public void OnBatchEnded(ref BatchInfo info) { // ReSharper disable once ForCanBeConvertedToForeach for (var i = 0; i < _batchAware.Length; i++) { _batchAware[i].OnBatchEnded(ref info); } }
protected override void PrepareRendering(IDrawDevice device, BatchInfo material) { base.PrepareRendering(device, material); material.SetTexture("mainTex", TextureOne); material.SetTexture("samp1", TextureTwo); material.SetTexture("samp2", TextureThree); }
public void SpinOnce(out BatchInfo info) { var start = Stopwatch.GetTimestamp(); var processed = Buffer.Read(_handler, _batchSize); var stop = Stopwatch.GetTimestamp(); info = new BatchInfo(_batchSize, processed, stop - start); _batchAware?.OnBatchEnded(ref info); }
public override void PrepareRendering(IDrawDevice device, BatchInfo material) { base.PrepareRendering(device, material); Vector3 camPos = device.RefCoord; float camRefDist = MathF.Abs(device.FocusDist); // Don't pass RefDist, see note in Light.cs material.SetUniform("_camRefDist", camRefDist); material.SetUniform("_camWorldPos", camPos.X, camPos.Y, camPos.Z); DynamicLighting.Light.SetupLighting(device, material); }
public void Draw(IDrawDevice device) { var triangles = env.Triangles; var triangleSideLength = env.TriangleSideLength; Canvas canvas = new Canvas(device); Vector2[][] vectors = new Vector2[4][]; vectors[0] = new Vector2[3]; vectors[0][0] = new Vector2(-(triangleSideLength / 2), -(triangleSideLength / 2)); vectors[0][1] = new Vector2((triangleSideLength / 2), -(triangleSideLength / 2)); vectors[0][2] = new Vector2(0, 0); vectors[1] = new Vector2[3]; vectors[1][0] = new Vector2((triangleSideLength / 2), -(triangleSideLength / 2)); vectors[1][1] = new Vector2((triangleSideLength / 2), (triangleSideLength / 2)); vectors[1][2] = new Vector2(0, 0); vectors[2] = new Vector2[3]; vectors[2][0] = new Vector2((triangleSideLength / 2), (triangleSideLength / 2)); vectors[2][1] = new Vector2(-(triangleSideLength / 2), (triangleSideLength / 2)); vectors[2][2] = new Vector2(0, 0); vectors[3] = new Vector2[3]; vectors[3][0] = new Vector2(-(triangleSideLength / 2), (triangleSideLength / 2)); vectors[3][1] = new Vector2(-(triangleSideLength / 2), -(triangleSideLength / 2)); vectors[3][2] = new Vector2(0, 0); BatchInfo[] infos = new BatchInfo[4]; infos[0] = new BatchInfo(DrawTechnique.Alpha, ColorRgba.Red, null); infos[1] = new BatchInfo(DrawTechnique.Alpha, ColorRgba.Blue, null); infos[2] = new BatchInfo(DrawTechnique.Alpha, ColorRgba.White, null); infos[3] = new BatchInfo(DrawTechnique.Alpha, ColorRgba.Green, null); for (int x = 0; x < triangles.GetLength(0); x++) { for (int y = 0; y < triangles.GetLength(1); y++) { for (int i = 0; i < triangles.GetLength(2); i++) { canvas.State.SetMaterial(infos[i]); if(true/*triangles[x,y,i] != null*/) { canvas.FillPolygon(vectors[i], triangleSideLength / 2 + triangleSideLength * x, triangleSideLength / 2 + triangleSideLength * y); } } } } }
public static void Editbatch(BatchInfo _batch) { SqlConnection con = ConnectionHelper.GetConnection(); string sp = "USP_Update_Batch"; SqlCommand cmd = new SqlCommand(sp, con); cmd.Parameters.Add(new SqlParameter("@BatchID",_batch.BatchID)); cmd.Parameters.Add(new SqlParameter("@Year",_batch.Year)); cmd.CommandType=CommandType.StoredProcedure; try{ cmd.ExecuteNonQuery(); } catch(Exception ex) { throw ex; } }
public static BatchInfo getBatchByID(int id) { SqlConnection con = ConnectionHelper.GetConnection(); string sp = "USP_GetBatch_ByID"; SqlCommand cmd = new SqlCommand(sp, con); cmd.Parameters.Add(new SqlParameter("@ID",id)); cmd.CommandType=CommandType.StoredProcedure; try { SqlDataReader _Reader = cmd.ExecuteReader(); _Reader.Read(); BatchInfo _batch = new BatchInfo(); _batch.BatchID = int.Parse(_Reader["BatchID"].ToString()); _batch.Year = int.Parse(_Reader["BatchYear"].ToString()); return _batch; } catch (Exception ex) { throw ex; } }
/// <summary> /// Creates a new Material /// </summary> public Material() { this.info = new BatchInfo(); }
/// <summary> /// Creates a new complex Material. /// </summary> /// <param name="technique">The <see cref="Duality.Resources.DrawTechnique"/> to use.</param> /// <param name="mainColor">The <see cref="MainColor"/> to use.</param> /// <param name="textures">A set of <see cref="Duality.Resources.Texture">Textures</see> to use.</param> /// <param name="uniforms">A set of <see cref="Duality.Resources.ShaderFieldInfo">uniform values</see> to use.</param> public Material(ContentRef<DrawTechnique> technique, ColorRgba mainColor, Dictionary<string,ContentRef<Texture>> textures = null, Dictionary<string,float[]> uniforms = null) { this.info = new BatchInfo(technique, mainColor, textures, uniforms); }
/// <summary> /// Function for call from productRegister /// </summary> public void CallFromProductRegister(decimal decId, frmProductRegister frmProRegister) { try { base.Show(); this.frmProductRegisterObj = frmProRegister; frmProductRegisterObj.Enabled = false; ProductCreationBll BllProductCreation = new ProductCreationBll(); ProductInfo infoProduct = new ProductInfo(); StockPostingInfo infoStockPosting = new StockPostingInfo(); BatchInfo infoBatch = new BatchInfo(); StockPostingBll BllStockPosting = new StockPostingBll(); BatchBll BllBatch = new BatchBll(); UnitBll bllUnit = new UnitBll(); List<DataTable> ListObj = new List<DataTable>(); decProductIdForEdit = decId; infoProduct = BllProductCreation.ProductView(decId); strUnitNameForGrid = bllUnit.UnitName(infoProduct.UnitId); btnSave.Text = "Update"; btnDelete.Enabled = true; txtName.Text = infoProduct.ProductName; txtProductCode.Text = infoProduct.ProductCode; cmbGroup.SelectedValue = infoProduct.GroupId; cmbBrand.SelectedValue = infoProduct.BrandId; cmbUnit.SelectedValue = infoProduct.UnitId; if (BllProductCreation.ProductReferenceCheck(decId)) { cmbUnit.Enabled = false; } decUnitIdForUpdate = infoProduct.UnitId; cmbSize.SelectedValue = infoProduct.SizeId; cmbModalNo.SelectedValue = infoProduct.ModelNoId; cmbTax.SelectedValue = infoProduct.TaxId; cmbTaxApplicableOn.SelectedItem = infoProduct.TaxapplicableOn; txtPurchaseRate.Text = infoProduct.PurchaseRate.ToString(); txtSalesRate.Text = infoProduct.SalesRate.ToString(); txtMrp.Text = infoProduct.Mrp.ToString(); txtMinimumStock.Text = infoProduct.MinimumStock.ToString(); txtMaximumStock.Text = infoProduct.MaximumStock.ToString(); txtReorderLevel.Text = infoProduct.ReorderLevel.ToString(); txtPartNo.Text = infoProduct.PartNo; cmbDefaultGodown.SelectedValue = infoProduct.GodownId; cmbDefaultRack.SelectedValue = infoProduct.RackId; if (infoProduct.IsBom) { cmbBom.SelectedIndex = 1; isBomFromRegister = true; } if (infoProduct.Ismultipleunit) { cmbMultipleUnit.SelectedIndex = 1; isMulUnitFromRgister = true; } if (infoProduct.Isopeningstock) { isOpeningStockForUpdate = true; OpeningStockGridFill(); } if (infoProduct.IsallowBatch) { OpeningStockGridWithBathFill(); } else { cmbAllowBatch.SelectedIndex = 0; txtPartNo.Text = BllBatch.PartNoReturn(decProductIdForEdit); } if (infoProduct.Ismultipleunit) { cmbMultipleUnit.SelectedIndex = 1; } else { cmbMultipleUnit.SelectedIndex = 0; } if (infoProduct.IsBom) { cmbBom.SelectedIndex = 1; } else { cmbBom.SelectedIndex = 0; } if (infoProduct.Isopeningstock) { cmbOpeningStock.SelectedIndex = 1; } else { cmbOpeningStock.SelectedIndex = 0; } if (infoProduct.IsActive) { cbxActive.Checked = true; } else { cbxActive.Checked = false; } if (infoProduct.IsshowRemember) { cbxReminder.Checked = true; } else { cbxReminder.Checked = false; } txtNarration.Text = infoProduct.Narration; } catch (Exception ex) { MessageBox.Show("PC:63" + ex.Message, "OpenMiracle", MessageBoxButtons.OK, MessageBoxIcon.Information); } }
/// <summary> /// Gets a batch of changes to synchronize when given batch size, /// destination knowledge, and change data retriever parameters. /// </summary> /// <returns>A DbSyncContext object that will be used to retrieve the modified data.</returns> internal virtual async Task <(SyncContext, BatchInfo, DatabaseChangesSelected)> InternalGetChangesAsync( SyncContext context, MessageGetChangesBatch message, DbConnection connection, DbTransaction transaction, CancellationToken cancellationToken, IProgress <ProgressArgs> progress) { // batch info containing changes BatchInfo batchInfo; // Statistics about changes that are selected DatabaseChangesSelected changesSelected; if (context.SyncWay == SyncWay.Upload && context.SyncType == SyncType.Reinitialize) { (batchInfo, changesSelected) = await this.InternalGetEmptyChangesAsync(message).ConfigureAwait(false); return(context, batchInfo, changesSelected); } // Call interceptor await this.InterceptAsync(new DatabaseChangesSelectingArgs(context, message, connection, transaction), cancellationToken).ConfigureAwait(false); // create local directory if (message.BatchSize > 0 && !string.IsNullOrEmpty(message.BatchDirectory) && !Directory.Exists(message.BatchDirectory)) { Directory.CreateDirectory(message.BatchDirectory); } changesSelected = new DatabaseChangesSelected(); // numbers of batch files generated var batchIndex = 0; // Check if we are in batch mode var isBatch = message.BatchSize > 0; // Create a batch info in memory (if !isBatch) or serialized on disk (if isBatch) // batchinfo generate a schema clone with scope columns if needed batchInfo = new BatchInfo(!isBatch, message.Schema, message.BatchDirectory); // Clean SyncSet, we will add only tables we need in the batch info var changesSet = new SyncSet(); var cptSyncTable = 0; var currentProgress = context.ProgressPercentage; foreach (var syncTable in message.Schema.Tables) { // tmp count of table for report progress pct cptSyncTable++; // Only table schema is replicated, no datas are applied if (syncTable.SyncDirection == SyncDirection.None) { continue; } // if we are in upload stage, so check if table is not download only if (context.SyncWay == SyncWay.Upload && syncTable.SyncDirection == SyncDirection.DownloadOnly) { continue; } // if we are in download stage, so check if table is not download only if (context.SyncWay == SyncWay.Download && syncTable.SyncDirection == SyncDirection.UploadOnly) { continue; } // Get Command var selectIncrementalChangesCommand = await this.GetSelectChangesCommandAsync(context, syncTable, message.Setup, message.IsNew, connection, transaction); // Set parameters this.SetSelectChangesCommonParameters(context, syncTable, message.ExcludingScopeId, message.IsNew, message.LastTimestamp, selectIncrementalChangesCommand); // launch interceptor if any var args = new TableChangesSelectingArgs(context, syncTable, selectIncrementalChangesCommand, connection, transaction); await this.InterceptAsync(args, cancellationToken).ConfigureAwait(false); if (!args.Cancel && args.Command != null) { // Statistics var tableChangesSelected = new TableChangesSelected(syncTable.TableName, syncTable.SchemaName); // Create a chnages table with scope columns var changesSetTable = DbSyncAdapter.CreateChangesTable(message.Schema.Tables[syncTable.TableName, syncTable.SchemaName], changesSet); // Get the reader using var dataReader = await args.Command.ExecuteReaderAsync().ConfigureAwait(false); // memory size total double rowsMemorySize = 0L; while (dataReader.Read()) { // Create a row from dataReader var row = CreateSyncRowFromReader(dataReader, changesSetTable); // Add the row to the changes set changesSetTable.Rows.Add(row); // Set the correct state to be applied if (row.RowState == DataRowState.Deleted) { tableChangesSelected.Deletes++; } else if (row.RowState == DataRowState.Modified) { tableChangesSelected.Upserts++; } // calculate row size if in batch mode if (isBatch) { var fieldsSize = ContainerTable.GetRowSizeFromDataRow(row.ToArray()); var finalFieldSize = fieldsSize / 1024d; if (finalFieldSize > message.BatchSize) { throw new RowOverSizedException(finalFieldSize.ToString()); } // Calculate the new memory size rowsMemorySize += finalFieldSize; // Next line if we don't reach the batch size yet. if (rowsMemorySize <= message.BatchSize) { continue; } // Check interceptor var batchTableChangesSelectedArgs = new TableChangesSelectedArgs(context, changesSetTable, tableChangesSelected, connection, transaction); await this.InterceptAsync(batchTableChangesSelectedArgs, cancellationToken).ConfigureAwait(false); // add changes to batchinfo await batchInfo.AddChangesAsync(changesSet, batchIndex, false, this).ConfigureAwait(false); // increment batch index batchIndex++; // we know the datas are serialized here, so we can flush the set changesSet.Clear(); // Recreate an empty ContainerSet and a ContainerTable changesSet = new SyncSet(); changesSetTable = DbSyncAdapter.CreateChangesTable(message.Schema.Tables[syncTable.TableName, syncTable.SchemaName], changesSet); // Init the row memory size rowsMemorySize = 0L; } } dataReader.Close(); // We don't report progress if no table changes is empty, to limit verbosity if (tableChangesSelected.Deletes > 0 || tableChangesSelected.Upserts > 0) { changesSelected.TableChangesSelected.Add(tableChangesSelected); } // even if no rows raise the interceptor var tableChangesSelectedArgs = new TableChangesSelectedArgs(context, changesSetTable, tableChangesSelected, connection, transaction); await this.InterceptAsync(tableChangesSelectedArgs, cancellationToken).ConfigureAwait(false); context.ProgressPercentage = currentProgress + (cptSyncTable * 0.2d / message.Schema.Tables.Count); // only raise report progress if we have something if (tableChangesSelectedArgs.TableChangesSelected.TotalChanges > 0) { this.ReportProgress(context, progress, tableChangesSelectedArgs); } } } // We are in batch mode, and we are at the last batchpart info // Even if we don't have rows inside, we return the changesSet, since it contains at least schema if (changesSet != null && changesSet.HasTables && changesSet.HasRows) { await batchInfo.AddChangesAsync(changesSet, batchIndex, true, this).ConfigureAwait(false); } //Set the total rows count contained in the batch info batchInfo.RowsCount = changesSelected.TotalChangesSelected; // Check the last index as the last batch batchInfo.EnsureLastBatch(); // Raise database changes selected if (changesSelected.TotalChangesSelected > 0 || changesSelected.TotalChangesSelectedDeletes > 0 || changesSelected.TotalChangesSelectedUpdates > 0) { var databaseChangesSelectedArgs = new DatabaseChangesSelectedArgs(context, message.LastTimestamp, batchInfo, changesSelected, connection); this.ReportProgress(context, progress, databaseChangesSelectedArgs); await this.InterceptAsync(databaseChangesSelectedArgs, cancellationToken).ConfigureAwait(false); } return(context, batchInfo, changesSelected); }
public override void PrepareRendering(IDrawDevice device, BatchInfo material) { base.PrepareRendering(device, material); DynamicLighting.Light.SetupLighting(device, material); }
/// <summary> /// Function to add the products to grid /// </summary> public void AddToGrid() { BatchBll BllBatch = new BatchBll(); GodownBll BllGodown = new GodownBll(); try { SettingsBll BllSettings = new SettingsBll(); if (txtProductCode.Text.Trim() == null && txtProductCode.Text.Trim() == string.Empty) { Messages.InformationMessage("Enter product code"); txtProductCode.Focus(); } else if (cmbItem.SelectedIndex == -1 && cmbItem.SelectedValue == null) { Messages.InformationMessage("Select a product"); cmbItem.Focus(); } else if (Convert.ToDecimal(txtQuantity.Text.Trim()) <= 0 || txtQuantity.Text.Trim() == string.Empty) { Messages.InformationMessage("Enter quantity"); txtQuantity.Focus(); } else if (cmbUnit.SelectedValue == null) { Messages.InformationMessage("Select a unit"); cmbUnit.Focus(); } else if (BllSettings.SettingsStatusCheck("AllowZeroValueEntry") == "No" && decimal.Parse(txtRate.Text.Trim()) <= 0 || txtRate.Text.Trim() == string.Empty) { Messages.InformationMessage("Enter rate"); txtRate.Focus(); } else { int inCurrentRowIndex = new int(); bool isExecutef = false; if (rowIdToEdit == 0) { dgvPointOfSales.Rows.Add(); inCurrentRowIndex = dgvPointOfSales.Rows.Count - 1; isExecutef = true; } else { for (int i = 0; i < dgvPointOfSales.Rows.Count; ++i) { if (dgvPointOfSales.Rows[i].Cells["rowId"].Value.ToString() == rowIdToEdit.ToString()) { isExecutef = true; inCurrentRowIndex = i; break; } } } if (!isExecutef) { dgvPointOfSales.Rows.Add(); inCurrentRowIndex = dgvPointOfSales.Rows.Count - 1; } ProductInfo infoProduct = new ProductInfo(); BatchInfo infoBatch = new BatchInfo(); RackInfo infoRack = new RackInfo(); ProductCreationBll BllProductCreation = new ProductCreationBll(); UnitConvertionInfo InfoUnitConvertion = new UnitConvertionInfo(); infoProduct = BllProductCreation.ProductView(decProductId); decimal dcProductBatch = BllBatch.BatchIdViewByProductId(decProductId); InfoUnitConvertion = new UnitConvertionBll().UnitViewAllByProductId(decProductId); infoBatch = BllBatch.BatchView(dcProductBatch); decimal dcGodownId = infoProduct.GodownId; GodownInfo infoGodown = new GodownInfo(); infoGodown = BllGodown.GodownView(dcGodownId); decimal dcRackId = infoProduct.RackId; infoRack = new RackBll().RackView(dcRackId); dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtProductCode"].Value = txtProductCode.Text; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtProductName"].Value = cmbItem.Text; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtQuantity"].Value = txtQuantity.Text; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtUnit"].Value = cmbUnit.Text; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtRate"].Value = txtRate.Text; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtGrossValue"].Value = txtGrossValue.Text; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtTaxPercentage"].Value = cmbTax.Text; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtTaxAmount"].Value = txtTaxAmount.Text; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtNetAmount"].Value = txtNetAmount.Text; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtDiscount"].Value = txtDiscountAmount.Text; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtTotalAmount"].Value = txtAmount.Text; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxttaxid"].Value = Convert.ToDecimal(cmbTax.SelectedValue); dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtProductId"].Value = infoProduct.ProductId; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtBatchId"].Value = dcProductBatch; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtRackId"].Value = infoProduct.RackId; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtGodownId"].Value = infoProduct.GodownId; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtUnitId"].Value = Convert.ToDecimal(cmbUnit.SelectedValue); dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtunitconversionId"].Value = InfoUnitConvertion.UnitconvertionId; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtBarcode"].Value = txtBarcode.Text; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtBatchno"].Value = infoBatch.BatchNo; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtGodownName"].Value = infoGodown.GodownName; dgvPointOfSales.Rows[inCurrentRowIndex].Cells["dgvtxtRackName"].Value = infoRack.RackName; TotalAmountCalculation(); ClearGroupbox(); dgvPointOfSales.CurrentCell = dgvPointOfSales[0, dgvPointOfSales.Rows.Count - 1]; txtBarcode.Focus(); } } catch (Exception ex) { MessageBox.Show("POS:34" + ex.Message, "OpenMiracle", MessageBoxButtons.OK, MessageBoxIcon.Information); } }
private async Task<HttpMessage> ApplyChangesAsync(HttpMessage httpMessage) { if (httpMessage.ApplyChanges == null) throw new ArgumentException("ApplyChanges message could not be null"); var scopeInfo = httpMessage.ApplyChanges.ScopeInfo; if (scopeInfo == null) throw new ArgumentException("ApplyChanges ScopeInfo could not be null"); BatchInfo batchInfo; var bpi = httpMessage.ApplyChanges.BatchPartInfo; if (httpMessage.ApplyChanges.InMemory) { batchInfo = new BatchInfo { BatchIndex = 0, BatchPartsInfo = new List<BatchPartInfo>(new[] { bpi }), InMemory = true }; bpi.Set = httpMessage.ApplyChanges.Set.ConvertToDmSet(); httpMessage.ApplyChanges.Set.Dispose(); httpMessage.ApplyChanges.Set = null; var (c, s) = await this.ApplyChangesAsync(httpMessage.SyncContext, scopeInfo, batchInfo); httpMessage.SyncContext = c; httpMessage.ApplyChanges.ChangesStatistics = s; httpMessage.ApplyChanges.BatchPartInfo.Clear(); httpMessage.ApplyChanges.BatchPartInfo.FileName = null; return httpMessage; } // not in memory batchInfo = this.LocalProvider.CacheManager.GetValue<BatchInfo>("ApplyChanges_BatchInfo"); if (batchInfo == null) { batchInfo = new BatchInfo { BatchIndex = 0, BatchPartsInfo = new List<BatchPartInfo>(new[] { bpi }), InMemory = false, Directory = BatchInfo.GenerateNewDirectoryName() }; } else { batchInfo.BatchPartsInfo.Add(bpi); } var bpId = BatchInfo.GenerateNewFileName(httpMessage.ApplyChanges.BatchIndex.ToString()); var fileName = Path.Combine(this.LocalProvider.GetCacheConfiguration().BatchDirectory, batchInfo.Directory, bpId); BatchPart.Serialize(httpMessage.ApplyChanges.Set, fileName); bpi.FileName = fileName; this.LocalProvider.CacheManager.Set("ApplyChanges_BatchInfo", batchInfo); // Clear the httpMessage set if (httpMessage.ApplyChanges != null) { httpMessage.ApplyChanges.Set.Dispose(); httpMessage.ApplyChanges.Set = null; } // if it's last batch sent if (bpi.IsLastBatch) { var (c, s) = await this.ApplyChangesAsync(httpMessage.SyncContext, scopeInfo, batchInfo); this.LocalProvider.CacheManager.Remove("ApplyChanges_BatchInfo"); httpMessage.SyncContext = c; httpMessage.ApplyChanges.ChangesStatistics = s; } httpMessage.ApplyChanges.BatchPartInfo.Clear(); httpMessage.ApplyChanges.BatchPartInfo.FileName = null; return httpMessage; }
/// <summary> /// Actualiza el precio en base a si se Albaran por kilo o bulto /// y si el cliente tiene un precio especial para el producto /// </summary> public virtual void SetPrecio(ClienteInfo client, ProductInfo product, BatchInfo batch) { Precio = client.GetPrecio(product, batch, ETipoFacturacion); PDescuento = client.GetDescuento(product, batch); CalculateTotal(); }
public async Task <(SyncContext, BatchInfo, ChangesSelected)> GetChangeBatchAsync(SyncContext context, ScopeInfo scopeInfo) { // While we have an other batch to process var isLastBatch = false; // Create the BatchInfo and SyncContext to return at the end BatchInfo changes = new BatchInfo(); changes.Directory = BatchInfo.GenerateNewDirectoryName(); SyncContext syncContext = null; ChangesSelected changesSelected = null; while (!isLastBatch) { HttpMessage httpMessage = new HttpMessage(); httpMessage.SyncContext = context; httpMessage.Step = HttpStep.GetChangeBatch; httpMessage.GetChangeBatch = new HttpGetChangeBatchMessage { ScopeInfo = scopeInfo, BatchIndexRequested = changes.BatchIndex }; var httpMessageResponse = await this.httpRequestHandler.ProcessRequest(httpMessage, cancellationToken); if (httpMessageResponse == null) { throw new Exception("Can't have an empty body"); } if (httpMessageResponse.GetChangeBatch == null) { throw new Exception("Can't have an empty GetChangeBatch"); } changesSelected = httpMessageResponse.GetChangeBatch.ChangesSelected; changes.InMemory = httpMessageResponse.GetChangeBatch.InMemory; syncContext = httpMessageResponse.SyncContext; // get the bpi and add it to the BatchInfo var bpi = httpMessageResponse.GetChangeBatch.BatchPartInfo; if (bpi != null) { changes.BatchIndex = bpi.Index; changes.BatchPartsInfo.Add(bpi); isLastBatch = bpi.IsLastBatch; } else { changes.BatchIndex = 0; isLastBatch = true; // break the while { } story break; } if (changes.InMemory) { // load the DmSet in memory bpi.Set = httpMessageResponse.GetChangeBatch.Set.ConvertToDmSet(); } else { // Serialize the file ! var bpId = BatchInfo.GenerateNewFileName(changes.BatchIndex.ToString()); var fileName = Path.Combine(this.syncConfiguration.BatchDirectory, changes.Directory, bpId); BatchPart.Serialize(httpMessageResponse.GetChangeBatch.Set, fileName); bpi.FileName = fileName; bpi.Clear(); } // Clear the DmSetSurrogate from response, we don't need it anymore if (httpMessageResponse.GetChangeBatch.Set != null) { httpMessageResponse.GetChangeBatch.Set.Dispose(); httpMessageResponse.GetChangeBatch.Set = null; } // if not last, increment batchIndex for next request if (!isLastBatch) { changes.BatchIndex++; } } return(syncContext, changes, changesSelected); }
/// <summary> /// Send changes to server /// </summary> public async Task <(SyncContext, ChangesApplied)> ApplyChangesAsync(SyncContext context, ScopeInfo fromScope, BatchInfo changes) { if (changes == null || changes.BatchPartsInfo.Count == 0) { return(context, new ChangesApplied()); } SyncContext syncContext = null; ChangesApplied changesApplied = null; // Foreach part, will have to send them to the remote // once finished, return context foreach (var bpi in changes.BatchPartsInfo.OrderBy(bpi => bpi.Index)) { HttpMessage httpMessage = new HttpMessage(); httpMessage.Step = HttpStep.ApplyChanges; httpMessage.SyncContext = context; httpMessage.ApplyChanges = new HttpApplyChangesMessage(); httpMessage.ApplyChanges.ScopeInfo = fromScope; // If BPI is InMempory, no need to deserialize from disk // Set already contained in part.Set if (!changes.InMemory) { // get the batch var partBatch = bpi.GetBatch(); // get the surrogate dmSet if (partBatch != null) { httpMessage.ApplyChanges.Set = partBatch.DmSetSurrogate; } } else if (bpi.Set != null) { httpMessage.ApplyChanges.Set = new DmSetSurrogate(bpi.Set); } if (httpMessage.ApplyChanges.Set == null || httpMessage.ApplyChanges.Set.Tables == null) { throw new ArgumentException("No changes to upload found."); } // no need to send filename httpMessage.ApplyChanges.BatchPartInfo = new BatchPartInfo { FileName = null, Index = bpi.Index, IsLastBatch = bpi.IsLastBatch, Tables = bpi.Tables }; httpMessage.ApplyChanges.InMemory = changes.InMemory; httpMessage.ApplyChanges.BatchIndex = bpi.Index; //Post request and get response var httpMessageResponse = await this.httpRequestHandler.ProcessRequest(httpMessage, cancellationToken); // Clear surrogate httpMessage.ApplyChanges.Set.Dispose(); httpMessage.ApplyChanges.Set = null; if (httpMessageResponse == null) { throw new Exception("Can't have an empty body"); } syncContext = httpMessageResponse.SyncContext; changesApplied = httpMessageResponse.ApplyChanges.ChangesApplied; } return(syncContext, changesApplied); }
public override void Draw(IDrawDevice device) { Vector3 posTemp = this.gameobj.Transform.Pos; float scaleTemp = 1.0f; device.PreprocessCoords(ref posTemp, ref scaleTemp); Vector2 xDot, yDot; MathF.GetTransformDotVec(this.GameObj.Transform.Angle, this.gameobj.Transform.Scale * scaleTemp, out xDot, out yDot); // Apply block alignment Vector2 textOffset = Vector2.Zero; Vector2 textSize = this.text.Size; if (this.text.MaxWidth > 0) { textSize.X = this.text.MaxWidth; } this.blockAlign.ApplyTo(ref textOffset, textSize); MathF.TransformDotVec(ref textOffset, ref xDot, ref yDot); posTemp.X += textOffset.X; posTemp.Y += textOffset.Y; if (this.text.Fonts != null && this.text.Fonts.Any(r => r.IsAvailable && r.Res.IsPixelGridAligned)) { posTemp.X = MathF.Round(posTemp.X); posTemp.Y = MathF.Round(posTemp.Y); if (MathF.RoundToInt(device.TargetSize.X) != (MathF.RoundToInt(device.TargetSize.X) / 2) * 2) { posTemp.X += 0.5f; } if (MathF.RoundToInt(device.TargetSize.Y) != (MathF.RoundToInt(device.TargetSize.Y) / 2) * 2) { posTemp.Y += 0.5f; } } // Draw design time metrics data if (DualityApp.ExecContext == DualityApp.ExecutionContext.Editor) { bool showLimits = true; bool showLines = false; bool showElements = false; Vector3 metricsOffset = new Vector3(0.0f, 0.0f, 0.01f); Vector3 lineOffset = new Vector3(0.5f, 0.5f, 0.0f); Vector3 tUnitX = Vector3.UnitX; Vector3 tUnitY = Vector3.UnitY; MathF.TransformDotVec(ref tUnitX, ref xDot, ref yDot); MathF.TransformDotVec(ref tUnitY, ref xDot, ref yDot); // Actual text size and maximum text size if (showLimits) { Vector3 textWidth = tUnitX * this.text.Size.X; Vector3 textHeight = tUnitY * this.text.Size.Y; Vector3 textMaxWidth = tUnitX * this.text.MaxWidth; Vector3 textMaxHeight = tUnitY * MathF.Max(this.text.MaxHeight, this.text.Size.Y); ColorRgba clrSize = ColorRgba.Green.WithAlpha(128); ColorRgba clrMaxSize = ColorRgba.Red.WithAlpha(128); device.AddVertices(new BatchInfo(DrawTechnique.Alpha, ColorRgba.White), VertexMode.LineLoop, new VertexC1P3(metricsOffset + lineOffset + posTemp, clrSize), new VertexC1P3(metricsOffset + lineOffset + posTemp + textWidth, clrSize), new VertexC1P3(metricsOffset + lineOffset + posTemp + textWidth + textHeight, clrSize), new VertexC1P3(metricsOffset + lineOffset + posTemp + textHeight, clrSize)); device.AddVertices(new BatchInfo(DrawTechnique.Alpha, ColorRgba.White), VertexMode.LineLoop, new VertexC1P3(metricsOffset + lineOffset + posTemp, clrMaxSize), new VertexC1P3(metricsOffset + lineOffset + posTemp + textMaxWidth, clrMaxSize), new VertexC1P3(metricsOffset + lineOffset + posTemp + textMaxWidth + textMaxHeight, clrMaxSize), new VertexC1P3(metricsOffset + lineOffset + posTemp + textMaxHeight, clrMaxSize)); } // Individual line sizes if (showLines) { ColorRgba clrLineBg = (ColorRgba.Blue + ColorRgba.Red).WithAlpha(64); for (int i = 0; i < this.text.TextMetrics.LineBounds.Count; i++) { Rect lineRect = this.text.TextMetrics.LineBounds[i]; device.AddVertices(new BatchInfo(DrawTechnique.Alpha, ColorRgba.White), VertexMode.Quads, new VertexC1P3(metricsOffset + posTemp + lineRect.TopLeft.X * tUnitX + lineRect.TopLeft.Y * tUnitY, clrLineBg), new VertexC1P3(metricsOffset + posTemp + lineRect.BottomLeft.X * tUnitX + lineRect.BottomLeft.Y * tUnitY, clrLineBg), new VertexC1P3(metricsOffset + posTemp + lineRect.BottomRight.X * tUnitX + lineRect.BottomRight.Y * tUnitY, clrLineBg), new VertexC1P3(metricsOffset + posTemp + lineRect.TopRight.X * tUnitX + lineRect.TopRight.Y * tUnitY, clrLineBg)); } } // Individual line sizes if (showElements) { ColorRgba clrElementBg = (ColorRgba.Blue + ColorRgba.Green).WithAlpha(128); for (int i = 0; i < this.text.TextMetrics.ElementBounds.Count; i++) { Rect elemRect = this.text.TextMetrics.ElementBounds[i]; device.AddVertices(new BatchInfo(DrawTechnique.Alpha, ColorRgba.White), VertexMode.LineLoop, new VertexC1P3(metricsOffset + lineOffset + posTemp + elemRect.TopLeft.X * tUnitX + elemRect.TopLeft.Y * tUnitY, clrElementBg), new VertexC1P3(metricsOffset + lineOffset + posTemp + elemRect.BottomLeft.X * tUnitX + elemRect.BottomLeft.Y * tUnitY, clrElementBg), new VertexC1P3(metricsOffset + lineOffset + posTemp + elemRect.BottomRight.X * tUnitX + elemRect.BottomRight.Y * tUnitY, clrElementBg), new VertexC1P3(metricsOffset + lineOffset + posTemp + elemRect.TopRight.X * tUnitX + elemRect.TopRight.Y * tUnitY, clrElementBg)); } } } ColorRgba matColor = this.customMat != null ? this.customMat.MainColor : ColorRgba.White; int[] vertLen = this.text.EmitVertices(ref this.vertFont, ref this.vertIcon, posTemp.X, posTemp.Y, posTemp.Z + this.VertexZOffset, this.colorTint * matColor, xDot, yDot); if (this.text.Fonts != null) { for (int i = 0; i < this.text.Fonts.Length; i++) { if (this.text.Fonts[i] != null && this.text.Fonts[i].IsAvailable) { if (this.customMat == null) { device.AddVertices(this.text.Fonts[i].Res.Material, VertexMode.Quads, this.vertFont[i], vertLen[i + 1]); } else { BatchInfo cm = new BatchInfo(this.customMat); cm.Textures = this.text.Fonts[i].Res.Material.Textures; device.AddVertices(cm, VertexMode.Quads, this.vertFont[i], vertLen[i + 1]); } } } } if (this.text.Icons != null && this.iconMat.IsAvailable) { device.AddVertices(this.iconMat, VertexMode.Quads, this.vertIcon, vertLen[0]); } }
public override void Draw(IDrawDevice device) { Vector3 posTemp = this.gameobj.Transform.Pos; float scaleTemp = 1.0f; device.PreprocessCoords(ref posTemp, ref scaleTemp); Vector2 xDot, yDot; MathF.GetTransformDotVec(this.GameObj.Transform.Angle, this.gameobj.Transform.Scale * scaleTemp, out xDot, out yDot); // Apply block alignment Vector2 textOffset = Vector2.Zero; Vector2 textSize = this.text.Size; if (this.text.MaxWidth > 0) textSize.X = this.text.MaxWidth; this.blockAlign.ApplyTo(ref textOffset, textSize); MathF.TransformDotVec(ref textOffset, ref xDot, ref yDot); posTemp.X += textOffset.X; posTemp.Y += textOffset.Y; if (this.text.Fonts != null && this.text.Fonts.Any(r => r.IsAvailable && r.Res.IsPixelGridAligned)) { posTemp.X = MathF.Round(posTemp.X); posTemp.Y = MathF.Round(posTemp.Y); if (MathF.RoundToInt(device.TargetSize.X) != (MathF.RoundToInt(device.TargetSize.X) / 2) * 2) posTemp.X += 0.5f; if (MathF.RoundToInt(device.TargetSize.Y) != (MathF.RoundToInt(device.TargetSize.Y) / 2) * 2) posTemp.Y += 0.5f; } ColorRgba matColor = this.customMat != null ? this.customMat.MainColor : ColorRgba.White; int[] vertLen = this.text.EmitVertices(ref this.vertFont, ref this.vertIcon, posTemp.X, posTemp.Y, posTemp.Z + this.VertexZOffset, this.colorTint * matColor, xDot, yDot); if (this.text.Fonts != null) { for (int i = 0; i < this.text.Fonts.Length; i++) { if (this.text.Fonts[i] != null && this.text.Fonts[i].IsAvailable) { if (this.customMat == null) { device.AddVertices(this.text.Fonts[i].Res.Material, VertexMode.Quads, this.vertFont[i], vertLen[i + 1]); } else { BatchInfo cm = new BatchInfo(this.customMat); cm.Textures = this.text.Fonts[i].Res.Material.Textures; device.AddVertices(cm, VertexMode.Quads, this.vertFont[i], vertLen[i + 1]); } } } } if (this.text.Icons != null && this.iconMat.IsAvailable) { device.AddVertices(this.iconMat, VertexMode.Quads, this.vertIcon, vertLen[0]); } }
/// <summary> /// Gets a batch of changes to synchronize when given batch size, /// destination knowledge, and change data retriever parameters. /// </summary> /// <returns>A DbSyncContext object that will be used to retrieve the modified data.</returns> internal virtual async Task <(SyncContext, BatchInfo, DatabaseChangesSelected)> InternalGetChangesAsync( IScopeInfo scopeInfo, SyncContext context, bool isNew, long?fromLastTimestamp, long?toNewTimestamp, Guid?excludingScopeId, bool supportsMultiActiveResultSets, string batchRootDirectory, string batchDirectoryName, DbConnection connection, DbTransaction transaction, CancellationToken cancellationToken, IProgress <ProgressArgs> progress) { // batch info containing changes BatchInfo batchInfo; // Statistics about changes that are selected DatabaseChangesSelected changesSelected; context.SyncStage = SyncStage.ChangesSelecting; if (context.SyncWay == SyncWay.Upload && context.SyncType == SyncType.Reinitialize) { (batchInfo, changesSelected) = await this.InternalGetEmptyChangesAsync(scopeInfo, batchRootDirectory).ConfigureAwait(false); return(context, batchInfo, changesSelected); } // create local directory if (!string.IsNullOrEmpty(batchRootDirectory) && !Directory.Exists(batchRootDirectory)) { Directory.CreateDirectory(batchRootDirectory); } changesSelected = new DatabaseChangesSelected(); // Create a batch // batchinfo generate a schema clone with scope columns if needed batchInfo = new BatchInfo(scopeInfo.Schema, batchRootDirectory, batchDirectoryName); batchInfo.TryRemoveDirectory(); batchInfo.CreateDirectory(); // Call interceptor var databaseChangesSelectingArgs = new DatabaseChangesSelectingArgs(context, batchInfo.GetDirectoryFullPath(), this.Options.BatchSize, isNew, fromLastTimestamp, toNewTimestamp, connection, transaction); await this.InterceptAsync(databaseChangesSelectingArgs, progress, cancellationToken).ConfigureAwait(false); var cptSyncTable = 0; var currentProgress = context.ProgressPercentage; var schemaTables = scopeInfo.Schema.Tables.SortByDependencies(tab => tab.GetRelations().Select(r => r.GetParentTable())); var lstAllBatchPartInfos = new ConcurrentBag <BatchPartInfo>(); var lstTableChangesSelected = new ConcurrentBag <TableChangesSelected>(); var threadNumberLimits = supportsMultiActiveResultSets ? 16 : 1; if (supportsMultiActiveResultSets) { await schemaTables.ForEachAsync(async syncTable => { if (cancellationToken.IsCancellationRequested) { return; } // tmp count of table for report progress pct cptSyncTable++; List <BatchPartInfo> syncTableBatchPartInfos; TableChangesSelected tableChangesSelected; (context, syncTableBatchPartInfos, tableChangesSelected) = await InternalReadSyncTableChangesAsync( scopeInfo, context, excludingScopeId, syncTable, batchInfo, isNew, fromLastTimestamp, connection, transaction, cancellationToken, progress).ConfigureAwait(false); if (syncTableBatchPartInfos == null) { return; } // We don't report progress if no table changes is empty, to limit verbosity if (tableChangesSelected != null && (tableChangesSelected.Deletes > 0 || tableChangesSelected.Upserts > 0)) { lstTableChangesSelected.Add(tableChangesSelected); } // Add sync table bpi to all bpi syncTableBatchPartInfos.ForEach(bpi => lstAllBatchPartInfos.Add(bpi)); context.ProgressPercentage = currentProgress + (cptSyncTable * 0.2d / scopeInfo.Schema.Tables.Count); }, threadNumberLimits); } else { foreach (var syncTable in schemaTables) { if (cancellationToken.IsCancellationRequested) { continue; } // tmp count of table for report progress pct cptSyncTable++; List <BatchPartInfo> syncTableBatchPartInfos; TableChangesSelected tableChangesSelected; (context, syncTableBatchPartInfos, tableChangesSelected) = await InternalReadSyncTableChangesAsync( scopeInfo, context, excludingScopeId, syncTable, batchInfo, isNew, fromLastTimestamp, connection, transaction, cancellationToken, progress).ConfigureAwait(false); if (syncTableBatchPartInfos == null) { continue; } // We don't report progress if no table changes is empty, to limit verbosity if (tableChangesSelected != null && (tableChangesSelected.Deletes > 0 || tableChangesSelected.Upserts > 0)) { lstTableChangesSelected.Add(tableChangesSelected); } // Add sync table bpi to all bpi syncTableBatchPartInfos.ForEach(bpi => lstAllBatchPartInfos.Add(bpi)); context.ProgressPercentage = currentProgress + (cptSyncTable * 0.2d / scopeInfo.Schema.Tables.Count); } } while (!lstTableChangesSelected.IsEmpty) { if (lstTableChangesSelected.TryTake(out var tableChangesSelected)) { changesSelected.TableChangesSelected.Add(tableChangesSelected); } } // delete all empty batchparts (empty tables) foreach (var bpi in lstAllBatchPartInfos.Where(bpi => bpi.RowsCount <= 0)) { File.Delete(Path.Combine(batchInfo.GetDirectoryFullPath(), bpi.FileName)); } // Generate a good index order to be compliant with previous versions var tmpLstBatchPartInfos = new List <BatchPartInfo>(); foreach (var table in schemaTables) { // get all bpi where count > 0 and ordered by index foreach (var bpi in lstAllBatchPartInfos.Where(bpi => bpi.RowsCount > 0 && bpi.Tables[0].EqualsByName(new BatchPartTableInfo(table.TableName, table.SchemaName))).OrderBy(bpi => bpi.Index).ToArray()) { batchInfo.BatchPartsInfo.Add(bpi); batchInfo.RowsCount += bpi.RowsCount; tmpLstBatchPartInfos.Add(bpi); } } var newBatchIndex = 0; foreach (var bpi in tmpLstBatchPartInfos) { bpi.Index = newBatchIndex; newBatchIndex++; bpi.IsLastBatch = newBatchIndex == tmpLstBatchPartInfos.Count; } //Set the total rows count contained in the batch info batchInfo.EnsureLastBatch(); if (batchInfo.RowsCount <= 0) { var cleanFolder = await this.InternalCanCleanFolderAsync(scopeInfo.Name, context.Parameters, batchInfo, cancellationToken).ConfigureAwait(false); batchInfo.Clear(cleanFolder); } var databaseChangesSelectedArgs = new DatabaseChangesSelectedArgs(context, fromLastTimestamp, toNewTimestamp, batchInfo, changesSelected, connection); await this.InterceptAsync(databaseChangesSelectedArgs, progress, cancellationToken).ConfigureAwait(false); return(context, batchInfo, changesSelected); }
public async Task<(SyncContext, ChangesStatistics)> ApplyChangesAsync(SyncContext ctx, ScopeInfo fromScope, BatchInfo changes) => await this.LocalProvider.ApplyChangesAsync(ctx, fromScope, changes);
InternalApplyThenGetChangesAsync(ClientScopeInfo clientScopeInfo, SyncContext context, BatchInfo clientBatchInfo, DbConnection connection = default, DbTransaction transaction = default, CancellationToken cancellationToken = default, IProgress <ProgressArgs> progress = null) { await using var runner = await this.GetConnectionAsync(context, SyncMode.Reading, SyncStage.ChangesApplying, connection, transaction, cancellationToken, progress).ConfigureAwait(false); SyncSet schema; ServerScopeInfo serverScopeInfo; // is it something that could happens ? if (clientScopeInfo.Schema == null) { // Make a remote call to get Schema from remote provider (context, serverScopeInfo) = await this.InternalGetServerScopeInfoAsync( context, null, false, runner.Connection, runner.Transaction, runner.CancellationToken, runner.Progress).ConfigureAwait(false); schema = serverScopeInfo.Schema; } else { schema = clientScopeInfo.Schema; } schema.EnsureSchema(); // if we don't have any BatchPartsInfo, just generate a new one to get, at least, something to send to the server // and get a response with new data from server if (clientBatchInfo == null) { clientBatchInfo = new BatchInfo(schema); } // -------------------------------------------------------------- // STEP 1 : Send everything to the server side // -------------------------------------------------------------- HttpResponseMessage response = null; // If not in memory and BatchPartsInfo.Count == 0, nothing to send. // But we need to send something, so generate a little batch part if (clientBatchInfo.BatchPartsInfo.Count == 0) { var changesToSend = new HttpMessageSendChangesRequest(context, clientScopeInfo); var containerSet = new ContainerSet(); changesToSend.Changes = containerSet; changesToSend.IsLastBatch = true; changesToSend.BatchIndex = 0; changesToSend.BatchCount = clientBatchInfo.BatchPartsInfo == null ? 0 : clientBatchInfo.BatchPartsInfo.Count; var inMemoryRowsCount = changesToSend.Changes.RowsCount(); context.ProgressPercentage += 0.125; await this.InterceptAsync(new HttpSendingClientChangesRequestArgs(changesToSend, inMemoryRowsCount, inMemoryRowsCount, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); // serialize message var serializer = this.SerializerFactory.GetSerializer <HttpMessageSendChangesRequest>(); var binaryData = await serializer.SerializeAsync(changesToSend); response = await this.httpRequestHandler.ProcessRequestAsync (this.HttpClient, context, this.ServiceUri, binaryData, HttpStep.SendChangesInProgress, this.SerializerFactory, this.Converter, this.Options.BatchSize, this.SyncPolicy, cancellationToken, progress).ConfigureAwait(false); } else { int tmpRowsSendedCount = 0; // Foreach part, will have to send them to the remote // once finished, return context var initialPctProgress1 = context.ProgressPercentage; var localSerializer = new LocalJsonSerializer(); var interceptorsReading = this.interceptors.GetInterceptors <DeserializingRowArgs>(); if (interceptorsReading.Count > 0) { localSerializer.OnReadingRow(async(schemaTable, rowString) => { var args = new DeserializingRowArgs(context, schemaTable, rowString); await this.InterceptAsync(args); return(args.Result); }); } foreach (var bpi in clientBatchInfo.BatchPartsInfo.OrderBy(bpi => bpi.Index)) { // Get the updatable schema for the only table contained in the batchpartinfo var schemaTable = DbSyncAdapter.CreateChangesTable(schema.Tables[bpi.Tables[0].TableName, bpi.Tables[0].SchemaName]); // Generate the ContainerSet containing rows to send to the user var containerSet = new ContainerSet(); var containerTable = new ContainerTable(schemaTable); var fullPath = Path.Combine(clientBatchInfo.GetDirectoryFullPath(), bpi.FileName); containerSet.Tables.Add(containerTable); // read rows from file foreach (var row in localSerializer.ReadRowsFromFile(fullPath, schemaTable)) { containerTable.Rows.Add(row.ToArray()); } // Call the converter if needed if (this.Converter != null && containerTable.HasRows) { BeforeSerializeRows(containerTable, schemaTable, this.Converter); } // Create the send changes request var changesToSend = new HttpMessageSendChangesRequest(context, clientScopeInfo) { Changes = containerSet, IsLastBatch = bpi.IsLastBatch, BatchIndex = bpi.Index, BatchCount = clientBatchInfo.BatchPartsInfo.Count }; tmpRowsSendedCount += containerTable.Rows.Count; context.ProgressPercentage = initialPctProgress1 + ((changesToSend.BatchIndex + 1) * 0.2d / changesToSend.BatchCount); await this.InterceptAsync(new HttpSendingClientChangesRequestArgs(changesToSend, tmpRowsSendedCount, clientBatchInfo.RowsCount, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); // serialize message var serializer = this.SerializerFactory.GetSerializer <HttpMessageSendChangesRequest>(); var binaryData = await serializer.SerializeAsync(changesToSend); response = await this.httpRequestHandler.ProcessRequestAsync (this.HttpClient, context, this.ServiceUri, binaryData, HttpStep.SendChangesInProgress, this.SerializerFactory, this.Converter, this.Options.BatchSize, this.SyncPolicy, cancellationToken, progress).ConfigureAwait(false); // See #721 for issue and #721 for PR from slagtejn if (!bpi.IsLastBatch) { response.Dispose(); } } } // -------------------------------------------------------------- // STEP 2 : Receive everything from the server side // -------------------------------------------------------------- // Now we have sent all the datas to the server and now : // We have a FIRST response from the server with new datas // 1) Could be the only one response // 2) Could be the first response and we need to download all batchs context.SyncStage = SyncStage.ChangesSelecting; var initialPctProgress = 0.55; context.ProgressPercentage = initialPctProgress; // Create the BatchInfo var serverBatchInfo = new BatchInfo(schema); HttpMessageSummaryResponse summaryResponseContent = null; // Deserialize response incoming from server using (var streamResponse = await response.Content.ReadAsStreamAsync().ConfigureAwait(false)) { var responseSerializer = this.SerializerFactory.GetSerializer <HttpMessageSummaryResponse>(); summaryResponseContent = await responseSerializer.DeserializeAsync(streamResponse); } serverBatchInfo.RowsCount = summaryResponseContent.BatchInfo.RowsCount; serverBatchInfo.Timestamp = summaryResponseContent.RemoteClientTimestamp; context = summaryResponseContent.SyncContext; if (summaryResponseContent.BatchInfo.BatchPartsInfo != null) { foreach (var bpi in summaryResponseContent.BatchInfo.BatchPartsInfo) { serverBatchInfo.BatchPartsInfo.Add(bpi); } } // From here, we need to serialize everything on disk // Generate the batch directory var batchDirectoryRoot = this.Options.BatchDirectory; var batchDirectoryName = string.Concat(DateTime.UtcNow.ToString("yyyy_MM_dd_ss"), Path.GetRandomFileName().Replace(".", "")); serverBatchInfo.DirectoryRoot = batchDirectoryRoot; serverBatchInfo.DirectoryName = batchDirectoryName; if (!Directory.Exists(serverBatchInfo.GetDirectoryFullPath())) { Directory.CreateDirectory(serverBatchInfo.GetDirectoryFullPath()); } // If we have a snapshot we are raising the batches downloading process that will occurs await this.InterceptAsync(new HttpBatchesDownloadingArgs(context, serverBatchInfo, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); // function used to download one part var dl = new Func <BatchPartInfo, Task>(async(bpi) => { if (cancellationToken.IsCancellationRequested) { return; } var changesToSend3 = new HttpMessageGetMoreChangesRequest(context, bpi.Index); var serializer3 = this.SerializerFactory.GetSerializer <HttpMessageGetMoreChangesRequest>(); var binaryData3 = await serializer3.SerializeAsync(changesToSend3).ConfigureAwait(false); var step3 = HttpStep.GetMoreChanges; await this.InterceptAsync(new HttpGettingServerChangesRequestArgs(bpi.Index, serverBatchInfo.BatchPartsInfo.Count, summaryResponseContent.SyncContext, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); // Raise get changes request context.ProgressPercentage = initialPctProgress + ((bpi.Index + 1) * 0.2d / serverBatchInfo.BatchPartsInfo.Count); var response = await this.httpRequestHandler.ProcessRequestAsync( this.HttpClient, context, this.ServiceUri, binaryData3, step3, this.SerializerFactory, this.Converter, 0, this.SyncPolicy, cancellationToken, progress).ConfigureAwait(false); if (this.SerializerFactory.Key != "json") { var webSerializer = this.SerializerFactory.GetSerializer <HttpMessageSendChangesResponse>(); using var responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); var getMoreChanges = await webSerializer.DeserializeAsync(responseStream); context = getMoreChanges.SyncContext; if (getMoreChanges != null && getMoreChanges.Changes != null && getMoreChanges.Changes.HasRows) { var localSerializer = new LocalJsonSerializer(); var interceptorsWriting = this.interceptors.GetInterceptors <SerializingRowArgs>(); if (interceptorsWriting.Count > 0) { localSerializer.OnWritingRow(async(syncTable, rowArray) => { var args = new SerializingRowArgs(context, syncTable, rowArray); await this.InterceptAsync(args, progress, cancellationToken).ConfigureAwait(false); return(args.Result); }); } // Should have only one table var table = getMoreChanges.Changes.Tables[0]; var schemaTable = DbSyncAdapter.CreateChangesTable(schema.Tables[table.TableName, table.SchemaName]); var fullPath = Path.Combine(serverBatchInfo.GetDirectoryFullPath(), bpi.FileName); // open the file and write table header await localSerializer.OpenFileAsync(fullPath, schemaTable).ConfigureAwait(false); foreach (var row in table.Rows) { await localSerializer.WriteRowToFileAsync(new SyncRow(schemaTable, row), schemaTable).ConfigureAwait(false); } // Close file await localSerializer.CloseFileAsync(fullPath, schemaTable).ConfigureAwait(false); } } else { // Serialize await SerializeAsync(response, bpi.FileName, serverBatchInfo.GetDirectoryFullPath(), this).ConfigureAwait(false); } // Raise response from server containing a batch changes await this.InterceptAsync(new HttpGettingServerChangesResponseArgs(serverBatchInfo, bpi.Index, bpi.RowsCount, summaryResponseContent.SyncContext, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); }); // Parrallel download of all bpis (which will launch the delete directory on the server side) await serverBatchInfo.BatchPartsInfo.ForEachAsync(bpi => dl(bpi), this.MaxDownladingDegreeOfParallelism).ConfigureAwait(false); // Send order of end of download var lastBpi = serverBatchInfo.BatchPartsInfo.FirstOrDefault(bpi => bpi.IsLastBatch); if (lastBpi != null) { var endOfDownloadChanges = new HttpMessageGetMoreChangesRequest(context, lastBpi.Index); var serializerEndOfDownloadChanges = this.SerializerFactory.GetSerializer <HttpMessageGetMoreChangesRequest>(); var binaryData3 = await serializerEndOfDownloadChanges.SerializeAsync(endOfDownloadChanges).ConfigureAwait(false); var endResponse = await this.httpRequestHandler.ProcessRequestAsync( this.HttpClient, context, this.ServiceUri, binaryData3, HttpStep.SendEndDownloadChanges, this.SerializerFactory, this.Converter, 0, this.SyncPolicy, cancellationToken, progress).ConfigureAwait(false); // Deserialize response incoming from server // This is the last response // Should contains step HttpStep.SendEndDownloadChanges using var streamResponse = await endResponse.Content.ReadAsStreamAsync().ConfigureAwait(false); var endResponseSerializer = this.SerializerFactory.GetSerializer <HttpMessageSendChangesResponse>(); var endResponseContent = await endResponseSerializer.DeserializeAsync(streamResponse); context = endResponseContent.SyncContext; } // generate the new scope item this.CompleteTime = DateTime.UtcNow; await this.InterceptAsync(new HttpBatchesDownloadedArgs(summaryResponseContent, summaryResponseContent.SyncContext, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); var serverSyncChanges = new ServerSyncChanges( summaryResponseContent.RemoteClientTimestamp, serverBatchInfo, summaryResponseContent.ServerChangesSelected ); return(context, serverSyncChanges, summaryResponseContent.ClientChangesApplied, summaryResponseContent.ConflictResolutionPolicy); }
public void OneTimeSetupBeforeEntireTestClass() { //This will create a new batch in applitools and store your results there MyBatchInfo = new BatchInfo("UltimateQA-DifferentResolutions"); }
/// <summary> /// Function to save batch /// </summary> public void BatchWithBarCode() { BatchBll BllBatch = new BatchBll(); BatchInfo infoBatch = new BatchInfo(); Int32 inBarcode = BllBatch.AutomaticBarcodeGeneration(); infoBatch.BatchNo = "NA"; infoBatch.ExpiryDate = DateTime.Now; infoBatch.ManufacturingDate = DateTime.Now; infoBatch.partNo = txtPartNo.Text.Trim(); if (btnSave.Text == "Update") { infoBatch.ProductId = decProductIdForEdit; } else { infoBatch.ProductId = decSaveProduct; } infoBatch.narration = string.Empty; infoBatch.ExtraDate = DateTime.Now; infoBatch.barcode = Convert.ToString(inBarcode); infoBatch.Extra1 = string.Empty; infoBatch.Extra2 = string.Empty; inBatchIdWithPartNoNA = BllBatch.BatchAddWithBarCode(infoBatch); }
private void GenerateMaterial() { if (this.material != null) this.material.Dispose(); if (this.texture == null) return; // Select DrawTechnique to use ContentRef<DrawTechnique> technique; if (this.renderMode == RenderMode.MonochromeBitmap) technique = DrawTechnique.Mask; else if (this.renderMode == RenderMode.GrayscaleBitmap) technique = DrawTechnique.Alpha; else if (this.renderMode == RenderMode.SmoothBitmap) technique = DrawTechnique.Alpha; else technique = DrawTechnique.SharpAlpha; // Create and configure internal BatchInfo BatchInfo matInfo = new BatchInfo(technique, ColorRgba.White, this.texture); if (technique == DrawTechnique.SharpAlpha) { matInfo.SetUniform("smoothness", this.size * 4.0f); } this.material = new Material(matInfo); }
private void GenerateResources() { if (this.mat != null || this.texture != null || this.pixelData != null) this.ReleaseResources(); TextRenderingHint textRenderingHint; if (this.renderMode == RenderMode.MonochromeBitmap) textRenderingHint = TextRenderingHint.SingleBitPerPixelGridFit; else textRenderingHint = TextRenderingHint.AntiAliasGridFit; int cols; int rows; cols = rows = (int)Math.Ceiling(Math.Sqrt(SupportedChars.Length)); Pixmap.Layer pixelLayer = new Pixmap.Layer(MathF.RoundToInt(cols * this.internalFont.Size * 1.2f), MathF.RoundToInt(rows * this.internalFont.Height * 1.2f)); Pixmap.Layer glyphTemp; Pixmap.Layer glyphTempTypo; Bitmap bm; Bitmap measureBm = new Bitmap(1, 1); Rect[] atlas = new Rect[SupportedChars.Length]; using (Graphics measureGraphics = Graphics.FromImage(measureBm)) { Brush fntBrush = new SolidBrush(Color.Black); StringFormat formatDef = StringFormat.GenericDefault; formatDef.LineAlignment = StringAlignment.Near; formatDef.FormatFlags = 0; StringFormat formatTypo = StringFormat.GenericTypographic; formatTypo.LineAlignment = StringAlignment.Near; int x = 1; int y = 1; for (int i = 0; i < SupportedChars.Length; ++i) { string str = SupportedChars[i].ToString(CultureInfo.InvariantCulture); bool isSpace = str == " "; SizeF charSize = measureGraphics.MeasureString(str, this.internalFont, pixelLayer.Width, formatDef); // Rasterize a single glyph for rendering bm = new Bitmap((int)Math.Ceiling(Math.Max(1, charSize.Width)), this.internalFont.Height + 1); using (Graphics glyphGraphics = Graphics.FromImage(bm)) { glyphGraphics.Clear(Color.Transparent); glyphGraphics.TextRenderingHint = textRenderingHint; glyphGraphics.DrawString(str, this.internalFont, fntBrush, new RectangleF(0, 0, bm.Width, bm.Height), formatDef); } glyphTemp = new Pixmap.Layer(bm); // Rasterize a single glyph in typographic mode for metric analysis if (!isSpace) { Rectangle glyphTempBounds = glyphTemp.OpaqueBounds(); glyphTemp.SubImage(glyphTempBounds.X, 0, glyphTempBounds.Width, glyphTemp.Height); if (BodyAscentRef.Contains(SupportedChars[i])) this.bodyAscent += glyphTempBounds.Height; bm = new Bitmap((int)Math.Ceiling(Math.Max(1, charSize.Width)), this.internalFont.Height + 1); using (Graphics glyphGraphics = Graphics.FromImage(bm)) { glyphGraphics.Clear(Color.Transparent); glyphGraphics.TextRenderingHint = textRenderingHint; glyphGraphics.DrawString(str, this.internalFont, fntBrush, new RectangleF(0, 0, bm.Width, bm.Height), formatTypo); } glyphTempTypo = new Pixmap.Layer(bm); glyphTempTypo.Crop(true, false); } else { glyphTempTypo = glyphTemp; } // Update xy values if it doesn't fit anymore if (x + glyphTemp.Width + 2 > pixelLayer.Width) { x = 1; y += this.internalFont.Height + MathF.Clamp((int)MathF.Ceiling(this.internalFont.Height * 0.1875f), 3, 10); } // Memorize atlas coordinates & glyph data this.maxGlyphWidth = Math.Max(this.maxGlyphWidth, glyphTemp.Width); this.glyphs[i].width = glyphTemp.Width; this.glyphs[i].height = glyphTemp.Height; this.glyphs[i].offsetX = glyphTemp.Width - glyphTempTypo.Width; if (isSpace) { this.glyphs[i].width /= 2; this.glyphs[i].offsetX /= 2; } atlas[i].X = x; atlas[i].Y = y; atlas[i].W = glyphTemp.Width; atlas[i].H = (this.internalFont.Height + 1); // Draw it onto the font surface glyphTemp.DrawOnto(pixelLayer, BlendMode.Solid, x, y); x += glyphTemp.Width + MathF.Clamp((int)MathF.Ceiling(this.internalFont.Height * 0.125f), 2, 10); } } // White out texture except alpha channel. for (int i = 0; i < pixelLayer.Data.Length; i++) { pixelLayer.Data[i].R = 255; pixelLayer.Data[i].G = 255; pixelLayer.Data[i].B = 255; } // Determine Font properties this.height = this.internalFont.Height; this.ascent = (int)Math.Round(this.internalFont.FontFamily.GetCellAscent(this.internalFont.Style) * this.internalFont.Size / this.internalFont.FontFamily.GetEmHeight(this.internalFont.Style)); this.bodyAscent /= BodyAscentRef.Length; this.descent = (int)Math.Round(this.internalFont.FontFamily.GetCellDescent(this.internalFont.Style) * this.internalFont.GetHeight() / this.internalFont.FontFamily.GetLineSpacing(this.internalFont.Style)); this.baseLine = (int)Math.Round(this.internalFont.FontFamily.GetCellAscent(this.internalFont.Style) * this.internalFont.GetHeight() / this.internalFont.FontFamily.GetLineSpacing(this.internalFont.Style)); // Create internal Pixmap and Texture Resources this.pixelData = new Pixmap(pixelLayer); this.pixelData.Atlas = new List<Rect>(atlas); this.texture = new Texture(this.pixelData, Texture.SizeMode.Enlarge, this.IsPixelGridAligned ? TextureMagFilter.Nearest : TextureMagFilter.Linear, this.IsPixelGridAligned ? TextureMinFilter.Nearest : TextureMinFilter.LinearMipmapLinear); // Select DrawTechnique to use ContentRef<DrawTechnique> technique; if (this.renderMode == RenderMode.MonochromeBitmap) technique = DrawTechnique.Mask; else if (this.renderMode == RenderMode.GrayscaleBitmap) technique = DrawTechnique.Alpha; else if (this.renderMode == RenderMode.SmoothBitmap) technique = DrawTechnique.Alpha; else technique = DrawTechnique.SharpAlpha; // Create and configure internal BatchInfo BatchInfo matInfo = new BatchInfo(technique, ColorRgba.White, this.texture); if (technique == DrawTechnique.SharpAlpha) { matInfo.SetUniform("smoothness", this.size * 3.0f); } this.mat = new Material(matInfo); }
public async Task <(SyncContext, BatchInfo, ChangesSelected)> GetChangeBatchAsync( SyncContext context, MessageGetChangesBatch message) { // While we have an other batch to process var isLastBatch = false; // Create the BatchInfo and SyncContext to return at the end BatchInfo changes = new BatchInfo { Directory = BatchInfo.GenerateNewDirectoryName() }; SyncContext syncContext = null; ChangesSelected changesSelected = null; while (!isLastBatch) { HttpMessage httpMessage = new HttpMessage { SyncContext = context, Step = HttpStep.GetChangeBatch, Content = new HttpMessageGetChangesBatch { ScopeInfo = message.ScopeInfo, BatchIndexRequested = changes.BatchIndex, DownloadBatchSizeInKB = message.DownloadBatchSizeInKB, BatchDirectory = message.BatchDirectory, Schema = new DmSetSurrogate(message.Schema), Filters = message.Filters, Policy = message.Policy, SerializationFormat = message.SerializationFormat } }; var httpMessageResponse = await this.httpRequestHandler.ProcessRequest(httpMessage, message.SerializationFormat, cancellationToken); if (httpMessageResponse == null) { throw new Exception("Can't have an empty body"); } HttpMessageGetChangesBatch httpMessageContent; if (httpMessageResponse.Content is HttpMessageGetChangesBatch) { httpMessageContent = httpMessageResponse.Content as HttpMessageGetChangesBatch; } else { httpMessageContent = (httpMessageResponse.Content as JObject).ToObject <HttpMessageGetChangesBatch>(); } if (httpMessageContent == null) { throw new Exception("Can't have an empty GetChangeBatch"); } changesSelected = httpMessageContent.ChangesSelected; changes.InMemory = httpMessageContent.InMemory; syncContext = httpMessageResponse.SyncContext; // get the bpi and add it to the BatchInfo var bpi = httpMessageContent.BatchPartInfo; if (bpi != null) { changes.BatchIndex = bpi.Index; changes.BatchPartsInfo.Add(bpi); isLastBatch = bpi.IsLastBatch; } else { changes.BatchIndex = 0; isLastBatch = true; // break the while { } story break; } if (changes.InMemory) { // load the DmSet in memory bpi.Set = httpMessageContent.Set.ConvertToDmSet(); } else { // Serialize the file ! var bpId = BatchInfo.GenerateNewFileName(changes.BatchIndex.ToString()); var fileName = Path.Combine(message.BatchDirectory, changes.Directory, bpId); BatchPart.Serialize(httpMessageContent.Set, fileName); bpi.FileName = fileName; bpi.Clear(); } // Clear the DmSetSurrogate from response, we don't need it anymore if (httpMessageContent.Set != null) { httpMessageContent.Set.Dispose(); httpMessageContent.Set = null; } // if not last, increment batchIndex for next request if (!isLastBatch) { changes.BatchIndex++; } } return(syncContext, changes, changesSelected); }
internal static FabLot CreateFrontInLot(FabProduct product, ProductType prodType, int inQty, BatchInfo batch) { FabStep step = product.Process.FirstStep as FabStep; //TODO : 2019.8.27 미사용파악 //FabWipInfo info = CreateHelper.CreateWipInfoDummy( // CreateFrontInLotID(product), // batch, // product, // product.Process as FabProcess, // step, // prodType, // Constants.NULL_ID, // 2, // inQty, // EntityState.WAIT, // null, // string.Empty, // AoFactory.Current.NowDT, // DateTime.MinValue); FabWipInfo info = new FabWipInfo(); EntityHelper.AddBatchInfo(batch, step, inQty); FabLot lot = CreateHelper.CreateLot(info, LotState.CREATE); lot.LotState = Mozart.SeePlan.Simulation.LotState.CREATE; return(lot); }
public ViewResultBase Details() { if (!string.IsNullOrEmpty(Request["isTooltip"])) { Guid id; if (Guid.TryParse(Request["id"], out id)) { var data = new BatchInfo(base.EntityType.GetData(id)); return new PartialViewResult { ViewName = "Partials/Details", ViewData = new ViewDataDictionary(data) }; } else { throw new ValidationException("非法的Guid标识" + Request["id"]); } } else if (!string.IsNullOrEmpty(Request["isInner"])) { return new PartialViewResult { ViewName = "Partials/Details" }; } else { return this.View(); } }
public static void SetupLighting(IDrawDevice device, BatchInfo material) { DeviceLightInfo info = UpdateLighting(device); // Prepare shader dara float[] _lightPos = new float[4 * MaxVisible]; float[] _lightDir = new float[4 * MaxVisible]; float[] _lightColor = new float[3 * MaxVisible]; int _lightCount = MathF.Min(MaxVisible, info.PriorizedLights.Count); int i = 0; foreach (Light light in info.PriorizedLights) { if (light.Disposed) { continue; } Vector3 dir; Vector3 pos; float uniformScale; bool directional = light.IsDirectional; if (directional) { dir = light.dir; pos = Vector3.Zero; uniformScale = 1.0f; } else { dir = light.dir; pos = light.GameObj.Transform.Pos; uniformScale = light.GameObj.Transform.Scale; MathF.TransformCoord(ref dir.X, ref dir.Y, light.GameObj.Transform.Angle); } if (directional) { _lightPos[i * 4 + 0] = (float)light.ambientColor.R * light.ambientIntensity / 255.0f; _lightPos[i * 4 + 1] = (float)light.ambientColor.G * light.ambientIntensity / 255.0f; _lightPos[i * 4 + 2] = (float)light.ambientColor.B * light.ambientIntensity / 255.0f; _lightPos[i * 4 + 3] = 0.0f; } else { _lightPos[i * 4 + 0] = pos.X; _lightPos[i * 4 + 1] = pos.Y; _lightPos[i * 4 + 2] = pos.Z; _lightPos[i * 4 + 3] = light.range * uniformScale; } _lightDir[i * 4 + 0] = dir.X; _lightDir[i * 4 + 1] = dir.Y; _lightDir[i * 4 + 2] = dir.Z; _lightDir[i * 4 + 3] = dir == Vector3.Zero ? 0.0f : MathF.Max(light.spotFocus, 1.0f); _lightColor[i * 3 + 0] = (float)light.color.R * light.intensity / 255.0f; _lightColor[i * 3 + 1] = (float)light.color.G * light.intensity / 255.0f; _lightColor[i * 3 + 2] = (float)light.color.B * light.intensity / 255.0f; i++; if (i >= _lightCount) { break; } } if (i + 1 < _lightCount) { _lightCount = i + 1; } material.SetUniform("_lightCount", _lightCount); material.SetUniform("_lightPos", _lightPos); material.SetUniform("_lightDir", _lightDir); material.SetUniform("_lightColor", _lightColor); }
/// <summary> /// Enumerate all internal changes, no batch mode /// </summary> internal async Task <(BatchInfo, DatabaseChangesSelected)> EnumerateChangesInternalAsync( SyncContext context, ScopeInfo scopeInfo, DmSet configTables, string batchDirectory, ConflictResolutionPolicy policy, ICollection <FilterClause> filters) { // create the in memory changes set var changesSet = new DmSet(SyncConfiguration.DMSET_NAME); // Create the batch info, in memory // No need to geneate a directory name, since we are in memory var batchInfo = new BatchInfo(true, batchDirectory); using (var connection = this.CreateConnection()) { // Open the connection await connection.OpenAsync(); using (var transaction = connection.BeginTransaction()) { try { // changes that will be returned as selected changes var changes = new DatabaseChangesSelected(); foreach (var tableDescription in configTables.Tables) { // if we are in upload stage, so check if table is not download only if (context.SyncWay == SyncWay.Upload && tableDescription.SyncDirection == SyncDirection.DownloadOnly) { continue; } // if we are in download stage, so check if table is not download only if (context.SyncWay == SyncWay.Download && tableDescription.SyncDirection == SyncDirection.UploadOnly) { continue; } var builder = this.GetDatabaseBuilder(tableDescription); var syncAdapter = builder.CreateSyncAdapter(connection, transaction); // raise before event context.SyncStage = SyncStage.TableChangesSelecting; // launch any interceptor await this.InterceptAsync(new TableChangesSelectingArgs(context, tableDescription.TableName, connection, transaction)); // selected changes for the current table var tableSelectedChanges = new TableChangesSelected { TableName = tableDescription.TableName }; // Get Command DbCommand selectIncrementalChangesCommand; DbCommandType dbCommandType; if (this.CanBeServerProvider && context.Parameters != null && context.Parameters.Count > 0 && filters != null && filters.Count > 0) { var tableFilters = filters .Where(f => f.TableName.Equals(tableDescription.TableName, StringComparison.InvariantCultureIgnoreCase)); if (tableFilters != null && tableFilters.Count() > 0) { dbCommandType = DbCommandType.SelectChangesWitFilters; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType, tableFilters); if (selectIncrementalChangesCommand == null) { throw new Exception("Missing command 'SelectIncrementalChangesCommand'"); } syncAdapter.SetCommandParameters(dbCommandType, selectIncrementalChangesCommand, tableFilters); } else { dbCommandType = DbCommandType.SelectChanges; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType); if (selectIncrementalChangesCommand == null) { throw new Exception("Missing command 'SelectIncrementalChangesCommand'"); } syncAdapter.SetCommandParameters(dbCommandType, selectIncrementalChangesCommand); } } else { dbCommandType = DbCommandType.SelectChanges; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType); if (selectIncrementalChangesCommand == null) { throw new Exception("Missing command 'SelectIncrementalChangesCommand'"); } syncAdapter.SetCommandParameters(dbCommandType, selectIncrementalChangesCommand); } // Get a clone of the table with tracking columns var dmTableChanges = this.BuildChangesTable(tableDescription.TableName, configTables); SetSelectChangesCommonParameters(context, scopeInfo, selectIncrementalChangesCommand); // Set filter parameters if any if (this.CanBeServerProvider && context.Parameters != null && context.Parameters.Count > 0 && filters != null && filters.Count > 0) { var tableFilters = filters .Where(f => f.TableName.Equals(tableDescription.TableName, StringComparison.InvariantCultureIgnoreCase)).ToList(); if (tableFilters != null && tableFilters.Count > 0) { foreach (var filter in tableFilters) { var parameter = context.Parameters.FirstOrDefault(p => p.ColumnName.Equals(filter.ColumnName, StringComparison.InvariantCultureIgnoreCase) && p.TableName.Equals(filter.TableName, StringComparison.InvariantCultureIgnoreCase)); if (parameter != null) { DbManager.SetParameterValue(selectIncrementalChangesCommand, parameter.ColumnName, parameter.Value); } } } } this.AddTrackingColumns <int>(dmTableChanges, "sync_row_is_tombstone"); // Get the reader using (var dataReader = selectIncrementalChangesCommand.ExecuteReader()) { while (dataReader.Read()) { var dataRow = this.CreateRowFromReader(dataReader, dmTableChanges); //DmRow dataRow = dmTableChanges.NewRow(); // assuming the row is not inserted / modified var state = DmRowState.Unchanged; // get if the current row is inserted, modified, deleted state = this.GetStateFromDmRow(dataRow, scopeInfo); if (state != DmRowState.Deleted && state != DmRowState.Modified && state != DmRowState.Added) { continue; } // add row dmTableChanges.Rows.Add(dataRow); // acceptchanges before modifying dataRow.AcceptChanges(); tableSelectedChanges.TotalChanges++; // Set the correct state to be applied if (state == DmRowState.Deleted) { dataRow.Delete(); tableSelectedChanges.Deletes++; } else if (state == DmRowState.Added) { dataRow.SetAdded(); tableSelectedChanges.Inserts++; } else if (state == DmRowState.Modified) { dataRow.SetModified(); tableSelectedChanges.Updates++; } } // Since we dont need this column anymore, remove it this.RemoveTrackingColumns(dmTableChanges, "sync_row_is_tombstone"); // add it to the DmSet changesSet.Tables.Add(dmTableChanges); } // add the stats to global stats changes.TableChangesSelected.Add(tableSelectedChanges); // Progress & Interceptor context.SyncStage = SyncStage.TableChangesSelected; var args = new TableChangesSelectedArgs(context, tableSelectedChanges, connection, transaction); this.ReportProgress(context, args); await this.InterceptAsync(args); } transaction.Commit(); // generate the batchpartinfo batchInfo.GenerateBatchInfo(0, changesSet); // Create a new in-memory batch info with an the changes DmSet return(batchInfo, changes); } catch (Exception) { throw; } finally { if (connection != null && connection.State == ConnectionState.Open) { connection.Close(); } } } } }
public HttpBatchesDownloadingArgs(SyncContext context, BatchInfo serverBatchInfo, string host) : base(context, null) { this.ServerBatchInfo = serverBatchInfo; this.Host = host; }
/// <summary> /// Enumerate all internal changes, no batch mode /// </summary> internal async Task <(BatchInfo, DatabaseChangesSelected)> EnumerateChangesInBatchesInternalAsync (SyncContext context, ScopeInfo scopeInfo, int downloadBatchSizeInKB, DmSet configTables, string batchDirectory, ConflictResolutionPolicy policy, ICollection <FilterClause> filters) { DmTable dmTable = null; // memory size total double memorySizeFromDmRows = 0L; var batchIndex = 0; // this batch info won't be in memory, it will be be batched var batchInfo = new BatchInfo(false, batchDirectory); // directory where all files will be stored batchInfo.GenerateNewDirectoryName(); // Create stats object to store changes count var changes = new DatabaseChangesSelected(); using (var connection = this.CreateConnection()) { try { // Open the connection await connection.OpenAsync(); using (var transaction = connection.BeginTransaction()) { // create the in memory changes set var changesSet = new DmSet(configTables.DmSetName); foreach (var tableDescription in configTables.Tables) { // if we are in upload stage, so check if table is not download only if (context.SyncWay == SyncWay.Upload && tableDescription.SyncDirection == SyncDirection.DownloadOnly) { continue; } // if we are in download stage, so check if table is not download only if (context.SyncWay == SyncWay.Download && tableDescription.SyncDirection == SyncDirection.UploadOnly) { continue; } var builder = this.GetDatabaseBuilder(tableDescription); var syncAdapter = builder.CreateSyncAdapter(connection, transaction); // raise before event context.SyncStage = SyncStage.TableChangesSelecting; var tableChangesSelectingArgs = new TableChangesSelectingArgs(context, tableDescription.TableName, connection, transaction); // launc interceptor if any await this.InterceptAsync(tableChangesSelectingArgs); // Get Command DbCommand selectIncrementalChangesCommand; DbCommandType dbCommandType; if (this.CanBeServerProvider && context.Parameters != null && context.Parameters.Count > 0 && filters != null && filters.Count > 0) { var tableFilters = filters .Where(f => f.TableName.Equals(tableDescription.TableName, StringComparison.InvariantCultureIgnoreCase)); if (tableFilters != null && tableFilters.Count() > 0) { dbCommandType = DbCommandType.SelectChangesWitFilters; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType, tableFilters); if (selectIncrementalChangesCommand == null) { throw new Exception("Missing command 'SelectIncrementalChangesCommand' "); } syncAdapter.SetCommandParameters(dbCommandType, selectIncrementalChangesCommand, tableFilters); } else { dbCommandType = DbCommandType.SelectChanges; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType); if (selectIncrementalChangesCommand == null) { throw new Exception("Missing command 'SelectIncrementalChangesCommand' "); } syncAdapter.SetCommandParameters(dbCommandType, selectIncrementalChangesCommand); } } else { dbCommandType = DbCommandType.SelectChanges; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType); if (selectIncrementalChangesCommand == null) { throw new Exception("Missing command 'SelectIncrementalChangesCommand' "); } syncAdapter.SetCommandParameters(dbCommandType, selectIncrementalChangesCommand); } dmTable = this.BuildChangesTable(tableDescription.TableName, configTables); try { // Set commons parameters SetSelectChangesCommonParameters(context, scopeInfo, selectIncrementalChangesCommand); // Set filter parameters if any // Only on server side if (this.CanBeServerProvider && context.Parameters != null && context.Parameters.Count > 0 && filters != null && filters.Count > 0) { var filterTable = filters.Where(f => f.TableName.Equals(tableDescription.TableName, StringComparison.InvariantCultureIgnoreCase)).ToList(); if (filterTable != null && filterTable.Count > 0) { foreach (var filter in filterTable) { var parameter = context.Parameters.FirstOrDefault(p => p.ColumnName.Equals(filter.ColumnName, StringComparison.InvariantCultureIgnoreCase) && p.TableName.Equals(filter.TableName, StringComparison.InvariantCultureIgnoreCase)); if (parameter != null) { DbManager.SetParameterValue(selectIncrementalChangesCommand, parameter.ColumnName, parameter.Value); } } } } this.AddTrackingColumns <int>(dmTable, "sync_row_is_tombstone"); // Statistics var tableChangesSelected = new TableChangesSelected { TableName = tableDescription.TableName }; changes.TableChangesSelected.Add(tableChangesSelected); // Get the reader using (var dataReader = selectIncrementalChangesCommand.ExecuteReader()) { while (dataReader.Read()) { var dmRow = this.CreateRowFromReader(dataReader, dmTable); var state = DmRowState.Unchanged; state = this.GetStateFromDmRow(dmRow, scopeInfo); // If the row is not deleted inserted or modified, go next if (state != DmRowState.Deleted && state != DmRowState.Modified && state != DmRowState.Added) { continue; } var fieldsSize = DmTableSurrogate.GetRowSizeFromDataRow(dmRow); var dmRowSize = fieldsSize / 1024d; if (dmRowSize > downloadBatchSizeInKB) { var exc = $"Row is too big ({dmRowSize} kb.) for the current Configuration.DownloadBatchSizeInKB ({downloadBatchSizeInKB} kb.) Aborting Sync..."; throw new Exception(exc); } // Calculate the new memory size memorySizeFromDmRows = memorySizeFromDmRows + dmRowSize; // add row dmTable.Rows.Add(dmRow); tableChangesSelected.TotalChanges++; // acceptchanges before modifying dmRow.AcceptChanges(); // Set the correct state to be applied if (state == DmRowState.Deleted) { dmRow.Delete(); tableChangesSelected.Deletes++; } else if (state == DmRowState.Added) { dmRow.SetAdded(); tableChangesSelected.Inserts++; } else if (state == DmRowState.Modified) { dmRow.SetModified(); tableChangesSelected.Updates++; } // We exceed the memorySize, so we can add it to a batch if (memorySizeFromDmRows > downloadBatchSizeInKB) { // Since we dont need this column anymore, remove it this.RemoveTrackingColumns(dmTable, "sync_row_is_tombstone"); changesSet.Tables.Add(dmTable); // generate the batch part info batchInfo.GenerateBatchInfo(batchIndex, changesSet); // increment batch index batchIndex++; changesSet.Clear(); // Recreate an empty DmSet, then a dmTable clone changesSet = new DmSet(configTables.DmSetName); dmTable = dmTable.Clone(); this.AddTrackingColumns <int>(dmTable, "sync_row_is_tombstone"); // Init the row memory size memorySizeFromDmRows = 0L; // SyncProgress & interceptor context.SyncStage = SyncStage.TableChangesSelected; var loopTableChangesSelectedArgs = new TableChangesSelectedArgs(context, tableChangesSelected, connection, transaction); this.ReportProgress(context, loopTableChangesSelectedArgs); await this.InterceptAsync(loopTableChangesSelectedArgs); } } // Since we dont need this column anymore, remove it this.RemoveTrackingColumns(dmTable, "sync_row_is_tombstone"); context.SyncStage = SyncStage.TableChangesSelected; changesSet.Tables.Add(dmTable); // Init the row memory size memorySizeFromDmRows = 0L; // Event progress & interceptor context.SyncStage = SyncStage.TableChangesSelected; var tableChangesSelectedArgs = new TableChangesSelectedArgs(context, tableChangesSelected, connection, transaction); this.ReportProgress(context, tableChangesSelectedArgs); await this.InterceptAsync(tableChangesSelectedArgs); } } catch (Exception) { throw; } finally { } } // We are in batch mode, and we are at the last batchpart info if (changesSet != null && changesSet.HasTables && changesSet.HasChanges()) { var batchPartInfo = batchInfo.GenerateBatchInfo(batchIndex, changesSet); if (batchPartInfo != null) { batchPartInfo.IsLastBatch = true; } } transaction.Commit(); } } catch (Exception) { throw; } finally { if (connection != null && connection.State == ConnectionState.Open) { connection.Close(); } } } return(batchInfo, changes); }
/// <summary> /// Creates a new single-texture Material. /// </summary> /// <param name="technique">The <see cref="Duality.Resources.DrawTechnique"/> to use.</param> /// <param name="mainColor">The <see cref="MainColor"/> to use.</param> /// <param name="mainTex">The main <see cref="Duality.Resources.Texture"/> to use.</param> public Material(ContentRef<DrawTechnique> technique, ColorRgba mainColor, ContentRef<Texture> mainTex) { this.info = new BatchInfo(technique, mainColor, mainTex); }
public void OnBatchEnded(ref BatchInfo info) { BatchFinished = true; }
/// <summary> /// Creates a new Material based on the specified BatchInfo /// </summary> /// <param name="info"></param> public Material(BatchInfo info) { this.info = new BatchInfo(info); }
public virtual void AjustaCantidad(ProductInfo producto, BatchInfo partida) { AjustaCantidad(producto, partida, 0); }
/// <summary> /// Create a response message content based on a requested index in a server batch info /// </summary> private async Task <HttpMessageSendChangesResponse> GetChangesResponseAsync(SyncContext syncContext, long remoteClientTimestamp, BatchInfo serverBatchInfo, DatabaseChangesApplied clientChangesApplied, DatabaseChangesSelected serverChangesSelected, int batchIndexRequested) { // 1) Create the http message content response var changesResponse = new HttpMessageSendChangesResponse(syncContext); changesResponse.ServerChangesSelected = serverChangesSelected; changesResponse.ClientChangesApplied = clientChangesApplied; changesResponse.ServerStep = HttpStep.GetMoreChanges; changesResponse.ConflictResolutionPolicy = this.Options.ConflictResolutionPolicy; // If nothing to do, just send back if (serverBatchInfo.InMemory || serverBatchInfo.BatchPartsInfo.Count == 0) { if (this.ClientConverter != null && serverBatchInfo.InMemoryData != null && serverBatchInfo.InMemoryData.HasRows) { BeforeSerializeRows(serverBatchInfo.InMemoryData, this.ClientConverter); } changesResponse.Changes = serverBatchInfo.InMemoryData == null ? new ContainerSet() : serverBatchInfo.InMemoryData.GetContainerSet(); changesResponse.BatchIndex = 0; changesResponse.IsLastBatch = true; changesResponse.RemoteClientTimestamp = remoteClientTimestamp; return(changesResponse); } // Get the batch part index requested var batchPartInfo = serverBatchInfo.BatchPartsInfo.First(d => d.Index == batchIndexRequested); // if we are not in memory, we set the BI in session, to be able to get it back on next request // create the in memory changes set var changesSet = new SyncSet(); foreach (var table in Schema.Tables) { SyncAdapter.CreateChangesTable(Schema.Tables[table.TableName, table.SchemaName], changesSet); } await batchPartInfo.LoadBatchAsync(changesSet, serverBatchInfo.GetDirectoryFullPath(), this); // if client request a conversion on each row, apply the conversion if (this.ClientConverter != null && batchPartInfo.Data.HasRows) { BeforeSerializeRows(batchPartInfo.Data, this.ClientConverter); } changesResponse.Changes = batchPartInfo.Data.GetContainerSet(); changesResponse.BatchIndex = batchIndexRequested; changesResponse.IsLastBatch = batchPartInfo.IsLastBatch; changesResponse.RemoteClientTimestamp = remoteClientTimestamp; changesResponse.ServerStep = batchPartInfo.IsLastBatch ? HttpStep.GetMoreChanges : HttpStep.GetChangesInProgress; // If we have only one bpi, we can safely delete it if (batchPartInfo.IsLastBatch) { // delete the folder (not the BatchPartInfo, because we have a reference on it) if (this.Options.CleanFolder) { var shouldDeleteFolder = true; if (!string.IsNullOrEmpty(this.Options.SnapshotsDirectory)) { var dirInfo = new DirectoryInfo(serverBatchInfo.DirectoryRoot); var snapInfo = new DirectoryInfo(this.Options.SnapshotsDirectory); shouldDeleteFolder = dirInfo.FullName != snapInfo.FullName; } if (shouldDeleteFolder) { serverBatchInfo.TryRemoveDirectory(); } } } return(changesResponse); }
public void FixtureSetUp() { batch_ = new BatchInfo("Layout2"); }
private async Task <HttpMessage> ApplyChangesAsync(HttpMessage httpMessage) { HttpMessageApplyChanges httpMessageContent; if (httpMessage.Content is HttpMessageApplyChanges) { httpMessageContent = httpMessage.Content as HttpMessageApplyChanges; } else { httpMessageContent = (httpMessage.Content as JObject).ToObject <HttpMessageApplyChanges>(); } if (httpMessageContent == null) { throw new ArgumentException("ApplyChanges message could not be null"); } var scopeInfo = httpMessageContent.FromScope; if (scopeInfo == null) { throw new ArgumentException("ApplyChanges ScopeInfo could not be null"); } var schema = httpMessageContent.Schema.ConvertToDmSet(); BatchInfo batchInfo; var bpi = httpMessageContent.BatchPartInfo; if (httpMessageContent.InMemory) { batchInfo = new BatchInfo(true, this.Configuration.BatchDirectory) { BatchIndex = 0, BatchPartsInfo = new List <BatchPartInfo>(new[] { bpi }), }; bpi.Set = httpMessageContent.Set.ConvertToDmSet(); httpMessageContent.Set.Dispose(); httpMessageContent.Set = null; var(c, s) = await this.ApplyChangesAsync(httpMessage.SyncContext, new MessageApplyChanges { FromScope = scopeInfo, Schema = schema, Policy = httpMessageContent.Policy, UseBulkOperations = httpMessageContent.UseBulkOperations, CleanMetadatas = httpMessageContent.CleanMetadatas, ScopeInfoTableName = httpMessageContent.ScopeInfoTableName, Changes = batchInfo }); httpMessageContent.ChangesApplied = s; httpMessageContent.BatchPartInfo.Clear(); httpMessageContent.BatchPartInfo.FileName = null; httpMessage.SyncContext = c; httpMessage.Content = httpMessageContent; return(httpMessage); } // not in memory batchInfo = this.LocalProvider.CacheManager.GetValue <BatchInfo>("ApplyChanges_BatchInfo"); if (batchInfo == null) { batchInfo = new BatchInfo(false, this.Configuration.BatchDirectory) { BatchIndex = 0, BatchPartsInfo = new List <BatchPartInfo>(new[] { bpi }), InMemory = false, }; batchInfo.GenerateNewDirectoryName(); } else { batchInfo.BatchPartsInfo.Add(bpi); } var bpId = batchInfo.GenerateNewFileName(httpMessageContent.BatchIndex.ToString()); // to save the file, we should use the local configuration batch directory var fileName = Path.Combine(batchInfo.GetDirectoryFullPath(), bpId); BatchPart.Serialize(httpMessageContent.Set, fileName); bpi.FileName = fileName; this.LocalProvider.CacheManager.Set("ApplyChanges_BatchInfo", batchInfo); // Clear the httpMessage set if (httpMessageContent != null) { httpMessageContent.Set.Dispose(); httpMessageContent.Set = null; } // if it's last batch sent if (bpi.IsLastBatch) { var(c, s) = await this.ApplyChangesAsync(httpMessage.SyncContext, new MessageApplyChanges { FromScope = scopeInfo, Schema = schema, Policy = httpMessageContent.Policy, UseBulkOperations = httpMessageContent.UseBulkOperations, CleanMetadatas = httpMessageContent.CleanMetadatas, ScopeInfoTableName = httpMessageContent.ScopeInfoTableName, Changes = batchInfo }); this.LocalProvider.CacheManager.Remove("ApplyChanges_BatchInfo"); httpMessage.SyncContext = c; httpMessageContent.ChangesApplied = s; } httpMessageContent.BatchPartInfo.Clear(); httpMessageContent.BatchPartInfo.FileName = null; httpMessage.Content = httpMessageContent; return(httpMessage); }
private void SetupMaterial(BatchInfo material, BatchInfo lastMaterial) { DrawTechnique tech = material.Technique.Res ?? DrawTechnique.Solid.Res; DrawTechnique lastTech = lastMaterial != null ? lastMaterial.Technique.Res : null; // Setup BlendType if (lastTech == null || tech.Blending != lastTech.Blending) { this.SetupBlendState(tech.Blending); } // Bind Shader NativeShaderProgram nativeShader = tech.NativeShader as NativeShaderProgram; NativeShaderProgram.Bind(nativeShader); // Setup shader data ShaderFieldInfo[] varInfo = nativeShader.Fields; int[] locations = nativeShader.FieldLocations; // Setup sampler bindings and uniform data int curSamplerIndex = this.sharedSamplerBindings; for (int i = 0; i < varInfo.Length; i++) { ShaderFieldInfo field = varInfo[i]; int location = locations[i]; if (field.Scope == ShaderFieldScope.Attribute) { continue; } if (this.sharedShaderParameters.Contains(field.Name)) { continue; } if (field.Type == ShaderFieldType.Sampler2D) { ContentRef <Texture> texRef = material.GetInternalTexture(field.Name); if (texRef == null) { this.internalShaderState.TryGetInternal(field.Name, out texRef); } NativeTexture.Bind(texRef, curSamplerIndex); GL.Uniform1(location, curSamplerIndex); curSamplerIndex++; } else { float[] data = material.GetInternalData(field.Name); if (data == null && !this.internalShaderState.TryGetInternal(field.Name, out data)) { continue; } NativeShaderProgram.SetUniform(field, location, data); } } NativeTexture.ResetBinding(curSamplerIndex); }
public virtual void Vende(Budget proforma, SerieInfo serie, ClienteInfo cliente, ProductInfo producto, BatchInfo partida) { if (cliente == null) { throw new iQException(Library.Invoice.Resources.Messages.NO_CLIENTE_SELECTED); } if (cliente.Productos == null) { cliente.LoadChilds(typeof(ProductoCliente), true); } ProductoClienteInfo productoCliente = cliente.Productos.GetByProducto(producto.Oid); if (partida == null) { CopyFrom(proforma, producto); } else { CopyFrom(partida); } SetTipoFacturacion(productoCliente, producto); SetImpuestos(serie, cliente, producto); Precio = producto.GetPrecioVenta(productoCliente, partida, ETipoFacturacion); }
public DatabaseChangesSelectedArgs(SyncContext context, long?fromLastTimestamp, long?toLastTimestamp, BatchInfo clientBatchInfo, DatabaseChangesSelected changesSelected, DbConnection connection = null, DbTransaction transaction = null) : base(context, connection, transaction) { this.FromTimestamp = fromLastTimestamp; this.ToTimestamp = toLastTimestamp; this.BatchInfo = clientBatchInfo; this.ChangesSelected = changesSelected; }
private void GenerateTexMat() { if (this.material != null) this.material.Dispose(); if (this.texture != null) this.texture.Dispose(); if (this.pixelData == null) return; this.texture = new Texture(this.pixelData, TextureSizeMode.Enlarge, this.IsPixelGridAligned ? TextureMagFilter.Nearest : TextureMagFilter.Linear, this.IsPixelGridAligned ? TextureMinFilter.Nearest : TextureMinFilter.LinearMipmapLinear); // Select DrawTechnique to use ContentRef<DrawTechnique> technique; if (this.renderMode == RenderMode.MonochromeBitmap) technique = DrawTechnique.Mask; else if (this.renderMode == RenderMode.GrayscaleBitmap) technique = DrawTechnique.Alpha; else if (this.renderMode == RenderMode.SmoothBitmap) technique = DrawTechnique.Alpha; else technique = DrawTechnique.SharpAlpha; // Create and configure internal BatchInfo BatchInfo matInfo = new BatchInfo(technique, ColorRgba.White, this.texture); if (technique == DrawTechnique.SharpAlpha) { matInfo.SetUniform("smoothness", this.size * 4.0f); } this.material = new Material(matInfo); }
//[TestCase(true, "Test Sequence", "Test Sequence Name Env Var")] //[TestCase(true, "Test Sequence", null)] //[TestCase(true, null, "Test Sequence Name Env Var")] //[TestCase(true, null, null)] public void TestEyesConfiguration(bool useVisualGrid, string sequenceName, string sequenceNameEnvVar) { ILogHandler logHandler = TestUtils.InitLogHandler(); EyesRunner runner = useVisualGrid ? (EyesRunner) new VisualGridRunner(10, logHandler) : new ClassicRunner(logHandler); Eyes eyes = new Eyes(runner); IWebDriver driver = SeleniumUtils.CreateChromeDriver(); driver.Url = "https://applitools.github.io/demo/TestPages/FramesTestPage/"; string originalBatchSequence = Environment.GetEnvironmentVariable("APPLITOOLS_BATCH_SEQUENCE"); if (sequenceNameEnvVar != null) { Environment.SetEnvironmentVariable("APPLITOOLS_BATCH_SEQUENCE", sequenceNameEnvVar); } string effectiveSequenceName = sequenceName ?? sequenceNameEnvVar; BatchInfo batchInfo = new BatchInfo() { Id = TestDataProvider.BatchInfo.Id + "_" + effectiveSequenceName, Name = TestDataProvider.BatchInfo.Name + "_" + effectiveSequenceName }; if (sequenceName != null) { batchInfo.SequenceName = sequenceName; } if (sequenceNameEnvVar != null) { Environment.SetEnvironmentVariable("APPLITOOLS_BATCH_SEQUENCE", originalBatchSequence); } try { Assert.AreEqual(effectiveSequenceName, batchInfo.SequenceName, "SequenceName"); Configuration conf = new Configuration(); string testName = "Test - " + (useVisualGrid ? "Visual Grid" : "Selenium"); conf.SetAppName("app").SetTestName(testName) .SetHostApp("someHostApp").SetHostOS("someHostOs") //.SetBaselineBranchName("baseline branch") //.SetBaselineEnvName("baseline env") .SetEnvironmentName("env name") .SetBatch(batchInfo); eyes.SetConfiguration(conf); eyes.Open(driver); eyes.MatchLevel = MatchLevel.Layout; eyes.Check(Target.Window()); eyes.MatchLevel = MatchLevel.Content; eyes.Check(Target.Window()); } finally { driver.Quit(); } TestResults results = eyes.Close(false); Metadata.SessionResults sessionResults = TestUtils.GetSessionResults(eyes.ApiKey, results); Assert.NotNull(sessionResults, "SessionResults"); Assert.AreEqual("someHostOs", sessionResults.Env.Os, "OS"); Assert.AreEqual("someHostApp", sessionResults.Env.HostingApp, "Hosting App"); Assert.AreEqual(batchInfo.SequenceName, sessionResults.StartInfo.BatchInfo.SequenceName, "Sequence Name"); //Assert.AreEqual("baseline branch", sessionResults.BaselineBranchName); //Assert.AreEqual("baseline env", sessionResults.BaselineEnvId); Assert.NotNull(sessionResults.ActualAppOutput, "Actual App Output"); Assert.AreEqual(2, sessionResults.ActualAppOutput.Length, "Actual App Output"); Assert.AreEqual(MatchLevel.Layout2, sessionResults.ActualAppOutput[0].ImageMatchSettings.MatchLevel, "Actual App Output (Layout)"); Assert.AreEqual(MatchLevel.Content, sessionResults.ActualAppOutput[1].ImageMatchSettings.MatchLevel, "Actual App Output (Content)"); TestResultsSummary resultsSummary = runner.GetAllTestResults(false); eyes.Abort(); }
public static void SetupLighting(IDrawDevice device, BatchInfo material) { DeviceLightInfo info = UpdateLighting(device); // Prepare shader dara float[] _lightPos = new float[4 * MaxVisible]; float[] _lightDir = new float[4 * MaxVisible]; float[] _lightColor = new float[3 * MaxVisible]; int _lightCount = MathF.Min(MaxVisible, info.PriorizedLights.Count); int i = 0; foreach (Light light in info.PriorizedLights) { if (light.Disposed) continue; Vector3 dir; Vector3 pos; float uniformScale; bool directional = light.IsDirectional; if (directional) { dir = light.dir; pos = Vector3.Zero; uniformScale = 1.0f; } else { dir = light.dir; pos = light.GameObj.Transform.Pos; uniformScale = light.GameObj.Transform.Scale; MathF.TransformCoord(ref dir.X, ref dir.Y, light.GameObj.Transform.Angle); } if (directional) { _lightPos[i * 4 + 0] = (float)light.ambientColor.R * light.ambientIntensity / 255.0f; _lightPos[i * 4 + 1] = (float)light.ambientColor.G * light.ambientIntensity / 255.0f; _lightPos[i * 4 + 2] = (float)light.ambientColor.B * light.ambientIntensity / 255.0f; _lightPos[i * 4 + 3] = 0.0f; } else { _lightPos[i * 4 + 0] = pos.X; _lightPos[i * 4 + 1] = pos.Y; _lightPos[i * 4 + 2] = pos.Z; _lightPos[i * 4 + 3] = light.range * uniformScale; } _lightDir[i * 4 + 0] = dir.X; _lightDir[i * 4 + 1] = dir.Y; _lightDir[i * 4 + 2] = dir.Z; _lightDir[i * 4 + 3] = dir == Vector3.Zero ? 0.0f : MathF.Max(light.spotFocus, 1.0f); _lightColor[i * 3 + 0] = (float)light.color.R * light.intensity / 255.0f; _lightColor[i * 3 + 1] = (float)light.color.G * light.intensity / 255.0f; _lightColor[i * 3 + 2] = (float)light.color.B * light.intensity / 255.0f; i++; if (i >= _lightCount) break; } if (i + 1 < _lightCount) _lightCount = i + 1; material.SetUniform("_lightCount", _lightCount); material.SetUniform("_lightPos", _lightPos); material.SetUniform("_lightDir", _lightDir); material.SetUniform("_lightColor", _lightColor); }
/// <summary> /// Applying changes message. /// Be careful policy could be differente from the schema (especially on client side, it's the reverse one, by default) /// </summary> public MessageApplyChanges(Guid localScopeId, Guid senderScopeId, bool isNew, long lastTimestamp, SyncSet schema, ConflictResolutionPolicy policy, bool disableConstraintsOnApplyChanges, bool useBulkOperations, bool cleanMetadatas, bool cleanFolder, BatchInfo changes) { this.LocalScopeId = localScopeId; this.SenderScopeId = senderScopeId; this.IsNew = isNew; this.LastTimestamp = lastTimestamp; this.Schema = schema ?? throw new ArgumentNullException(nameof(schema)); this.Policy = policy; this.DisableConstraintsOnApplyChanges = disableConstraintsOnApplyChanges; this.UseBulkOperations = useBulkOperations; this.CleanMetadatas = cleanMetadatas; this.CleanFolder = cleanFolder; this.Changes = changes ?? throw new ArgumentNullException(nameof(changes)); }
/// <summary> /// fill Items into the purticular controls for Update or delete /// </summary> public void FillControls() { try { StandardRateInfo infoStandardRate = new StandardRateInfo(); standardRateBll BllStandaredRate = new standardRateBll(); infoStandardRate = BllStandaredRate.StandardRateView(decStandardRate); dtpFromDate.Value = Convert.ToDateTime(infoStandardRate.ApplicableFrom.ToString()); dtpToDate.Value = Convert.ToDateTime(infoStandardRate.ApplicableTo.ToString()); dtpFromDate.Text = infoStandardRate.ApplicableFrom.ToString(); dtpToDate.Text = infoStandardRate.ApplicableTo.ToString(); txtRate.Text = infoStandardRate.Rate.ToString(); decProduct = infoStandardRate.ProductId; decUnitId = infoStandardRate.UnitId; ProductCreationBll BllProductCreation = new ProductCreationBll(); ProductInfo infoProduct = new ProductInfo(); infoProduct = BllProductCreation.ProductViewForStandardRate(decProductId); txtProductCode.Text = infoProduct.ProductCode; txtProductName.Text = infoProduct.ProductName; decStandardRateId = infoStandardRate.StandardRateId; UnitInfo infoUnit = new UnitInfo(); UnitBll bllUnit = new UnitBll(); infoUnit = bllUnit.UnitView(decUnit); txtUnitName.Text = infoUnit.UnitName; txtProductName.ReadOnly = true; txtProductCode.ReadOnly = true; txtUnitName.ReadOnly = true; BatchInfo infoBatch = new BatchInfo(); BatchBll BllBatch = new BatchBll(); decBatchId = infoStandardRate.BatchId; infoBatch = BllBatch.BatchView(decBatchId); cmbBatch.SelectedValue = infoBatch.BatchId; } catch (Exception ex) { MessageBox.Show("SRP5:" + ex.Message, "OpenMiracle", MessageBoxButtons.OK, MessageBoxIcon.Information); } }
private static void DrawTileHighlights(Canvas canvas, ICmpTilemapRenderer renderer, Point2 origin, IReadOnlyGrid<bool> highlight, ColorRgba fillTint, ColorRgba outlineTint, TileHighlightMode mode, List<Vector2[]> outlineCache = null) { if (highlight.Width == 0 || highlight.Height == 0) return; // Generate strippled line texture if not available yet if (strippledLineTex == null) { PixelData pixels = new PixelData(8, 1); for (int i = 0; i < pixels.Width / 2; i++) pixels[i, 0] = ColorRgba.White; for (int i = pixels.Width / 2; i < pixels.Width; i++) pixels[i, 0] = ColorRgba.TransparentWhite; using (Pixmap pixmap = new Pixmap(pixels)) { strippledLineTex = new Texture(pixmap, TextureSizeMode.Default, TextureMagFilter.Nearest, TextureMinFilter.Nearest, TextureWrapMode.Repeat, TextureWrapMode.Repeat, TexturePixelFormat.Rgba); } } BatchInfo defaultMaterial = new BatchInfo(DrawTechnique.Alpha, canvas.State.Material.MainColor); BatchInfo strippleMaterial = new BatchInfo(DrawTechnique.Alpha, canvas.State.Material.MainColor, strippledLineTex); bool uncertain = (mode & TileHighlightMode.Uncertain) != 0; bool selection = (mode & TileHighlightMode.Selection) != 0; Component component = renderer as Component; Transform transform = component.GameObj.Transform; Tilemap tilemap = renderer.ActiveTilemap; Tileset tileset = tilemap != null ? tilemap.Tileset.Res : null; Vector2 tileSize = tileset != null ? tileset.TileSize : Tileset.DefaultTileSize; Rect localRect = renderer.LocalTilemapRect; // Determine the object's local coordinate system (rotated, scaled) in world space Vector2 worldAxisX = Vector2.UnitX; Vector2 worldAxisY = Vector2.UnitY; MathF.TransformCoord(ref worldAxisX.X, ref worldAxisX.Y, transform.Angle, transform.Scale); MathF.TransformCoord(ref worldAxisY.X, ref worldAxisY.Y, transform.Angle, transform.Scale); Vector2 localOriginPos = tileSize * origin; Vector2 worldOriginPos = localOriginPos.X * worldAxisX + localOriginPos.Y * worldAxisY; canvas.PushState(); { // Configure the canvas so our shapes are properly rotated and scaled canvas.State.TransformHandle = -localRect.TopLeft; canvas.State.TransformAngle = transform.Angle; canvas.State.TransformScale = new Vector2(transform.Scale); // Fill all highlighted tiles that are currently visible { canvas.State.SetMaterial(defaultMaterial); canvas.State.ColorTint = fillTint * ColorRgba.White.WithAlpha(selection ? 0.2f : 0.375f); // Determine tile visibility Vector2 worldTilemapOriginPos = localRect.TopLeft; MathF.TransformCoord(ref worldTilemapOriginPos.X, ref worldTilemapOriginPos.Y, transform.Angle, transform.Scale); TilemapCulling.TileInput cullingIn = new TilemapCulling.TileInput { // Remember: All these transform values are in world space TilemapPos = transform.Pos + new Vector3(worldTilemapOriginPos) + new Vector3(worldOriginPos), TilemapScale = transform.Scale, TilemapAngle = transform.Angle, TileCount = new Point2(highlight.Width, highlight.Height), TileSize = tileSize }; TilemapCulling.TileOutput cullingOut = TilemapCulling.GetVisibleTileRect(canvas.DrawDevice, cullingIn); int renderedTileCount = cullingOut.VisibleTileCount.X * cullingOut.VisibleTileCount.Y; // Draw all visible highlighted tiles { Point2 tileGridPos = cullingOut.VisibleTileStart; Vector2 renderStartPos = worldOriginPos + tileGridPos.X * tileSize.X * worldAxisX + tileGridPos.Y * tileSize.Y * worldAxisY;; Vector2 renderPos = renderStartPos; Vector2 tileXStep = worldAxisX * tileSize.X; Vector2 tileYStep = worldAxisY * tileSize.Y; int lineMergeCount = 0; int totalRects = 0; for (int tileIndex = 0; tileIndex < renderedTileCount; tileIndex++) { bool current = highlight[tileGridPos.X, tileGridPos.Y]; if (current) { // Try to merge consecutive rects in the same line to reduce drawcalls / CPU load bool hasNext = (tileGridPos.X + 1 < highlight.Width) && ((tileGridPos.X + 1 - cullingOut.VisibleTileStart.X) < cullingOut.VisibleTileCount.X); bool next = hasNext ? highlight[tileGridPos.X + 1, tileGridPos.Y] : false; if (next) { lineMergeCount++; } else { totalRects++; canvas.FillRect( transform.Pos.X + renderPos.X - lineMergeCount * tileXStep.X, transform.Pos.Y + renderPos.Y - lineMergeCount * tileXStep.Y, transform.Pos.Z, tileSize.X * (1 + lineMergeCount), tileSize.Y); lineMergeCount = 0; } } tileGridPos.X++; renderPos += tileXStep; if ((tileGridPos.X - cullingOut.VisibleTileStart.X) >= cullingOut.VisibleTileCount.X) { tileGridPos.X = cullingOut.VisibleTileStart.X; tileGridPos.Y++; renderPos = renderStartPos; renderPos += tileYStep * (tileGridPos.Y - cullingOut.VisibleTileStart.Y); } } } } // Draw highlight area outlines, unless flagged as uncertain if (!uncertain) { // Determine the outlines of individual highlighted tile patches if (outlineCache == null) outlineCache = new List<Vector2[]>(); if (outlineCache.Count == 0) { GetTileAreaOutlines(highlight, tileSize, ref outlineCache); } // Draw outlines around all highlighted tile patches canvas.State.SetMaterial(selection ? strippleMaterial : defaultMaterial); canvas.State.ColorTint = outlineTint; foreach (Vector2[] outline in outlineCache) { // For strippled-line display, determine total length of outline if (selection) { float totalLength = 0.0f; for (int i = 1; i < outline.Length; i++) { totalLength += (outline[i - 1] - outline[i]).Length; } canvas.State.TextureCoordinateRect = new Rect(totalLength / strippledLineTex.PixelWidth, 1.0f); } // Draw the outline canvas.DrawPolygon( outline, transform.Pos.X + worldOriginPos.X, transform.Pos.Y + worldOriginPos.Y, transform.Pos.Z); } } // If this is an uncertain highlight, i.e. not actually reflecting the represented action, // draw a gizmo to indicate this for the user. if (uncertain) { Vector2 highlightSize = new Vector2(highlight.Width * tileSize.X, highlight.Height * tileSize.Y); Vector2 highlightCenter = highlightSize * 0.5f; Vector3 circlePos = transform.Pos + new Vector3(worldOriginPos + worldAxisX * highlightCenter + worldAxisY * highlightCenter); float circleRadius = MathF.Min(tileSize.X, tileSize.Y) * 0.2f; canvas.State.SetMaterial(defaultMaterial); canvas.State.ColorTint = outlineTint; canvas.FillCircle( circlePos.X, circlePos.Y, circlePos.Z, circleRadius); } } canvas.PopState(); }
/// <summary> /// Function to fil Controls based on the barcode /// </summary> /// <param name="strBarcode"></param> public void FillControlsByBarcode(string strBarcode) { try { BatchInfo infoBatch = new BatchInfo(); BatchBll BllBatch = new BatchBll(); PriceListInfo InfoPriceList = new PriceListInfo(); PriceListBll BllPriceList = new PriceListBll(); infoBatch = BllBatch.BatchAndProductViewByBarcode(strBarcode); cmbBatch.Text = infoBatch.BatchNo; decProductId = infoBatch.ProductId; decBatchId = infoBatch.BatchId; InfoPriceList = BllPriceList.PriceListViewByBatchIdORProduct(decBatchId); ProductInfo infoProduct = new ProductInfo(); ProductCreationBll BllProductCreation = new ProductCreationBll(); infoProduct = BllProductCreation.ProductView(decProductId); txtProductCode.Text = infoProduct.ProductCode; string strProductCode = infoProduct.ProductCode; isFromBarcode = true; cmbItem.Text = infoProduct.ProductName; isFromBarcode = false; cmbGodown.SelectedValue = infoProduct.GodownId; cmbRack.SelectedValue = infoProduct.RackId; UnitComboFill(); UnitInfo infoUnit = new UnitInfo(); infoUnit = new UnitBll().unitVieWForStandardRate(decProductId); cmbUnit.SelectedValue = infoUnit.UnitId; if (InfoPriceList.PricinglevelId != 0) { cmbPricingLevel.SelectedValue = InfoPriceList.PricinglevelId; } else { cmbPricingLevel.SelectedIndex = 0; } ComboTaxFill(); cmbTax.SelectedValue = infoProduct.TaxId; if (txtProductCode.Text.Trim() != string.Empty && cmbItem.SelectedIndex != -1) { decimal decNodecplaces = PublicVariables._inNoOfDecimalPlaces; decimal dcRate = BllProductCreation.ProductRateForSales(decProductId, Convert.ToDateTime(txtDate.Text), decBatchId, decNodecplaces); txtRate.Text = dcRate.ToString(); try { if (decimal.Parse(txtQuantity.Text) == 0) txtQuantity.Text = "1"; } catch { txtQuantity.Text = "1"; } txtQuantity.Focus(); } else { txtRate.Text = string.Empty; } } catch (Exception ex) { MessageBox.Show("POS:25" + ex.Message, "OpenMiracle", MessageBoxButtons.OK, MessageBoxIcon.Information); } }
BatchInfo GetCreateBatchInfo(ResourceHandle<Material> materialHandle) { if (!batches.ContainsKey(materialHandle.Id)) { var batch = new RenderBatch { GeometryBuffer = gb, RenderLayer = RenderLayer.Overlays, PrimitiveType = PrimitiveType.Quads, Material = materialHandle }; var batchInfo = new BatchInfo {Batch = batch, Ranges = new List<int>()}; batches[materialHandle.Id] = batchInfo; } return batches[materialHandle.Id]; }
private void SetupMaterial(BatchInfo material, BatchInfo lastMaterial) { if (material == lastMaterial) { return; } DrawTechnique tech = material.Technique.Res ?? DrawTechnique.Solid.Res; DrawTechnique lastTech = lastMaterial != null ? lastMaterial.Technique.Res : null; // Prepare Rendering if (tech.NeedsPreparation) { material = new BatchInfo(material); tech.PrepareRendering(this.currentDevice, material); } // Setup BlendType if (lastTech == null || tech.Blending != lastTech.Blending) { this.SetupBlendType(tech.Blending, this.currentDevice.DepthWrite); } // Bind Shader NativeShaderProgram shader = (tech.Shader.Res != null ? tech.Shader.Res.Native : null) as NativeShaderProgram; NativeShaderProgram.Bind(shader); // Setup shader data if (shader != null) { ShaderFieldInfo[] varInfo = shader.Fields; int[] locations = shader.FieldLocations; // Setup sampler bindings automatically int curSamplerIndex = 0; if (material.Textures != null) { for (int i = 0; i < varInfo.Length; i++) { if (locations[i] == -1) { continue; } if (varInfo[i].Type != ShaderFieldType.Sampler2D) { continue; } // Bind Texture ContentRef <Texture> texRef = material.GetTexture(varInfo[i].Name); NativeTexture.Bind(texRef, curSamplerIndex); GL.Uniform1(locations[i], curSamplerIndex); curSamplerIndex++; } } NativeTexture.ResetBinding(curSamplerIndex); // Transfer uniform data from material to actual shader if (material.Uniforms != null) { for (int i = 0; i < varInfo.Length; i++) { if (locations[i] == -1) { continue; } float[] data = material.GetUniform(varInfo[i].Name); if (data == null) { continue; } NativeShaderProgram.SetUniform(ref varInfo[i], locations[i], data); } } } // Setup fixed function data else { // Fixed function texture binding if (material.Textures != null) { int samplerIndex = 0; foreach (var pair in material.Textures) { NativeTexture.Bind(pair.Value, samplerIndex); samplerIndex++; } NativeTexture.ResetBinding(samplerIndex); } else { NativeTexture.ResetBinding(); } } }
BatchInfo GetCreateBatchInfo(ResourceHandle<Image> imageHandle) { var hId = imageHandle.Id; if (!batches.ContainsKey(hId)) { var batch = new RenderBatch(); batch.SetGeometryBuffer(gb); batch.SetRenderLayer(RenderLayer.Overlays); batch.SetPrimitiveType(PrimitiveType.Quads); var mat = GetCreateMaterial(imageHandle); batch.SetMaterial(mat); var bInfo = new BatchInfo {Batch = batch}; batches[hId] = bInfo; } return batches[hId]; }
/// <summary> /// Prepares rendering using this DrawTechnique. /// </summary> /// <param name="device"></param> /// <param name="material"></param> public virtual void PrepareRendering(IDrawDevice device, BatchInfo material) { }
/// <summary> /// Apply changes internal method for one Insert or Update or Delete for every dbSyncAdapter /// </summary> internal ChangeApplicationAction ApplyChangesInternal(SyncContext context, DbConnection connection, DbTransaction transaction, ScopeInfo fromScope, BatchInfo changes, DmRowState applyType, ChangesApplied changesApplied) { ChangeApplicationAction changeApplicationAction = ChangeApplicationAction.Continue; var configuration = GetCacheConfiguration(); // for each adapters (Zero to End for Insert / Updates -- End to Zero for Deletes for (int i = 0; i < configuration.Count; i++) { // If we have a delete we must go from Up to Down, orthewise Dow to Up index var tableDescription = (applyType != DmRowState.Deleted ? configuration[i] : configuration[configuration.Count - i - 1]); // if we are in upload stage, so check if table is not download only if (context.SyncWay == SyncWay.Upload && tableDescription.SyncDirection == SyncDirection.DownloadOnly) { continue; } // if we are in download stage, so check if table is not download only if (context.SyncWay == SyncWay.Download && tableDescription.SyncDirection == SyncDirection.UploadOnly) { continue; } var builder = this.GetDatabaseBuilder(tableDescription); var syncAdapter = builder.CreateSyncAdapter(connection, transaction); syncAdapter.ConflictApplyAction = configuration.GetApplyAction(); // Set syncAdapter properties syncAdapter.applyType = applyType; // Get conflict handler resolver if (syncAdapter.ConflictActionInvoker == null && this.ApplyChangedFailed != null) { syncAdapter.ConflictActionInvoker = GetConflictAction; } if (changes.BatchPartsInfo != null && changes.BatchPartsInfo.Count > 0) { // getting the table to be applied // we may have multiple batch files, so we can have multipe dmTable with the same Name // We can say that dmTable may be contained in several files foreach (DmTable dmTablePart in changes.GetTable(tableDescription.TableName)) { if (dmTablePart == null || dmTablePart.Rows.Count == 0) { continue; } // check and filter var dmChangesView = new DmView(dmTablePart, (r) => r.RowState == applyType); if (dmChangesView.Count == 0) { dmChangesView.Dispose(); dmChangesView = null; continue; } // Conflicts occured when trying to apply rows List <SyncConflict> conflicts = new List <SyncConflict>(); // Raise event progress only if there are rows to be applied context.SyncStage = SyncStage.TableChangesApplying; var args = new TableChangesApplyingEventArgs(this.ProviderTypeName, context.SyncStage, tableDescription.TableName, applyType); this.TryRaiseProgressEvent(args, this.TableChangesApplying); int rowsApplied; // applying the bulkchanges command if (configuration.UseBulkOperations && this.SupportBulkOperations) { rowsApplied = syncAdapter.ApplyBulkChanges(dmChangesView, fromScope, conflicts); } else { rowsApplied = syncAdapter.ApplyChanges(dmChangesView, fromScope, conflicts); } // If conflicts occured // Eventuall, conflicts are resolved on server side. if (conflicts != null && conflicts.Count > 0) { foreach (var conflict in conflicts) { var scopeBuilder = this.GetScopeBuilder(); var scopeInfoBuilder = scopeBuilder.CreateScopeInfoBuilder(connection, transaction); var localTimeStamp = scopeInfoBuilder.GetLocalTimestamp(); changeApplicationAction = syncAdapter.HandleConflict(conflict, fromScope, localTimeStamp, out DmRow resolvedRow); if (changeApplicationAction == ChangeApplicationAction.Continue) { // row resolved if (resolvedRow != null) { rowsApplied++; } } else { context.TotalSyncErrors++; // TODO : Should we break at the first error ? return(ChangeApplicationAction.Rollback); } } } // Get all conflicts resolved context.TotalSyncConflicts = conflicts.Where(c => c.Type != ConflictType.ErrorsOccurred).Sum(c => 1); // Handle sync progress for this syncadapter (so this table) var changedFailed = dmChangesView.Count - rowsApplied; // raise SyncProgress Event var existAppliedChanges = changesApplied.TableChangesApplied.FirstOrDefault( sc => string.Equals(sc.TableName, tableDescription.TableName) && sc.State == applyType); if (existAppliedChanges == null) { existAppliedChanges = new TableChangesApplied { TableName = tableDescription.TableName, Applied = rowsApplied, Failed = changedFailed, State = applyType }; changesApplied.TableChangesApplied.Add(existAppliedChanges); } else { existAppliedChanges.Applied += rowsApplied; existAppliedChanges.Failed += changedFailed; } // Event progress context.SyncStage = SyncStage.TableChangesApplied; var progressEventArgs = new TableChangesAppliedEventArgs(this.ProviderTypeName, context.SyncStage, existAppliedChanges); this.TryRaiseProgressEvent(progressEventArgs, this.TableChangesApplied); } } // Dispose conflict handler resolver if (syncAdapter.ConflictActionInvoker != null) { syncAdapter.ConflictActionInvoker = null; } } return(ChangeApplicationAction.Continue); }
public override void Draw(IDrawDevice device) { Vector3 posTemp = this.gameobj.Transform.Pos; float scaleTemp = 1.0f; device.PreprocessCoords(ref posTemp, ref scaleTemp); Vector2 xDot, yDot; MathF.GetTransformDotVec(this.GameObj.Transform.Angle, this.gameobj.Transform.Scale * scaleTemp, out xDot, out yDot); // Apply block alignment Vector2 textOffset = Vector2.Zero; Vector2 textSize = this.text.Size; if (this.text.MaxWidth > 0) textSize.X = this.text.MaxWidth; this.blockAlign.ApplyTo(ref textOffset, textSize); MathF.TransformDotVec(ref textOffset, ref xDot, ref yDot); posTemp.X += textOffset.X; posTemp.Y += textOffset.Y; if (this.text.Fonts != null && this.text.Fonts.Any(r => r.IsAvailable && r.Res.IsPixelGridAligned)) { posTemp.X = MathF.Round(posTemp.X); posTemp.Y = MathF.Round(posTemp.Y); if (MathF.RoundToInt(device.TargetSize.X) != (MathF.RoundToInt(device.TargetSize.X) / 2) * 2) posTemp.X += 0.5f; if (MathF.RoundToInt(device.TargetSize.Y) != (MathF.RoundToInt(device.TargetSize.Y) / 2) * 2) posTemp.Y += 0.5f; } // Draw design time metrics data if (DualityApp.ExecContext == DualityApp.ExecutionContext.Editor) { bool showLimits = true; bool showLines = false; bool showElements = false; Vector3 metricsOffset = new Vector3(0.0f, 0.0f, 0.01f); Vector3 lineOffset = new Vector3(0.5f, 0.5f, 0.0f); Vector3 tUnitX = Vector3.UnitX; Vector3 tUnitY = Vector3.UnitY; MathF.TransformDotVec(ref tUnitX, ref xDot, ref yDot); MathF.TransformDotVec(ref tUnitY, ref xDot, ref yDot); // Actual text size and maximum text size if (showLimits) { Vector3 textWidth = tUnitX * this.text.Size.X; Vector3 textHeight = tUnitY * this.text.Size.Y; Vector3 textMaxWidth = tUnitX * this.text.MaxWidth; Vector3 textMaxHeight = tUnitY * MathF.Max(this.text.MaxHeight, this.text.Size.Y); ColorRgba clrSize = ColorRgba.Green.WithAlpha(128); ColorRgba clrMaxSize = ColorRgba.Red.WithAlpha(128); device.AddVertices(new BatchInfo(DrawTechnique.Alpha, ColorRgba.White), VertexMode.LineLoop, new VertexC1P3(metricsOffset + lineOffset + posTemp, clrSize), new VertexC1P3(metricsOffset + lineOffset + posTemp + textWidth, clrSize), new VertexC1P3(metricsOffset + lineOffset + posTemp + textWidth + textHeight, clrSize), new VertexC1P3(metricsOffset + lineOffset + posTemp + textHeight, clrSize)); device.AddVertices(new BatchInfo(DrawTechnique.Alpha, ColorRgba.White), VertexMode.LineLoop, new VertexC1P3(metricsOffset + lineOffset + posTemp, clrMaxSize), new VertexC1P3(metricsOffset + lineOffset + posTemp + textMaxWidth, clrMaxSize), new VertexC1P3(metricsOffset + lineOffset + posTemp + textMaxWidth + textMaxHeight, clrMaxSize), new VertexC1P3(metricsOffset + lineOffset + posTemp + textMaxHeight, clrMaxSize)); } // Individual line sizes if (showLines) { ColorRgba clrLineBg = (ColorRgba.Blue + ColorRgba.Red).WithAlpha(64); for (int i = 0; i < this.text.TextMetrics.LineBounds.Count; i++) { Rect lineRect = this.text.TextMetrics.LineBounds[i]; device.AddVertices(new BatchInfo(DrawTechnique.Alpha, ColorRgba.White), VertexMode.Quads, new VertexC1P3(metricsOffset + posTemp + lineRect.TopLeft.X * tUnitX + lineRect.TopLeft.Y * tUnitY, clrLineBg), new VertexC1P3(metricsOffset + posTemp + lineRect.BottomLeft.X * tUnitX + lineRect.BottomLeft.Y * tUnitY, clrLineBg), new VertexC1P3(metricsOffset + posTemp + lineRect.BottomRight.X * tUnitX + lineRect.BottomRight.Y * tUnitY, clrLineBg), new VertexC1P3(metricsOffset + posTemp + lineRect.TopRight.X * tUnitX + lineRect.TopRight.Y * tUnitY, clrLineBg)); } } // Individual line sizes if (showElements) { ColorRgba clrElementBg = (ColorRgba.Blue + ColorRgba.Green).WithAlpha(128); for (int i = 0; i < this.text.TextMetrics.ElementBounds.Count; i++) { Rect elemRect = this.text.TextMetrics.ElementBounds[i]; device.AddVertices(new BatchInfo(DrawTechnique.Alpha, ColorRgba.White), VertexMode.LineLoop, new VertexC1P3(metricsOffset + lineOffset + posTemp + elemRect.TopLeft.X * tUnitX + elemRect.TopLeft.Y * tUnitY, clrElementBg), new VertexC1P3(metricsOffset + lineOffset + posTemp + elemRect.BottomLeft.X * tUnitX + elemRect.BottomLeft.Y * tUnitY, clrElementBg), new VertexC1P3(metricsOffset + lineOffset + posTemp + elemRect.BottomRight.X * tUnitX + elemRect.BottomRight.Y * tUnitY, clrElementBg), new VertexC1P3(metricsOffset + lineOffset + posTemp + elemRect.TopRight.X * tUnitX + elemRect.TopRight.Y * tUnitY, clrElementBg)); } } } ColorRgba matColor = this.customMat != null ? this.customMat.MainColor : ColorRgba.White; int[] vertLen = this.text.EmitVertices(ref this.vertFont, ref this.vertIcon, posTemp.X, posTemp.Y, posTemp.Z, this.colorTint * matColor, xDot, yDot); if (this.text.Fonts != null) { for (int i = 0; i < this.text.Fonts.Length; i++) { if (this.text.Fonts[i] != null && this.text.Fonts[i].IsAvailable) { if (this.customMat == null) { device.AddVertices(this.text.Fonts[i].Res.Material, VertexMode.Quads, this.vertFont[i], vertLen[i + 1]); } else { BatchInfo cm = new BatchInfo(this.customMat); cm.Textures = this.text.Fonts[i].Res.Material.Textures; device.AddVertices(cm, VertexMode.Quads, this.vertFont[i], vertLen[i + 1]); } } } } if (this.text.Icons != null && this.iconMat.IsAvailable) { device.AddVertices(this.iconMat, VertexMode.Quads, this.vertIcon, vertLen[0]); } }
/// <summary> /// Apply changes : Insert / Updates Delete /// the fromScope is local client scope when this method is called from server /// the fromScope is server scope when this method is called from client /// </summary> public virtual async Task <(SyncContext, ChangesApplied)> ApplyChangesAsync(SyncContext context, ScopeInfo fromScope, BatchInfo changes) { ChangeApplicationAction changeApplicationAction; DbTransaction applyTransaction = null; ChangesApplied changesApplied = new ChangesApplied(); DbConnection connection = null; try { using (connection = this.CreateConnection()) { await connection.OpenAsync(); // Create a transaction applyTransaction = connection.BeginTransaction(); // ----------------------------------------------------- // 0) Check if we are in a reinit mode // ----------------------------------------------------- if (context.SyncWay == SyncWay.Download && context.SyncType != SyncType.Normal) { changeApplicationAction = this.ResetInternal(context, connection, applyTransaction, fromScope); // Rollback if (changeApplicationAction == ChangeApplicationAction.Rollback) { throw new SyncException("Rollback during reset tables", context.SyncStage, this.ProviderTypeName, SyncExceptionType.Rollback); } } // ----------------------------------------------------- // 1) Applying deletes. Do not apply deletes if we are in a new database // ----------------------------------------------------- if (!fromScope.IsNewScope) { changeApplicationAction = this.ApplyChangesInternal(context, connection, applyTransaction, fromScope, changes, DmRowState.Deleted, changesApplied); // Rollback if (changeApplicationAction == ChangeApplicationAction.Rollback) { throw new SyncException("Rollback during applying deletes", context.SyncStage, this.ProviderTypeName, SyncExceptionType.Rollback); } } // ----------------------------------------------------- // 1) Applying Inserts // ----------------------------------------------------- changeApplicationAction = this.ApplyChangesInternal(context, connection, applyTransaction, fromScope, changes, DmRowState.Added, changesApplied); // Rollback if (changeApplicationAction == ChangeApplicationAction.Rollback) { throw new SyncException("Rollback during applying inserts", context.SyncStage, this.ProviderTypeName, SyncExceptionType.Rollback); } // ----------------------------------------------------- // 1) Applying updates // ----------------------------------------------------- changeApplicationAction = this.ApplyChangesInternal(context, connection, applyTransaction, fromScope, changes, DmRowState.Modified, changesApplied); // Rollback if (changeApplicationAction == ChangeApplicationAction.Rollback) { throw new SyncException("Rollback during applying updates", context.SyncStage, this.ProviderTypeName, SyncExceptionType.Rollback); } applyTransaction.Commit(); return(context, changesApplied); } } catch (SyncException se) { throw; } catch (Exception ex) { throw new SyncException(ex, SyncStage.TableChangesApplying, this.ProviderTypeName); } finally { if (applyTransaction != null) { applyTransaction.Dispose(); applyTransaction = null; } if (connection != null && connection.State == ConnectionState.Open) { connection.Close(); } if (changes != null) { changes.Clear(); } } }
public Pass(Pass copyFrom, BatchInfo inputOverride) { this.input = inputOverride; this.output = copyFrom.output; this.clearColor = copyFrom.clearColor; this.clearDepth = copyFrom.clearDepth; this.clearFlags = copyFrom.clearFlags; this.matrixMode = copyFrom.matrixMode; this.visibilityMask = copyFrom.visibilityMask; this.MakeAvailable(); }
/// <summary> /// Function to fill the fields while user double click on the datagridview /// </summary> public void FillControls() { try { PriceListInfo infoPricelist = new PriceListInfo(); PriceListBll BllPriceList = new PriceListBll(); infoPricelist.PricelistId = decpriceListId; infoPricelist = BllPriceList.PriceListView(decpriceListId); txtRate.Text = infoPricelist.Rate.ToString(); ProductInfo infoProduct = new ProductInfo(); ProductCreationBll BllProductCreation = new ProductCreationBll(); infoProduct = BllProductCreation.PriceListPopUpView(decProductId); txtProductCode.Text = infoProduct.ProductCode; txtProductName.Text = infoProduct.ProductName; BatchInfo infobatch = new BatchInfo(); // BatchSP spBatch = new BatchSP(); BatchBll BllBatch = new BatchBll(); infobatch = BllBatch.BatchView(decBatchId); cmbBatch.SelectedValue = infobatch.BatchId; UnitInfo infoUnit = new UnitInfo(); UnitBll bllUnit = new UnitBll(); infoUnit = bllUnit.UnitView(decUnitId); txtUnitName.Text = infoUnit.UnitName; PricingLevelInfo infoPricingLevel = new PricingLevelInfo(); PricingLevelBll BllPricingLevel = new PricingLevelBll(); infoPricingLevel = BllPricingLevel.PricingLevelView(decPriceLevelId); txtPricingLevel.Text = infoPricingLevel.PricinglevelName; txtProductName.ReadOnly = true; txtProductCode.ReadOnly = true; txtUnitName.ReadOnly = true; txtPricingLevel.ReadOnly = true; } catch (Exception ex) { MessageBox.Show("PLP9:" + ex.Message, "OpenMiracle", MessageBoxButtons.OK, MessageBoxIcon.Information); } }