/// <summary> /// Serialize a container set instance /// </summary> private static void Serialize(ContainerSet set, string fileName, string directoryFullPath) { if (set == null) { return; } var fullPath = Path.Combine(directoryFullPath, fileName); var fi = new FileInfo(fullPath); if (!Directory.Exists(fi.Directory.FullName)) { Directory.CreateDirectory(fi.Directory.FullName); } // Serialize on disk. var jsonConverter = new JsonConverter <ContainerSet>(); using (var f = new FileStream(fullPath, FileMode.CreateNew, FileAccess.ReadWrite)) { var bytes = jsonConverter.Serialize(set); f.Write(bytes, 0, bytes.Length); } }
private void FormulaCalculation() { string[] strsTemps; StringBuilder sbTemp; ContainerSet container = new ContainerSet(); try { sbTemp = new StringBuilder(); foreach (var item in tlpAssemblyUI.Controls) { switch (item.GetType().FullName.Split('.')[item.GetType().FullName.Split('.').Length - 1].ToString()) { case "ComboBox": sbTemp.Append(((ComboBox)item).Text.ToUpper().Equals("EMPTY") ? string.Empty : ((ComboBox)item).Text); sbTemp.Append(","); break; case "NumericUpDown": if (((NumericUpDown)item).Tag == null) { sbTemp.Append(((NumericUpDown)item).Value.ToString()); } else { sbTemp.Append(Convert.ToInt32(((NumericUpDown)item).Value).ToString("D" + ((NumericUpDown)item).Tag.ToString())); } sbTemp.Append(","); break; } } switch (lsbDeviceName.SelectedValue.ToString().Split(',')[0].ToString()) { case "SANWA": txbManually.Text = string.Format(dtCommandParameter.Rows[0]["code_format"].ToString(), lsbDeviceName.Text.Split(',')[1].ToString(), string.Empty) + container.StringFormat(dtCommandParameter.Select(), sbTemp.ToString().TrimEnd(',').Split(',')); break; case "ATEL": strsTemps = (lsbDeviceName.Text.Split(',')[1].ToString() + "," + sbTemp.ToString().Substring(0, sbTemp.ToString().Length - 1)).Split(','); txbManually.Text = string.Format(dtCommandParameter.Rows[0]["code_format"].ToString(), strsTemps); break; case "HST": case "COGNEX": txbManually.Text = string.Format(dtCommandParameter.Rows[0]["code_format"].ToString(), sbTemp.ToString().Substring(0, sbTemp.ToString().Length - 1).Split(',')); break; } } catch (Exception ex) { logger.Error(ex.ToString()); throw new Exception(ex.ToString()); } }
/// <summary> /// Serialize a container set instance /// </summary> private static async Task SerializeAsync(ContainerSet set, string fileName, string directoryFullPath, ISerializerFactory serializerFactory = default, BaseOrchestrator orchestrator = null) { if (set == null) { return; } var fullPath = Path.Combine(directoryFullPath, fileName); var fi = new FileInfo(fullPath); if (!Directory.Exists(fi.Directory.FullName)) { Directory.CreateDirectory(fi.Directory.FullName); } if (serializerFactory == default) { serializerFactory = SerializersCollection.JsonSerializer; } var serializer = serializerFactory.GetSerializer <ContainerSet>(); using var f = new FileStream(fullPath, FileMode.CreateNew, FileAccess.ReadWrite); byte[] serializedBytes = null; if (orchestrator != null) { var interceptorArgs = new SerializingSetArgs(orchestrator.GetContext(), set, serializerFactory, fileName, directoryFullPath); await orchestrator.InterceptAsync(interceptorArgs, default); serializedBytes = interceptorArgs.Result; } if (serializedBytes == null) { serializedBytes = await serializer.SerializeAsync(set); } f.Write(serializedBytes, 0, serializedBytes.Length); //await f.FlushAsync(); }
private static async Task <ContainerSet> DeserializeAsync(string fileName, string directoryFullPath, BaseOrchestrator orchestrator = null) { if (string.IsNullOrEmpty(fileName)) { throw new ArgumentNullException(fileName); } if (string.IsNullOrEmpty(directoryFullPath)) { throw new ArgumentNullException(directoryFullPath); } var fullPath = Path.Combine(directoryFullPath, fileName); if (!File.Exists(fullPath)) { throw new MissingFileException(fullPath); } var jsonConverter = new JsonConverter <ContainerSet>(); //var jsonConverter = new Utf8JsonConverter<ContainerSet>(); using var fs = new FileStream(fullPath, FileMode.Open, FileAccess.Read); ContainerSet set = null; if (orchestrator != null) { var interceptorArgs = new DeserializingSetArgs(orchestrator.GetContext(), fs, fileName, directoryFullPath); await orchestrator.InterceptAsync(interceptorArgs, default); set = interceptorArgs.Result; } if (set == null) { set = await jsonConverter.DeserializeAsync(fs); } //await fs.FlushAsync(); return(set); }
/// <summary> /// Serialize a container set instance /// </summary> private static async Task SerializeAsync(ContainerSet set, string fileName, string directoryFullPath, BaseOrchestrator orchestrator = null) { if (set == null) { return; } var fullPath = Path.Combine(directoryFullPath, fileName); var fi = new FileInfo(fullPath); if (!Directory.Exists(fi.Directory.FullName)) { Directory.CreateDirectory(fi.Directory.FullName); } // Serialize on disk. // var jsonConverter = new JsonConverter<ContainerSet>(); var jsonConverter = new Utf8JsonConverter <ContainerSet>(); using var f = new FileStream(fullPath, FileMode.CreateNew, FileAccess.ReadWrite); byte[] serializedBytes = null; if (orchestrator != null) { var interceptorArgs = new SerializingSetArgs(orchestrator.GetContext(), set, fileName, directoryFullPath); await orchestrator.InterceptAsync(interceptorArgs, default); serializedBytes = interceptorArgs.Result; } if (serializedBytes == null) { serializedBytes = await jsonConverter.SerializeAsync(set); } f.Write(serializedBytes, 0, serializedBytes.Length); }
InternalApplyThenGetChangesAsync(ClientScopeInfo clientScopeInfo, SyncContext context, BatchInfo clientBatchInfo, DbConnection connection = default, DbTransaction transaction = default, CancellationToken cancellationToken = default, IProgress <ProgressArgs> progress = null) { await using var runner = await this.GetConnectionAsync(context, SyncMode.Reading, SyncStage.ChangesApplying, connection, transaction, cancellationToken, progress).ConfigureAwait(false); SyncSet schema; ServerScopeInfo serverScopeInfo; // is it something that could happens ? if (clientScopeInfo.Schema == null) { // Make a remote call to get Schema from remote provider (context, serverScopeInfo) = await this.InternalGetServerScopeInfoAsync( context, null, runner.Connection, runner.Transaction, runner.CancellationToken, runner.Progress).ConfigureAwait(false); schema = serverScopeInfo.Schema; } else { schema = clientScopeInfo.Schema; } schema.EnsureSchema(); // if we don't have any BatchPartsInfo, just generate a new one to get, at least, something to send to the server // and get a response with new data from server if (clientBatchInfo == null) { clientBatchInfo = new BatchInfo(schema); } // -------------------------------------------------------------- // STEP 1 : Send everything to the server side // -------------------------------------------------------------- HttpResponseMessage response = null; // If not in memory and BatchPartsInfo.Count == 0, nothing to send. // But we need to send something, so generate a little batch part if (clientBatchInfo.BatchPartsInfo.Count == 0) { var changesToSend = new HttpMessageSendChangesRequest(context, clientScopeInfo); var containerSet = new ContainerSet(); changesToSend.Changes = containerSet; changesToSend.IsLastBatch = true; changesToSend.BatchIndex = 0; changesToSend.BatchCount = clientBatchInfo.BatchPartsInfo == null ? 0 : clientBatchInfo.BatchPartsInfo.Count; var inMemoryRowsCount = changesToSend.Changes.RowsCount(); context.ProgressPercentage += 0.125; await this.InterceptAsync(new HttpSendingClientChangesRequestArgs(changesToSend, inMemoryRowsCount, inMemoryRowsCount, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); // serialize message var serializer = this.SerializerFactory.GetSerializer <HttpMessageSendChangesRequest>(); var binaryData = await serializer.SerializeAsync(changesToSend); response = await this.httpRequestHandler.ProcessRequestAsync (this.HttpClient, context, this.ServiceUri, binaryData, HttpStep.SendChangesInProgress, this.SerializerFactory, this.Converter, this.Options.BatchSize, this.SyncPolicy, cancellationToken, progress).ConfigureAwait(false); } else { int tmpRowsSendedCount = 0; // Foreach part, will have to send them to the remote // once finished, return context var initialPctProgress1 = context.ProgressPercentage; var localSerializer = new LocalJsonSerializer(); var interceptorsReading = this.interceptors.GetInterceptors <DeserializingRowArgs>(); if (interceptorsReading.Count > 0) { localSerializer.OnReadingRow(async(schemaTable, rowString) => { var args = new DeserializingRowArgs(context, schemaTable, rowString); await this.InterceptAsync(args); return(args.Result); }); } foreach (var bpi in clientBatchInfo.BatchPartsInfo.OrderBy(bpi => bpi.Index)) { // Get the updatable schema for the only table contained in the batchpartinfo var schemaTable = DbSyncAdapter.CreateChangesTable(schema.Tables[bpi.Tables[0].TableName, bpi.Tables[0].SchemaName]); // Generate the ContainerSet containing rows to send to the user var containerSet = new ContainerSet(); var containerTable = new ContainerTable(schemaTable); var fullPath = Path.Combine(clientBatchInfo.GetDirectoryFullPath(), bpi.FileName); containerSet.Tables.Add(containerTable); // read rows from file foreach (var row in localSerializer.ReadRowsFromFile(fullPath, schemaTable)) { containerTable.Rows.Add(row.ToArray()); } // Call the converter if needed if (this.Converter != null && containerTable.HasRows) { BeforeSerializeRows(containerTable, schemaTable, this.Converter); } // Create the send changes request var changesToSend = new HttpMessageSendChangesRequest(context, clientScopeInfo) { Changes = containerSet, IsLastBatch = bpi.IsLastBatch, BatchIndex = bpi.Index, BatchCount = clientBatchInfo.BatchPartsInfo.Count }; tmpRowsSendedCount += containerTable.Rows.Count; context.ProgressPercentage = initialPctProgress1 + ((changesToSend.BatchIndex + 1) * 0.2d / changesToSend.BatchCount); await this.InterceptAsync(new HttpSendingClientChangesRequestArgs(changesToSend, tmpRowsSendedCount, clientBatchInfo.RowsCount, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); // serialize message var serializer = this.SerializerFactory.GetSerializer <HttpMessageSendChangesRequest>(); var binaryData = await serializer.SerializeAsync(changesToSend); response = await this.httpRequestHandler.ProcessRequestAsync (this.HttpClient, context, this.ServiceUri, binaryData, HttpStep.SendChangesInProgress, this.SerializerFactory, this.Converter, this.Options.BatchSize, this.SyncPolicy, cancellationToken, progress).ConfigureAwait(false); // See #721 for issue and #721 for PR from slagtejn if (!bpi.IsLastBatch) { response.Dispose(); } } } // -------------------------------------------------------------- // STEP 2 : Receive everything from the server side // -------------------------------------------------------------- // Now we have sent all the datas to the server and now : // We have a FIRST response from the server with new datas // 1) Could be the only one response // 2) Could be the first response and we need to download all batchs context.SyncStage = SyncStage.ChangesSelecting; var initialPctProgress = 0.55; context.ProgressPercentage = initialPctProgress; // Create the BatchInfo var serverBatchInfo = new BatchInfo(schema); HttpMessageSummaryResponse summaryResponseContent = null; // Deserialize response incoming from server using (var streamResponse = await response.Content.ReadAsStreamAsync().ConfigureAwait(false)) { var responseSerializer = this.SerializerFactory.GetSerializer <HttpMessageSummaryResponse>(); summaryResponseContent = await responseSerializer.DeserializeAsync(streamResponse); } serverBatchInfo.RowsCount = summaryResponseContent.BatchInfo.RowsCount; serverBatchInfo.Timestamp = summaryResponseContent.RemoteClientTimestamp; context = summaryResponseContent.SyncContext; if (summaryResponseContent.BatchInfo.BatchPartsInfo != null) { foreach (var bpi in summaryResponseContent.BatchInfo.BatchPartsInfo) { serverBatchInfo.BatchPartsInfo.Add(bpi); } } // From here, we need to serialize everything on disk // Generate the batch directory var batchDirectoryRoot = this.Options.BatchDirectory; var batchDirectoryName = string.Concat(DateTime.UtcNow.ToString("yyyy_MM_dd_ss"), Path.GetRandomFileName().Replace(".", "")); serverBatchInfo.DirectoryRoot = batchDirectoryRoot; serverBatchInfo.DirectoryName = batchDirectoryName; if (!Directory.Exists(serverBatchInfo.GetDirectoryFullPath())) { Directory.CreateDirectory(serverBatchInfo.GetDirectoryFullPath()); } // If we have a snapshot we are raising the batches downloading process that will occurs await this.InterceptAsync(new HttpBatchesDownloadingArgs(context, serverBatchInfo, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); // function used to download one part var dl = new Func <BatchPartInfo, Task>(async(bpi) => { if (cancellationToken.IsCancellationRequested) { return; } var changesToSend3 = new HttpMessageGetMoreChangesRequest(context, bpi.Index); var serializer3 = this.SerializerFactory.GetSerializer <HttpMessageGetMoreChangesRequest>(); var binaryData3 = await serializer3.SerializeAsync(changesToSend3).ConfigureAwait(false); var step3 = HttpStep.GetMoreChanges; await this.InterceptAsync(new HttpGettingServerChangesRequestArgs(bpi.Index, serverBatchInfo.BatchPartsInfo.Count, summaryResponseContent.SyncContext, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); // Raise get changes request context.ProgressPercentage = initialPctProgress + ((bpi.Index + 1) * 0.2d / serverBatchInfo.BatchPartsInfo.Count); var response = await this.httpRequestHandler.ProcessRequestAsync( this.HttpClient, context, this.ServiceUri, binaryData3, step3, this.SerializerFactory, this.Converter, 0, this.SyncPolicy, cancellationToken, progress).ConfigureAwait(false); if (this.SerializerFactory.Key != "json") { var webSerializer = this.SerializerFactory.GetSerializer <HttpMessageSendChangesResponse>(); using var responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); var getMoreChanges = await webSerializer.DeserializeAsync(responseStream); context = getMoreChanges.SyncContext; if (getMoreChanges != null && getMoreChanges.Changes != null && getMoreChanges.Changes.HasRows) { var localSerializer = new LocalJsonSerializer(); var interceptorsWriting = this.interceptors.GetInterceptors <SerializingRowArgs>(); if (interceptorsWriting.Count > 0) { localSerializer.OnWritingRow(async(syncTable, rowArray) => { var args = new SerializingRowArgs(context, syncTable, rowArray); await this.InterceptAsync(args, progress, cancellationToken).ConfigureAwait(false); return(args.Result); }); } // Should have only one table var table = getMoreChanges.Changes.Tables[0]; var schemaTable = DbSyncAdapter.CreateChangesTable(schema.Tables[table.TableName, table.SchemaName]); var fullPath = Path.Combine(serverBatchInfo.GetDirectoryFullPath(), bpi.FileName); // open the file and write table header await localSerializer.OpenFileAsync(fullPath, schemaTable).ConfigureAwait(false); foreach (var row in table.Rows) { await localSerializer.WriteRowToFileAsync(new SyncRow(schemaTable, row), schemaTable).ConfigureAwait(false); } // Close file await localSerializer.CloseFileAsync(fullPath, schemaTable).ConfigureAwait(false); } } else { // Serialize await SerializeAsync(response, bpi.FileName, serverBatchInfo.GetDirectoryFullPath(), this).ConfigureAwait(false); } // Raise response from server containing a batch changes await this.InterceptAsync(new HttpGettingServerChangesResponseArgs(serverBatchInfo, bpi.Index, bpi.RowsCount, summaryResponseContent.SyncContext, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); }); // Parrallel download of all bpis (which will launch the delete directory on the server side) await serverBatchInfo.BatchPartsInfo.ForEachAsync(bpi => dl(bpi), this.MaxDownladingDegreeOfParallelism).ConfigureAwait(false); // Send order of end of download var lastBpi = serverBatchInfo.BatchPartsInfo.FirstOrDefault(bpi => bpi.IsLastBatch); if (lastBpi != null) { var endOfDownloadChanges = new HttpMessageGetMoreChangesRequest(context, lastBpi.Index); var serializerEndOfDownloadChanges = this.SerializerFactory.GetSerializer <HttpMessageGetMoreChangesRequest>(); var binaryData3 = await serializerEndOfDownloadChanges.SerializeAsync(endOfDownloadChanges).ConfigureAwait(false); var endResponse = await this.httpRequestHandler.ProcessRequestAsync( this.HttpClient, context, this.ServiceUri, binaryData3, HttpStep.SendEndDownloadChanges, this.SerializerFactory, this.Converter, 0, this.SyncPolicy, cancellationToken, progress).ConfigureAwait(false); // Deserialize response incoming from server // This is the last response // Should contains step HttpStep.SendEndDownloadChanges using var streamResponse = await endResponse.Content.ReadAsStreamAsync().ConfigureAwait(false); var endResponseSerializer = this.SerializerFactory.GetSerializer <HttpMessageSendChangesResponse>(); var endResponseContent = await endResponseSerializer.DeserializeAsync(streamResponse); context = endResponseContent.SyncContext; } // generate the new scope item this.CompleteTime = DateTime.UtcNow; await this.InterceptAsync(new HttpBatchesDownloadedArgs(summaryResponseContent, summaryResponseContent.SyncContext, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); var serverSyncChanges = new ServerSyncChanges( summaryResponseContent.RemoteClientTimestamp, serverBatchInfo, summaryResponseContent.ServerChangesSelected ); return(context, serverSyncChanges, summaryResponseContent.ClientChangesApplied, summaryResponseContent.ConflictResolutionPolicy); }
GetChangesAsync(ClientScopeInfo clientScopeInfo, SyncParameters parameters = default, DbConnection connection = default, DbTransaction transaction = default, CancellationToken cancellationToken = default, IProgress <ProgressArgs> progress = null) { var context = new SyncContext(Guid.NewGuid(), clientScopeInfo.Name); if (parameters != null) { context.Parameters = parameters; } SyncSet schema; ServerScopeInfo serverScopeInfo; // Need the server scope (context, serverScopeInfo) = await this.InternalGetServerScopeInfoAsync(context, clientScopeInfo.Setup, connection, transaction, cancellationToken, progress).ConfigureAwait(false); schema = serverScopeInfo.Schema; schema.EnsureSchema(); clientScopeInfo.Schema = schema; clientScopeInfo.Setup = serverScopeInfo.Setup; clientScopeInfo.Version = serverScopeInfo.Version; var changesToSend = new HttpMessageSendChangesRequest(context, clientScopeInfo); var containerSet = new ContainerSet(); changesToSend.Changes = containerSet; changesToSend.IsLastBatch = true; changesToSend.BatchIndex = 0; changesToSend.BatchCount = 0; context.ProgressPercentage += 0.125; await this.InterceptAsync(new HttpSendingClientChangesRequestArgs(changesToSend, 0, 0, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); // serialize message var serializer = this.SerializerFactory.GetSerializer <HttpMessageSendChangesRequest>(); var binaryData = await serializer.SerializeAsync(changesToSend); var response = await this.httpRequestHandler.ProcessRequestAsync (this.HttpClient, context, this.ServiceUri, binaryData, HttpStep.SendChangesInProgress, this.SerializerFactory, this.Converter, this.Options.BatchSize, this.SyncPolicy, cancellationToken, progress).ConfigureAwait(false); // -------------------------------------------------------------- // STEP 2 : Receive everything from the server side // -------------------------------------------------------------- // Now we have sent all the datas to the server and now : // We have a FIRST response from the server with new datas // 1) Could be the only one response (enough or InMemory is set on the server side) // 2) Could bt the first response and we need to download all batchs context.SyncStage = SyncStage.ChangesSelecting; var initialPctProgress = 0.55; context.ProgressPercentage = initialPctProgress; // Create the BatchInfo var serverBatchInfo = new BatchInfo(schema); HttpMessageSummaryResponse summaryResponseContent = null; // Deserialize response incoming from server using (var streamResponse = await response.Content.ReadAsStreamAsync().ConfigureAwait(false)) { var responseSerializer = this.SerializerFactory.GetSerializer <HttpMessageSummaryResponse>(); summaryResponseContent = await responseSerializer.DeserializeAsync(streamResponse); } serverBatchInfo.RowsCount = summaryResponseContent.BatchInfo.RowsCount; serverBatchInfo.Timestamp = summaryResponseContent.RemoteClientTimestamp; context = summaryResponseContent.SyncContext; if (summaryResponseContent.BatchInfo.BatchPartsInfo != null) { foreach (var bpi in summaryResponseContent.BatchInfo.BatchPartsInfo) { serverBatchInfo.BatchPartsInfo.Add(bpi); } } //----------------------- // In Batch Mode //----------------------- // From here, we need to serialize everything on disk // Generate the batch directory var batchDirectoryRoot = this.Options.BatchDirectory; var batchDirectoryName = string.Concat(DateTime.UtcNow.ToString("yyyy_MM_dd_ss"), Path.GetRandomFileName().Replace(".", "")); serverBatchInfo.DirectoryRoot = batchDirectoryRoot; serverBatchInfo.DirectoryName = batchDirectoryName; if (!Directory.Exists(serverBatchInfo.GetDirectoryFullPath())) { Directory.CreateDirectory(serverBatchInfo.GetDirectoryFullPath()); } // hook to get the last batch part info at the end var bpis = serverBatchInfo.BatchPartsInfo.Where(bpi => !bpi.IsLastBatch); var lstbpi = serverBatchInfo.BatchPartsInfo.First(bpi => bpi.IsLastBatch); // function used to download one part var dl = new Func <BatchPartInfo, Task>(async(bpi) => { var changesToSend3 = new HttpMessageGetMoreChangesRequest(context, bpi.Index); var serializer3 = this.SerializerFactory.GetSerializer <HttpMessageGetMoreChangesRequest>(); var binaryData3 = await serializer3.SerializeAsync(changesToSend3).ConfigureAwait(false); var step3 = HttpStep.GetMoreChanges; await this.InterceptAsync(new HttpGettingServerChangesRequestArgs(bpi.Index, serverBatchInfo.BatchPartsInfo.Count, summaryResponseContent.SyncContext, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); // Raise get changes request context.ProgressPercentage = initialPctProgress + ((bpi.Index + 1) * 0.2d / serverBatchInfo.BatchPartsInfo.Count); var response = await this.httpRequestHandler.ProcessRequestAsync( this.HttpClient, context, this.ServiceUri, binaryData3, step3, this.SerializerFactory, this.Converter, 0, this.SyncPolicy, cancellationToken, progress).ConfigureAwait(false); // Serialize await SerializeAsync(response, bpi.FileName, serverBatchInfo.GetDirectoryFullPath(), this).ConfigureAwait(false); // Raise response from server containing a batch changes await this.InterceptAsync(new HttpGettingServerChangesResponseArgs(serverBatchInfo, bpi.Index, bpi.RowsCount, summaryResponseContent.SyncContext, this.GetServiceHost()), progress, cancellationToken).ConfigureAwait(false); response.Dispose(); }); // Parrallel download of all bpis except the last one (which will launch the delete directory on the server side) await bpis.ForEachAsync(bpi => dl(bpi), this.MaxDownladingDegreeOfParallelism).ConfigureAwait(false); // Download last batch part that will launch the server deletion of the tmp dir await dl(lstbpi).ConfigureAwait(false); // generate the new scope item this.CompleteTime = DateTime.UtcNow; // Reaffect context context = summaryResponseContent.SyncContext; return(new ServerSyncChanges(summaryResponseContent.RemoteClientTimestamp, serverBatchInfo, summaryResponseContent.ServerChangesSelected)); }
private void btnUP_Click(object sender, EventArgs e) { if (dgvCommandList.SelectedCells.Count == 0) { return; } ContainerSet containerSet; DataRowView currentDataRowView; int ii = 0; try { containerSet = new ContainerSet(); ii = dgvCommandList.SelectedCells[0].OwningRow.Index; currentDataRowView = (DataRowView)dgvCommandList.Rows[ii].DataBoundItem; switch (((Button)sender).Tag.ToString()) { case "Down": containerSet.MoveRow(ref dtCommandAssembly, currentDataRowView.Row, ContainerSet.DataTableMoveRow.Down); dgvCommandList.ClearSelection(); if (ii + 1 < dgvCommandList.Rows.Count) { dgvCommandList.Rows[ii + 1].Selected = true; } else { dgvCommandList.Rows[ii].Selected = true; } break; case "UP": containerSet.MoveRow(ref dtCommandAssembly, currentDataRowView.Row, ContainerSet.DataTableMoveRow.Up); dgvCommandList.ClearSelection(); if (ii - 1 >= 0) { dgvCommandList.Rows[ii - 1].Selected = true; } else { dgvCommandList.Rows[ii].Selected = true; } break; } CommandListReNo(); } catch (Exception ex) { logger.Error(ex.ToString()); throw new Exception(ex.ToString()); } }
private void CreateUI() { tlpAssemblyUI.Controls.Clear(); txbManually.Text = string.Empty; if (dtCommandParameter.Rows.Count == 0) { return; } tlpAssemblyUI.Controls.Clear(); txbManually.Text = string.Empty; Label lbTemp; ComboBox cmbTemp; NumericUpDown nudTemp; string[] strsTemp; string[] strsTemps; StringBuilder sbTemp; ContainerSet container = new ContainerSet(); string strCommandFormatParameter = string.Empty; ToolTip toolTip = new ToolTip(); try { sbTemp = new StringBuilder(); foreach (DataRow dr in dtCommandParameter.Rows) { if (dr["Parameter_ID"].ToString().Equals("Null")) { break; } lbTemp = new Label(); lbTemp.Name = dr["UI_Name"].ToString(); lbTemp.Dock = DockStyle.Fill; lbTemp.Text = dr["Parameter_ID"].ToString(); lbTemp.TextAlign = ContentAlignment.MiddleRight; toolTip.SetToolTip(lbTemp, dr["Parameter_Description"].ToString()); tlpAssemblyUI.Controls.Add(lbTemp); if (dr["Data_Value"].ToString().Equals(string.Empty)) { nudTemp = new NumericUpDown(); nudTemp.Maximum = Convert.ToInt32(dr["Max_Value"].ToString()); nudTemp.Minimum = Convert.ToInt32(dr["Min_Value"].ToString()); nudTemp.Value = Convert.ToInt32(dr["Default_Value"].ToString()); nudTemp.Dock = DockStyle.Fill; if (Convert.ToInt32(dr["Values_length"].ToString()) > 1 && dr["Is_Fill"].ToString() == "Y") { sbTemp.Append(Convert.ToInt32(dr["Default_Value"].ToString()).ToString("D" + Convert.ToInt32(dr["Values_length"].ToString()).ToString())); sbTemp.Append(","); nudTemp.Tag = Convert.ToInt32(dr["Values_length"].ToString()); } else { sbTemp.Append(dr["Default_Value"].ToString()); sbTemp.Append(","); } nudTemp.ValueChanged += new EventHandler(numericUpDown_ValueChanged); toolTip.SetToolTip(nudTemp, dr["Parameter_Description"].ToString()); tlpAssemblyUI.Controls.Add(nudTemp); } else { strsTemp = dr["Data_Value"].ToString().Split(','); cmbTemp = new ComboBox(); cmbTemp.Items.AddRange(strsTemp); cmbTemp.Dock = DockStyle.Fill; cmbTemp.AutoCompleteMode = AutoCompleteMode.SuggestAppend; cmbTemp.AutoCompleteSource = AutoCompleteSource.ListItems; cmbTemp.SelectedIndex = 0; sbTemp.Append(cmbTemp.Text.ToUpper().Equals("EMPTY") ? string.Empty : cmbTemp.Text); sbTemp.Append(","); cmbTemp.SelectedIndexChanged += new EventHandler(comboBox_DropDownClosed); toolTip.SetToolTip(cmbTemp, dr["Parameter_Description"].ToString()); tlpAssemblyUI.Controls.Add(cmbTemp); } } if (tlpAssemblyUI.Controls.Count == 0) { switch (lsbDeviceName.SelectedValue.ToString().Split(',')[0].ToString()) { case "SANWA": txbManually.Text = string.Format(dtCommandParameter.Rows[0]["code_format"].ToString(), lsbDeviceName.Text.Split(',')[1].ToString(), string.Empty); break; case "TDK": txbManually.Text = dtCommandParameter.Rows[0]["code_format"].ToString(); break; case "ATEL": txbManually.Text = string.Format(dtCommandParameter.Rows[0]["code_format"].ToString(), lsbDeviceName.Text.Split(',')[1].ToString()); break; case "HST": case "COGNEX": txbManually.Text = dtCommandParameter.Rows[0]["code_format"].ToString(); break; } } else { switch (lsbDeviceName.SelectedValue.ToString().Split(',')[0].ToString()) { case "SANWA": txbManually.Text = string.Format(dtCommandParameter.Rows[0]["code_format"].ToString(), lsbDeviceName.Text.Split(',')[1].ToString(), string.Empty) + container.StringFormat(dtCommandParameter.Select(), sbTemp.ToString().TrimEnd(',').Split(',')); break; case "TDK": txbManually.Text = dtCommandParameter.Rows[0]["code_format"].ToString(); break; case "ATEL": strsTemps = (lsbDeviceName.Text.Split(',')[1].ToString() + "," + sbTemp.ToString().Substring(0, sbTemp.ToString().Length - 1)).Split(','); txbManually.Text = string.Format(dtCommandParameter.Rows[0]["code_format"].ToString(), strsTemps); break; case "HST": case "COGNEX": txbManually.Text = string.Format(dtCommandParameter.Rows[0]["code_format"].ToString(), sbTemp.ToString().Substring(0, sbTemp.ToString().Length - 1).Split(',')); break; } } } catch (Exception ex) { logger.Error(ex.ToString()); throw new Exception(ex.ToString()); } }
private async Task <ContainerSet> DeserializeAsync(string fileName, string directoryFullPath, ISerializerFactory serializerFactory = default, BaseOrchestrator orchestrator = null) { if (string.IsNullOrEmpty(fileName)) { throw new ArgumentNullException(fileName); } if (string.IsNullOrEmpty(directoryFullPath)) { throw new ArgumentNullException(directoryFullPath); } var fullPath = Path.Combine(directoryFullPath, fileName); if (!File.Exists(fullPath)) { throw new MissingFileException(fullPath); } // backward compatibility if (serializerFactory == default) { serializerFactory = SerializersCollection.JsonSerializer; } // backward compatibility if (this.SerializedType == default) { this.SerializedType = typeof(ContainerSet); } Debug.WriteLine($"Deserialize file {fileName}"); using var fs = new FileStream(fullPath, FileMode.Open, FileAccess.Read); ContainerSet set = null; if (orchestrator != null) { var interceptorArgs = new DeserializingSetArgs(orchestrator.GetContext(), fs, serializerFactory, fileName, directoryFullPath); await orchestrator.InterceptAsync(interceptorArgs, default); set = interceptorArgs.Result; } if (set == null) { if (this.SerializedType == typeof(ContainerSet)) { var serializer = serializerFactory.GetSerializer <ContainerSet>(); set = await serializer.DeserializeAsync(fs); } else { var serializer = serializerFactory.GetSerializer <ContainerSetBoilerPlate>(); var jobject = await serializer.DeserializeAsync(fs); set = jobject.Changes; } } return(set); }