/// <summary> /// Enumerate all internal changes, no batch mode /// </summary> internal async Task <(BatchInfo, ChangesSelected)> EnumerateChangesInBatchesInternal (SyncContext context, ScopeInfo scopeInfo, int downloadBatchSizeInKB, DmSet configTables, string batchDirectory, ConflictResolutionPolicy policy, ICollection <FilterClause> filters) { DmTable dmTable = null; // memory size total double memorySizeFromDmRows = 0L; int batchIndex = 0; // this batch info won't be in memory, it will be be batched BatchInfo batchInfo = new BatchInfo { // directory where all files will be stored Directory = BatchInfo.GenerateNewDirectoryName(), // not in memory since we serialized all files in the tmp directory InMemory = false }; // Create stats object to store changes count ChangesSelected changes = new ChangesSelected(); using (var connection = this.CreateConnection()) { try { // Open the connection await connection.OpenAsync(); using (var transaction = connection.BeginTransaction()) { // create the in memory changes set DmSet changesSet = new DmSet(configTables.DmSetName); foreach (var tableDescription in configTables.Tables) { // if we are in upload stage, so check if table is not download only if (context.SyncWay == SyncWay.Upload && tableDescription.SyncDirection == SyncDirection.DownloadOnly) { continue; } // if we are in download stage, so check if table is not download only if (context.SyncWay == SyncWay.Download && tableDescription.SyncDirection == SyncDirection.UploadOnly) { continue; } var builder = this.GetDatabaseBuilder(tableDescription); var syncAdapter = builder.CreateSyncAdapter(connection, transaction); syncAdapter.ConflictApplyAction = SyncConfiguration.GetApplyAction(policy); // raise before event context.SyncStage = SyncStage.TableChangesSelecting; var beforeArgs = new TableChangesSelectingEventArgs(this.ProviderTypeName, context.SyncStage, tableDescription.TableName); this.TryRaiseProgressEvent(beforeArgs, this.TableChangesSelecting); // Get Command DbCommand selectIncrementalChangesCommand; DbCommandType dbCommandType; if (this.CanBeServerProvider && context.Parameters != null && context.Parameters.Count > 0 && filters != null && filters.Count > 0) { var filtersName = filters .Where(f => f.TableName.Equals(tableDescription.TableName, StringComparison.InvariantCultureIgnoreCase)) .Select(f => f.ColumnName); if (filtersName != null && filtersName.Count() > 0) { dbCommandType = DbCommandType.SelectChangesWitFilters; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType, filtersName); } else { dbCommandType = DbCommandType.SelectChanges; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType); } } else { dbCommandType = DbCommandType.SelectChanges; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType); } // Deriving Parameters syncAdapter.SetCommandParameters(DbCommandType.SelectChanges, selectIncrementalChangesCommand); if (selectIncrementalChangesCommand == null) { var exc = "Missing command 'SelectIncrementalChangesCommand' "; throw new Exception(exc); } dmTable = BuildChangesTable(tableDescription.TableName, configTables); try { // Set commons parameters SetSelectChangesCommonParameters(context, scopeInfo, selectIncrementalChangesCommand); // Set filter parameters if any // Only on server side if (this.CanBeServerProvider && context.Parameters != null && context.Parameters.Count > 0 && filters != null && filters.Count > 0) { var filterTable = filters.Where(f => f.TableName.Equals(tableDescription.TableName, StringComparison.InvariantCultureIgnoreCase)).ToList(); if (filterTable != null && filterTable.Count > 0) { foreach (var filter in filterTable) { var parameter = context.Parameters.FirstOrDefault(p => p.ColumnName.Equals(filter.ColumnName, StringComparison.InvariantCultureIgnoreCase) && p.TableName.Equals(filter.TableName, StringComparison.InvariantCultureIgnoreCase)); if (parameter != null) { DbManager.SetParameterValue(selectIncrementalChangesCommand, parameter.ColumnName, parameter.Value); } } } } this.AddTrackingColumns <int>(dmTable, "sync_row_is_tombstone"); // Statistics TableChangesSelected tableChangesSelected = new TableChangesSelected { TableName = tableDescription.TableName }; changes.TableChangesSelected.Add(tableChangesSelected); // Get the reader using (var dataReader = selectIncrementalChangesCommand.ExecuteReader()) { while (dataReader.Read()) { DmRow dmRow = CreateRowFromReader(dataReader, dmTable); DmRowState state = DmRowState.Unchanged; state = GetStateFromDmRow(dmRow, scopeInfo); // If the row is not deleted inserted or modified, go next if (state != DmRowState.Deleted && state != DmRowState.Modified && state != DmRowState.Added) { continue; } var fieldsSize = DmTableSurrogate.GetRowSizeFromDataRow(dmRow); var dmRowSize = fieldsSize / 1024d; if (dmRowSize > downloadBatchSizeInKB) { var exc = $"Row is too big ({dmRowSize} kb.) for the current Configuration.DownloadBatchSizeInKB ({downloadBatchSizeInKB} kb.) Aborting Sync..."; throw new Exception(exc); } // Calculate the new memory size memorySizeFromDmRows = memorySizeFromDmRows + dmRowSize; // add row dmTable.Rows.Add(dmRow); tableChangesSelected.TotalChanges++; // acceptchanges before modifying dmRow.AcceptChanges(); // Set the correct state to be applied if (state == DmRowState.Deleted) { dmRow.Delete(); tableChangesSelected.Deletes++; } else if (state == DmRowState.Added) { dmRow.SetAdded(); tableChangesSelected.Inserts++; } else if (state == DmRowState.Modified) { dmRow.SetModified(); tableChangesSelected.Updates++; } // We exceed the memorySize, so we can add it to a batch if (memorySizeFromDmRows > downloadBatchSizeInKB) { // Since we dont need this column anymore, remove it this.RemoveTrackingColumns(dmTable, "sync_row_is_tombstone"); changesSet.Tables.Add(dmTable); // generate the batch part info batchInfo.GenerateBatchInfo(batchIndex, changesSet, batchDirectory); // increment batch index batchIndex++; changesSet.Clear(); // Recreate an empty DmSet, then a dmTable clone changesSet = new DmSet(configTables.DmSetName); dmTable = dmTable.Clone(); this.AddTrackingColumns <int>(dmTable, "sync_row_is_tombstone"); // Init the row memory size memorySizeFromDmRows = 0L; // add stats for a SyncProgress event context.SyncStage = SyncStage.TableChangesSelected; var args2 = new TableChangesSelectedEventArgs (this.ProviderTypeName, SyncStage.TableChangesSelected, tableChangesSelected); this.TryRaiseProgressEvent(args2, this.TableChangesSelected); } } // Since we dont need this column anymore, remove it this.RemoveTrackingColumns(dmTable, "sync_row_is_tombstone"); context.SyncStage = SyncStage.TableChangesSelected; changesSet.Tables.Add(dmTable); // Init the row memory size memorySizeFromDmRows = 0L; // Event progress context.SyncStage = SyncStage.TableChangesSelected; var args = new TableChangesSelectedEventArgs(this.ProviderTypeName, SyncStage.TableChangesSelected, tableChangesSelected); this.TryRaiseProgressEvent(args, this.TableChangesSelected); } } catch (Exception) { throw; } finally { } } // We are in batch mode, and we are at the last batchpart info if (changesSet != null && changesSet.HasTables && changesSet.HasChanges()) { var batchPartInfo = batchInfo.GenerateBatchInfo(batchIndex, changesSet, batchDirectory); if (batchPartInfo != null) { batchPartInfo.IsLastBatch = true; } } transaction.Commit(); } } catch (Exception) { throw; } finally { if (connection != null && connection.State == ConnectionState.Open) { connection.Close(); } } } return(batchInfo, changes); }
private static void TestSerializerSize() { DmTable dtClient = new DmTable("Client"); DmColumn dcId = new DmColumn <int>("Id"); DmColumn dcName = new DmColumn <string>("Name"); DmColumn dcBirthdate = new DmColumn <DateTime>("Date"); DmColumn dcMoney = new DmColumn <Double>("Money"); DmColumn dcInt = new DmColumn <Int64>("i64"); DmColumn dcJob = new DmColumn <String>("Job"); dtClient.Columns.AddRange(new[] { dcId, dcName, dcBirthdate, dcMoney, dcInt, dcJob }); long rowsSize = 0; foreach (var c in dtClient.Columns) { DmColumnSurrogate dcs = new DmColumnSurrogate(c); Console.WriteLine($"Column {dcs.ColumnName} Surrogate length : " + dcs.GetBytesLength()); rowsSize += dcs.GetBytesLength(); } var dmTableS = new DmTableSurrogate(dtClient); var dmTableSLength = dmTableS.GetEmptyBytesLength(); Console.WriteLine("DmTableSurrogate length = " + dmTableSLength); rowsSize += dmTableSLength; for (int i = 0; i < 50000; i++) { var newRow = dtClient.NewRow(); Random r = new Random(DateTime.Now.Second); var l = r.NextDouble() * 100; string name = ""; for (int j = 0; j < l; j++) { name += "a"; } newRow["Id"] = 12; newRow["Name"] = name; newRow["Date"] = DateTime.Now; newRow["Money"] = l; var l2 = r.NextDouble() * 100; string job = ""; for (int j = 0; j < l2; j++) { job += "b"; } newRow["Job"] = job; newRow["i64"] = l2; rowsSize += DmTableSurrogate.GetRowSizeFromDataRow(newRow); dtClient.Rows.Add(newRow); } var serializer = new DmSerializer(); long dtSize = 0; if (File.Exists("dt.bin")) { File.Delete("dt.bin"); } using (var fs = new FileStream("dt.bin", FileMode.OpenOrCreate, FileAccess.ReadWrite)) { serializer.Serialize(new DmTableSurrogate(dtClient), fs); dtSize = fs.Length; } //using (var fs = new FileStream("dt.bin", FileMode.Open, FileAccess.Read)) //{ // var ds = serializer.Deserialize<DmTableSurrogate>(fs); //} Console.WriteLine("Rows Size : " + rowsSize); Console.WriteLine("Table Size : " + dtSize); }