public DatabaseProvisioningArgs(SyncContext context, SyncProvision provision, DmSet schema, DbConnection connection, DbTransaction transaction) : base(context, connection, transaction) { Provision = provision; Schema = schema; }
/// <summary> /// Create a new BPI, and serialize the changeset if not in memory /// </summary> internal static BatchPartInfo CreateBatchPartInfo(int batchIndex, DmSet changesSet, string fileName, Boolean isLastBatch, Boolean inMemory) { BatchPartInfo bpi = null; // Create a batch part // The batch part creation process will serialize the changesSet to the disk if (!inMemory) { // Serialize the file ! BatchPart.Serialize(new DmSetSurrogate(changesSet), fileName); bpi = new BatchPartInfo { FileName = fileName }; } else { bpi = new BatchPartInfo { Set = changesSet }; } bpi.Index = batchIndex; bpi.IsLastBatch = isLastBatch; // Even if the set is empty (serialized on disk), we should retain the tables names bpi.Tables = changesSet.Tables.Select(t => t.TableName).ToArray(); return(bpi); }
private static DmSet BuildCompositeFKDatabaseModel() { var set = new DmSet(); var menuTable = new DmTable("Menus"); menuTable.Columns.Add(new DmColumn <int>("Id")); menuTable.Columns.Add(new DmColumn <string>("Description")); menuTable.PrimaryKey = new DmKey(menuTable.Columns[0]); set.Tables.Add(menuTable); var menuCategoryTable = new DmTable("MenusCategories"); menuCategoryTable.Columns.Add(new DmColumn <int>("Id")); menuCategoryTable.Columns.Add(new DmColumn <int>("MenuId")); menuCategoryTable.Columns.Add(new DmColumn <string>("Description")); menuCategoryTable.PrimaryKey = new DmKey(new[] { menuCategoryTable.Columns[0], menuCategoryTable.Columns[1], }); set.Tables.Add(menuCategoryTable); var menuCategoryRowTable = new DmTable("MenusCategoriesRows"); menuCategoryRowTable.Columns.Add(new DmColumn <int>("Id")); menuCategoryRowTable.Columns.Add(new DmColumn <int>("MenuId")); menuCategoryRowTable.Columns.Add(new DmColumn <int>("MenuCatogoryId")); menuCategoryRowTable.Columns.Add(new DmColumn <string>("Description")); menuCategoryRowTable.Columns.Add(new DmColumn <decimal>("Price")); menuCategoryRowTable.PrimaryKey = new DmKey(new[] { menuCategoryRowTable.Columns[0], menuCategoryRowTable.Columns[1], menuCategoryRowTable.Columns[2], }); set.Tables.Add(menuCategoryRowTable); set.Relations .Add(new DmRelation("FK_MenusCategories_Menu" , menuTable.Columns[0] // Id , menuCategoryTable.Columns[1])); // MenuId set.Relations .Add(new DmRelation("FK_MenusCategoriesRows_MenusCategories" , new DmColumn[] { menuCategoryTable.Columns[0], // Id menuCategoryTable.Columns[1], // MenuId } , new DmColumn[] { menuCategoryRowTable.Columns[2], // MenuCatogoryId menuCategoryRowTable.Columns[1], // MenuId })); return(set); }
/// <summary> /// Clone the originla DmSet and copy datas from the DmSetSurrogate /// </summary> public DmSet ConvertToDmSet(DmSet originalDmSet) { DmSet dmSet = originalDmSet.Clone(); this.ReadDataIntoDmSet(dmSet); return(dmSet); }
/// <summary> /// Deserialize the BPI WITHOUT the DmSet /// </summary> public static BatchPartInfo DeserializeFromDmSet(DmSet set) { if (set == null) { return(null); } if (!set.Tables.Contains("DotmimSync__BatchPartsInfo")) { return(null); } var dmRow = set.Tables["DotmimSync__BatchPartsInfo"].Rows[0]; var bpi = new BatchPartInfo(); bpi.Index = (int)dmRow["Index"]; bpi.FileName = dmRow["FileName"] as string; bpi.IsLastBatch = (Boolean)dmRow["IsLastBatch"]; if (dmRow["Tables"] != null) { var stringTables = dmRow["Tables"] as string; var tables = stringTables.Split(new char[] { '|' }, StringSplitOptions.RemoveEmptyEntries); bpi.Tables = tables; } return(bpi); }
public static List <ScopeInfo> DeserializeFromDmSet(DmSet set) { if (set == null) { return(null); } if (!set.Tables.Contains("DotmimSync__ScopeInfo")) { return(null); } List <ScopeInfo> scopesInfo = new List <ScopeInfo>(); foreach (var dmRow in set.Tables["DotmimSync__ScopeInfo"].Rows) { ScopeInfo scopeInfo = new ScopeInfo(); scopeInfo.Id = (Guid)dmRow["Id"]; //scopeInfo.IsDatabaseCreated = (bool)dmRow["IsDatabaseCreated"]; scopeInfo.IsLocal = (bool)dmRow["IsLocal"]; scopeInfo.IsNewScope = (bool)dmRow["IsNewScope"]; scopeInfo.LastTimestamp = (long)dmRow["LastTimestamp"]; scopeInfo.Name = dmRow["Name"] as string; scopesInfo.Add(scopeInfo); } return(scopesInfo); }
private async static Task TestWebPostStream() { var dmTable = GetATestTable(); var dmSet = new DmSet(); dmSet.Tables.Add(dmTable); var surrgotabeTable = new DmSetSurrogate(dmSet); DmSerializer serializer = new DmSerializer(); var binaryData = serializer.Serialize(surrgotabeTable); Uri target = new Uri("http://localhost:5000/api/sync"); var client = new HttpClient(); ByteArrayContent arrayContent = new ByteArrayContent(binaryData); var response = await client.PostAsync(target, arrayContent); if (response.IsSuccessStatusCode) { using (var stream = await response.Content.ReadAsStreamAsync()) { var ds = serializer.Deserialize <DmSetSurrogate>(stream); var newDs = ds.ConvertToDmSet(); } } }
/// <summary> /// Generate an empty BatchInfo /// </summary> internal (BatchInfo, ChangesSelected) GetEmptyChanges(SyncContext context, ScopeInfo scopeInfo, int downloadBatchSizeInKB, string batchDirectory) { // Get config var isBatched = downloadBatchSizeInKB > 0; // create the in memory changes set DmSet changesSet = new DmSet(SyncConfiguration.DMSET_NAME); // Create the batch info, in memory var batchInfo = new BatchInfo { InMemory = !isBatched }; if (isBatched) { batchInfo.Directory = BatchInfo.GenerateNewDirectoryName(); } // generate the batchpartinfo var bpi = batchInfo.GenerateBatchInfo(0, changesSet, batchDirectory); bpi.IsLastBatch = true; // Create a new in-memory batch info with an the changes DmSet return(batchInfo, new ChangesSelected()); }
public DmRelationTests() { _set = new DmSet(); clientTable = new DmTable("Client"); clientTypeTable = new DmTable("TypeClient"); _set.Tables.Add(clientTable); _set.Tables.Add(clientTypeTable); DmColumn Col0 = new DmColumn <int>("ClientId"); DmColumn Col1 = new DmColumn <int>("ClientType"); DmColumn Col2 = new DmColumn <String>("ClientName"); clientTable.Columns.Add(Col0); clientTable.Columns.Add(Col1); clientTable.Columns.Add(Col2); clientTable.PrimaryKey = new DmKey(Col0); DmColumn Col3 = new DmColumn <int>("TypeId"); DmColumn Col4 = new DmColumn <string>("TypeName"); clientTypeTable.Columns.Add(Col3); clientTypeTable.Columns.Add(Col4); clientTypeTable.PrimaryKey = new DmKey(Col3); _compositeFkSet = BuildCompositeFKDatabaseModel(); BuilCompositeFKdSampleData(_compositeFkSet); }
/// <summary> /// Generate an empty BatchInfo /// </summary> internal (BatchInfo, ChangesSelected) GetEmptyChanges(SyncContext context, ScopeInfo scopeInfo) { // Get config var configuration = GetCacheConfiguration(); var isBatched = configuration.DownloadBatchSizeInKB > 0; // create the in memory changes set DmSet changesSet = new DmSet(configuration.ScopeSet.DmSetName); // Create the batch info, in memory var batchInfo = new BatchInfo(); batchInfo.InMemory = !isBatched; if (!isBatched) { batchInfo.Directory = BatchInfo.GenerateNewDirectoryName(); } // generate the batchpartinfo var bpi = batchInfo.GenerateBatchInfo(0, changesSet, configuration.BatchDirectory); bpi.IsLastBatch = true; // Create a new in-memory batch info with an the changes DmSet return(batchInfo, new ChangesSelected()); }
public DatabaseProvisionedArgs(SyncContext context, SyncProvision provision, DmSet schema, string script, DbConnection connection, DbTransaction transaction) : base(context, connection, transaction) { Provision = provision; Script = script; Schema = schema; }
internal void ReadDataIntoDmSet(DmSet ds) { for (int i = 0; i < ds.Tables.Count; i++) { DmTableSurrogate dmTableSurrogate = this.Tables[i]; dmTableSurrogate.ReadDatasIntoDmTable(ds.Tables[i]); } }
public void SetCacheConfiguration(SyncConfiguration configuration) { var dmSetConf = new DmSet(); SyncConfiguration.SerializeInDmSet(dmSetConf, configuration); var dmSSetConf = new DmSetSurrogate(dmSetConf); this.CacheManager.Set(SYNC_CONF, dmSSetConf); }
internal static SyncConfiguration DeserializeFromDmSet(DmSet set) { if (set == null) { return(null); } if (!set.Tables.Contains("DotmimSync__ServiceConfiguration")) { return(null); } SyncConfiguration configuration = new SyncConfiguration(); var dmRowConfiguration = set.Tables["DotmimSync__ServiceConfiguration"].Rows[0]; configuration.BatchDirectory = dmRowConfiguration["BatchDirectory"] as String;; configuration.Archive = dmRowConfiguration["Archive"] as String;; configuration.ConflictResolutionPolicy = (ConflictResolutionPolicy)dmRowConfiguration["ConflictResolutionPolicy"]; configuration.DownloadBatchSizeInKB = (int)dmRowConfiguration["DownloadBatchSizeInKB"]; configuration.UseBulkOperations = (bool)dmRowConfiguration["UseBulkOperations"]; configuration.UseVerboseErrors = (bool)dmRowConfiguration["UseVerboseErrors"]; configuration.SerializationFormat = (SerializationFormat)dmRowConfiguration["SerializationConverter"]; if (set.Tables.Contains("DotmimSync__Filters")) { var dmTableFilterParameters = set.Tables["DotmimSync__Filters"]; foreach (var dmRowFilter in dmTableFilterParameters.Rows) { FilterClause filterClause = new FilterClause(); var tableName = dmRowFilter["TableName"] as String; var columnName = dmRowFilter["ColumnName"] as String; configuration.Filters.Add(tableName, columnName); } } var configTables = set.Tables.Where(tbl => !tbl.TableName.StartsWith("DotmimSync__")); if (configTables != null) { foreach (var configTable in configTables) { configuration.ScopeSet.Tables.Add(configTable.Clone()); } } if (set.Relations != null && set.Relations.Count > 0) { foreach (var r in set.Relations) { var relation = r.Clone(configuration.ScopeSet); configuration.ScopeSet.Relations.Add(relation); } } return(configuration); }
private async Task <HttpMessage> EnsureSchemaAsync(HttpMessage httpMessage) { HttpMessageEnsureSchema httpMessageContent; if (httpMessage.Content is HttpMessageEnsureSchema) { httpMessageContent = httpMessage.Content as HttpMessageEnsureSchema; } else { httpMessageContent = (httpMessage.Content as JObject).ToObject <HttpMessageEnsureSchema>(); } if (httpMessageContent == null) { throw new ArgumentException("EnsureSchema message could not be null"); } if (this.Configuration == null) { throw new InvalidOperationException("No sync configuration was provided. Make sure you create a SyncConfiguration object and pass it to the WebProxyServerProvider!"); } // If the Conf is hosted by the server, we try to get the tables from it, overriding the client schema, if passed DmSet schema = null; if (this.Configuration.Schema != null) { schema = this.Configuration.Schema; } else if (httpMessageContent.Schema != null) { schema = httpMessageContent.Schema.ConvertToDmSet(); } if (httpMessageContent.Schema != null) { httpMessageContent.Schema.Dispose(); httpMessageContent.Schema = null; } (httpMessage.SyncContext, schema) = await this.EnsureSchemaAsync(httpMessage.SyncContext, new MessageEnsureSchema { Schema = schema }); httpMessageContent.Schema = new DmSetSurrogate(schema); schema.Clear(); schema = null; // Dont forget to re-assign since it's a JObject, until now httpMessage.Content = httpMessageContent; return(httpMessage); }
private void DmTable_CheckDmSet_Properties() { var set0 = new DmSet(); var tbl0 = new DmTable(); set0.Tables.Add(tbl0); Assert.Equal(set0, tbl0.DmSet); Assert.Equal(set0.Culture, tbl0.Culture); Assert.Equal(set0.CaseSensitive, tbl0.CaseSensitive); }
/// <summary> /// Constructs a DmSet object based on a DmSetSurrogate object. /// </summary> public DmSet ConvertToDmSet() { DmSet dmSet = new DmSet() { Culture = new CultureInfo(this.CultureInfoName), CaseSensitive = this.CaseSensitive, DmSetName = this.DmSetName }; this.ReadSchemaIntoDmSet(dmSet); this.ReadDataIntoDmSet(dmSet); return(dmSet); }
/// <summary> /// Initializes a new instance of the DmSetSurrogate class from an existing DmSet /// </summary> public DmSetSurrogate(DmSet ds) { if (ds == null) { throw new ArgumentNullException("ds"); } this.DmSetName = ds.DmSetName; this.CultureInfoName = ds.Culture.Name; this.CaseSensitive = ds.CaseSensitive; this.Tables = new List <DmTableSurrogate>(ds.Tables.Count); for (int i = 0; i < ds.Tables.Count; i++) { this.Tables.Add(new DmTableSurrogate(ds.Tables[i])); } if (ds.Relations != null && ds.Relations.Count > 0) { this.Relations = new List <DmRelationSurrogate>(ds.Relations.Count); for (int i = 0; i < ds.Relations.Count; i++) { DmRelation dr = ds.Relations[i]; DmRelationSurrogate drs = new DmRelationSurrogate(); drs.ChildKeySurrogates = new DmColumnSurrogate[dr.ChildKey.Columns.Length]; for (int keyIndex = 0; keyIndex < dr.ChildKey.Columns.Length; keyIndex++) { drs.ChildKeySurrogates[keyIndex] = new DmColumnSurrogate(dr.ChildKey.Columns[keyIndex]); } drs.ParentKeySurrogates = new DmColumnSurrogate[dr.ParentKey.Columns.Length]; for (int keyIndex = 0; keyIndex < dr.ParentKey.Columns.Length; keyIndex++) { drs.ParentKeySurrogates[keyIndex] = new DmColumnSurrogate(dr.ParentKey.Columns[keyIndex]); } drs.RelationName = dr.RelationName; this.Relations.Add(drs); } } }
internal void ReadSchemaIntoDmSet(DmSet ds) { var dmTableSurrogateArray = this.Tables; for (int i = 0; i < dmTableSurrogateArray.Count; i++) { DmTableSurrogate dmTableSurrogate = dmTableSurrogateArray[i]; DmTable dmTable = new DmTable(); dmTableSurrogate.ReadSchemaIntoDmTable(dmTable); dmTable.Culture = new CultureInfo(dmTableSurrogate.CultureInfoName); dmTable.CaseSensitive = dmTableSurrogate.CaseSensitive; dmTable.TableName = dmTableSurrogate.TableName; ds.Tables.Add(dmTable); } }
public void DmTable_CaseSensitive() { var set0 = new DmSet("CaseSensitive"); var tbl0 = new DmTable("CASESENSITIVE"); set0.Tables.Add(tbl0); // No error throws, it's ok Assert.NotEqual(set0.DmSetName, tbl0.TableName); Assert.Throws(typeof(ArgumentException), () => { var set1 = new DmSet("CaseSensitive"); set1.CaseSensitive = false; var tbl1 = new DmTable("CASESENSITIVE"); set1.Tables.Add(tbl1); }); }
/// <summary> /// Disabling all constraints on synced tables /// </summary> private ChangeApplicationAction EnableConstraints(SyncContext context, DmSet configTables, DbConnection connection, DbTransaction transaction, ScopeInfo fromScope) { if (configTables == null || configTables.Tables.Count <= 0) { return(ChangeApplicationAction.Continue); } for (var i = 0; i < configTables.Tables.Count; i++) { var tableDescription = configTables.Tables[configTables.Tables.Count - i - 1]; var builder = this.GetDatabaseBuilder(tableDescription); var syncAdapter = builder.CreateSyncAdapter(connection, transaction); // reset table syncAdapter.EnableConstraints(); } return(ChangeApplicationAction.Continue); }
internal void ReadSchemaIntoDmSet(DmSet ds) { var dmTableSurrogateArray = this.Tables; for (int i = 0; i < dmTableSurrogateArray.Count; i++) { DmTableSurrogate dmTableSurrogate = dmTableSurrogateArray[i]; DmTable dmTable = new DmTable(); dmTableSurrogate.ReadSchemaIntoDmTable(dmTable); dmTable.Culture = new CultureInfo(dmTableSurrogate.CultureInfoName); dmTable.CaseSensitive = dmTableSurrogate.CaseSensitive; dmTable.TableName = dmTableSurrogate.TableName; ds.Tables.Add(dmTable); } if (this.Relations != null && this.Relations.Count > 0) { foreach (var dmRelationSurrogate in this.Relations) { DmColumn[] parentColumns = new DmColumn[dmRelationSurrogate.ParentKeySurrogates.Length]; DmColumn[] childColumns = new DmColumn[dmRelationSurrogate.ChildKeySurrogates.Length]; for (int i = 0; i < parentColumns.Length; i++) { var columnName = dmRelationSurrogate.ParentKeySurrogates[i].ColumnName; var tableName = dmRelationSurrogate.ParentKeySurrogates[i].TableName; parentColumns[i] = ds.Tables[tableName].Columns[columnName]; columnName = dmRelationSurrogate.ChildKeySurrogates[i].ColumnName; tableName = dmRelationSurrogate.ChildKeySurrogates[i].TableName; childColumns[i] = ds.Tables[tableName].Columns[columnName]; } DmRelation relation = new DmRelation(dmRelationSurrogate.RelationName, parentColumns, childColumns); ds.Relations.Add(relation); } } }
/// <summary> /// Generate a new BatchPartInfo and add it to the current batchInfo /// </summary> internal BatchPartInfo GenerateBatchInfo(int batchIndex, DmSet changesSet, string batchDirectory) { var hasData = true; if (changesSet == null || changesSet.Tables.Count == 0) { hasData = false; } else { hasData = changesSet.Tables.Any(t => t.Rows.Count > 0); } if (!hasData) { return(null); } BatchPartInfo bpi = null; // Create a batch part // The batch part creation process will serialize the changesSet to the disk if (!InMemory) { var bpId = GenerateNewFileName(batchIndex.ToString()); var fileName = Path.Combine(batchDirectory, this.Directory, bpId); bpi = BatchPartInfo.CreateBatchPartInfo(batchIndex, changesSet, fileName, false, false); } else { bpi = BatchPartInfo.CreateBatchPartInfo(batchIndex, changesSet, null, true, true); } // add the batchpartinfo tp the current batchinfo this.BatchPartsInfo.Add(bpi); return(bpi); }
/// <summary> /// Generate a new BatchPartInfo and add it to the current batchInfo /// </summary> internal BatchPartInfo GenerateBatchInfo(int batchIndex, DmSet changesSet, string batchDirectory) { var hasData = true; if (changesSet == null || changesSet.Tables.Count == 0) { hasData = false; } else { hasData = changesSet.Tables.Any(t => t.Rows.Count > 0); } // Sometimes we can have a last BPI without any data, but we need to generate it to be able to have the IsLast batch property //if (!hasData) // return null; BatchPartInfo bpi = null; // Create a batch part // The batch part creation process will serialize the changesSet to the disk if (!InMemory) { var bpId = GenerateNewFileName(batchIndex.ToString()); var fileName = Path.Combine(batchDirectory, this.Directory, bpId); bpi = BatchPartInfo.CreateBatchPartInfo(batchIndex, changesSet, fileName, false, false); } else { bpi = BatchPartInfo.CreateBatchPartInfo(batchIndex, changesSet, null, true, true); } // add the batchpartinfo tp the current batchinfo this.BatchPartsInfo.Add(bpi); return(bpi); }
/// <summary> /// Check if the database is already created. /// If so, we won't do any check on the structure. /// Edit this value after EnsureScopes to force checking. /// </summary> //public Boolean IsDatabaseCreated { get; set; } /// <summary> /// Generate a DmTable based on a SyncContext object /// </summary> public static void SerializeInDmSet(DmSet set, IEnumerable <ScopeInfo> scopesInfo) { if (set == null) { return; } DmTable dmTable = null; if (!set.Tables.Contains("DotmimSync__ScopeInfo")) { dmTable = new DmTable("DotmimSync__ScopeInfo"); set.Tables.Add(dmTable); } dmTable = set.Tables["DotmimSync__ScopeInfo"]; dmTable.Columns.Add <Guid>("Id"); dmTable.Columns.Add <Boolean>("IsDatabaseCreated"); dmTable.Columns.Add <Boolean>("IsLocal"); dmTable.Columns.Add <Boolean>("IsNewScope"); dmTable.Columns.Add <Int64>("LastTimestamp"); dmTable.Columns.Add <String>("Name"); foreach (var scopeInfo in scopesInfo) { DmRow dmRow = dmTable.NewRow(); dmRow["Id"] = scopeInfo.Id; //dmRow["IsDatabaseCreated"] = scopeInfo.IsDatabaseCreated; dmRow["IsLocal"] = scopeInfo.IsLocal; dmRow["IsNewScope"] = scopeInfo.IsNewScope; dmRow["LastTimestamp"] = scopeInfo.LastTimestamp; dmRow["Name"] = scopeInfo.Name; dmTable.Rows.Add(dmRow); } }
/// <summary> /// Serialize the BatchPartInfo WITHOUT the DmSet /// </summary> internal static void SerializeInDmSet(DmSet set, BatchPartInfo bpi) { if (set == null) { return; } DmTable dmTableBatchPartsInfo = null; if (!set.Tables.Contains("DotmimSync__BatchPartsInfo")) { dmTableBatchPartsInfo = new DmTable("DotmimSync__BatchPartsInfo"); set.Tables.Add(dmTableBatchPartsInfo); } dmTableBatchPartsInfo = set.Tables["DotmimSync__BatchPartsInfo"]; dmTableBatchPartsInfo.Columns.Add <String>("FileName"); dmTableBatchPartsInfo.Columns.Add <int>("Index"); dmTableBatchPartsInfo.Columns.Add <Boolean>("IsLastBatch"); dmTableBatchPartsInfo.Columns.Add <String>("Tables"); var dmRow = dmTableBatchPartsInfo.NewRow(); dmRow["FileName"] = bpi.FileName; dmRow["Index"] = bpi.Index; dmRow["IsLastBatch"] = bpi.IsLastBatch; if (bpi.Tables != null && bpi.Tables.Length > 0) { var tablesString = String.Join("|", bpi.Tables); dmRow["Tables"] = tablesString; } dmTableBatchPartsInfo.Rows.Add(dmRow); }
private DmTable BuildChangesTable(string tableName, DmSet configTables) { var dmTable = configTables.Tables[tableName].Clone(); // Adding the tracking columns AddTrackingColumns <Guid>(dmTable, "create_scope_id"); AddTrackingColumns <long>(dmTable, "create_timestamp"); AddTrackingColumns <Guid>(dmTable, "update_scope_id"); AddTrackingColumns <long>(dmTable, "update_timestamp"); // Since we can have some deleted rows, the Changes table should have only null columns (except PrimaryKeys) foreach (var c in dmTable.Columns) { var isPrimaryKey = dmTable.PrimaryKey.Columns.Any(cc => dmTable.IsEqual(cc.ColumnName, c.ColumnName)); if (!isPrimaryKey) { c.AllowDBNull = true; } } return(dmTable); }
public DmRelationTests() { _set = new DmSet(); clientTable = new DmTable("Client"); clientTypeTable = new DmTable("TypeClient"); _set.Tables.Add(clientTable); _set.Tables.Add(clientTypeTable); DmColumn Col0 = new DmColumn <int>("ClientId"); DmColumn Col1 = new DmColumn <int>("ClientType"); DmColumn Col2 = new DmColumn <String>("ClientName"); clientTable.Columns.Add(Col0); clientTable.Columns.Add(Col1); clientTable.Columns.Add(Col2); clientTable.PrimaryKey = new DmKey(Col0); DmColumn Col3 = new DmColumn <int>("TypeId"); DmColumn Col4 = new DmColumn <string>("TypeName"); clientTypeTable.Columns.Add(Col3); clientTypeTable.Columns.Add(Col4); clientTypeTable.PrimaryKey = new DmKey(Col3); }
/// <summary> /// Enumerate all internal changes, no batch mode /// </summary> internal async Task <(BatchInfo, ChangesSelected)> EnumerateChangesInBatchesInternal (SyncContext context, ScopeInfo scopeInfo, int downloadBatchSizeInKB, DmSet configTables, string batchDirectory, ConflictResolutionPolicy policy, ICollection <FilterClause> filters) { DmTable dmTable = null; // memory size total double memorySizeFromDmRows = 0L; int batchIndex = 0; // this batch info won't be in memory, it will be be batched BatchInfo batchInfo = new BatchInfo { // directory where all files will be stored Directory = BatchInfo.GenerateNewDirectoryName(), // not in memory since we serialized all files in the tmp directory InMemory = false }; // Create stats object to store changes count ChangesSelected changes = new ChangesSelected(); using (var connection = this.CreateConnection()) { try { // Open the connection await connection.OpenAsync(); using (var transaction = connection.BeginTransaction()) { // create the in memory changes set DmSet changesSet = new DmSet(configTables.DmSetName); foreach (var tableDescription in configTables.Tables) { // if we are in upload stage, so check if table is not download only if (context.SyncWay == SyncWay.Upload && tableDescription.SyncDirection == SyncDirection.DownloadOnly) { continue; } // if we are in download stage, so check if table is not download only if (context.SyncWay == SyncWay.Download && tableDescription.SyncDirection == SyncDirection.UploadOnly) { continue; } var builder = this.GetDatabaseBuilder(tableDescription); var syncAdapter = builder.CreateSyncAdapter(connection, transaction); syncAdapter.ConflictApplyAction = SyncConfiguration.GetApplyAction(policy); // raise before event context.SyncStage = SyncStage.TableChangesSelecting; var beforeArgs = new TableChangesSelectingEventArgs(this.ProviderTypeName, context.SyncStage, tableDescription.TableName); this.TryRaiseProgressEvent(beforeArgs, this.TableChangesSelecting); // Get Command DbCommand selectIncrementalChangesCommand; DbCommandType dbCommandType; if (this.CanBeServerProvider && context.Parameters != null && context.Parameters.Count > 0 && filters != null && filters.Count > 0) { var filtersName = filters .Where(f => f.TableName.Equals(tableDescription.TableName, StringComparison.InvariantCultureIgnoreCase)) .Select(f => f.ColumnName); if (filtersName != null && filtersName.Count() > 0) { dbCommandType = DbCommandType.SelectChangesWitFilters; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType, filtersName); } else { dbCommandType = DbCommandType.SelectChanges; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType); } } else { dbCommandType = DbCommandType.SelectChanges; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType); } // Deriving Parameters syncAdapter.SetCommandParameters(DbCommandType.SelectChanges, selectIncrementalChangesCommand); if (selectIncrementalChangesCommand == null) { var exc = "Missing command 'SelectIncrementalChangesCommand' "; throw new Exception(exc); } dmTable = BuildChangesTable(tableDescription.TableName, configTables); try { // Set commons parameters SetSelectChangesCommonParameters(context, scopeInfo, selectIncrementalChangesCommand); // Set filter parameters if any // Only on server side if (this.CanBeServerProvider && context.Parameters != null && context.Parameters.Count > 0 && filters != null && filters.Count > 0) { var filterTable = filters.Where(f => f.TableName.Equals(tableDescription.TableName, StringComparison.InvariantCultureIgnoreCase)).ToList(); if (filterTable != null && filterTable.Count > 0) { foreach (var filter in filterTable) { var parameter = context.Parameters.FirstOrDefault(p => p.ColumnName.Equals(filter.ColumnName, StringComparison.InvariantCultureIgnoreCase) && p.TableName.Equals(filter.TableName, StringComparison.InvariantCultureIgnoreCase)); if (parameter != null) { DbManager.SetParameterValue(selectIncrementalChangesCommand, parameter.ColumnName, parameter.Value); } } } } this.AddTrackingColumns <int>(dmTable, "sync_row_is_tombstone"); // Statistics TableChangesSelected tableChangesSelected = new TableChangesSelected { TableName = tableDescription.TableName }; changes.TableChangesSelected.Add(tableChangesSelected); // Get the reader using (var dataReader = selectIncrementalChangesCommand.ExecuteReader()) { while (dataReader.Read()) { DmRow dmRow = CreateRowFromReader(dataReader, dmTable); DmRowState state = DmRowState.Unchanged; state = GetStateFromDmRow(dmRow, scopeInfo); // If the row is not deleted inserted or modified, go next if (state != DmRowState.Deleted && state != DmRowState.Modified && state != DmRowState.Added) { continue; } var fieldsSize = DmTableSurrogate.GetRowSizeFromDataRow(dmRow); var dmRowSize = fieldsSize / 1024d; if (dmRowSize > downloadBatchSizeInKB) { var exc = $"Row is too big ({dmRowSize} kb.) for the current Configuration.DownloadBatchSizeInKB ({downloadBatchSizeInKB} kb.) Aborting Sync..."; throw new Exception(exc); } // Calculate the new memory size memorySizeFromDmRows = memorySizeFromDmRows + dmRowSize; // add row dmTable.Rows.Add(dmRow); tableChangesSelected.TotalChanges++; // acceptchanges before modifying dmRow.AcceptChanges(); // Set the correct state to be applied if (state == DmRowState.Deleted) { dmRow.Delete(); tableChangesSelected.Deletes++; } else if (state == DmRowState.Added) { dmRow.SetAdded(); tableChangesSelected.Inserts++; } else if (state == DmRowState.Modified) { dmRow.SetModified(); tableChangesSelected.Updates++; } // We exceed the memorySize, so we can add it to a batch if (memorySizeFromDmRows > downloadBatchSizeInKB) { // Since we dont need this column anymore, remove it this.RemoveTrackingColumns(dmTable, "sync_row_is_tombstone"); changesSet.Tables.Add(dmTable); // generate the batch part info batchInfo.GenerateBatchInfo(batchIndex, changesSet, batchDirectory); // increment batch index batchIndex++; changesSet.Clear(); // Recreate an empty DmSet, then a dmTable clone changesSet = new DmSet(configTables.DmSetName); dmTable = dmTable.Clone(); this.AddTrackingColumns <int>(dmTable, "sync_row_is_tombstone"); // Init the row memory size memorySizeFromDmRows = 0L; // add stats for a SyncProgress event context.SyncStage = SyncStage.TableChangesSelected; var args2 = new TableChangesSelectedEventArgs (this.ProviderTypeName, SyncStage.TableChangesSelected, tableChangesSelected); this.TryRaiseProgressEvent(args2, this.TableChangesSelected); } } // Since we dont need this column anymore, remove it this.RemoveTrackingColumns(dmTable, "sync_row_is_tombstone"); context.SyncStage = SyncStage.TableChangesSelected; changesSet.Tables.Add(dmTable); // Init the row memory size memorySizeFromDmRows = 0L; // Event progress context.SyncStage = SyncStage.TableChangesSelected; var args = new TableChangesSelectedEventArgs(this.ProviderTypeName, SyncStage.TableChangesSelected, tableChangesSelected); this.TryRaiseProgressEvent(args, this.TableChangesSelected); } } catch (Exception) { throw; } finally { } } // We are in batch mode, and we are at the last batchpart info if (changesSet != null && changesSet.HasTables && changesSet.HasChanges()) { var batchPartInfo = batchInfo.GenerateBatchInfo(batchIndex, changesSet, batchDirectory); if (batchPartInfo != null) { batchPartInfo.IsLastBatch = true; } } transaction.Commit(); } } catch (Exception) { throw; } finally { if (connection != null && connection.State == ConnectionState.Open) { connection.Close(); } } } return(batchInfo, changes); }
/// <summary> /// Enumerate all internal changes, no batch mode /// </summary> internal async Task <(BatchInfo, ChangesSelected)> EnumerateChangesInternal( SyncContext context, ScopeInfo scopeInfo, DmSet configTables, string batchDirectory, ConflictResolutionPolicy policy, ICollection <FilterClause> filters) { // create the in memory changes set DmSet changesSet = new DmSet(SyncConfiguration.DMSET_NAME); // Create the batch info, in memory var batchInfo = new BatchInfo { InMemory = true }; using (var connection = this.CreateConnection()) { // Open the connection await connection.OpenAsync(); using (var transaction = connection.BeginTransaction()) { try { // changes that will be returned as selected changes ChangesSelected changes = new ChangesSelected(); foreach (var tableDescription in configTables.Tables) { // if we are in upload stage, so check if table is not download only if (context.SyncWay == SyncWay.Upload && tableDescription.SyncDirection == SyncDirection.DownloadOnly) { continue; } // if we are in download stage, so check if table is not download only if (context.SyncWay == SyncWay.Download && tableDescription.SyncDirection == SyncDirection.UploadOnly) { continue; } var builder = this.GetDatabaseBuilder(tableDescription); var syncAdapter = builder.CreateSyncAdapter(connection, transaction); syncAdapter.ConflictApplyAction = SyncConfiguration.GetApplyAction(policy); // raise before event context.SyncStage = SyncStage.TableChangesSelecting; var beforeArgs = new TableChangesSelectingEventArgs(this.ProviderTypeName, context.SyncStage, tableDescription.TableName); this.TryRaiseProgressEvent(beforeArgs, this.TableChangesSelecting); // selected changes for the current table TableChangesSelected tableSelectedChanges = new TableChangesSelected { TableName = tableDescription.TableName }; // Get Command DbCommand selectIncrementalChangesCommand; DbCommandType dbCommandType; if (this.CanBeServerProvider && context.Parameters != null && context.Parameters.Count > 0 && filters != null && filters.Count > 0) { var filtersName = filters .Where(f => f.TableName.Equals(tableDescription.TableName, StringComparison.InvariantCultureIgnoreCase)) .Select(f => f.ColumnName); if (filtersName != null && filtersName.Count() > 0) { dbCommandType = DbCommandType.SelectChangesWitFilters; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType, filtersName); } else { dbCommandType = DbCommandType.SelectChanges; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType); } } else { dbCommandType = DbCommandType.SelectChanges; selectIncrementalChangesCommand = syncAdapter.GetCommand(dbCommandType); } if (selectIncrementalChangesCommand == null) { var exc = "Missing command 'SelectIncrementalChangesCommand' "; throw new Exception(exc); } // Deriving Parameters syncAdapter.SetCommandParameters(dbCommandType, selectIncrementalChangesCommand); // Get a clone of the table with tracking columns var dmTableChanges = BuildChangesTable(tableDescription.TableName, configTables); SetSelectChangesCommonParameters(context, scopeInfo, selectIncrementalChangesCommand); // Set filter parameters if any if (this.CanBeServerProvider && context.Parameters != null && context.Parameters.Count > 0 && filters != null && filters.Count > 0) { var tableFilters = filters.Where(f => f.TableName.Equals(tableDescription.TableName, StringComparison.InvariantCultureIgnoreCase)).ToList(); if (tableFilters != null && tableFilters.Count > 0) { foreach (var filter in tableFilters) { var parameter = context.Parameters.FirstOrDefault(p => p.ColumnName.Equals(filter.ColumnName, StringComparison.InvariantCultureIgnoreCase) && p.TableName.Equals(filter.TableName, StringComparison.InvariantCultureIgnoreCase)); if (parameter != null) { DbManager.SetParameterValue(selectIncrementalChangesCommand, parameter.ColumnName, parameter.Value); } } } } this.AddTrackingColumns <int>(dmTableChanges, "sync_row_is_tombstone"); // Get the reader using (var dataReader = selectIncrementalChangesCommand.ExecuteReader()) { while (dataReader.Read()) { DmRow dataRow = CreateRowFromReader(dataReader, dmTableChanges); //DmRow dataRow = dmTableChanges.NewRow(); // assuming the row is not inserted / modified DmRowState state = DmRowState.Unchanged; // get if the current row is inserted, modified, deleted state = GetStateFromDmRow(dataRow, scopeInfo); if (state != DmRowState.Deleted && state != DmRowState.Modified && state != DmRowState.Added) { continue; } // add row dmTableChanges.Rows.Add(dataRow); // acceptchanges before modifying dataRow.AcceptChanges(); tableSelectedChanges.TotalChanges++; // Set the correct state to be applied if (state == DmRowState.Deleted) { dataRow.Delete(); tableSelectedChanges.Deletes++; } else if (state == DmRowState.Added) { dataRow.SetAdded(); tableSelectedChanges.Inserts++; } else if (state == DmRowState.Modified) { dataRow.SetModified(); tableSelectedChanges.Updates++; } } // Since we dont need this column anymore, remove it this.RemoveTrackingColumns(dmTableChanges, "sync_row_is_tombstone"); // add it to the DmSet changesSet.Tables.Add(dmTableChanges); } // add the stats to global stats changes.TableChangesSelected.Add(tableSelectedChanges); // Raise event for this table context.SyncStage = SyncStage.TableChangesSelected; var args = new TableChangesSelectedEventArgs(this.ProviderTypeName, SyncStage.TableChangesSelected, tableSelectedChanges); this.TryRaiseProgressEvent(args, this.TableChangesSelected); } transaction.Commit(); // generate the batchpartinfo batchInfo.GenerateBatchInfo(0, changesSet, batchDirectory); // Create a new in-memory batch info with an the changes DmSet return(batchInfo, changes); } catch (Exception) { throw; } finally { if (connection != null && connection.State == ConnectionState.Open) { connection.Close(); } } } } }