static internal bool OpenConnection(IDbConnection connection) { WebUtil.CheckArgumentNull(connection, "connection"); bool openedConnection = false; switch (connection.State) { case ConnectionState.Open: break; case ConnectionState.Broken: SyncServiceTracer.TraceVerbose("Closing broken connection"); connection.Close(); goto case ConnectionState.Closed; case ConnectionState.Closed: if (connection is System.Data.SqlClient.SqlConnection) { // Blank out the password System.Data.SqlClient.SqlConnectionStringBuilder builder = new System.Data.SqlClient.SqlConnectionStringBuilder(); builder.ConnectionString = connection.ConnectionString; if (!String.IsNullOrEmpty(builder.Password)) { builder.Password = "******"; } SyncServiceTracer.TraceVerbose("Connecting using string: {0}", builder.ConnectionString); } else { SyncServiceTracer.TraceVerbose("Connecting to database: {0}", connection.Database); } // Check for SqlConnection if (connection is SqlConnection) { TryOpenConnection(connection); } else { connection.Open(); } openedConnection = true; break; default: throw SyncServiceException.CreateInternalServerError(String.Format("Unhandled ConnectionState {0}", connection.State)); } return(openedConnection); }
/// <summary> /// Utility function to get the SELECT command for a given entity type. /// This will return a command of format SELECT [Fieldname1, FieldName2] from [TableName] WHERE PK1 = @PK1 [AND PK2= @pk2] /// where FieldNameN is the actual columns included in the scope /// </summary> /// <param name="t">Entity type</param> /// <returns>TSQL Select command text</returns> internal string GetSelectScriptForType(Type t) { if (!_typeToTableGlobalNameMapping.ContainsKey(t)) { throw SyncServiceException.CreateInternalServerError( String.Format(CultureInfo.InvariantCulture, "DataSetToEntitiesConverter.GetSelectScriptForType: Unknown type {0}", t)); } if (!this._globalToLocalPropertyMapping.ContainsKey(t)) { BuildPropertyMappingInfo(t); } Dictionary <string, string> mappingInfo = this._globalToLocalPropertyMapping[t]; string delimiter = string.Empty; StringBuilder colsList = new StringBuilder(); // Build cols list foreach (PropertyInfo pinfo in ReflectionUtility.GetPropertyInfoMapping(t)) { colsList.Append(delimiter).Append((mappingInfo.ContainsKey(pinfo.Name)) ? mappingInfo[pinfo.Name] : pinfo.Name); if (string.IsNullOrEmpty(delimiter)) { delimiter = ", "; } } delimiter = string.Empty; StringBuilder pkList = new StringBuilder(); int index = 1; // Build primary keys list foreach (PropertyInfo pinfo in ReflectionUtility.GetPrimaryKeysPropertyInfoMapping(t)) { pkList.Append(delimiter).Append((mappingInfo.ContainsKey(pinfo.Name)) ? mappingInfo[pinfo.Name] : pinfo.Name); pkList.Append(" = @p").Append(index++); if (string.IsNullOrEmpty(delimiter)) { delimiter = " AND "; } } // Get the table local name. string tableName = _typeToTableLocalNameMapping[t]; return(string.Format(CultureInfo.InvariantCulture, SelectFromTableFormat, colsList, tableName, pkList)); }
/// <summary> /// Convert from a list of OfflineCapableEntities to a dataset. /// </summary> /// <param name="entities">Entity list</param> /// <returns>Dataset which contains all data from the entity list.</returns> /// <exception cref="SyncServiceException">For an unknown type that is passed in the input.</exception> internal DataSet ConvertEntitiesToDataSet(List <IOfflineEntity> entities) { var resultSet = new DataSet(); foreach (var entity in entities) { if (!_typeToTableGlobalNameMapping.ContainsKey(entity.GetType())) { throw SyncServiceException.CreateInternalServerError( String.Format(CultureInfo.InvariantCulture, "DataSetToEntitiesConverter.ConvertEntitiesToDataSet: Unknown type {0}", entity.GetType())); } BuildPropertyMappingInfo(entity.GetType()); string tableName = _typeToTableGlobalNameMapping[entity.GetType()]; AddEntityToDataSet(entity, resultSet, tableName); } return(resultSet); }
// // **** Methods for pulling sorted data // public IEnumerable <SortedBatch> PullSortedBatches() { // start the first batch and range SortedBatch pendingBatch = new SortedBatch(); long sizeOfBatch = 0; // intialize the range set build because we will be pullig // rows now and need to calculate the correct range sets BatchRangeSetBuilder rangeSetBuilder = new BatchRangeSetBuilder(_srcKnowledge.GetSyncIdFormatGroup().ItemIdFormat, _tablesInApplyOrder); rangeSetBuilder.StartBuildingFirstBatchRangeSet(); // for each table in apply order foreach (string tableName in _tablesInApplyOrder) { // start the next table range rangeSetBuilder.StartNextTable(tableName); // if we have a datatable for this name SortedTable curTable; if (_sortedTables.TryGetValue(tableName, out curTable) && curTable._schema != null) { // add the current table to the batch we are working // on DataTable curDataTable = curTable._schema.Clone(); pendingBatch.sortedDataSet.Tables.Add(curDataTable); curDataTable.BeginLoadData(); // if there are no rows in the table just add it to // the current dataset and move on SyncId maxIdInCurrentTable = null; // pull the rows in SyncId order foreach (KeyValuePair <SyncId, SortedRow> kvp in curTable._rows) { long curRowSize = SyncUtil.GetRowSizeForObjectArray(kvp.Value._rowValues); if (curRowSize > (_maxSortedBatchSizeInKB * 1024)) { // Note: This code is modified to throw a more specific exception. // If we end up merging this code with the provider, then the caller has to be tested to // make sure it works with the logic in the provider codebase. throw SyncServiceException.CreateInternalServerError( String.Format(Strings.RowExceedsConfiguredBatchSize, _maxSortedBatchSizeInKB, tableName, curRowSize)); } // fixme: if this row won't fit then return // the current batch if ((sizeOfBatch + curRowSize) > (_maxSortedBatchSizeInKB * 1024)) { // * done loading data curDataTable.EndLoadData(); // * add last sync id in batch if (maxIdInCurrentTable == null) { // we have not added any rows to the // current table so we should create a // dummy id in the current table for // the range maxIdInCurrentTable = rangeSetBuilder.MakeDummyFirstRowID(tableName); } rangeSetBuilder.AddSyncId(tableName, maxIdInCurrentTable); // start a new batch BatchRangeSet curRS = rangeSetBuilder.FinishBuildingBatchRangeSet(); pendingBatch.sortedDataSetKnowledge = curRS.ProjectOnKnowledge(_srcKnowledge); yield return(pendingBatch); // *** tricky // after yielding the current batch we // need to start a new one for the rest of // the rows in this table. // we must reset all the neede state and // this is tricky maxIdInCurrentTable = null; // start a new batch pendingBatch = new SortedBatch(); sizeOfBatch = 0; // start a new range after the current one rangeSetBuilder.StartBuildingBatchRangeSet(curRS); // add the current table to the batch we are working // on curDataTable = curTable._schema.Clone(); pendingBatch.sortedDataSet.Tables.Add(curDataTable); curDataTable.BeginLoadData(); } AddSortedRowToDataTable(curDataTable, kvp.Value); sizeOfBatch += curRowSize; maxIdInCurrentTable = kvp.Key; } curDataTable.EndLoadData(); } } // we should always be working on a batch { Debug.Assert(pendingBatch != null); BatchRangeSet curRS = rangeSetBuilder.FinishLastBatchRangeSet(); pendingBatch.sortedDataSetKnowledge = curRS.ProjectOnKnowledge(_srcKnowledge); } yield return(pendingBatch); }