Exemplo n.º 1
0
        private async Task SaveSyncData(IDbConnection connection, ISyncableItemInfo remoteSyncableItemInfo,
                                        SyncStatus status)
        {
            var data = JObject.Parse("{item:{itemRefs:[]}}");

            if (!remoteSyncableItemInfo.Deleted)
            {
                var request = new JObject
                {
                    { "sessionID", _remoteSessionId },
                    { "item", SyncUtil.SyncableItemInfoToJson(remoteSyncableItemInfo) }
                };
                var response = await _transport.TransportAsync(SyncEndpoint.GetItemData, request);

                data = response;
            }

            SessionDbHelper.SaveItemData(connection, remoteSyncableItemInfo, status, data);

            var itemRefs = (JArray)data["item"]["itemRefs"];

            foreach (var item in itemRefs)
            {
                ISyncableItemInfo itemRefInfo      = SyncUtil.SyncableItemInfoFromJson(item);
                ISyncableItemInfo localItemRefInfo = _store.LocateCurrentItemInfo(itemRefInfo);
                if (localItemRefInfo != null && localItemRefInfo.Deleted)
                {
                    await SaveSyncData(connection, itemRefInfo, SyncStatus.MayBeNeeded);
                }
            }
        }
Exemplo n.º 2
0
        public void ResolveConflictLocalWins(SyncConflict conflict)
        {
            if (_closed)
            {
                throw new InvalidOperationException();
            }

            var tickCount       = _store.IncrementLocalRepilcaTickCount();
            var modifiedReplica = new ReplicaInfo
            {
                ReplicaId        = _store.GetLocalReplicaId(),
                ReplicaTickCount = tickCount
            };
            var resolvedStatus = conflict.LocalItemInfo.Deleted ? SyncStatus.Delete : SyncStatus.Update;
            var data           = JObject.Parse("{item:{itemRefs:[]}}");

            if (resolvedStatus != SyncStatus.Delete)
            {
                var builder = SyncUtil.JsonItemFromSyncableItemInfo(conflict.LocalItemInfo);
                _store.BuildItemData(conflict.LocalItemInfo, builder);
                data = new JObject {
                    { "item", builder }
                };
            }

            using (var connection = _syncSessionDbConnectionProvider.GetSyncSessionDbConnection(_localSessionId))
            {
                SessionDbHelper.ResolveItemWithData(connection, conflict.RemoteItemInfo, resolvedStatus, modifiedReplica,
                                                    data);
            }
        }
Exemplo n.º 3
0
        private void CommitChanges()
        {
            if (_remoteKnowledge == null)
            {
                return;
            }

            ReportProgressAndCheckCacellation(new SyncProgress
            {
                Stage           = SyncStage.ApplyingChangesLocally,
                PercentComplete = 0,
                Message         = "Applying changes locally"
            });
            using (
                IDbConnection connection = _syncSessionDbConnectionProvider.GetSyncSessionDbConnection(_localSessionId))
            {
                SyncUtil.ApplyChangesAndUpdateKnowledge(connection, _store, _remoteKnowledge);
            }
            ReportProgressAndCheckCacellation(new SyncProgress
            {
                Stage           = SyncStage.ApplyingChangesLocally,
                PercentComplete = 100,
                Message         = "Applied all changes locally"
            });
        }
Exemplo n.º 4
0
        public async Task LoadVocs_NewSession_SendFromClient_DoctorOk()
        {
            // sync voc
            var voc = sSession.Get <Vocabulary>(IntToGuid <Vocabulary>(2));
            await s.OnlySelectedVocs(sSession, voc.ToEnumerable())
            .SendFrom(Side.Server, Scope.Voc);

            // close app
            SyncUtil.Clear();

            // sync user
            bool errorWas = false;
            EventHandler <StringEventArgs> h = (s1, e) =>
            {
                errorWas = e.str.Contains("Error");
            };

            Poster.MessagePosted += h;

            await s.WithoutCustomVocsInDoc().SendFrom(Side.Client, Scope.User);

            Poster.MessagePosted -= h;

            // doctor table already provisioned in user scope
            Assert.IsTrue(!errorWas);
        }
Exemplo n.º 5
0
        internal void Scale(double factor, TimeSpan baseTime)
        {
            PreciseStart = SyncUtil.Scale(PreciseStart, baseTime, factor);
            PreciseEnd   = SyncUtil.Scale(PreciseEnd, baseTime, factor);

            subtitle.UpdateFramesFromTimes();
        }
Exemplo n.º 6
0
        public void Sync(SyncOptions options)
        {
            var totalIndexedUsers = GetAllUsers(options.UserLimit, options.UserAccountFilter);

            Console.WriteLine(@"""fullSync"":{""getUserProfileByIndex"":" + totalIndexedUsers.Count + ",");
            Console.WriteLine(string.Concat(@"""users"" : [", string.Join(",\n", totalIndexedUsers), "]"));

            SyncUtil.JsonClose(",");
        }
Exemplo n.º 7
0
        //
        // ****** Methods for adding unsorted data
        //
        public void AddUnsortedDataSet(DataSet dataSet)
        {
            // for each table in the DataSet
            foreach (DataTable curTable in dataSet.Tables)
            {
                int numColumns = curTable.Columns.Count;
                // this table should already be in the list of added
                // tables
                SortedTable sortedTable;
                if (!_sortedTables.TryGetValue(curTable.TableName, out sortedTable))
                {
                    SyncTracer.Error("Cannot Apply Changes since Adapters are missing for the following tables: {0}.  " +
                                     "Please ensure that the local and global names on the Adapters are set properly.", curTable);

                    throw new DbSyncException("MissingProviderAdapter");
                }
                if (sortedTable._schema == null)
                {
                    // add a new row storage dictionary and schema if we
                    // need one
                    sortedTable._schema = curTable.Clone();
                    Debug.Assert(sortedTable._schema.DataSet == null);
                    sortedTable._rows = new SortedDictionary <SyncId, SortedRow>(_syncIdComparer);
                }
                if (curTable.Rows.Count == 0)
                {
                    continue;
                }
                object[] idColVals = new object[sortedTable._idCols.Count];
                // for each row
                foreach (DataRow curRow in curTable.Rows)
                {
                    DataRowVersion viewVersion =
                        (curRow.RowState == DataRowState.Deleted)
                        ? DataRowVersion.Original
                        : DataRowVersion.Current;
                    for (int idx = 0; idx < idColVals.Length; idx += 1)
                    {
                        idColVals[idx] = curRow[sortedTable._idCols[idx], viewVersion];
                    }

                    // Add the row to this tables row storage dictionary
                    SyncId curRowId = SyncUtil.InitRowId(curTable.TableName, idColVals);

                    // Note: There is an issue with batching in the provider which causes the same primary key to be repeated across files.
                    // This crashes the .Add call below. To work-around this problem, we need to check if the key already exists.
                    // If it does, then remove it and add it again so that the latest record is always used.

                    if (sortedTable._rows.ContainsKey(curRowId))
                    {
                        sortedTable._rows.Remove(curRowId);
                    }

                    sortedTable._rows.Add(curRowId, new SortedRow(curRow, numColumns));
                }
            }
        }
        public void Sync(SyncOptions options)
        {
            var totalIndexedUsers = GetChanges(options);

            Console.WriteLine(@"""incrementalSync"":{""userProfileChangeServiceGetChanges"":" + totalIndexedUsers.Count + ",");
            Console.WriteLine(@"""changeToken"":""" + _changeToken + @""",");
            Console.WriteLine(string.Concat(@"""users"" : [", string.Join(",\n", totalIndexedUsers), "]"));

            SyncUtil.JsonClose();
        }
        public void Sync(SyncOptions options)
        {
            NetworkCredential nc = null;

            if (!string.IsNullOrEmpty(_domain) && !string.IsNullOrEmpty(_username) && !string.IsNullOrEmpty(_password))
            {
                nc = new NetworkCredential {
                    Domain = _domain, UserName = _username, Password = _password
                };
                SyncUtil.WriteLine("Using Credentials for {0}\\{1}.", _domain, _username);
            }
            else
            {
                SyncUtil.WriteLine("Using Default Credentials.");
            }

            try
            {
                if (Settings.Default.ConsoleUPS_MyProfileUPSService_UserProfileService.StartsWith(DefaultSite))
                {
                    SyncUtil.WriteLine("Verify ConsoleUPS.exe.config settings before running.");
                    return;
                }

                using (var ups = new UserProfileService {
                    PreAuthenticate = false
                })
                {
                    if (nc != null)
                    {
                        ups.Credentials = nc;
                    }
                    else
                    {
                        ups.UseDefaultCredentials = true;
                    }

                    var total = ups.GetUserProfileCount();
                    Console.WriteLine(@"""getUserProfileCount"":" + total + ",");

                    SyncFull.Instance(ups).Sync(options);
                    SyncIncremental.Instance(ups).Sync(options);
                }
            }
            catch (Exception ex)
            {
                if (ex.Message.Contains("status 401"))
                {
                    SyncUtil.WriteLine("Login failed please check ConsoleUPS.exe.config");
                }

                throw;
            }
        }
        private List <string> GetChanges(SyncOptions options)
        {
            var users = new List <string>();

            try
            {
                _changeToken = options.IgnoreChangeToken ? string.Empty : Settings.Default.ChangeToken;

                var changeTokenStart = new UserProfileChangeToken();
                var profileChanges   = _changeService.GetChanges(_changeToken, new UserProfileChangeQuery
                {
                    ChangeTokenStart = changeTokenStart,
                    Add                 = true,
                    Update              = true,
                    UserProfile         = true,
                    SingleValueProperty = true,
                    MultiValueProperty  = true,
                });

                Settings.Default.ChangeToken = profileChanges.ChangeToken;
                Settings.Default.Save();

                var accountNameChanges = profileChanges.Changes.GroupBy(d => d.UserAccountName).Select(gr => gr.Key).ToList();

                foreach (var accountChanged in accountNameChanges)
                {
                    try
                    {
                        var userProfile = _ups.GetUserProfileByName(accountChanged);
                        if (options.UserLimit > 0 && users.Count >= options.UserLimit)
                        {
                            return(users);
                        }
                        if (IsValidProfile(userProfile, options.UserAccountFilter))
                        {
                            users.Add(FieldsToJson(userProfile));
                        }
                    }
                    catch (Exception ex)
                    {
                        SyncUtil.WriteLine("Error : {0} AccountChanged: {1}", ex.Message, accountChanged);
                    }
                }
            }
            catch (Exception ex)
            {
                SyncUtil.WriteLine("IncrementalSyncGetChangesFailed : {0} {1}", ex.Message, ex.StackTrace);
                Settings.Default.ChangeToken = string.Empty;
                Settings.Default.Save();
            }

            return(users);
        }
Exemplo n.º 11
0
        private List <string> GetAllUsers(int userLimit, string userAccountFilter = null)
        {
            var users     = new List <string>();
            var nextIndex = -1;

            try
            {
                GetUserProfileByIndexResult userInstance;

                do
                {
                    userInstance = _ups.GetUserProfileByIndex(nextIndex);
                    if (userInstance == null || userInstance.UserProfile == null)
                    {
                        continue;
                    }

                    try
                    {
                        if (userLimit > 0 && users.Count >= userLimit)
                        {
                            return(users);
                        }
                        if (IsValidProfile(userInstance.UserProfile, userAccountFilter))
                        {
                            users.Add(FieldsToJson(userInstance.UserProfile));
                            if (users.Count % 100 == 0)
                            {
                                SyncUtil.WriteLine("Saving {0] users.", users.Count);
                            }
                        }
                    }
                    catch (Exception ex)
                    {
                        var fieldCount = userInstance.UserProfile.Length;
                        SyncUtil.WriteLine("Error : {0} FieldCount: {1}", ex.Message, fieldCount);
                    }

                    var nextValue = userInstance.NextValue ?? string.Empty;

                    if (!int.TryParse(nextValue.Replace(",", ""), out nextIndex))
                    {
                        SyncUtil.WriteLine("Error with next index : {0}", nextValue);
                    }
                }while (userInstance != null && userInstance.UserProfile != null);
            }
            catch (Exception ex)
            {
                SyncUtil.WriteLine("FullSyncGetUserProfileByIndexFailed : {0} {1}", ex.Message, ex.StackTrace);
            }

            return(users);
        }
        public override void BuildItemData(ISyncableItemInfo itemInfo, JObject builder)
        {
            IBook book = Adapter.BookRepository.getBookByRowID(GetRowIdFromItemInfo(itemInfo));

            builder.Add("title", book.Title);
            JArray authors = new JArray();

            foreach (IPerson author in book.Authors)
            {
                ReplicaItemId id = ((Person)author).Id;
                authors.Add(SyncUtil.GenerateItemRefAndIndex(builder, getSyncableItemInfoFrom(id)));
            }
            builder.Add("authors", authors);
        }
Exemplo n.º 13
0
        /// <summary>
        /// Creates a Range set build and pre calculates the effective
        /// tables ranges for use later when building a BatchRangeSet.
        /// </summary>
        /// <param name="idFormat">
        /// The id format for the SyncIds
        /// </param>
        /// <param name="tableNames">
        /// All the table names in that will be in the sum of the
        /// batches. Order does not matter.
        /// </param>
        public BatchRangeSetBuilder(SyncIdFormat idFormat, List <string> tableNames)
        {
            _tableRanges = new Dictionary <string, BatchRange>(tableNames.Count);
            _idFormat    = idFormat;

            int numTables = tableNames.Count;
            SortedList <SyncId, string> tablesSortedBySyncId
                = new SortedList <SyncId, string>(numTables);

            List <object> emptyPkVals = new List <object>(0);

            foreach (string curTable in tableNames)
            {
                SyncId idBeforeTable = SyncUtil.InitRowId(curTable, emptyPkVals);
                tablesSortedBySyncId.Add(idBeforeTable, curTable);
            }

            // for each table we need to determine the:
            // - starting SyncId (zero or first id in table)
            // - ending SyncId (just before next table or infinity)
            BatchRange prevTableRange = null;

            foreach (KeyValuePair <SyncId, string> curElem in tablesSortedBySyncId)
            {
                BatchRange curTableRange = new BatchRange();
                curTableRange.TableName = curElem.Value;
                // assume this is the last table and then fix this if
                // there is another table
                curTableRange.End = _idFormat.Infinity;
                if (prevTableRange == null)
                {
                    // first table starts at zero
                    curTableRange.Start = _idFormat.Zero;
                }
                else
                {
                    // fix up prev end to be correct
                    // set it's end to be one before current tables
                    // starting SyncId
                    prevTableRange.End  = curElem.Key;
                    curTableRange.Start = IdPlusOne(_idFormat, curElem.Key);
                }
                prevTableRange = curTableRange;
                _tableRanges.Add(curTableRange.TableName, curTableRange);
            }
        }
        public override void SaveItemData(ISyncableItemInfo itemInfo, JObject itemData)
        {
            long       rowId   = GetRowIdFromItemInfo(itemInfo);
            IDbCommand command = Adapter.Connection.CreateCommand();

            if (rowId == -1)
            {
                command.CommandText = "INSERT INTO Books(BookTitle, CreatedReplica, CreatedTickCount, ModifiedReplica, ModifiedTickCount ) VALUES(@BookTitle, @CreatedReplica, @CreatedTickCount, @ModifiedReplica, @ModifiedTickCount)";
                command.AddParameter("@CreatedReplica", Adapter.GetLocalReplicaIdForGlobalReplicaId(itemInfo.Created.ReplicaId));
                command.AddParameter("@CreatedTickCount", itemInfo.Created.ReplicaTickCount);
            }
            else
            {
                command.CommandText = "UPDATE Books SET BookTitle=@BookTitle,ModifiedReplica=@ModifiedReplica, ModifiedTickCount=@ModifiedTickCount WHERE BookID=@BookID";
                command.AddParameter("@BookID", rowId);
            }
            command.AddParameter("@BookTitle", (string)itemData["item"]["title"]);
            command.AddParameter("@ModifiedReplica", Adapter.GetLocalReplicaIdForGlobalReplicaId(itemInfo.Modified.ReplicaId));
            command.AddParameter("@ModifiedTickCount", itemInfo.Modified.ReplicaTickCount);
            command.ExecuteNonQuery();
            command.Parameters.Clear();

            if (rowId == -1)
            {
                rowId = GetRowIdFromItemInfo(itemInfo);
            }

            command.CommandText = "DELETE FROM BookAuthors WHERE BookID = @ID;";
            command.AddParameter("@ID", rowId);
            command.ExecuteNonQuery();
            command.Parameters.Clear();


            int authorPriority = 0;

            foreach (var authors in itemData["item"]["authors"])
            {
                ISyncableItemInfo authorItemInfo = SyncUtil.SyncableItemInfoFromJsonItemRef(itemData["item"], authors);
                long authorRowId = Adapter.HandlerForItemType(authorItemInfo.ItemType).GetRowIdFromItemInfo(authorItemInfo);
                authorPriority++;
                command.CommandText = String.Format("INSERT INTO BookAuthors(BookID, PersonID, AuthorPriority) VALUES ({0},{1},{2})", rowId, authorRowId, authorPriority);
                command.ExecuteNonQuery();
            }
        }
Exemplo n.º 15
0
        private async Task <int> SaveChangesBatch(IDbConnection connection, IList <IReplicaInfo> localKnowledge,
                                                  int startItem)
        {
            var request = new JObject
            {
                { "sessionID", _remoteSessionId },
                { "startItem", startItem },
                { "maxBatchCount", PullMaxBatchCount },
                { "maxBatchSize", PullMaxBatchSize }
            };

            JObject response = await _transport.TransportAsync(SyncEndpoint.GetItemDataBatch, request);

            var batch = (JArray)response["batch"];

            foreach (var item in batch)
            {
                ISyncableItemInfo remoteSyncableItemInfo = SyncUtil.SyncableItemInfoFromJson(item["item"]);
                var itemData = new JObject {
                    { "item", item["item"] }
                };

                var status = SyncStatus.MayBeNeeded;
                if (!SyncUtil.KnowledgeContains(localKnowledge, remoteSyncableItemInfo.Modified))
                {
                    ISyncableItemInfo localSyncableItemInfo = _store.LocateCurrentItemInfo(remoteSyncableItemInfo);
                    status = SyncUtil.CalculateSyncStatus(remoteSyncableItemInfo, localSyncableItemInfo,
                                                          _remoteKnowledge);
                }
                SessionDbHelper.SaveItemData(connection, remoteSyncableItemInfo, status, itemData);
                var itemRefs = (JArray)itemData["item"]["itemRefs"];
                foreach (var itemRef in itemRefs)
                {
                    ISyncableItemInfo itemRefInfo      = SyncUtil.SyncableItemInfoFromJson(itemRef);
                    ISyncableItemInfo localItemRefInfo = _store.LocateCurrentItemInfo(itemRefInfo);
                    if (localItemRefInfo != null && localItemRefInfo.Deleted)
                    {
                        await SaveSyncData(connection, itemRefInfo, SyncStatus.MayBeNeeded);
                    }
                }
            }
            return(batch.Count);
        }
Exemplo n.º 16
0
        public virtual string FieldsToJson(IEnumerable<PropertyData> properties)
        {
            var fields = new List<string>();
            var currentName = string.Empty;

            foreach (var propertyData in properties)
            {
                try
                {
                    if (propertyData == null) continue;
                    
                    currentName = propertyData.Name;
                    fields.Add(string.Format(@"""{0}"" : ""{1}""", propertyData.Name.Replace(@"""", "'"), GetValueData(propertyData.Values.FirstOrDefault())));
                }
                catch (Exception ex)
                {
                    SyncUtil.WriteLine("{0} Warning : {1}", currentName, ex.Message);
                }
            }

            return string.Concat("{", string.Join(",", fields), "}");
        }
        public string GetDbState()
        {
            JObject state     = new JObject();
            var     knowledge = GenerateLocalKnowledge().OrderBy((ri) => { return(ri.ReplicaId + ri.ReplicaTickCount.ToString()); });

            state.Add("knowledge", SyncUtil.KnowledgeToJson(knowledge));
            foreach (var itemType in GetItemTypes())
            {
                var        handler       = HandlerForItemType(itemType);
                JArray     items         = new JArray();
                IDbCommand selectCommand = _connection.CreateCommand();
                selectCommand.CommandText = String.Format("SELECT CreatedReplica, CreatedTickCount, ModifiedReplica, ModifiedTickCount FROM {0}", handler.DbTable);
                IList <ISyncableItemInfo> itemInfos = new List <ISyncableItemInfo>();
                using (IDataReader reader = selectCommand.ExecuteReader())
                {
                    while (reader.Read())
                    {
                        IReplicaInfo createdReplicaInfo  = ReplicaInfoFromDataReader(reader, "Created");
                        IReplicaInfo modifiedReplicaInfo = ReplicaInfoFromDataReader(reader, "Modified");

                        itemInfos.Add(new SyncableItemInfo {
                            ItemType = handler.TypeName, Created = createdReplicaInfo, Modified = modifiedReplicaInfo, Deleted = false
                        });
                    }
                }


                var sortedItemInfos = itemInfos.OrderBy((ii) => { return(ii.Created.ReplicaId + ii.Created.ReplicaTickCount.ToString()); });
                foreach (var syncItemInfo in sortedItemInfos)
                {
                    JObject builder = SyncUtil.JsonItemFromSyncableItemInfo(syncItemInfo);
                    BuildItemData(syncItemInfo, builder);
                    items.Add(builder);
                }
                state.Add(itemType, items);
            }
            return(state.ToString());
        }
        public override DuplicateStatus GetDuplicateStatus(JObject localItemData, JObject remoteItemData)
        {
            string localName  = (string)localItemData["item"]["title"];
            string remoteName = (string)remoteItemData["item"]["title"];

            if (localName != remoteName)
            {
                return(DuplicateStatus.None);
            }

            JArray localAuthors  = (JArray)localItemData["item"]["authors"];
            JArray remoteAuthors = (JArray)remoteItemData["item"]["authors"];

            if (localAuthors.Count != remoteAuthors.Count)
            {
                return(DuplicateStatus.None);
            }

            for (int i = 0; i < localAuthors.Count; i++)
            {
                ISyncableItemInfo localAuthorItemInfo  = SyncUtil.SyncableItemInfoFromJsonItemRef(localItemData["item"], localAuthors[i]);
                ISyncableItemInfo remoteAuthorItemInfo = SyncUtil.SyncableItemInfoFromJsonItemRef(remoteItemData["item"], remoteAuthors[i]);

                if (localAuthorItemInfo.Created.ReplicaId != remoteAuthorItemInfo.Created.ReplicaId)
                {
                    return(DuplicateStatus.None);
                }
                if (localAuthorItemInfo.Created.ReplicaTickCount != remoteAuthorItemInfo.Created.ReplicaTickCount)
                {
                    return(DuplicateStatus.None);
                }
            }


            return(DuplicateStatus.Exact);
        }
Exemplo n.º 19
0
        //
        // **** Methods for pulling sorted data
        //
        public IEnumerable <SortedBatch> PullSortedBatches()
        {
            // start the first batch and range
            SortedBatch pendingBatch = new SortedBatch();
            long        sizeOfBatch  = 0;

            // intialize the range set build because we will be pullig
            // rows now and need to calculate the correct range sets
            BatchRangeSetBuilder rangeSetBuilder = new BatchRangeSetBuilder(_srcKnowledge.GetSyncIdFormatGroup().ItemIdFormat, _tablesInApplyOrder);

            rangeSetBuilder.StartBuildingFirstBatchRangeSet();

            // for each table in apply order
            foreach (string tableName in _tablesInApplyOrder)
            {
                // start the next table range
                rangeSetBuilder.StartNextTable(tableName);

                // if we have a datatable for this name
                SortedTable curTable;
                if (_sortedTables.TryGetValue(tableName, out curTable) &&
                    curTable._schema != null)
                {
                    // add the current table to the batch we are working
                    // on
                    DataTable curDataTable = curTable._schema.Clone();
                    pendingBatch.sortedDataSet.Tables.Add(curDataTable);
                    curDataTable.BeginLoadData();

                    // if there are no rows in the table just add it to
                    // the current dataset and move on
                    SyncId maxIdInCurrentTable = null;
                    // pull the rows in SyncId order
                    foreach (KeyValuePair <SyncId, SortedRow> kvp in curTable._rows)
                    {
                        long curRowSize = SyncUtil.GetRowSizeForObjectArray(kvp.Value._rowValues);
                        if (curRowSize > (_maxSortedBatchSizeInKB * 1024))
                        {
                            // Note: This code is modified to throw a more specific exception.
                            // If we end up merging this code with the provider, then the caller has to be tested to
                            // make sure it works with the logic in the provider codebase.
                            throw SyncServiceException.CreateInternalServerError(
                                      String.Format(Strings.RowExceedsConfiguredBatchSize, _maxSortedBatchSizeInKB, tableName, curRowSize));
                        }
                        // fixme: if this row won't fit then return
                        // the current batch
                        if ((sizeOfBatch + curRowSize) > (_maxSortedBatchSizeInKB * 1024))
                        {
                            // * done loading data
                            curDataTable.EndLoadData();
                            // * add last sync id in batch
                            if (maxIdInCurrentTable == null)
                            {
                                // we have not added any rows to the
                                // current table so we should create a
                                // dummy id in the current table for
                                // the range
                                maxIdInCurrentTable = rangeSetBuilder.MakeDummyFirstRowID(tableName);
                            }

                            rangeSetBuilder.AddSyncId(tableName, maxIdInCurrentTable);
                            // start a new batch
                            BatchRangeSet curRS = rangeSetBuilder.FinishBuildingBatchRangeSet();
                            pendingBatch.sortedDataSetKnowledge = curRS.ProjectOnKnowledge(_srcKnowledge);

                            yield return(pendingBatch);

                            // *** tricky
                            // after yielding the current batch we
                            // need to start a new one for the rest of
                            // the rows in this table.
                            // we must reset all the neede state and
                            // this is tricky
                            maxIdInCurrentTable = null;
                            // start a new batch
                            pendingBatch = new SortedBatch();
                            sizeOfBatch  = 0;
                            // start a new range after the current one
                            rangeSetBuilder.StartBuildingBatchRangeSet(curRS);
                            // add the current table to the batch we are working
                            // on
                            curDataTable = curTable._schema.Clone();
                            pendingBatch.sortedDataSet.Tables.Add(curDataTable);
                            curDataTable.BeginLoadData();
                        }
                        AddSortedRowToDataTable(curDataTable, kvp.Value);
                        sizeOfBatch        += curRowSize;
                        maxIdInCurrentTable = kvp.Key;
                    }
                    curDataTable.EndLoadData();
                }
            }
            // we should always be working on a batch
            {
                Debug.Assert(pendingBatch != null);
                BatchRangeSet curRS = rangeSetBuilder.FinishLastBatchRangeSet();
                pendingBatch.sortedDataSetKnowledge = curRS.ProjectOnKnowledge(_srcKnowledge);
            }
            yield return(pendingBatch);
        }
Exemplo n.º 20
0
        private async Task PushChanges()
        {
            ReportProgressAndCheckCacellation(new SyncProgress
            {
                Stage           = SyncStage.FindingLocalChanges,
                PercentComplete = 0,
                Message         = "Finding local changes"
            });

            var request = new JObject {
                { "sessionID", _remoteSessionId }
            };

            var localKnowledge   = _store.GenerateLocalKnowledge();
            var changedItemInfos = _store.LocateChangedItems(_remoteKnowledge).ToList();

            int totalChanges = changedItemInfos.Count();

            request.Add(new JProperty("knowledge", SyncUtil.KnowledgeToJson(localKnowledge)));
            request.Add(new JProperty("changeCount", totalChanges));

            await _transport.TransportAsync(SyncEndpoint.PutChanges, request);

            ReportProgressAndCheckCacellation(new SyncProgress
            {
                Stage           = SyncStage.FindingLocalChanges,
                PercentComplete = 100,
                Message         = String.Format("Found {0} local changes", totalChanges)
            });

            ReportProgressAndCheckCacellation(new SyncProgress
            {
                Stage           = SyncStage.UploadingLocalChanges,
                PercentComplete = 0,
                Message         = String.Format("Uploading {0} local changes", totalChanges)
            });

            int  maxBatchCount           = PushMaxBatchCount;
            int  maxBatchSize            = PushMaxBatchSize;
            long startTick               = Environment.TickCount;
            int  previousPercentComplete = -1;
            int  i          = 0;
            var  batchArray = new JArray();
            int  batchSize  = 0;

            foreach (ISyncableItemInfo syncItemInfo in changedItemInfos)
            {
                i++;
                int percentComplete = ((i * 100) / totalChanges);
                if (percentComplete != previousPercentComplete)
                {
                    ReportProgressAndCheckCacellation(new SyncProgress
                    {
                        Stage           = SyncStage.UploadingLocalChanges,
                        PercentComplete = 100,
                        Message         =
                            String.Format("Uploading local changes, {0}% complete ({1})", percentComplete,
                                          String.Format("Averaging {0}ms/item over {1} items",
                                                        (Environment.TickCount - startTick) / i, i))
                    });
                }
                previousPercentComplete = percentComplete;

                var builder = SyncUtil.JsonItemFromSyncableItemInfo(syncItemInfo);
                if (!syncItemInfo.Deleted)
                {
                    _store.BuildItemData(syncItemInfo, builder);
                }

                var singleItemRequest = new JObject {
                    { "changeNumber", i }, { "item", builder }
                };

                batchSize += singleItemRequest.ToString().Length;
                batchArray.Add(singleItemRequest);

                if (i == totalChanges || (i % maxBatchCount) == 0 || batchSize >= maxBatchSize)
                {
                    var batchRequest = new JObject {
                        { "sessionID", _remoteSessionId }, { "batch", batchArray }
                    };
                    await _transport.TransportAsync(SyncEndpoint.PutItemDataBatch, batchRequest);

                    batchArray = new JArray();
                    batchSize  = 0;
                }
            }
            ReportProgressAndCheckCacellation(new SyncProgress
            {
                Stage           = SyncStage.UploadingLocalChanges,
                PercentComplete = 100,
                Message         = String.Format("Uploaded {0} local changes", totalChanges)
            });

            await ApplyChanges(totalChanges);
        }
Exemplo n.º 21
0
        /// <summary>
        /// Returns whether the original should be cancelled
        /// </summary>
        public bool DoSync(object target, params object[] args)
        {
            if (!Multiplayer.ShouldSync)
            {
                return(false);
            }

            // todo limit per specific target/argument
            //if (Utils.MillisNow - lastSendTime < minTime)
            //    return true;

            LoggingByteWriter writer  = new LoggingByteWriter();
            MpContext         context = writer.MpContext();

            writer.Log.Node(ToString());

            writer.WriteInt32(syncId);
            SyncUtil.WriteContext(this, writer);

            var map = context.map;

            void SyncObj(object obj, SyncType type, string debugInfo)
            {
                writer.Log.Enter(debugInfo);

                try
                {
                    SyncSerialization.WriteSyncObject(writer, obj, type);
                }
                finally
                {
                    writer.Log.Exit();
                }

                if (type.contextMap && obj is Map contextMap)
                {
                    map = contextMap;
                }

                if (context.map is Map newMap)
                {
                    if (map != null && map != newMap)
                    {
                        throw new Exception($"{this}: map mismatch ({map?.uniqueID} and {newMap?.uniqueID})");
                    }
                    map = newMap;
                }
            }

            if (targetTransformer != null)
            {
                SyncObj(targetTransformer.Writer.DynamicInvoke(target, target, args), targetTransformer.NetworkType, "Target (transformed)");
            }
            else
            {
                WriteTarget(target, args, SyncObj);
            }

            for (int i = 0; i < argTypes.Length; i++)
            {
                if (argTransformers[i] == null)
                {
                    SyncObj(args[i], argTypes[i], $"Arg {i} {argNames[i]}");
                }
            }

            for (int i = 0; i < argTypes.Length; i++)
            {
                if (argTransformers[i] is { } trans)
                {
                    SyncObj(trans.Writer.DynamicInvoke(args[i], target, args), trans.NetworkType, $"Arg {i} {argNames[i]} (transformed)");
                }
            }

            int mapId = map?.uniqueID ?? ScheduledCommand.Global;

            writer.Log.Node("Map id: " + mapId);
            Multiplayer.WriterLog.AddCurrentNode(writer);

            Multiplayer.Client.SendCommand(CommandType.Sync, mapId, writer.ToArray());

            lastSendTime = Utils.MillisNow;

            return(true);
        }
Exemplo n.º 22
0
        private IEnumerable <SyncConflict> CheckForDuplicates(IDbConnection connection)
        {
            var conflicts = new List <SyncConflict>();

            var changedItemInfos = _store.LocateChangedItems(_remoteKnowledge).ToList();

            foreach (string itemType in _store.GetItemTypes())
            {
                IDbCommand getInsertedItemsCommand = connection.CreateCommand();
                getInsertedItemsCommand.CommandText =
                    String.Format(
                        "SELECT ItemID, SyncStatus, ItemType, GlobalCreatedReplica, CreatedTickCount, GlobalModifiedReplica, ModifiedTickCount, ItemData  FROM SyncItems WHERE SyncStatus={0} AND ItemType='{1}'",
                        (int)SyncStatus.Insert, itemType);
                using (IDataReader reader = getInsertedItemsCommand.ExecuteReader())
                {
                    while (reader != null && reader.Read())
                    {
                        long              itemId              = Convert.ToInt64(reader["ItemID"]);
                        IReplicaInfo      createdReplicaInfo  = SessionDbHelper.ReplicaInfoFromDataReader(reader, "Created");
                        IReplicaInfo      modifiedReplicaInfo = SessionDbHelper.ReplicaInfoFromDataReader(reader, "Modified");
                        ISyncableItemInfo remoteItemInfo      = new SyncableItemInfo
                        {
                            ItemType = itemType,
                            Created  = createdReplicaInfo,
                            Modified = modifiedReplicaInfo,
                            Deleted  = false
                        };
                        var remoteItemData = JObject.Parse((string)reader["ItemData"]);

                        foreach (var changedItemInfo in changedItemInfos)
                        {
                            if (changedItemInfo.ItemType != remoteItemInfo.ItemType)
                            {
                                continue;
                            }
                            if (SyncUtil.KnowledgeContains(_remoteKnowledge, changedItemInfo.Created))
                            {
                                continue;
                            }

                            // Inserted here without remote knowledge, could be a dup
                            var builder = SyncUtil.JsonItemFromSyncableItemInfo(changedItemInfo);
                            _store.BuildItemData(changedItemInfo, builder);

                            var localItemData = new JObject {
                                { "item", builder }
                            };

                            DuplicateStatus dupStatus = _store.GetDuplicateStatus(remoteItemInfo.ItemType, localItemData,
                                                                                  remoteItemData);
                            if (dupStatus == DuplicateStatus.Exact)
                            {
                                SessionDbHelper.ReplaceAllItemRefs(connection, _store, remoteItemInfo, changedItemInfo);
                                long         tickCount       = _store.IncrementLocalRepilcaTickCount();
                                IReplicaInfo modifiedReplica = new ReplicaInfo
                                {
                                    ReplicaId        = _store.GetLocalReplicaId(),
                                    ReplicaTickCount = tickCount
                                };
                                SessionDbHelper.ResolveItemNoData(connection, remoteItemInfo,
                                                                  SyncStatus.DeleteNonExisting, modifiedReplica);
                                break;
                            }
                            if (dupStatus == DuplicateStatus.Possible)
                            {
                                // TODO: clean this up, this call does more than we need
                                SessionDbHelper.ResolveItemNoData(connection, remoteItemInfo, SyncStatus.InsertConflict,
                                                                  remoteItemInfo.Modified);
                                conflicts.Add(new SyncConflict(itemId, SyncStatus.InsertConflict, changedItemInfo,
                                                               remoteItemInfo));
                                break;
                            }
                        }
                    }
                }
            }
            return(conflicts);
        }
Exemplo n.º 23
0
        private IEnumerable <SyncConflict> LoadConflicts(IDbConnection connection)
        {
            var        conflicts = new List <SyncConflict>();
            IDbCommand getInsertedItemsCommand = connection.CreateCommand();

            getInsertedItemsCommand.CommandText =
                String.Format(
                    "SELECT ItemID, SyncStatus, ItemType, GlobalCreatedReplica, CreatedTickCount, GlobalModifiedReplica, ModifiedTickCount, ItemData  FROM SyncItems WHERE SyncStatus NOT IN ({0},{1},{2},{3},{4},{5})",
                    (int)SyncStatus.Insert,
                    (int)SyncStatus.Update,
                    (int)SyncStatus.Delete,
                    (int)SyncStatus.DeleteNonExisting,
                    (int)SyncStatus.MayBeNeeded,
                    (int)SyncStatus.InsertConflict
                    );
            using (var reader = getInsertedItemsCommand.ExecuteReader())
            {
                while (reader != null && reader.Read())
                {
                    var createdReplicaInfo  = SessionDbHelper.ReplicaInfoFromDataReader(reader, "Created");
                    var modifiedReplicaInfo = SessionDbHelper.ReplicaInfoFromDataReader(reader, "Modified");
                    var itemType            = (string)reader["ItemType"];
                    var remoteItemInfo      = new SyncableItemInfo
                    {
                        ItemType = itemType,
                        Created  = createdReplicaInfo,
                        Modified = modifiedReplicaInfo,
                        Deleted  = false
                    };
                    var itemId = Convert.ToInt64(reader["ItemID"]);
                    var status = (SyncStatus)reader["SyncStatus"];

                    ISyncableItemInfo localItemInfo = _store.LocateCurrentItemInfo(remoteItemInfo);

                    if (status == SyncStatus.UpdateConflict)
                    {
                        // Check to see if the "conflict" is actually an exact same update
                        var builder = SyncUtil.JsonItemFromSyncableItemInfo(localItemInfo);
                        _store.BuildItemData(localItemInfo, builder);

                        var localItemData = new JObject {
                            { "item", builder }
                        };

                        var remoteItemData = JObject.Parse((string)reader["ItemData"]);
                        var dupStatus      = _store.GetDuplicateStatus(remoteItemInfo.ItemType, localItemData,
                                                                       remoteItemData);
                        if (dupStatus == DuplicateStatus.Exact)
                        {
                            var tickCount       = _store.IncrementLocalRepilcaTickCount();
                            var modifiedReplica = new ReplicaInfo
                            {
                                ReplicaId        = _store.GetLocalReplicaId(),
                                ReplicaTickCount = tickCount
                            };
                            SessionDbHelper.ResolveItemNoData(connection, remoteItemInfo, SyncStatus.Update,
                                                              modifiedReplica);
                            // TODO: Really should have an update status that just updates the modified repos without doing everything else, but this should work
                            continue;
                        }
                    }

                    conflicts.Add(new SyncConflict(itemId, status, localItemInfo, remoteItemInfo));
                }
            }

            return(conflicts);
        }
Exemplo n.º 24
0
        private async Task <IEnumerable <SyncConflict> > PullChanges()
        {
            ReportProgressAndCheckCacellation(new SyncProgress
            {
                Stage           = SyncStage.FindingRemoteChanges,
                PercentComplete = 0,
                Message         = "Looking for remote changes"
            });

            var request = new JObject {
                { "sessionID", _remoteSessionId }
            };

            var localKnowledge = _store.GenerateLocalKnowledge().ToList();

            request.Add(new JProperty("knowledge", SyncUtil.KnowledgeToJson(localKnowledge)));

            JObject response = await _transport.TransportAsync(SyncEndpoint.GetChanges, request);

            _remoteKnowledge = SyncUtil.KnowledgeFromJson(response["knowledge"]);
            var totalChanges = (int)response["totalChanges"];

            ReportProgressAndCheckCacellation(new SyncProgress
            {
                Stage           = SyncStage.FindingRemoteChanges,
                PercentComplete = 100,
                Message         = String.Format("Found {0} remote changes", totalChanges)
            });

            ReportProgressAndCheckCacellation(new SyncProgress
            {
                Stage           = SyncStage.DownloadingRemoteChanges,
                PercentComplete = 0,
                Message         = String.Format("Downloading {0} remote changes", totalChanges)
            });
            using (var connection = _syncSessionDbConnectionProvider.GetSyncSessionDbConnection(_localSessionId))
            {
                connection.ExecuteNonQuery("BEGIN");
                SessionDbHelper.ClearSyncItems(connection);

                long startTick = Environment.TickCount;
                int  previousPercentComplete = -1;
                for (int i = 1; i <= totalChanges;)
                {
                    i += await SaveChangesBatch(connection, localKnowledge, i);

                    int percentComplete = ((i * 100) / totalChanges);
                    if (percentComplete != previousPercentComplete)
                    {
                        ReportProgressAndCheckCacellation(new SyncProgress
                        {
                            Stage           = SyncStage.DownloadingRemoteChanges,
                            PercentComplete = percentComplete,
                            Message         =
                                String.Format("Downloading remote changes, {0}% complete ({1})", percentComplete,
                                              String.Format("Averaging {0}ms/item over {1} items",
                                                            (Environment.TickCount - startTick) / i, i))
                        });
                    }
                    previousPercentComplete = percentComplete;
                }
                connection.ExecuteNonQuery("COMMIT");
                ReportProgressAndCheckCacellation(new SyncProgress
                {
                    Stage           = SyncStage.DownloadingRemoteChanges,
                    PercentComplete = 100,
                    Message         = String.Format("Downloaded all {0} remote changes", totalChanges)
                });

                ReportProgressAndCheckCacellation(new SyncProgress
                {
                    Stage           = SyncStage.CheckingForConflicts,
                    PercentComplete = 0,
                    Message         = "Looking for conflicts"
                });
                var conflicts = new List <SyncConflict>();
                conflicts.AddRange(CheckForDuplicates(connection));
                conflicts.AddRange(LoadConflicts(connection));
                ReportProgressAndCheckCacellation(new SyncProgress
                {
                    Stage           = SyncStage.CheckingForConflicts,
                    PercentComplete = 100,
                    Message         = String.Format("Found {0} conflicts", conflicts.Count)
                });
                return(conflicts);
            }
        }
Exemplo n.º 25
0
        /// <summary>
        /// Sync data from GeniusDB and hosted in .144 which is the transfer DB
        /// </summary>
        /// <seealso cref="M:Spring.Scheduling.Quartz.QuartzJobObject.Execute(Quartz.JobExecutionContext)"/>
        protected override void ExecuteInternal(JobExecutionContext context)
        {
            var startTime = DateTime.UtcNow;
            var logEntity = new SCHEDULERLOG {
                STARTTIME = startTime
            };

            var    strInfo         = new StringBuilder();
            string settingFilePath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory,
                                                  @"config\EJVMigration2.xml");
            var settingManager = new XmlSettingManager(File.ReadAllText(settingFilePath));

            // Set Connection string
            settingManager.Init(string.Empty);//初始化 mapping
            strInfo.AppendFormat("Source [Type: {0} Address: {1}]\n", settingManager.SourceDb.Type,
                                 settingManager.SourceDb.Conn);
            strInfo.AppendFormat("Destination [Type: {0} Address: {1}]\n", settingManager.DestinationDb.Type,
                                 settingManager.DestinationDb.Conn);

            var lastSyncTime    = new DateTime(2017, 3, 17);
            var currentSyncTime = startTime.AddHours(8);

            try
            {
                using (var dataSync = new DataSynchronizer(settingManager, lastSyncTime, currentSyncTime))
                {
                    dataSync.PostTaskExecuted +=
                        (sender, e) =>
                        strInfo.AppendFormat
                            ("Post sync task {0} is executed.\n",
                            e.TaskName);

                    dataSync.Init();
                    var sourceTable = settingManager.SourceTableName.Split(',');
                    strInfo.Append(dataSync.SyncEachTableFromMaxMtime(sourceTable, "asset_last_chg_dt"));
                    //strInfo.Append(dataSync.Sync(sourceTable));
                }

                var endTime = DateTime.UtcNow;

                //Update Bond Info
                strInfo.AppendFormat("Update Bond Info en&cn start at {0}.\n", DateTime.UtcNow.ToGMT8String());
                SyncUtil.UpdateBondInfo(lastSyncTime, currentSyncTime);
                strInfo.AppendFormat("Update Bond Info en&cn completed at {0}.\n ", DateTime.UtcNow.ToGMT8String());

                //Rebuild Index
                strInfo.AppendFormat("Rebuild Index at {0}.\n", DateTime.UtcNow.ToGMT8String());
                var result1 = SolrClient.RebuildIndex("full");
                endTime = DateTime.UtcNow;
                strInfo.AppendFormat("Rebuild Index completed at {0}.\n Result: {1}\n", DateTime.UtcNow.ToGMT8String(), result1);


                logEntity.ENDTIME   = endTime;
                logEntity.JobStatus = (result1 == "Success") ? JobStatus.Success : JobStatus.Fail;
                logEntity.RUNDETAIL = strInfo.ToString();
                WriteLogEntity(logEntity);
            }
            catch (Exception exception)
            {
                logEntity.ENDTIME   = DateTime.UtcNow;
                logEntity.JobStatus = JobStatus.Fail;
                logEntity.RUNDETAIL = strInfo + "\n" + exception;
                WriteLogEntity(logEntity);
            }
        }
        static void Main(string[] args)
        {
            var waitForUser       = true;
            var userLimit         = 0;
            var userAccount       = string.Empty;
            var ignoreChangeToken = false;

            if (args != null && args.Length > 0)
            {
                bool.TryParse(args[0], out waitForUser);

                if (args.Length > 1)
                {
                    int.TryParse(args[1], out userLimit);
                }

                if (args.Length > 2)
                {
                    userAccount = args[2];
                }

                if (args.Length > 3)
                {
                    bool.TryParse(args[3], out ignoreChangeToken);
                }
            }

            SyncUtil.JsonOpen();

            try
            {
                var userProfileSync = new ProfileSync();
                userProfileSync.Sync(new SyncOptions
                {
                    UserLimit         = userLimit,
                    UserAccountFilter = userAccount,
                    IgnoreChangeToken = ignoreChangeToken
                });
            }
            catch (Exception ex)
            {
                Console.ForegroundColor = ConsoleColor.Black;
                Console.WriteLine("/*");

                Console.ForegroundColor = ConsoleColor.Red;
                Console.WriteLine("An error occurred while running ProfileSync.\n");

                Console.ResetColor();
                Console.WriteLine(ex.ToString());

                Console.ForegroundColor = ConsoleColor.Black;
                Console.WriteLine("*/");
                Console.ResetColor();
            }

            SyncUtil.JsonClose();

            if (!waitForUser)
            {
                return;
            }

            Console.WriteLine("\nPress Enter to exit");
            Console.ReadLine();
        }
Exemplo n.º 27
0
        /// <summary>
        /// Execute the actual job. The job data map will already have been
        ///             applied as object property values by execute. The contract is
        ///             exactly the same as for the standard Quartz execute method.
        /// </summary>
        /// <seealso cref="M:Spring.Scheduling.Quartz.QuartzJobObject.Execute(Quartz.JobExecutionContext)"/>
        protected override void ExecuteInternal(JobExecutionContext context)
        {
            var startTime = DateTime.UtcNow;
            var logEntity = new SCHEDULERLOG {
                STARTTIME = startTime, JOBTYPE = JobType
            };

            var    strInfo         = new StringBuilder();
            string settingFilePath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory,
                                                  @"config\File-To-IPP-sync.xml");
            var settingManager = new XmlSettingManager(File.ReadAllText(settingFilePath));

            // Set Connection string
            var destinationDbConn = string.Empty;

            settingManager.Init(destinationDbConn);//初始化 mapping

            strInfo.AppendFormat("<p>Source [Type: {0} Address: {1}]\n", settingManager.SourceDb.Type,
                                 settingManager.SourceDb.Conn);

            strInfo.AppendFormat("Destination [Type: {0} Address: {1}]</p>\n", settingManager.DestinationDb.Type,
                                 settingManager.DestinationDb.Conn);


            var from = getMaxDateTime(settingManager);
            var to   = startTime.AddHours(8).AddHours(-settingManager.DeltaHours);

            strInfo.AppendFormat("<p>Max '{0}' of table '{1}' : {2}.</p>", settingManager.DateKeyColumn, settingManager.DateKeyTable, from);

            strInfo.AppendFormat("<p>Sync duration : {0} to : {1} .</p>", from, to);


            try
            {
                using (var dataSync = new DataSynchronizer(settingManager, from, to))
                {
                    dataSync.TableSynched +=
                        (sender, e) =>
                        strInfo.AppendFormat
                            ("{0} rows have been synchronized from {1} view in CMAFileDB to {2} table in IPP DB.\n",
                            e.NumOfRowsSynched, e.Source, e.Dest);

                    dataSync.PostTaskExecuted +=
                        (sender, e) =>
                        strInfo.AppendFormat
                            ("Post sync task {0} is executed.\n",
                            e.TaskName);

                    dataSync.Init();
                    strInfo.Append(dataSync.Sync(new[] { "GetNewInstitution", "GetNewFile" }));
                }

                strInfo.AppendFormat("{0} table(s) be synchronized.\n", settingManager.TableMappings.Count());

                //Update File Topic
                strInfo.AppendFormat("<p>Update File Topic start at {0}.\n", DateTime.UtcNow.ToGMT8String());
                var result = SyncUtil.UpdateFileTopic(from, to);
                strInfo.AppendFormat("Update File Topic completed at {0}.\n Result: {1}</p>", DateTime.UtcNow.ToGMT8String(), result);

                //Rebuild Index
                strInfo.AppendFormat("<p>Rebuild Index at {0}.\n", DateTime.UtcNow.ToGMT8String());
                var result1 = SolrClient.RebuildIndex("full");
                var endTime = DateTime.UtcNow;
                strInfo.AppendFormat("Rebuild Index completed at {0}.\n Result: {1}</p>", DateTime.UtcNow.ToGMT8String(), result);
                if (result1 != "Success")
                {
                    strInfo.AppendFormat("<p style=\"color:red;\">Solr rebuild failed:<br />{0}</p>", result1);
                }

                logEntity.ENDTIME   = endTime;
                logEntity.JobStatus = (result == "Success" && result1 == "Success") ? JobStatus.Success : JobStatus.Fail;
                logEntity.RUNDETAIL = strInfo.ToString();
                WriteLogEntity(logEntity);
            }
            catch (Exception exception)
            {
                logEntity.ENDTIME   = DateTime.UtcNow;
                logEntity.JobStatus = JobStatus.Fail;
                logEntity.RUNDETAIL = strInfo + "\n<b>Exception detail:</b>\n" + exception + "\n<p>No tables synchronized.</p>";
                WriteLogEntity(logEntity);
            }
        }