public SyncManager(string replica1RootPath, string replica2RootPath, ICollection<string> fileNameExcludes, SyncOptions syncOption, ILogger logger) { this.replica1RootPath = replica1RootPath; this.replica2RootPath = replica2RootPath; this.fileNameExcludes = fileNameExcludes; this.syncOption = syncOption; this.logger = logger; }
private SyncDialog(SyncOperator syncOperator, SyncOptions options) { if (syncOperator == null) throw new ArgumentNullException("syncOperator"); if ((options & SyncOptions.Silenceable) == SyncOptions.Silenceable && !RoamiePlugin.Singleton.RoamingContext.Configuration.SilentMode) options &= ~SyncOptions.Silenceable; SyncOperator = syncOperator; Options = options; InitializeComponent(); }
// TODO: localOnly if no update available? - so for local diagnose etc.. public async Task GetMod(string name, IAbsoluteDirectoryPath destination, IAbsoluteDirectoryPath packPath, StatusRepo status, bool force = false) { var mod = GetMod(name); var folder = destination.GetChildDirectoryWithName(mod.Key); var config = GetOpts(packPath, status, mod); if (!folder.Exists) { var opts = new SyncOptions(); config(opts); await Repository.Factory.Clone(opts.Hosts, folder.ToString(), config) .ConfigureAwait(false); return; } var rsyncDir = folder.GetChildDirectoryWithName(Repository.RepoFolderName); if (!force && rsyncDir.Exists && IsRightVersion(rsyncDir, mod)) return; var repo = GetRepo(rsyncDir, folder, config); await repo.Update(config).ConfigureAwait(false); }
private Tuple <string, string, bool> GenerateFullKey <TProperty>(string key, Expression <Func <T, TProperty> > propertyGetter, SyncOptions syncOptions) { var visitor = new MemberAccessVisitor(); visitor.Visit(propertyGetter); var propertyType = typeof(TProperty).GetTypeInfo(); // primitive types if (syncOptions == SyncOptions.Patch && (propertyType.IsPrimitive || Nullable.GetUnderlyingType(typeof(TProperty)) != null || typeof(TProperty) == typeof(string))) { return(Tuple.Create(key + "/" + string.Join("/", visitor.PropertyNames.Skip(1).Reverse()), visitor.PropertyNames.First(), true)); } return(Tuple.Create(key + "/" + string.Join("/", visitor.PropertyNames.Reverse()), visitor.PropertyNames.First(), false)); }
private async Task Sync(SyncJobItem jobItem, Video item, User user, bool enableConversion, SyncOptions syncOptions, IProgress <double> progress, CancellationToken cancellationToken) { var job = _syncManager.GetJob(jobItem.JobId); var jobOptions = _syncManager.GetVideoOptions(jobItem, job); var conversionOptions = new VideoOptions { Profile = jobOptions.DeviceProfile }; conversionOptions.DeviceId = jobItem.TargetId; conversionOptions.Context = EncodingContext.Static; conversionOptions.ItemId = item.Id.ToString("N"); conversionOptions.MediaSources = _mediaSourceManager.GetStaticMediaSources(item, false, user).ToList(); var streamInfo = new StreamBuilder(_mediaEncoder, _logger).BuildVideoItem(conversionOptions); var mediaSource = streamInfo.MediaSource; // No sense creating external subs if we're already burning one into the video var externalSubs = streamInfo.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode ? new List <SubtitleStreamInfo>() : streamInfo.GetExternalSubtitles(false, true, null, null); // Mark as requiring conversion if transcoding the video, or if any subtitles need to be extracted var requiresVideoTranscoding = streamInfo.PlayMethod == PlayMethod.Transcode && jobOptions.IsConverting; var requiresConversion = requiresVideoTranscoding || externalSubs.Any(i => RequiresExtraction(i, mediaSource)); if (requiresConversion && !enableConversion) { return; } jobItem.MediaSourceId = streamInfo.MediaSourceId; jobItem.TemporaryPath = GetTemporaryPath(jobItem); if (requiresConversion) { jobItem.Status = SyncJobItemStatus.Converting; } if (requiresVideoTranscoding) { // Save the job item now since conversion could take a while await _syncManager.UpdateSyncJobItemInternal(jobItem).ConfigureAwait(false); await UpdateJobStatus(jobItem.JobId).ConfigureAwait(false); try { var lastJobUpdate = DateTime.MinValue; var innerProgress = new ActionableProgress <double>(); innerProgress.RegisterAction(async pct => { progress.Report(pct); if ((DateTime.UtcNow - lastJobUpdate).TotalSeconds >= DatabaseProgressUpdateIntervalSeconds) { jobItem.Progress = pct / 2; await _syncManager.UpdateSyncJobItemInternal(jobItem).ConfigureAwait(false); await UpdateJobStatus(jobItem.JobId).ConfigureAwait(false); } }); jobItem.OutputPath = await _mediaEncoder.EncodeVideo(new EncodingJobOptions(streamInfo, conversionOptions.Profile) { OutputDirectory = jobItem.TemporaryPath, CpuCoreLimit = syncOptions.TranscodingCpuCoreLimit, ReadInputAtNativeFramerate = !syncOptions.EnableFullSpeedTranscoding }, innerProgress, cancellationToken); jobItem.ItemDateModifiedTicks = item.DateModified.Ticks; _syncManager.OnConversionComplete(jobItem); } catch (OperationCanceledException) { jobItem.Status = SyncJobItemStatus.Queued; jobItem.Progress = 0; } catch (Exception ex) { jobItem.Status = SyncJobItemStatus.Failed; _logger.ErrorException("Error during sync transcoding", ex); } if (jobItem.Status == SyncJobItemStatus.Failed || jobItem.Status == SyncJobItemStatus.Queued) { await _syncManager.UpdateSyncJobItemInternal(jobItem).ConfigureAwait(false); return; } jobItem.MediaSource = await GetEncodedMediaSource(jobItem.OutputPath, user, true).ConfigureAwait(false); } else { if (mediaSource.Protocol == MediaProtocol.File) { jobItem.OutputPath = mediaSource.Path; } else if (mediaSource.Protocol == MediaProtocol.Http) { jobItem.OutputPath = await DownloadFile(jobItem, mediaSource, cancellationToken).ConfigureAwait(false); } else { throw new InvalidOperationException(string.Format("Cannot direct stream {0} protocol", mediaSource.Protocol)); } jobItem.ItemDateModifiedTicks = item.DateModified.Ticks; jobItem.MediaSource = mediaSource; } jobItem.MediaSource.SupportsTranscoding = false; if (externalSubs.Count > 0) { // Save the job item now since conversion could take a while await _syncManager.UpdateSyncJobItemInternal(jobItem).ConfigureAwait(false); await ConvertSubtitles(jobItem, externalSubs, streamInfo, cancellationToken).ConfigureAwait(false); } jobItem.Progress = 50; jobItem.Status = SyncJobItemStatus.ReadyToTransfer; await _syncManager.UpdateSyncJobItemInternal(jobItem).ConfigureAwait(false); }
public async Task TrackingTable_Create_All() { var dbName = HelperDatabase.GetRandomName("tcp_lo_"); await HelperDatabase.CreateDatabaseAsync(ProviderType.Sql, dbName, true); var cs = HelperDatabase.GetConnectionString(ProviderType.Sql, dbName); var sqlProvider = new SqlSyncProvider(cs); var ctx = new AdventureWorksContext((dbName, ProviderType.Sql, sqlProvider), true, false); await ctx.Database.EnsureCreatedAsync(); var options = new SyncOptions(); var setup = new SyncSetup(new string[] { "SalesLT.ProductCategory", "SalesLT.ProductModel", "SalesLT.Product", "Posts" }); setup.TrackingTablesPrefix = "t_"; setup.TrackingTablesSuffix = "_t"; var localOrchestrator = new LocalOrchestrator(sqlProvider, options, setup); var onCreating = 0; var onCreated = 0; var onDropping = 0; var onDropped = 0; localOrchestrator.OnTrackingTableCreating(ttca => onCreating++); localOrchestrator.OnTrackingTableCreated(ttca => onCreated++); localOrchestrator.OnTrackingTableDropping(ttca => onDropping++); localOrchestrator.OnTrackingTableDropped(ttca => onDropped++); await localOrchestrator.CreateTrackingTablesAsync(); Assert.Equal(4, onCreating); Assert.Equal(4, onCreated); Assert.Equal(0, onDropping); Assert.Equal(0, onDropped); onCreating = 0; onCreated = 0; onDropping = 0; onDropped = 0; await localOrchestrator.CreateTrackingTablesAsync(); Assert.Equal(0, onCreating); Assert.Equal(0, onCreated); Assert.Equal(0, onDropping); Assert.Equal(0, onDropped); onCreating = 0; onCreated = 0; onDropping = 0; onDropped = 0; await localOrchestrator.CreateTrackingTablesAsync(true); Assert.Equal(4, onCreating); Assert.Equal(4, onCreated); Assert.Equal(4, onDropping); Assert.Equal(4, onDropped); HelperDatabase.DropDatabase(ProviderType.Sql, dbName); }
public void CanShareConflictingChangeViaUsb() { ConsoleProgress progress = new ConsoleProgress(); BobSetup bobSetup = new BobSetup(progress, _pathToTestRoot); bobSetup.ChangeTextFile(); string usbSourcePath = Path.Combine(_pathToTestRoot, "USB-A"); Directory.CreateDirectory(usbSourcePath); string usbProjectPath = bobSetup.SetupClone(usbSourcePath); Synchronizer usbRepo = Synchronizer.FromProjectConfiguration(BobSetup.CreateFolderConfig(usbProjectPath), progress); Synchronizer bobSynchronizer = bobSetup.GetSynchronizer(); //Sally gets the usb and uses it to clone herself a repository string sallySourcePath = Path.Combine(_pathToTestRoot, "sally"); Directory.CreateDirectory(sallySourcePath); //string sallyRepoPath = usbRepo.MakeClone(Path.Combine(sallySourcePath, BobSetup.ProjectFolderName), true); string sallyRepoPath = HgHighLevel.MakeCloneFromLocalToLocal(usbRepo.Repository.PathToRepo, Path.Combine(sallySourcePath, BobSetup.ProjectFolderName), true, progress); //Now bob sets up the conflict File.WriteAllText(bobSetup.PathToText, "Bob's new idea"); SyncOptions bobOptions = new SyncOptions(); bobOptions.CheckinDescription = "changed my mind"; bobOptions.DoMergeWithOthers = false; // pretend the usb key isn't there bobOptions.DoPullFromOthers = false; // pretend the usb key isn't there bobOptions.DoSendToOthers = false; RepositoryAddress usbPath = RepositoryAddress.Create("usba source", Path.Combine(usbSourcePath, RepositoryAddress.ProjectNameVariable), false); bobOptions.RepositorySourcesToTry.Add(usbPath); bobSynchronizer.SyncNow(bobOptions); ProjectFolderConfiguration sallyProject = BobSetup.CreateFolderConfig(sallyRepoPath); Synchronizer sallySynchronizer = Synchronizer.FromProjectConfiguration(sallyProject, progress); sallySynchronizer.Repository.SetUserNameInIni("sally", new NullProgress()); //now she modifies a file File.WriteAllText(Path.Combine(sallyRepoPath, Path.Combine("lexicon", "foo.abc")), "Sally was here"); //and syncs, which pushes back to the usb key SyncOptions sallyOptions = new SyncOptions(); sallyOptions.CheckinDescription = "making sally's mark on foo.abc"; sallyOptions.RepositorySourcesToTry.Add(usbPath); sallyOptions.DoPullFromOthers = true; sallyOptions.DoMergeWithOthers = true; sallyOptions.DoSendToOthers = true; sallySynchronizer.SyncNow(sallyOptions); //bob still doesn't have direct access to sally's repo... it's in some other city // but now the usb comes back to him // SyncOptions bobOptions = new SyncOptions(); bobOptions.CheckinDescription = "Getting from sally, i hope"; bobOptions.DoPullFromOthers = true; bobOptions.DoSendToOthers = true; bobOptions.DoMergeWithOthers = true; bobSynchronizer.SyncNow(bobOptions); // With sync set as 'WeWin' Assert.AreEqual("Bob's new idea", File.ReadAllText(bobSetup.PathToText)); var notesPath = Path.Combine(Path.Combine(Path.Combine(usbSourcePath, BobSetup.ProjectFolderName), "lexicon"), "foo.abc.ChorusNotes"); AssertThatXmlIn.File(notesPath).HasSpecifiedNumberOfMatchesForXpath("//notes/annotation[@class='mergeConflict']", 1); //The conflict has now been created, in the merge with Bob, make a new conflict and make sure that when Sally does the next sync both conflicts are //present in the ChorusNotes. File.WriteAllText(Path.Combine(sallyRepoPath, Path.Combine("lexicon", "foo.abc")), "Sally changed her mind"); File.WriteAllText(bobSetup.PathToText, "Bob changed his mind."); bobOptions.CheckinDescription = "Bob makes conflicting change."; bobSynchronizer.SyncNow(bobOptions); sallyOptions.CheckinDescription = "Sally makes conflicting change."; sallySynchronizer.SyncNow(sallyOptions); AssertThatXmlIn.File(notesPath).HasSpecifiedNumberOfMatchesForXpath("//notes/annotation[@class='mergeConflict']", 2); }
public async Task ExportAsync(DirectoryInfo directoryInfo, JsonHelper jsonHelper, SyncOptions options, ISession session) { var current = await session.Apps.GetWorkflowsAsync(session.App); var index = 0; foreach (var workflow in current.Items.OrderBy(x => x.Name)) { var workflowName = workflow.Name; await log.DoSafeAsync($"Exporting '{workflowName}' ({workflow.Id})", async() => { await jsonHelper.WriteWithSchemaAs <UpdateWorkflowDto>(directoryInfo, $"workflows/workflow{index}.json", workflow, "../__json/workflow"); }); index++; } }
/// <summary> /// Initializes a new instance of the <see cref="OfflineEntry"/> class. /// </summary> /// <param name="key"> The key. </param> /// <param name="obj"> The object. </param> /// <param name="priority"> The priority. Objects with higher priority will be synced first. Higher number indicates higher priority. </param> /// <param name="syncOptions"> The sync options. </param> /// <param name="isPartial"> A value indicating whether this is only a partial object. </param> public OfflineEntry(string key, object obj, int priority, SyncOptions syncOptions, bool isPartial = false) : this(key, obj, JsonConvert.SerializeObject(obj), priority, syncOptions, isPartial) { }
private static async Task SynchronizeThenDeprovisionThenProvisionAsync() { // Create 2 Sql Sync providers var serverProvider = new SqlSyncProvider(serverConnectionString); var clientProvider = new SqlSyncProvider(clientConnectionString); // Create standard Setup and Options var setup = new SyncSetup(new string[] { "Address", "Customer", "CustomerAddress" }); var options = new SyncOptions(); // Creating an agent that will handle all the process var agent = new SyncAgent(clientProvider, serverProvider, options, setup); // Using the Progress pattern to handle progession during the synchronization var progress = new SynchronousProgress <ProgressArgs>(s => Console.WriteLine($"{s.Context.SyncStage}:\t{s.Message}")); // First sync to have a starting point var s1 = await agent.SynchronizeAsync(progress); Console.WriteLine(s1); // ----------------------------------------------------------------- // Migrating a table by adding a new column // ----------------------------------------------------------------- // Adding a new column called CreatedDate to Address table, on the server, and on the client. await Helper.AddNewColumnToAddressAsync(serverProvider.CreateConnection()); await Helper.AddNewColumnToAddressAsync(clientProvider.CreateConnection()); // ----------------------------------------------------------------- // Server side // ----------------------------------------------------------------- // Creating a setup regarding only the table Address var setupAddress = new SyncSetup(new string[] { "Address" }); // Create a server orchestrator used to Deprovision and Provision only table Address var remoteOrchestrator = new RemoteOrchestrator(serverProvider, options, setupAddress); // Unprovision the Address triggers / stored proc. // We can conserve the Address tracking table, since we just add a column, // that is not a primary key used in the tracking table // That way, we are preserving historical data await remoteOrchestrator.DeprovisionAsync(SyncProvision.StoredProcedures | SyncProvision.Triggers); // Provision the Address triggers / stored proc again, // This provision method will fetch the address schema from the database, // so it will contains all the columns, including the new Address column added await remoteOrchestrator.ProvisionAsync(SyncProvision.StoredProcedures | SyncProvision.Triggers); // Now we need the full setup to get the full schema. // Setup includes [Address] [Customer] and [CustomerAddress] remoteOrchestrator.Setup = setup; var newSchema = await remoteOrchestrator.GetSchemaAsync(); // Now we need to save this new schema to the serverscope table // get the server scope again var serverScope = await remoteOrchestrator.GetServerScopeAsync(); // affect good values serverScope.Setup = setup; serverScope.Schema = newSchema; // save it await remoteOrchestrator.WriteServerScopeAsync(serverScope); // ----------------------------------------------------------------- // Client side // ----------------------------------------------------------------- // Now go for local orchestrator var localOrchestrator = new LocalOrchestrator(clientProvider, options, setupAddress); // Unprovision the Address triggers / stored proc. We can conserve tracking table, since we just add a column, that is not a primary key used in the tracking table // In this case, we will await localOrchestrator.DeprovisionAsync(SyncProvision.StoredProcedures | SyncProvision.Triggers); // Provision the Address triggers / stored proc again, // This provision method will fetch the address schema from the database, so it will contains all the columns, including the new one added await localOrchestrator.ProvisionAsync(SyncProvision.StoredProcedures | SyncProvision.Triggers); // Now we need to save this to clientscope // get the server scope again var clientScope = await localOrchestrator.GetClientScopeAsync(); // At this point, if you need the schema and you are not able to create a RemoteOrchestrator, // You can create a WebClientOrchestrator and get the schema as well // var proxyClientProvider = new WebClientOrchestrator("https://localhost:44369/api/Sync"); // var newSchema = proxyClientProvider.GetSchemaAsync(); // affect good values clientScope.Setup = setup; clientScope.Schema = newSchema; // save it await localOrchestrator.WriteClientScopeAsync(clientScope); // Now test a new sync, everything should work as expected. do { // Console.Clear(); Console.WriteLine("Sync Start"); try { var s2 = await agent.SynchronizeAsync(); // Write results Console.WriteLine(s2); } catch (Exception e) { Console.WriteLine(e.Message); } } while (Console.ReadKey().Key != ConsoleKey.Escape); Console.WriteLine("End"); }
public async Task ImportAsync(ISyncService sync, SyncOptions options, ISession session) { var models = GetFiles(sync.FileSystem) .Select(x => (x, sync.Read <AssetsModel>(x, log))); var batchIndex = 0; foreach (var(_, model) in models) { if (model?.Assets?.Count > 0) { var uploader = new UploadPipeline(session, log, sync.FileSystem) { FilePathProvider = asset => asset.Id.GetBlobPath() }; try { foreach (var asset in model.Assets) { await uploader.UploadAsync(asset); } } finally { await uploader.CompleteAsync(); } var request = new BulkUpdateAssetsDto(); foreach (var asset in model.Assets) { var parentId = await sync.Folders.GetIdAsync(asset.FolderPath); request.Jobs.Add(asset.ToMove(parentId)); request.Jobs.Add(asset.ToAnnotate()); } var assetIndex = 0; var results = await session.Assets.BulkUpdateAssetsAsync(session.App, request); foreach (var asset in model.Assets) { // We create wo commands per asset. var result1 = results.FirstOrDefault(x => x.JobIndex == (assetIndex * 2)); var result2 = results.FirstOrDefault(x => x.JobIndex == (assetIndex * 2) + 1); log.StepStart($"Upserting #{batchIndex}/{assetIndex}"); if (result1?.Error != null) { log.StepFailed(result1.Error.ToString()); } else if (result2?.Error != null) { log.StepFailed(result2.Error.ToString()); } else if (result1?.Id != null && result2?.Id != null) { log.StepSuccess(); } else { log.StepSkipped("Unknown Reason"); } assetIndex++; } } batchIndex++; } }
private async Task <SyncedFileInfo> SendFile(IServerSyncProvider provider, string inputPath, string[] pathParts, SyncTarget target, SyncOptions options, IProgress <double> progress, CancellationToken cancellationToken) { _logger.Debug("Sending {0} to {1}. Remote path: {2}", inputPath, provider.Name, string.Join("/", pathParts)); var supportsDirectCopy = provider as ISupportsDirectCopy; if (supportsDirectCopy != null) { return(await supportsDirectCopy.SendFile(inputPath, pathParts, target, progress, cancellationToken).ConfigureAwait(false)); } using (var fileStream = _fileSystem.GetFileStream(inputPath, FileMode.Open, FileAccess.Read, FileShare.Read, true)) { Stream stream = fileStream; if (options.UploadSpeedLimitBytes > 0 && provider is IRemoteSyncProvider) { stream = new ThrottledStream(stream, options.UploadSpeedLimitBytes); } return(await provider.SendFile(stream, pathParts, target, progress, cancellationToken).ConfigureAwait(false)); } }
internal static object RunModal(SyncOperator syncOperator, SyncOptions options) { using (SyncDialog dlg = new SyncDialog(syncOperator, options)) { if ((dlg.Options & SyncOptions.Silenceable) == SyncOptions.Silenceable) dlg.ToggleVisibility(false); dlg.ShowDialog(); if (dlg.Error != null && (options & SyncOptions.NoThrow) != SyncOptions.NoThrow) throw dlg.Error; return dlg.Result; } }
public static SyncState CreateSyncUp(SmartStore.SmartStore store, SyncUpTarget target, SyncOptions options, string soupName) { var nativeStore = JsonConvert.SerializeObject(store); var syncUpTarget = JsonConvert.SerializeObject(target); var nativeOptions = JsonConvert.SerializeObject(options); var state = SDK.SmartSync.Model.SyncState.CreateSyncUp( JsonConvert.DeserializeObject<SDK.SmartStore.Store.SmartStore>(nativeStore), JsonConvert.DeserializeObject<SDK.SmartSync.Model.SyncUpTarget>(syncUpTarget), JsonConvert.DeserializeObject<SDK.SmartSync.Model.SyncOptions>(nativeOptions), soupName); var syncState = JsonConvert.SerializeObject(state); return JsonConvert.DeserializeObject<SyncState>(syncState); }
public void Set <TProperty>(string key, Expression <Func <T, TProperty> > propertyExpression, object value, SyncOptions syncOptions, int priority = 1) { var fullKey = this.GenerateFullKey(key, propertyExpression, syncOptions); var serializedObject = JsonConvert.SerializeObject(value).Trim('"', '\\'); if (fullKey.Item3) { if (typeof(TProperty) != typeof(string) || value == null) { // don't escape non-string primitives and null; serializedObject = $"{{ \"{fullKey.Item2}\" : {serializedObject} }}"; } else { serializedObject = $"{{ \"{fullKey.Item2}\" : \"{serializedObject}\" }}"; } } var setObject = this.firebaseCache.PushData(fullKey.Item1, serializedObject).First(); if (!this.Database.ContainsKey(key) || this.Database[key].SyncOptions != SyncOptions.Patch && this.Database[key].SyncOptions != SyncOptions.Put) { this.Database[fullKey.Item1] = new OfflineEntry(fullKey.Item1, value, serializedObject, priority, syncOptions, true); } this.subject.OnNext(new FirebaseEvent <T>(key, setObject.Object, setObject == null ? FirebaseEventType.Delete : FirebaseEventType.InsertOrUpdate, FirebaseEventSource.Offline)); }
/// <summary> /// Launch a simple sync, over TCP network, each sql server (client and server are reachable through TCP cp /// </summary> /// <returns></returns> private static async Task SynchronizeExistingTablesAsync() { string serverName = "ServerTablesExist"; string clientName = "ClientsTablesExist"; await DbHelper.EnsureDatabasesAsync(serverName); await DbHelper.EnsureDatabasesAsync(clientName); // Create 2 Sql Sync providers var serverProvider = new SqlSyncProvider(DbHelper.GetDatabaseConnectionString(serverName)); var clientProvider = new SqlSyncProvider(DbHelper.GetDatabaseConnectionString(clientName)); // Tables involved in the sync process: var tables = allTables; // Creating an agent that will handle all the process var agent = new SyncAgent(clientProvider, serverProvider, tables); // Using the Progress pattern to handle progession during the synchronization var progress = new Progress <ProgressArgs>(s => Console.WriteLine($"[client]: {s.Context.SyncStage}:\t{s.Message}")); // Setting configuration options agent.SetConfiguration(s => { s.ScopeInfoTableName = "tscopeinfo"; s.SerializationFormat = Dotmim.Sync.Enumerations.SerializationFormat.Binary; s.StoredProceduresPrefix = "s"; s.StoredProceduresSuffix = ""; s.TrackingTablesPrefix = "t"; s.TrackingTablesSuffix = ""; }); agent.SetOptions(opt => { opt.BatchDirectory = Path.Combine(SyncOptions.GetDefaultUserBatchDiretory(), "sync"); opt.BatchSize = 100; opt.CleanMetadatas = true; opt.UseBulkOperations = true; opt.UseVerboseErrors = false; }); var remoteProvider = agent.RemoteProvider as CoreProvider; var dpAction = new Action <DatabaseProvisionedArgs>(args => { Console.WriteLine($"-- [InterceptDatabaseProvisioned] -- "); var sql = $"Update tscopeinfo set scope_last_sync_timestamp = 0 where [scope_is_local] = 1"; var cmd = args.Connection.CreateCommand(); cmd.Transaction = args.Transaction; cmd.CommandText = sql; cmd.ExecuteNonQuery(); }); remoteProvider.InterceptDatabaseProvisioned(dpAction); agent.LocalProvider.InterceptDatabaseProvisioned(dpAction); do { Console.Clear(); Console.WriteLine("Sync Start"); try { // Launch the sync process var s1 = await agent.SynchronizeAsync(progress); // Write results Console.WriteLine(s1); } catch (Exception e) { Console.WriteLine(e.Message); } //Console.WriteLine("Sync Ended. Press a key to start again, or Escapte to end"); } while (Console.ReadKey().Key != ConsoleKey.Escape); Console.WriteLine("End"); }
public Models.SyncState SyncDown(string target, string soupName, string callback, Models.SyncOptions options) { var soqlsyncDownTarget = JObject.Parse(target); var soqlsyncDown = new SoqlSyncDownTarget(soqlsyncDownTarget); SyncDownTarget syncDown = soqlsyncDown; var action = JsonConvert.DeserializeObject <Action <SyncState> >(callback); var syncOptions = JsonConvert.SerializeObject(options); var state = _syncManager.SyncDown(syncDown, soupName, action, SyncOptions.FromJson(JObject.Parse(syncOptions))); var syncState = JsonConvert.SerializeObject(state); return(JsonConvert.DeserializeObject <Models.SyncState>(syncState)); }
public static async Task SyncHttpThroughKestellAsync() { // server provider var serverProvider = new SqlSyncProvider(DbHelper.GetDatabaseConnectionString(serverDbName)); // client provider var client1Provider = new SqlSyncProvider(DbHelper.GetDatabaseConnectionString(clientDbName)); // proxy client provider var proxyClientProvider = new WebProxyClientProvider(); var tables = new string[] { "ProductCategory", "ProductDescription", "ProductModel", "Product", "ProductModelProductDescription", "Address", "Customer", "CustomerAddress", "SalesOrderHeader", "SalesOrderDetail" }; var configuration = new Action <SyncConfiguration>(conf => { conf.ScopeName = "AdventureWorks"; conf.ScopeInfoTableName = "tscopeinfo"; conf.SerializationFormat = Dotmim.Sync.Enumerations.SerializationFormat.Binary; conf.StoredProceduresPrefix = "s"; conf.StoredProceduresSuffix = ""; conf.TrackingTablesPrefix = "t"; conf.TrackingTablesSuffix = ""; conf.Add(tables); }); var optionsClient = new Action <SyncOptions>(opt => { opt.BatchDirectory = Path.Combine(SyncOptions.GetDefaultUserBatchDiretory(), "client"); opt.BatchSize = 100; opt.CleanMetadatas = true; opt.UseBulkOperations = true; opt.UseVerboseErrors = false; }); var optionsServer = new Action <SyncOptions>(opt => { opt.BatchDirectory = Path.Combine(SyncOptions.GetDefaultUserBatchDiretory(), "server"); opt.BatchSize = 100; opt.CleanMetadatas = true; opt.UseBulkOperations = true; opt.UseVerboseErrors = false; }); var serverHandler = new RequestDelegate(async context => { var proxyServerProvider = WebProxyServerProvider.Create(context, serverProvider, configuration, optionsServer); await proxyServerProvider.HandleRequestAsync(context); }); using (var server = new KestrellTestServer()) { var clientHandler = new ResponseDelegate(async(serviceUri) => { proxyClientProvider.ServiceUri = new Uri(serviceUri); var syncAgent = new SyncAgent(client1Provider, proxyClientProvider); do { Console.Clear(); Console.WriteLine("Sync Start"); try { var cts = new CancellationTokenSource(); Console.WriteLine("--------------------------------------------------"); Console.WriteLine("1 : Normal synchronization."); Console.WriteLine("2 : Synchronization with reinitialize"); Console.WriteLine("3 : Synchronization with upload and reinitialize"); Console.WriteLine("--------------------------------------------------"); Console.WriteLine("What's your choice ? "); Console.WriteLine("--------------------------------------------------"); var choice = Console.ReadLine(); if (int.TryParse(choice, out var choiceNumber)) { Console.WriteLine($"You choose {choice}. Start operation...."); switch (choiceNumber) { case 1: var s1 = await syncAgent.SynchronizeAsync(cts.Token); Console.WriteLine(s1); break; case 2: s1 = await syncAgent.SynchronizeAsync(SyncType.Reinitialize, cts.Token); Console.WriteLine(s1); break; case 3: s1 = await syncAgent.SynchronizeAsync(SyncType.ReinitializeWithUpload, cts.Token); Console.WriteLine(s1); break; default: break; } } } catch (SyncException e) { Console.WriteLine(e.ToString()); } catch (Exception e) { Console.WriteLine("UNKNOW EXCEPTION : " + e.Message); } Console.WriteLine("--------------------------------------------------"); Console.WriteLine("Press a key to choose again, or Escapte to end"); } while (Console.ReadKey().Key != ConsoleKey.Escape); }); await server.Run(serverHandler, clientHandler); } }
public Task <NewCookie2Response> NewCookie2Async(int schemaRevision, string serviceInstance, SyncOptions options, string[] objectClassesOfInterest, string[] propertiesOfInterest, string[] linkClassesOfInterest, string[] alwaysReturnProperties) { return(((IDirectorySync)this).NewCookie2Async(new NewCookie2Request { schemaRevision = schemaRevision, serviceInstance = serviceInstance, options = options, objectClassesOfInterest = objectClassesOfInterest, propertiesOfInterest = propertiesOfInterest, linkClassesOfInterest = linkClassesOfInterest, alwaysReturnProperties = alwaysReturnProperties })); }
public async Task RemoteOrchestrator_Scope() { var dbName = HelperDatabase.GetRandomName("tcp_lo_"); await HelperDatabase.CreateDatabaseAsync(ProviderType.Sql, dbName, true); var cs = HelperDatabase.GetConnectionString(ProviderType.Sql, dbName); var sqlProvider = new SqlSyncProvider(cs); var ctx = new AdventureWorksContext((dbName, ProviderType.Sql, sqlProvider), true, false); await ctx.Database.EnsureCreatedAsync(); var scopeName = "scope"; var options = new SyncOptions(); var setup = new SyncSetup(this.Tables); var remoteOrchestrator = new RemoteOrchestrator(sqlProvider, options); var scopeTableCreating = 0; var scopeTableCreated = 0; var scopeLoading = 0; var scopeLoaded = 0; var scopeSaving = 0; var scopeSaved = 0; remoteOrchestrator.OnScopeSaving(ssa => { Assert.NotNull(ssa.Command); scopeSaving++; }); remoteOrchestrator.OnScopeSaved(ssa => scopeSaved++); remoteOrchestrator.OnScopeTableCreating(stca => { Assert.NotNull(stca.Command); scopeTableCreating++; }); remoteOrchestrator.OnScopeTableCreated(stca => { scopeTableCreated++; }); remoteOrchestrator.OnServerScopeInfoLoading(args => { Assert.NotNull(args.Command); Assert.Equal(scopeName, args.Context.ScopeName); Assert.Equal(scopeName, args.ScopeName); Assert.NotNull(args.Connection); Assert.NotNull(args.Transaction); Assert.Equal(ConnectionState.Open, args.Connection.State); scopeLoading++; }); remoteOrchestrator.OnServerScopeInfoLoaded(args => { Assert.Equal(scopeName, args.Context.ScopeName); Assert.NotNull(args.Connection); Assert.NotNull(args.Transaction); scopeLoaded++; }); var serverScopeInfo = await remoteOrchestrator.GetServerScopeInfoAsync(scopeName, setup); // TODO : if serverScope.Schema is null, should we Provision here ? serverScopeInfo.Version = "2.0"; await remoteOrchestrator.SaveServerScopeInfoAsync(serverScopeInfo); Assert.Equal(2, scopeTableCreating); Assert.Equal(2, scopeTableCreated); Assert.Equal(2, scopeLoading); Assert.Equal(3, scopeLoaded); Assert.Equal(3, scopeSaving); Assert.Equal(3, scopeSaved); HelperDatabase.DropDatabase(ProviderType.Sql, dbName); }
public async Task <ProviderRun> RunAsync(ProviderFixture serverFixture, string[] tables = null, Action <SyncConfiguration> conf = null, bool reuseAgent = true) { // server proxy var proxyClientProvider = new WebProxyClientProvider(); var syncTables = tables ?? serverFixture.Tables; // local test, through tcp if (this.NetworkType == NetworkType.Tcp) { // create agent if (this.Agent == null || !reuseAgent) { this.Agent = new SyncAgent(this.ClientProvider, serverFixture.ServerProvider, syncTables); } // copy conf settings if (conf != null) { this.Agent.SetConfiguration(conf); } // Add Filers if (serverFixture.Filters != null && serverFixture.Filters.Count > 0) { serverFixture.Filters.ForEach(f => { if (!this.Agent.LocalProvider.Configuration.Filters.Contains(f)) { this.Agent.LocalProvider.Configuration.Filters.Add(f); } }); } // Add Filers values if (serverFixture.FilterParameters != null && serverFixture.FilterParameters.Count > 0) { foreach (var syncParam in serverFixture.FilterParameters) { if (!this.Agent.Parameters.Contains(syncParam)) { this.Agent.Parameters.Add(syncParam); } } } // sync try { this.BeginRun?.Invoke(this.Agent.RemoteProvider); this.Results = await this.Agent.SynchronizeAsync(); this.EndRun?.Invoke(this.Agent.RemoteProvider); } catch (Exception ex) { this.Exception = ex; Console.WriteLine(ex); } } // ----------------------------------------------------------------------- // HTTP // ----------------------------------------------------------------------- // tests through http proxy if (this.NetworkType == NetworkType.Http) { using (var server = new KestrellTestServer()) { // server handler var serverHandler = new RequestDelegate(async context => { // test if <> directory name works var options = new Action <SyncOptions>(o => o.BatchDirectory = Path.Combine(SyncOptions.GetDefaultUserBatchDiretory(), "server")); // sync try { var proxyServerProvider = WebProxyServerProvider.Create( context, serverFixture.ServerProvider, conf, options); var serverProvider = proxyServerProvider.GetLocalProvider(context); serverProvider.Configuration.Add(syncTables); this.BeginRun?.Invoke(serverProvider); await proxyServerProvider.HandleRequestAsync(context); this.EndRun?.Invoke(serverProvider); } catch (Exception ew) { Console.WriteLine(ew); } }); var clientHandler = new ResponseDelegate(async(serviceUri) => { // create agent if (this.Agent == null || !reuseAgent) { this.Agent = new SyncAgent(this.ClientProvider, proxyClientProvider); } if (serverFixture.FilterParameters != null && serverFixture.FilterParameters.Count > 0) { foreach (var syncParam in serverFixture.FilterParameters) { if (!this.Agent.Parameters.Contains(syncParam)) { this.Agent.Parameters.Add(syncParam); } } } ((WebProxyClientProvider)this.Agent.RemoteProvider).ServiceUri = new Uri(serviceUri); try { this.Results = await this.Agent.SynchronizeAsync(); } catch (Exception ew) { this.Exception = ew; Console.WriteLine(ew); } }); await server.Run(serverHandler, clientHandler); } } return(this); }
public async Task LocalOrchestrator_Scope() { var dbName = HelperDatabase.GetRandomName("tcp_lo_"); await HelperDatabase.CreateDatabaseAsync(ProviderType.Sql, dbName, true); var cs = HelperDatabase.GetConnectionString(ProviderType.Sql, dbName); var sqlProvider = new SqlSyncProvider(cs); var ctx = new AdventureWorksContext((dbName, ProviderType.Sql, sqlProvider), true, false); await ctx.Database.EnsureCreatedAsync(); var scopeName = "scope"; var options = new SyncOptions(); var setup = new SyncSetup(this.Tables); var localOrchestrator = new LocalOrchestrator(sqlProvider, options); var scopeTableCreating = 0; var scopeTableCreated = 0; var scopeLoading = 0; var scopeLoaded = 0; var scopeSaving = 0; var scopeSaved = 0; localOrchestrator.OnScopeSaving(ssa => { Assert.NotNull(ssa.Command); scopeSaving++; }); localOrchestrator.OnScopeSaved(ssa => scopeSaved++); localOrchestrator.OnScopeTableCreating(stca => { Assert.NotNull(stca.Command); scopeTableCreating++; }); localOrchestrator.OnScopeTableCreated(stca => { scopeTableCreated++; }); localOrchestrator.OnClientScopeInfoLoading(args => { Assert.NotNull(args.Command); Assert.Equal(SyncStage.ScopeLoading, args.Context.SyncStage); Assert.Equal(scopeName, args.Context.ScopeName); Assert.Equal(scopeName, args.ScopeName); Assert.NotNull(args.Connection); Assert.NotNull(args.Transaction); Assert.Equal(ConnectionState.Open, args.Connection.State); scopeLoading++; }); localOrchestrator.OnClientScopeInfoLoaded(args => { Assert.Equal(SyncStage.ScopeLoading, args.Context.SyncStage); Assert.Equal(scopeName, args.Context.ScopeName); Assert.NotNull(args.Connection); Assert.NotNull(args.Transaction); scopeLoaded++; }); var localScopeInfo = await localOrchestrator.GetClientScopeInfoAsync(scopeName); Assert.Equal(1, scopeTableCreating); Assert.Equal(1, scopeTableCreated); Assert.Equal(2, scopeLoading); Assert.Equal(2, scopeLoaded); Assert.Equal(1, scopeSaving); Assert.Equal(1, scopeSaved); scopeTableCreating = 0; scopeTableCreated = 0; scopeLoading = 0; scopeLoaded = 0; scopeSaving = 0; scopeSaved = 0; localScopeInfo.Version = "2.0"; await localOrchestrator.SaveClientScopeInfoAsync(localScopeInfo); Assert.Equal(0, scopeTableCreating); Assert.Equal(0, scopeTableCreated); Assert.Equal(0, scopeLoading); Assert.Equal(0, scopeLoaded); Assert.Equal(1, scopeSaving); Assert.Equal(1, scopeSaved); HelperDatabase.DropDatabase(ProviderType.Sql, dbName); }
public async Task ImportAsync(DirectoryInfo directoryInfo, JsonHelper jsonHelper, SyncOptions options, ISession session) { var newWorkflows = GetWorkflowModels(directoryInfo, jsonHelper).ToList(); if (!newWorkflows.HasDistinctNames(x => x.Name)) { log.WriteLine("ERROR: Can only sync workflows when all target workflows have distinct names."); return; } var current = await session.Apps.GetWorkflowsAsync(session.App); if (!current.Items.HasDistinctNames(x => x.Name)) { log.WriteLine("ERROR: Can only sync workflows when all current workflows have distinct names."); return; } var workflowsByName = current.Items.ToDictionary(x => x.Name); if (options.NoDeletion) { foreach (var(name, workflow) in workflowsByName.ToList()) { if (!newWorkflows.Any(x => x.Name != name)) { await log.DoSafeAsync($"Workflow '{name}' deleting", async() => { await session.Apps.DeleteWorkflowAsync(session.App, workflow.Id.ToString()); workflowsByName.Remove(name); }); } } } foreach (var newWorkflow in newWorkflows) { if (workflowsByName.ContainsKey(newWorkflow.Name)) { continue; } await log.DoSafeAsync($"Workflow '{newWorkflow.Name}' creating", async() => { if (workflowsByName.ContainsKey(newWorkflow.Name)) { throw new CLIException("Name already used."); } var request = new AddWorkflowDto { Name = newWorkflow.Name }; var created = await session.Apps.PostWorkflowAsync(session.App, request); workflowsByName[newWorkflow.Name] = created.Items.FirstOrDefault(x => x.Name == newWorkflow.Name); }); } foreach (var newWorkflow in newWorkflows) { var workflow = workflowsByName.GetValueOrDefault(newWorkflow.Name); if (workflow == null) { return; } await log.DoSafeAsync($"Workflow '{newWorkflow.Name}' updating", async() => { await session.Apps.PutWorkflowAsync(session.App, workflow.Id.ToString(), newWorkflow); }); } }
public async Task LocalOrchestrator_GetEstimatedChanges_AfterInitialized_ShouldReturnEstimatedRowsCount() { var dbNameSrv = HelperDatabase.GetRandomName("tcp_lo_srv"); await HelperDatabase.CreateDatabaseAsync(ProviderType.Sql, dbNameSrv, true); var dbNameCli = HelperDatabase.GetRandomName("tcp_lo_cli"); await HelperDatabase.CreateDatabaseAsync(ProviderType.Sql, dbNameCli, true); var csServer = HelperDatabase.GetConnectionString(ProviderType.Sql, dbNameSrv); var serverProvider = new SqlSyncProvider(csServer); var csClient = HelperDatabase.GetConnectionString(ProviderType.Sql, dbNameCli); var clientProvider = new SqlSyncProvider(csClient); await new AdventureWorksContext((dbNameSrv, ProviderType.Sql, serverProvider), true, false).Database.EnsureCreatedAsync(); await new AdventureWorksContext((dbNameCli, ProviderType.Sql, clientProvider), true, false).Database.EnsureCreatedAsync(); var scopeName = "scopesnap1"; var syncOptions = new SyncOptions(); var setup = new SyncSetup(); // Make a first sync to be sure everything is in place var agent = new SyncAgent(clientProvider, serverProvider, this.Tables, scopeName); // Making a first sync, will initialize everything we need await agent.SynchronizeAsync(); // Get the orchestrators var localOrchestrator = agent.LocalOrchestrator; var remoteOrchestrator = agent.RemoteOrchestrator; // Client side : Create a product category and a product // Create a productcategory item // Create a new product on server var productId = Guid.NewGuid(); var productName = HelperDatabase.GetRandomName(); var productNumber = productName.ToUpperInvariant().Substring(0, 10); var productCategoryName = HelperDatabase.GetRandomName(); var productCategoryId = productCategoryName.ToUpperInvariant().Substring(0, 6); using (var ctx = new AdventureWorksContext((dbNameCli, ProviderType.Sql, clientProvider))) { var pc = new ProductCategory { ProductCategoryId = productCategoryId, Name = productCategoryName }; ctx.Add(pc); var product = new Product { ProductId = productId, Name = productName, ProductNumber = productNumber }; ctx.Add(product); await ctx.SaveChangesAsync(); } // Get changes to be populated to the server var changes = await localOrchestrator.GetEstimatedChangesCountAsync(); Assert.NotNull(changes.ClientChangesSelected); Assert.Equal(2, changes.ClientChangesSelected.TableChangesSelected.Count); Assert.Contains("Product", changes.ClientChangesSelected.TableChangesSelected.Select(tcs => tcs.TableName).ToList()); Assert.Contains("ProductCategory", changes.ClientChangesSelected.TableChangesSelected.Select(tcs => tcs.TableName).ToList()); }
public void TipUpdatedPostMerge() { ConsoleProgress progress = new ConsoleProgress(); BobSetup bobSetup = new BobSetup(progress, _pathToTestRoot); var bobSynchronizer = bobSetup.GetSynchronizer(); //set up two branches to trigger issue SetAdjunctModelVersion(bobSynchronizer, "notdefault"); // Bob is on 'default' branch bobSetup.ChangeTextFile(bobSynchronizer); //Ok, this is unrealistic, but we just clone Bob onto Sally var hubRoot = Path.Combine(_pathToTestRoot, "Hub"); var sallyMachineRoot = Path.Combine(_pathToTestRoot, "sally"); Directory.CreateDirectory(sallyMachineRoot); Directory.CreateDirectory(hubRoot); var sallyProjectRoot = bobSetup.SetupClone(sallyMachineRoot); var hubProjectRoot = bobSetup.SetupClone(hubRoot); var sallyProject = BobSetup.CreateFolderConfig(sallyProjectRoot); var hubProject = BobSetup.CreateFolderConfig(hubProjectRoot); var repository = HgRepository.CreateOrUseExisting(sallyProject.FolderPath, progress); repository.SetUserNameInIni("sally", progress); // bob makes a change and syncs File.WriteAllText(bobSetup._pathToLift, LiftFileStrings.lift12Dog); var bobOptions = new SyncOptions { CheckinDescription = "added 'dog'", DoMergeWithOthers = true, DoSendToOthers = true, DoPullFromOthers = true }; bobOptions.RepositorySourcesToTry.Add(RepositoryAddress.Create("Hub", hubProject.FolderPath, false)); //now Sally modifies the original file, not having seen Bob's changes yet var sallyPathToLift = Path.Combine(sallyProject.FolderPath, Path.Combine("lexicon", "foo.lift")); File.WriteAllText(sallyPathToLift, LiftFileStrings.lift12Cat); //Sally syncs, pulling in Bob's change, and encountering a need to merge (no conflicts) var sallyOptions = new SyncOptions { CheckinDescription = "adding cat", DoPullFromOthers = true, DoSendToOthers = true, DoMergeWithOthers = true }; sallyOptions.RepositorySourcesToTry.Add(RepositoryAddress.Create("Hub", hubProject.FolderPath, false)); var sallySyncer = Synchronizer.FromProjectConfiguration(sallyProject, progress); SetAdjunctModelVersion(sallySyncer, "notdefault"); sallySyncer.SyncNow(sallyOptions); bobSynchronizer.SyncNow(bobOptions); // bob makes a change and syncs File.WriteAllText(bobSetup._pathToLift, LiftFileStrings.lift12DogAnt); bobSynchronizer.SyncNow(bobOptions); sallyOptions.DoSendToOthers = false; sallySyncer.SyncNow(sallyOptions); //Debug.WriteLine("bob's: " + File.ReadAllText(bobSetup._pathToLift)); var contents = File.ReadAllText(sallyPathToLift); //Debug.WriteLine("sally's: " + contents); Assert.IsTrue(contents.Contains("ant")); Assert.IsTrue(contents.Contains("dog")); }
public async Task ExportAsync(DirectoryInfo directoryInfo, JsonHelper jsonHelper, SyncOptions options, ISession session) { var current = await session.Rules.GetRulesAsync(); var index = 0; foreach (var rule in current.Items.OrderBy(x => x.Created)) { var ruleName = rule.Name; if (string.IsNullOrWhiteSpace(ruleName)) { ruleName = "<Unnammed>"; } await log.DoSafeAsync($"Exporting {ruleName} ({rule.Id})", async() => { await jsonHelper.WriteWithSchemaAs <RuleModel>(directoryInfo, $"rules/rule{index}.json", rule, "../__json/rule"); }); index++; } }
public async Task ImportAsync(ISyncService sync, SyncOptions options, ISession session) { var models = GetFiles(sync.FileSystem) .Select(x => sync.Read <RuleModel>(x, log)) .ToList(); if (!models.HasDistinctNames(x => x.Name)) { log.WriteLine("ERROR: Can only sync rules when all target rules have distinct names."); return; } var current = await session.Rules.GetRulesAsync(); if (!current.Items.HasDistinctNames(x => x.Name)) { log.WriteLine("ERROR: Can only sync rules when all current rules have distinct names."); return; } var rulesByName = current.Items.ToDictionary(x => x.Name); if (options.Delete) { foreach (var(name, rule) in rulesByName.ToList()) { if (models.All(x => x.Name != name)) { await log.DoSafeAsync($"Rule '{name}' deleting", async() => { await session.Rules.DeleteRuleAsync(rule.Id); rulesByName.Remove(name); }); } } } await MapSchemaNamesToIdsAsync(session, models); foreach (var newRule in models) { if (rulesByName.ContainsKey(newRule.Name)) { continue; } await log.DoSafeAsync($"Rule '{newRule.Name}' creating", async() => { if (rulesByName.ContainsKey(newRule.Name)) { throw new CLIException("Name already used."); } var request = newRule.ToCreate(); var created = await session.Rules.CreateRuleAsync(request); rulesByName[newRule.Name] = created; }); } foreach (var newRule in models) { var rule = rulesByName.GetValueOrDefault(newRule.Name); if (rule == null) { return; } await log.DoVersionedAsync($"Rule '{newRule.Name}' updating", rule.Version, async() => { var request = newRule.ToUpdate(); rule = await session.Rules.UpdateRuleAsync(rule.Id, request); return(rule.Version); }); if (newRule.IsEnabled != rule.IsEnabled) { if (newRule.IsEnabled) { await log.DoVersionedAsync($"Rule '{newRule.Name}' enabling", rule.Version, async() => { var result = await session.Rules.EnableRuleAsync(rule.Id); return(result.Version); }); } else { await log.DoVersionedAsync($"Rule '{newRule.Name}' disabling", rule.Version, async() => { var result = await session.Rules.DisableRuleAsync(rule.Id); return(result.Version); }); } } } }
public async Task ImportAsync(DirectoryInfo directoryInfo, JsonHelper jsonHelper, SyncOptions options, ISession session) { var newRules = GetRuleFiles(directoryInfo, jsonHelper).ToList(); if (!newRules.HasDistinctNames(x => x.Name)) { log.WriteLine("ERROR: Can only sync rules when all target rules have distinct names."); return; } var current = await session.Rules.GetRulesAsync(); if (!current.Items.HasDistinctNames(x => x.Name)) { log.WriteLine("ERROR: Can only sync rules when all current rules have distinct names."); return; } var rulesByName = current.Items.ToDictionary(x => x.Name); if (!options.NoDeletion) { foreach (var(name, rule) in rulesByName.ToList()) { if (!newRules.Any(x => x.Name == name)) { await log.DoSafeAsync($"Rule '{name}' deleting", async() => { await session.Rules.DeleteRuleAsync(rule.Id); rulesByName.Remove(name); }); } } } foreach (var newRule in newRules) { if (rulesByName.ContainsKey(newRule.Name)) { continue; } await log.DoSafeAsync($"Rule '{newRule.Name}' creating", async() => { if (rulesByName.ContainsKey(newRule.Name)) { throw new CLIException("Name already used."); } var request = newRule.ToCreate(); var created = await session.Rules.CreateRuleAsync(request); rulesByName[newRule.Name] = created; }); } foreach (var newRule in newRules) { var rule = rulesByName.GetValueOrDefault(newRule.Name); if (rule == null) { return; } await log.DoVersionedAsync($"Rule '{newRule.Name}' updating", rule.Version, async() => { var request = newRule.ToUpdate(); rule = await session.Rules.UpdateRuleAsync(rule.Id, request); return(rule.Version); }); if (newRule.IsEnabled != rule.IsEnabled) { if (newRule.IsEnabled) { await log.DoVersionedAsync($"Rule '{newRule.Name}' enabling", rule.Version, async() => { var result = await session.Rules.EnableRuleAsync(rule.Id); return(result.Version); }); } else { await log.DoVersionedAsync($"Rule '{newRule.Name}' disabling", rule.Version, async() => { var result = await session.Rules.DisableRuleAsync(rule.Id); return(result.Version); }); } } } }
private async Task Sync(SyncJobItem jobItem, Audio item, User user, bool enableConversion, SyncOptions syncOptions, IProgress <double> progress, CancellationToken cancellationToken) { var job = _syncManager.GetJob(jobItem.JobId); var jobOptions = _syncManager.GetAudioOptions(jobItem, job); var conversionOptions = new AudioOptions { Profile = jobOptions.DeviceProfile }; conversionOptions.DeviceId = jobItem.TargetId; conversionOptions.Context = EncodingContext.Static; conversionOptions.ItemId = item.Id.ToString("N"); conversionOptions.MediaSources = _mediaSourceManager.GetStaticMediaSources(item, false, user).ToList(); var streamInfo = new StreamBuilder(_mediaEncoder, _logger).BuildAudioItem(conversionOptions); var mediaSource = streamInfo.MediaSource; jobItem.MediaSourceId = streamInfo.MediaSourceId; jobItem.TemporaryPath = GetTemporaryPath(jobItem); if (streamInfo.PlayMethod == PlayMethod.Transcode && jobOptions.IsConverting) { if (!enableConversion) { return; } jobItem.Status = SyncJobItemStatus.Converting; await _syncManager.UpdateSyncJobItemInternal(jobItem).ConfigureAwait(false); await UpdateJobStatus(jobItem.JobId).ConfigureAwait(false); try { var lastJobUpdate = DateTime.MinValue; var innerProgress = new ActionableProgress <double>(); innerProgress.RegisterAction(async pct => { progress.Report(pct); if ((DateTime.UtcNow - lastJobUpdate).TotalSeconds >= DatabaseProgressUpdateIntervalSeconds) { jobItem.Progress = pct / 2; await _syncManager.UpdateSyncJobItemInternal(jobItem).ConfigureAwait(false); await UpdateJobStatus(jobItem.JobId).ConfigureAwait(false); } }); jobItem.OutputPath = await _mediaEncoder.EncodeAudio(new EncodingJobOptions(streamInfo, conversionOptions.Profile) { OutputDirectory = jobItem.TemporaryPath, CpuCoreLimit = syncOptions.TranscodingCpuCoreLimit }, innerProgress, cancellationToken); jobItem.ItemDateModifiedTicks = item.DateModified.Ticks; _syncManager.OnConversionComplete(jobItem); } catch (OperationCanceledException) { jobItem.Status = SyncJobItemStatus.Queued; jobItem.Progress = 0; } catch (Exception ex) { jobItem.Status = SyncJobItemStatus.Failed; _logger.ErrorException("Error during sync transcoding", ex); } if (jobItem.Status == SyncJobItemStatus.Failed || jobItem.Status == SyncJobItemStatus.Queued) { await _syncManager.UpdateSyncJobItemInternal(jobItem).ConfigureAwait(false); return; } jobItem.MediaSource = await GetEncodedMediaSource(jobItem.OutputPath, user, false).ConfigureAwait(false); } else { if (mediaSource.Protocol == MediaProtocol.File) { jobItem.OutputPath = mediaSource.Path; } else if (mediaSource.Protocol == MediaProtocol.Http) { jobItem.OutputPath = await DownloadFile(jobItem, mediaSource, cancellationToken).ConfigureAwait(false); } else { throw new InvalidOperationException(string.Format("Cannot direct stream {0} protocol", mediaSource.Protocol)); } jobItem.ItemDateModifiedTicks = item.DateModified.Ticks; jobItem.MediaSource = mediaSource; } jobItem.MediaSource.SupportsTranscoding = false; jobItem.Progress = 50; jobItem.Status = SyncJobItemStatus.ReadyToTransfer; await _syncManager.UpdateSyncJobItemInternal(jobItem).ConfigureAwait(false); }
/// <summary> /// Add the server provider (inherited from CoreProvider) and register in the DI a WebServerAgent. /// Use the WebServerAgent in your controller, by inject it. /// </summary> /// <param name="providerType">Provider inherited from CoreProvider (SqlSyncProvider, MySqlSyncProvider, OracleSyncProvider) Should have [CanBeServerProvider=true] </param> /// <param name="serviceCollection">services collections</param> /// <param name="connectionString">Provider connection string</param> /// <param name="setup">Configuration server side. Adding at least tables to be synchronized</param> /// <param name="options">Options, not shared with client, but only applied locally. Can be null</param> public static IServiceCollection AddSyncServer(this IServiceCollection serviceCollection, Type providerType, string connectionString, string scopeName = SyncOptions.DefaultScopeName, SyncSetup setup = null, SyncOptions options = null, WebServerOptions webServerOptions = null) { if (string.IsNullOrWhiteSpace(connectionString)) { throw new ArgumentNullException(nameof(connectionString)); } webServerOptions ??= new WebServerOptions(); options ??= new SyncOptions(); setup = setup ?? throw new ArgumentNullException(nameof(setup)); // Create provider var provider = (CoreProvider)Activator.CreateInstance(providerType); provider.ConnectionString = connectionString; // Create orchestrator //var webServerAgent = new WebServerAgent(provider, setup, options, webServerOptions, scopeName); serviceCollection.AddScoped(sp => new WebServerAgent(provider, setup, options, webServerOptions, scopeName)); return(serviceCollection); }
/// <summary> /// Overwrites existing object with given key. /// </summary> /// <param name="key"> The key. </param> /// <param name="obj"> The object to set. </param> /// <param name="syncOnline"> Indicates whether the item should be synced online. </param> /// <param name="priority"> The priority. Objects with higher priority will be synced first. Higher number indicates higher priority. </param> public void Set(string key, T obj, SyncOptions syncOptions, int priority = 1) { this.SetAndRaise(key, new OfflineEntry(key, obj, priority, syncOptions)); }
public static IServiceCollection AddSyncServer <TProvider>(this IServiceCollection serviceCollection, string connectionString, string[] tables = default, SyncOptions options = null, WebServerOptions webServerOptions = null) where TProvider : CoreProvider, new() => serviceCollection.AddSyncServer(typeof(TProvider), connectionString, SyncOptions.DefaultScopeName, new SyncSetup(tables), options, webServerOptions);
public Models.SyncState SyncUp(Models.SyncUpTarget target, Models.SyncOptions options, string soupName, string callback) { var syncUp = JsonConvert.SerializeObject(target); var action = JsonConvert.DeserializeObject <Action <SyncState> >(callback); var syncOptions = JsonConvert.SerializeObject(options); var state = _syncManager.SyncUp(JsonConvert.DeserializeObject <SyncUpTarget>(syncUp), SyncOptions.FromJson(JObject.Parse(syncOptions)), soupName, action); var syncState = JsonConvert.SerializeObject(state); return(JsonConvert.DeserializeObject <Models.SyncState>(syncState)); }
public async Task LocalOrchestrator_GetChanges() { var dbNameSrv = HelperDatabase.GetRandomName("tcp_lo_srv"); await HelperDatabase.CreateDatabaseAsync(ProviderType.Sql, dbNameSrv, true); var dbNameCli = HelperDatabase.GetRandomName("tcp_lo_cli"); await HelperDatabase.CreateDatabaseAsync(ProviderType.Sql, dbNameCli, true); var csServer = HelperDatabase.GetConnectionString(ProviderType.Sql, dbNameSrv); var serverProvider = new SqlSyncProvider(csServer); var csClient = HelperDatabase.GetConnectionString(ProviderType.Sql, dbNameCli); var clientProvider = new SqlSyncProvider(csClient); await new AdventureWorksContext((dbNameSrv, ProviderType.Sql, serverProvider), true, false).Database.EnsureCreatedAsync(); await new AdventureWorksContext((dbNameCli, ProviderType.Sql, clientProvider), true, false).Database.EnsureCreatedAsync(); var scopeName = "scopesnap1"; var syncOptions = new SyncOptions(); var setup = new SyncSetup(); // Make a first sync to be sure everything is in place var agent = new SyncAgent(clientProvider, serverProvider, this.Tables, scopeName); // Making a first sync, will initialize everything we need var s = await agent.SynchronizeAsync(); // Get the orchestrators var localOrchestrator = agent.LocalOrchestrator; var remoteOrchestrator = agent.RemoteOrchestrator; // Client side : Create a product category and a product // Create a productcategory item // Create a new product on server var productId = Guid.NewGuid(); var productName = HelperDatabase.GetRandomName(); var productNumber = productName.ToUpperInvariant().Substring(0, 10); var productCategoryName = HelperDatabase.GetRandomName(); var productCategoryId = productCategoryName.ToUpperInvariant().Substring(0, 6); using (var ctx = new AdventureWorksContext((dbNameCli, ProviderType.Sql, clientProvider))) { var pc = new ProductCategory { ProductCategoryId = productCategoryId, Name = productCategoryName }; ctx.Add(pc); var product = new Product { ProductId = productId, Name = productName, ProductNumber = productNumber }; ctx.Add(product); await ctx.SaveChangesAsync(); } var onDatabaseSelecting = 0; var onDatabaseSelected = 0; var onSelecting = 0; var onSelected = 0; localOrchestrator.OnDatabaseChangesSelecting(dcs => { onDatabaseSelecting++; }); localOrchestrator.OnDatabaseChangesSelected(dcs => { Assert.NotNull(dcs.BatchInfo); Assert.Equal(2, dcs.ChangesSelected.TableChangesSelected.Count); onDatabaseSelected++; }); localOrchestrator.OnTableChangesSelecting(action => { Assert.NotNull(action.Command); onSelecting++; }); localOrchestrator.OnTableChangesSelected(action => { Assert.NotNull(action.Changes); onSelected++; }); // Get changes to be populated to the server var changes = await localOrchestrator.GetChangesAsync(); Assert.Equal(this.Tables.Length, onSelecting); Assert.Equal(this.Tables.Length, onSelected); Assert.Equal(1, onDatabaseSelected); Assert.Equal(1, onDatabaseSelecting); HelperDatabase.DropDatabase(ProviderType.Sql, dbNameSrv); HelperDatabase.DropDatabase(ProviderType.Sql, dbNameCli); }
public async Task ExportAsync(DirectoryInfo directoryInfo, JsonHelper jsonHelper, SyncOptions options, ISession session) { var current = await session.Schemas.GetSchemasAsync(session.App); var schemaMap = current.Items.ToDictionary(x => x.Name, x => x.Id); jsonHelper.SetSchemaMap(schemaMap); foreach (var schema in current.Items.OrderBy(x => x.Name)) { await log.DoSafeAsync($"Exporting '{schema.Name}'", async() => { var details = await session.Schemas.GetSchemaAsync(session.App, schema.Name); var model = new SchemeModel { Name = schema.Name, Schema = jsonHelper.Convert <SynchronizeSchemaDto>(details) }; await jsonHelper.WriteWithSchema(directoryInfo, $"schemas/{schema.Name}.json", model, "../__json/schema"); }); } }
public SyncManager(string replica1RootPath, string replica2RootPath, SyncOptions syncOption, ILogger logger) : this(replica1RootPath, replica2RootPath, null, syncOption, logger) { }