public SqlServerConnectorTestsBase() { Sut = new SqlServerConnector(Repo.Object, Logger.Object, Client.Object); }
public RawConnector(){ if (connector == null) connector = new SqlServerConnector(); }
public static List <T> ToListWithInvalidation <T>(this IQueryable <T> simpleQuery, Type type, string exceptionContext, Action <SqlNotificationEventArgs> invalidation) { if (!WithSqlDependency) { throw new InvalidOperationException("ToListWithInvalidation requires SqlDependency"); } ITranslateResult tr; using (ObjectName.OverrideOptions(new ObjectNameOptions { AvoidDatabaseName = true })) tr = ((DbQueryProvider)simpleQuery.Provider).GetRawTranslateResult(simpleQuery.Expression); OnChangeEventHandler onChange = (object sender, SqlNotificationEventArgs args) => { try { if (args.Type != SqlNotificationType.Change) { throw new InvalidOperationException( "Problems with SqlDependency (Type : {0} Source : {1} Info : {2}) on query: \r\n{3}" .FormatWith(args.Type, args.Source, args.Info, tr.MainCommand.PlainSql())); } if (args.Info == SqlNotificationInfo.PreviousFire) { throw new InvalidOperationException("The same transaction that loaded the data is invalidating it!") { Data = { { "query", tr.MainCommand.PlainSql() } } } } ; if (CacheLogic.LogWriter != null) { CacheLogic.LogWriter.WriteLine("Change ToListWithInvalidations {0} {1}".FormatWith(typeof(T).TypeName()), exceptionContext); } invalidation(args); } catch (Exception e) { e.LogException(c => c.ControllerName = exceptionContext); } }; SimpleReader?reader = null; Expression <Func <IProjectionRow, T> > projectorExpression = (Expression <Func <IProjectionRow, T> >)tr.GetMainProjector(); Func <IProjectionRow, T> projector = projectorExpression.Compile(); List <T> list = new List <T>(); CacheLogic.AssertSqlDependencyStarted(); Table table = Schema.Current.Table(type); DatabaseName?db = table.Name.Schema?.Database; SqlServerConnector subConnector = (SqlServerConnector)Connector.Current.ForDatabase(db); if (CacheLogic.LogWriter != null) { CacheLogic.LogWriter.WriteLine("Load ToListWithInvalidations {0} {1}".FormatWith(typeof(T).TypeName()), exceptionContext); } using (new EntityCache()) using (var r = EntityCache.NewRetriever()) { subConnector.ExecuteDataReaderDependency(tr.MainCommand, onChange, StartSqlDependencyAndEnableBrocker, fr => { if (reader == null) { reader = new SimpleReader(fr, r); } list.Add(projector(reader)); }, CommandType.Text); r.CompleteAll(); } return(list); }
public static void StartSqlDependencyAndEnableBrocker() { if (!WithSqlDependency) { started = true; return; } lock (startKeyLock) { SqlServerConnector connector = (SqlServerConnector)Connector.Current; bool isPostgree = false; if (DropStaleServices) { //to avoid massive logs with SqlQueryNotificationStoredProcedure //http://rusanu.com/2007/11/10/when-it-rains-it-pours/ var staleServices = (from s in Database.View <SysServiceQueues>() where s.activation_procedure != null && !Database.View <SysProcedures>().Any(p => "[" + p.Schema().name + "].[" + p.name + "]" == s.activation_procedure) select new ObjectName(new SchemaName(null, s.Schema().name, isPostgree), s.name, isPostgree)).ToList(); foreach (var s in staleServices) { TryDropService(s.Name); TryDropQueue(s); } var oldProcedures = (from p in Database.View <SysProcedures>() where p.name.Contains("SqlQueryNotificationStoredProcedure-") && !Database.View <SysServiceQueues>().Any(s => "[" + p.Schema().name + "].[" + p.name + "]" == s.activation_procedure) select new ObjectName(new SchemaName(null, p.Schema().name, isPostgree), p.name, isPostgree)).ToList(); foreach (var item in oldProcedures) { try { Executor.ExecuteNonQuery(new SqlPreCommandSimple($"DROP PROCEDURE {item.ToString()}")); } catch (SqlException ex) { if (ex.Number != 15151) { throw; } } } } foreach (var database in Schema.Current.DatabaseNames()) { SqlServerConnector sub = (SqlServerConnector)connector.ForDatabase(database); try { try { SqlDependency.Start(sub.ConnectionString); } catch (InvalidOperationException ex) { string databaseName = database?.ToString() ?? Connector.Current.DatabaseName(); if (ex.Message.Contains("SQL Server Service Broker")) { EnableOrCreateBrocker(databaseName); SqlDependency.Start(sub.ConnectionString); } } } catch (SqlException e) { if (e.Number == 2797) { string currentUser = (string)Executor.ExecuteDataTable("SELECT CURRENT_USER").Rows[0][0]; Executor.ExecuteNonQuery("ALTER USER [{0}] WITH DEFAULT_SCHEMA = dbo;".FormatWith(currentUser)); SqlDependency.Start(sub.ConnectionString); } else { throw; } } } RegisterOnShutdown(); started = true; } }
public override ImportResult Import(DisconnectedMachineEntity machine, Table table, IDisconnectedStrategy strategy, SqlServerConnector newDatabase) { var isPostgres = Schema.Current.Settings.IsPostgres; int update = strategy.Upload == Upload.Subset ? Update(machine, table, strategy, new DatabaseName(null, newDatabase.DatabaseName(), isPostgres)) : 0; int inserts = Insert(machine, table, strategy, newDatabase); return(new ImportResult { Inserted = inserts, Updated = update }); }
public virtual Lite <DisconnectedImportEntity> BeginImportDatabase(DisconnectedMachineEntity machine, Stream?file = null) { Lite <DisconnectedImportEntity> import = new DisconnectedImportEntity { Machine = machine.ToLite(), Copies = uploadTables.Select(t => new DisconnectedImportTableEmbedded { Type = t.Type.ToTypeEntity().ToLite(), DisableForeignKeys = t.Strategy.DisableForeignKeys.Value, }).ToMList() }.Save().ToLite(); if (file != null) { using (FileStream fs = File.OpenWrite(BackupNetworkFileName(machine, import))) { file.CopyTo(fs); file.Close(); } } var threadContext = Statics.ExportThreadContext(); var cancelationSource = new CancellationTokenSource(); var user = UserEntity.Current; var token = cancelationSource.Token; var task = Task.Factory.StartNew(() => { lock (SyncLock) using (UserHolder.UserSession(user)) { OnStartImporting(machine); DisconnectedMachineEntity.Current = machine.ToLite(); try { if (file != null) { using (token.MeasureTime(l => import.InDB().UnsafeUpdate().Set(s => s.RestoreDatabase, s => l).Execute())) { DropDatabaseIfExists(machine); RestoreDatabase(machine, import); } } string connectionString = GetImportConnectionString(machine); var newDatabase = new SqlServerConnector(connectionString, Schema.Current, ((SqlServerConnector)Connector.Current).Version); using (token.MeasureTime(l => import.InDB().UnsafeUpdate().Set(s => s.SynchronizeSchema, s => l).Execute())) using (Connector.Override(newDatabase)) using (ObjectName.OverrideOptions(new ObjectNameOptions { AvoidDatabaseName = true })) using (ExecutionMode.DisableCache()) { var script = Administrator.TotalSynchronizeScript(interactive: false, schemaOnly: true); if (script != null) { string fileName = BackupNetworkFileName(machine, import) + ".sql"; script.Save(fileName); throw new InvalidOperationException("The schema has changed since the last export. A schema sync script has been saved on: {0}".FormatWith(fileName)); } } try { using (token.MeasureTime(l => import.InDB().UnsafeUpdate().Set(s => s.DisableForeignKeys, s => l).Execute())) foreach (var item in uploadTables.Where(u => u.Strategy.DisableForeignKeys.Value)) { DisableForeignKeys(item.Table); } foreach (var tuple in uploadTables) { ImportResult?result = null; using (token.MeasureTime(l => { if (result != null) { import.MListElementsLite(_ => _.Copies).Where(mle => mle.Element.Type.Is(tuple.Type.ToTypeEntity())).UnsafeUpdateMList() .Set(mle => mle.Element.CopyTable, mle => l) .Set(mle => mle.Element.DisableForeignKeys, mle => tuple.Strategy.DisableForeignKeys.Value) .Set(mle => mle.Element.InsertedRows, mle => result.Inserted) .Set(mle => mle.Element.UpdatedRows, mle => result.Updated) .Execute(); } })) { result = tuple.Strategy.Importer !.Import(machine, tuple.Table, tuple.Strategy, newDatabase); } } using (token.MeasureTime(l => import.InDB().UnsafeUpdate().Set(s => s.Unlock, s => l).Execute())) UnlockTables(machine.ToLite()); } finally { using (token.MeasureTime(l => import.InDB().UnsafeUpdate().Set(s => s.EnableForeignKeys, s => l).Execute())) foreach (var item in uploadTables.Where(u => u.Strategy.DisableForeignKeys.Value)) { EnableForeignKeys(item.Table); } } using (token.MeasureTime(l => import.InDB().UnsafeUpdate().Set(s => s.DropDatabase, s => l).Execute())) DropDatabase(newDatabase); token.ThrowIfCancellationRequested(); import.InDB().UnsafeUpdate() .Set(s => s.State, s => DisconnectedImportState.Completed) .Set(s => s.Total, s => s.CalculateTotal()) .Execute(); machine.InDB().UnsafeUpdate() .Set(m => m.State, m => file == null ? DisconnectedMachineState.Fixed : DisconnectedMachineState.Connected) .Execute(); } catch (Exception e) { var ex = e.LogException(); import.InDB().UnsafeUpdate() .Set(m => m.Exception, m => ex.ToLite()) .Set(m => m.State, m => DisconnectedImportState.Error) .Execute(); machine.InDB().UnsafeUpdate() .Set(m => m.State, m => DisconnectedMachineState.Faulted) .Execute(); OnImportingError(machine, import, e); } finally { runningImports.Remove(import); DisconnectedMachineEntity.Current = null; OnEndImporting(); } } }); runningImports.Add(import, new RunningImports(task, cancelationSource)); return(import); }
protected virtual int Insert(DisconnectedMachineEntity machine, Table table, IDisconnectedStrategy strategy, SqlServerConnector newDatabase) { var isPostgres = Schema.Current.Settings.IsPostgres; DatabaseName newDatabaseName = new DatabaseName(null, newDatabase.DatabaseName(), isPostgres); var count = (int)CountNewItems(table, newDatabaseName).ExecuteScalar() !; if (count == 0) { return(0); } using (Transaction tr = new Transaction()) { int result; using (DisableIdentityIfNecessary(table)) { SqlPreCommandSimple sql = InsertTableScript(table, newDatabaseName); result = Executor.ExecuteNonQuery(sql); } foreach (var rt in table.TablesMList()) { using (DisableIdentityIfNecessary(rt)) { SqlPreCommandSimple rsql = InsertRelationalTableScript(table, newDatabaseName, rt); Executor.ExecuteNonQuery(rsql); } } return(tr.Commit(result)); } }
public virtual ImportResult Import(DisconnectedMachineEntity machine, Table table, IDisconnectedStrategy strategy, SqlServerConnector newDatabase) { int inserts = Insert(machine, table, strategy, newDatabase); return(new ImportResult { Inserted = inserts, Updated = 0 }); }
private void DropDatabase(SqlServerConnector newDatabase) { var isPostgres = Schema.Current.Settings.IsPostgres; DisconnectedTools.DropDatabase(new DatabaseName(null, newDatabase.DatabaseName(), isPostgres)); }
private void init() { var db = new SqlServerConnector(); var rand = new Random(); var Occupancies = db.ExecuteQuery("SELECT DISTINCT TOP (100) PERCENT dbo.experiments.algorithm_id, dbo.experiments.population_group, dbo.experiments.max_trust, dbo.experiments.pop, dbo.experiments.pou, dbo.experiments.averagePenalties_maxvote - dbo.experiments.averagePenalties_pred AS LessPenalties, dbo.SimulatedOccupancies.weekday, dbo.SimulatedOccupancies.hour, dbo.SimulatedOccupancies.real_tag, dbo.SimulatedOccupancies.predicted_tag, dbo.SimulatedOccupancies.maxvote_tag, dbo.SimulatedOccupancies.section FROM dbo.experiments INNER JOIN dbo.SimulatedOccupancies ON dbo.experiments.id = dbo.SimulatedOccupancies.experiment_id WHERE (dbo.experiments.discounted = 1) AND (dbo.SimulatedOccupancies.experiment_id IN (SELECT id FROM dbo.experiments AS experiments_1 WHERE (pot = 0))) ORDER BY dbo.experiments.population_group, dbo.experiments.max_trust, dbo.experiments.pop, dbo.experiments.pou, dbo.SimulatedOccupancies.weekday, dbo.SimulatedOccupancies.hour, dbo.experiments.algorithm_id"); dbdata = new List <DatabaseRecord>(Occupancies.Rows.Count); for (int i = 0; i < Occupancies.Rows.Count; i++) { dbdata.Add(new DatabaseRecord { AlgorithmID = Convert.ToInt32(Occupancies.Rows[i][0]), PopulationGroup = Convert.ToInt32(Occupancies.Rows[i][1]), MaxTrust = Convert.ToInt32(Occupancies.Rows[i][1]) == 1, Pop = Convert.ToDouble(Occupancies.Rows[i][3]), Pou = Convert.ToDouble(Occupancies.Rows[i][4]), //LessPenalties = Convert.ToDouble(Occupancies.Rows[i][5]), WeekDay = Convert.ToInt32((byte)Occupancies.Rows[i][6]), Hour = Convert.ToInt32((byte)Occupancies.Rows[i][7]), RealTag = Convert.ToInt32((byte)Occupancies.Rows[i][8]), PredictedTag = Convert.ToInt32((byte)Occupancies.Rows[i][9]), MaxvoteTag = Convert.ToInt32((byte)Occupancies.Rows[i][10]), Section = Convert.ToInt32((byte)Occupancies.Rows[i][11]) }); } var set = (from x in dbdata group x by new { x.Hour, x.Pop, x.PopulationGroup, x.Pou, x.WeekDay } into g select new { key = g.Key, gr = g }).ToList(); var b = set[0]; /////////////////////////// create dataset for Classification ClassificationDataset = new TrainingTestingDataSet(); ClassificationDataset.TrainingDataSet = new DataSet(); ClassificationDataset.TrainingDataSet.DataPoints = new List <DataPoint>(); ClassificationDataset.TestingDataSet = new DataSet(); ClassificationDataset.TestingDataSet.DataPoints = new List <DataPoint>(); foreach (DatabaseRecord record in dbdata) { int label = 0; // maxvote if (record.PredictedTag == record.RealTag) { label = record.AlgorithmID; } var point = new PortfolioClassificationDataPoint(); point.Label = label; point.Features = new List <FeatureIndex>(); point.Features.Add(new FeatureIndex() { ID = 0, Name = "Hour", Value = record.Hour }); point.Features.Add(new FeatureIndex() { ID = 1, Name = "MaxTrust", Value = Convert.ToDouble(record.MaxTrust) }); point.Features.Add(new FeatureIndex() { ID = 2, Name = "Pop", Value = record.Pop }); point.Features.Add(new FeatureIndex() { ID = 3, Name = "Pou", Value = record.Pou }); point.Features.Add(new FeatureIndex() { ID = 4, Name = "PopulationGroup", Value = record.PopulationGroup }); point.Features.Add(new FeatureIndex() { ID = 5, Name = "Weekday", Value = record.WeekDay }); point.Features.Add(new FeatureIndex() { ID = 6, Name = "Section", Value = record.Section }); point.DatabaseRecord = record; if (rand.NextDouble() < 0.99) { ClassificationDataset.TrainingDataSet.DataPoints.Add(point); } else { ClassificationDataset.TestingDataSet.DataPoints.Add(point); } } //ClassificationDataset.TrainingDataSet.DataPoints = ClassificationDataset.TrainingDataSet.DataPoints.Distinct().ToList(); //ClassificationDataset.TestingDataSet.DataPoints = ClassificationDataset.TestingDataSet.DataPoints.Distinct().ToList(); /////////////////////////////////////////////////////////////// /////////////////////////// create dataset for Regression RegressionDataset = new TrainingTestingDataSet(); RegressionDataset.TrainingDataSet = new DataSet(); RegressionDataset.TrainingDataSet.DataPoints = new List <DataPoint>(); RegressionDataset.TestingDataSet = new DataSet(); RegressionDataset.TestingDataSet.DataPoints = new List <DataPoint>(); //var set = dbdata.GroupBy(x => new { x.Hour, x.Pop, x.PopulationGroup, x.Pou, x.WeekDay }).ToList(); //var a = set[0]; //foreach (var g in set) //{ // var record = g.ToList()[0]; // //int MleTag = (from r in g where r.AlgorithmID == 10 select r.PredictedTag).First(); // //int BayesianTag = (from r in g where r.AlgorithmID == 11 select r.PredictedTag).First(); // //int BetaTag = (from r in g where r.AlgorithmID == 23 select r.PredictedTag).First(); // //int GompertzTag = (from r in g where r.AlgorithmID == 20 select r.PredictedTag).First(); // //int RATag = (from r in g where r.AlgorithmID == 21 select r.PredictedTag).First(); // //var point = new PortfolioClassificationDataPoint(); // //point.Label = record.RealTag; // //point.Features = new List<FeatureIndex>(); // //point.Features.Add(new FeatureIndex() { ID = 1, Name = "Hour", Value = record.Hour }); // //point.Features.Add(new FeatureIndex() { ID = 2, Name = "MaxTrust", Value = Convert.ToDouble(record.MaxTrust) }); // //point.Features.Add(new FeatureIndex() { ID = 3, Name = "Pop", Value = record.Pop }); // //point.Features.Add(new FeatureIndex() { ID = 4, Name = "Pou", Value = record.Pou }); // //point.Features.Add(new FeatureIndex() { ID = 5, Name = "PopulationGroup", Value = record.PopulationGroup }); // //point.Features.Add(new FeatureIndex() { ID = 6, Name = "MLETag", Value = MleTag }); // //point.Features.Add(new FeatureIndex() { ID = 7, Name = "BayesianTag", Value = BayesianTag }); // //point.Features.Add(new FeatureIndex() { ID = 8, Name = "BetaTag", Value = BetaTag }); // //point.Features.Add(new FeatureIndex() { ID = 9, Name = "GompertTag", Value = GompertzTag }); // //point.Features.Add(new FeatureIndex() { ID = 10, Name = "RATag", Value = RATag }); // //point.Features.Add(new FeatureIndex() { ID = 11, Name = "MaxvoteTag", Value = record.MaxvoteTag }); // //point.DatabaseRecord = record; // //if (rand.NextDouble() < 0.8) // // RegressionDataset.TrainingDataSet.DataPoints.Add(point); // //else // // RegressionDataset.TestingDataSet.DataPoints.Add(point); //} foreach (DatabaseRecord record in dbdata) { var point = new PortfolioClassificationDataPoint(); point.Label = record.RealTag; point.Features = new List <FeatureIndex>(); point.Features.Add(new FeatureIndex() { ID = 1, Name = "Hour", Value = record.Hour }); point.Features.Add(new FeatureIndex() { ID = 2, Name = "MaxTrust", Value = Convert.ToDouble(record.MaxTrust) }); point.Features.Add(new FeatureIndex() { ID = 3, Name = "Pop", Value = record.Pop }); point.Features.Add(new FeatureIndex() { ID = 4, Name = "Pou", Value = record.Pou }); point.Features.Add(new FeatureIndex() { ID = 5, Name = "PopulationGroup", Value = record.PopulationGroup }); point.Features.Add(new FeatureIndex() { ID = 6, Name = "Algorithm", Value = record.AlgorithmID }); point.Features.Add(new FeatureIndex() { ID = 7, Name = "PredictedTag", Value = record.PredictedTag }); point.Features.Add(new FeatureIndex() { ID = 8, Name = "MaxvoteTag", Value = record.MaxvoteTag }); point.Features.Add(new FeatureIndex() { ID = 9, Name = "Section", Value = record.Section }); point.Features.Add(new FeatureIndex() { ID = 10, Name = "Weekday", Value = record.WeekDay }); //point.Features.Add(new FeatureIndex() { ID = 9, Name = "UseMaxvote", Value = record.LessPenalties }); point.DatabaseRecord = record; if (rand.NextDouble() < 0.99) { RegressionDataset.TrainingDataSet.DataPoints.Add(point); } else { RegressionDataset.TestingDataSet.DataPoints.Add(point); } } RegressionDataset.TrainingDataSet.DataPoints = RegressionDataset.TrainingDataSet.DataPoints.Distinct().ToList(); RegressionDataset.TestingDataSet.DataPoints = RegressionDataset.TestingDataSet.DataPoints.Distinct().ToList(); /////////////////////////////////////////////////////////////// }
public virtual Lite <DisconnectedExportEntity> BeginExportDatabase(DisconnectedMachineEntity machine) { Lite <DisconnectedExportEntity> export = new DisconnectedExportEntity { Machine = machine.ToLite(), Copies = downloadTables.Select(t => new DisconnectedExportTableEmbedded { Type = t.Type.ToTypeEntity().ToLite() }).ToMList() }.Save().ToLite(); var cancelationSource = new CancellationTokenSource(); var user = UserHolder.Current; var token = cancelationSource.Token; var task = Task.Factory.StartNew(() => { using (UserHolder.UserSession(user)) { OnStartExporting(machine); DisconnectedMachineEntity.Current = machine.ToLite(); try { using (token.MeasureTime(l => export.InDB().UnsafeUpdate().Set(s => s.Lock, s => l).Execute())) { foreach (var tuple in downloadTables) { token.ThrowIfCancellationRequested(); if (tuple.Strategy.Upload == Upload.Subset) { miUnsafeLock.MakeGenericMethod(tuple.Type).Invoke(this, new object[] { machine.ToLite(), tuple.Strategy, export }); } } } string connectionString; using (token.MeasureTime(l => export.InDB().UnsafeUpdate().Set(s => s.CreateDatabase, s => l).Execute())) connectionString = CreateDatabase(machine); var newDatabase = new SqlServerConnector(connectionString, Schema.Current, ((SqlServerConnector)Connector.Current).Version); using (token.MeasureTime(l => export.InDB().UnsafeUpdate().Set(s => s.CreateSchema, s => l).Execute())) using (Connector.Override(newDatabase)) using (ObjectName.OverrideOptions(new ObjectNameOptions { AvoidDatabaseName = true })) { Administrator.TotalGeneration(); } using (token.MeasureTime(l => export.InDB().UnsafeUpdate().Set(s => s.DisableForeignKeys, s => l).Execute())) using (Connector.Override(newDatabase)) using (ObjectName.OverrideOptions(new ObjectNameOptions { AvoidDatabaseName = true })) { foreach (var tuple in downloadTables.Where(t => !t.Type.IsEnumEntity())) { token.ThrowIfCancellationRequested(); DisableForeignKeys(tuple.Table); } } var isPostgres = Schema.Current.Settings.IsPostgres; DatabaseName newDatabaseName = new DatabaseName(null, newDatabase.DatabaseName(), isPostgres); foreach (var tuple in downloadTables) { token.ThrowIfCancellationRequested(); int ms = 0; using (token.MeasureTime(l => ms = l)) { tuple.Strategy.Exporter !.Export(tuple.Table, tuple.Strategy, newDatabaseName, machine); } export.MListElementsLite(_ => _.Copies).Where(c => c.Element.Type.Is(tuple.Type.ToTypeEntity())).UnsafeUpdateMList() .Set(mle => mle.Element.CopyTable, mle => ms) .Execute(); } using (token.MeasureTime(l => export.InDB().UnsafeUpdate().Set(s => s.EnableForeignKeys, s => l).Execute())) foreach (var tuple in downloadTables.Where(t => !t.Type.IsEnumEntity())) { token.ThrowIfCancellationRequested(); EnableForeignKeys(tuple.Table); } using (token.MeasureTime(l => export.InDB().UnsafeUpdate().Set(s => s.ReseedIds, s => l).Execute())) { var tablesToUpload = Schema.Current.Tables.Values.Where(t => DisconnectedLogic.GetStrategy(t.Type).Upload != Upload.None) .SelectMany(t => t.TablesMList().Cast <ITable>().And(t)).Where(t => t.PrimaryKey.Identity).ToList(); var maxIdDictionary = tablesToUpload.ToDictionary(t => t, t => DisconnectedTools.MaxIdInRange(t, machine.SeedMin, machine.SeedMax)); using (Connector.Override(newDatabase)) using (ObjectName.OverrideOptions(new ObjectNameOptions { AvoidDatabaseName = true })) { foreach (var table in tablesToUpload) { token.ThrowIfCancellationRequested(); long?max = maxIdDictionary.GetOrThrow(table); DisconnectedTools.SetNextId(table, (max + 1) ?? machine.SeedMin); } } } CopyExport(export, newDatabase); machine.InDB().UnsafeUpdate().Set(s => s.State, s => DisconnectedMachineState.Disconnected).Execute(); using (SqlServerConnector.Override(newDatabase)) using (ObjectName.OverrideOptions(new ObjectNameOptions { AvoidDatabaseName = true })) machine.InDB().UnsafeUpdate().Set(s => s.State, s => DisconnectedMachineState.Disconnected).Execute(); using (token.MeasureTime(l => export.InDB().UnsafeUpdate().Set(s => s.BackupDatabase, s => l).Execute())) BackupDatabase(machine, export, newDatabase); using (token.MeasureTime(l => export.InDB().UnsafeUpdate().Set(s => s.DropDatabase, s => l).Execute())) DropDatabase(newDatabase); token.ThrowIfCancellationRequested(); export.InDB().UnsafeUpdate() .Set(s => s.State, s => DisconnectedExportState.Completed) .Set(s => s.Total, s => s.CalculateTotal()) .Execute(); } catch (Exception e) { var ex = e.LogException(); export.InDB().UnsafeUpdate() .Set(s => s.Exception, s => ex.ToLite()) .Set(s => s.State, s => DisconnectedExportState.Error) .Execute(); OnExportingError(machine, export, e); } finally { runningExports.Remove(export); DisconnectedMachineEntity.Current = null; OnEndExporting(); } } }); runningExports.Add(export, new RunningExports(task, cancelationSource)); return(export); }