private Job Add(IJobSpecification specification) { Job job; lock (_jobListSyncRoot) { var lockKey = specification.LockKey; List <Job> jobList = null; if (!string.IsNullOrEmpty(lockKey)) { jobList = GetJobList(lockKey, true); if (jobList.Any(j => !j.EndDate.HasValue)) { throw new InvalidOperationException(string.Format("Another job is running with the lock key '{0}'", lockKey)); } } job = new Job(specification, JobExecutionSettings); if (!_jobs.TryAdd(job.Id, job)) { throw new InvalidOperationException("Dupplicate job ID"); } if (jobList != null) { jobList.Add(job); } } PurgeOld(); return(job); }
public void Validate(IEnumerable <TableData> tables, IJobSpecification job) { using (var server = new Server()) { server.Connect(ConnectionString); } }
public void Process(string tempDirectory, IEnumerable<TableData> sourceTables, IJobSpecification job) { var tables = sourceTables.Select(t => t.Schema).ToArray(); using (var server = new Server()) { server.Connect(ConnectionString); var db = server.Databases.Find(DbName); if (!Update) { if (db != null) { db.Drop(); } CreateSchema(db, server, tables); db = server.Databases.Find(DbName); } //server.CaptureXml = true; <- Doesn't work with QueryBinding and errors from empty partitions marked as never processed. foreach (var table in sourceTables) { var processingType = !Update && IncrementalUpdate.HasFlag( table.Schema.IsDimension() ? SqlClearOptions.Dimensions : SqlClearOptions.Facts) ? ProcessType.ProcessAdd : ProcessType.ProcessFull; if (table.Schema.IsDimension()) { ProcessPartition(db, table.Name, table.Name, processingType); } else { var partition = SqlUpdateUtil.GetPartitionField(table.Schema); if (partition != null) { ProcessPartition(db, table.Name, GetTransientPartitionName(table.Schema), ProcessType.ProcessFull, string.Format("SELECT [{0}].* FROM [{0}] {1}", table.Name, SqlUpdateUtil.GetUpdateCriteria(table.Schema, partition, true, date: ReferenceDate))); ProcessPartition(db, table.Name, table.Name, processingType, string.Format("SELECT [{0}].* FROM [{0}] {1}", table.Name, SqlUpdateUtil.GetUpdateCriteria(table.Schema, partition, false, date: ReferenceDate, cutoff: CutOff))); } else { ProcessPartition(db, table.Name, table.Name, processingType); } } } //server.ExecuteCaptureLog(true, true); } }
public void Process(string tempDirectory, IEnumerable <TableData> tables, IJobSpecification job) { var model = BuildModel(tables); using (var xml = XmlWriter.Create(Path.Combine(tempDirectory, "edm.xml"))) { IEnumerable <EdmError> errors; model.TryWriteCsdl(xml, out errors); } }
public Job(IJobSpecification specification, JobExecutionSettings settings, Guid? id = null, int statusUpdateFrequency = 1000) { Id = id ?? Guid.NewGuid(); Created = DateTime.Now; Status = JobStatus.Pending; Specification = specification; StatusUpdateFrequency = statusUpdateFrequency; ExecutionSettings = settings; TempDirectory = Path.Combine(settings.TempDirectory, Id.ToString("N")); }
public Job(IJobSpecification specification, JobExecutionSettings settings, Guid?id = null, int statusUpdateFrequency = 1000) { Id = id ?? Guid.NewGuid(); Created = DateTime.Now; Status = JobStatus.Pending; Specification = specification; StatusUpdateFrequency = statusUpdateFrequency; ExecutionSettings = settings; TempDirectory = Path.Combine(settings.TempDirectory, Id.ToString("N")); }
public void Add(IJobSpecification jobSpecification, DateTime scheduledTime) { // Just run the job. SystemProcessCall call = jobSpecification as SystemProcessCall; if (null == call) { throw new ArgumentException(String.Format("Only able to handle job types that are SystemProcessCalls at the moment. Job type found: '{0}'.", jobSpecification.GetType().FullName)); } ProcessStartInfo procStartInfo = new ProcessStartInfo(call.Value); Process proc = new Process(); proc.StartInfo = procStartInfo; proc.Start(); }
public JobInfo CreateJob(IJobSpecification specification, Action <JobInfo> jobEnded = null) { var job = Add(specification); Task.Run(() => { try { if (jobEnded != null) { job.JobEnded += (sender, args) => jobEnded(GetJobInfo((Job)sender)); } job.Run(); } catch (Exception ex) { Log.Error("Error running job", ex, this); } }); return(GetJobInfo(job)); }
public JobInfo CreateJob(IJobSpecification specification, Action<JobInfo> jobEnded = null) { var job = Add(specification); Task.Run(() => { try { if (jobEnded != null) { job.JobEnded += (sender, args) => jobEnded(GetJobInfo((Job)sender)); } job.Run(); } catch (Exception ex) { Log.Error("Error running job", ex, this); } }); return GetJobInfo(job); }
public StaticJobLoader(IJobSpecification specification) { Specification = specification; }
public void Validate(IEnumerable<TableData> tables, IJobSpecification job) { }
public void Process(string tempDirectory, IEnumerable<TableData> tables, IJobSpecification job) { var model = BuildModel(tables); using (var xml = XmlWriter.Create(Path.Combine(tempDirectory, "edm.xml"))) { IEnumerable<EdmError> errors; model.TryWriteCsdl(xml, out errors); } }
public void Validate(IEnumerable <TableData> tables, IJobSpecification job) { }
public void Process(string tempDirectory, IEnumerable <TableData> tables, IJobSpecification job) { if (tables.Any(t => (t as CsvTableData) == null)) { throw new NotSupportedException("AccessExporter require CsvTableData"); } var csvTables = tables.Cast <CsvTableData>(); var path = Path.Combine(tempDirectory, "Result.accdb"); using (var file = File.Create(path)) using (var templateDb = typeof(AccessExporter).Assembly.GetManifestResourceStream("ExperienceExtractor.Components.Resources.Empty.accdb")) { templateDb.CopyTo(file); } using (var conn = new OleDbConnection(@"Provider=Microsoft.ACE.OLEDB.12.0;Data Source=" + path)) { conn.Open(); foreach (var table in csvTables) { var csvFile = new FileInfo(table.Path); new OleDbCommand( string.Format( @"SELECT * INTO [{0}] FROM [Text;DATABASE={1}].{2}", table.Name, csvFile.DirectoryName, csvFile.Name), conn) .ExecuteNonQuery(); } foreach (var t in tables) { var pks = t.Schema.Keys; if (pks.Length > 0) { new OleDbCommand( string.Format(@"ALTER TABLE [{0}] ADD PRIMARY KEY ({1})", t.Name, FieldList(pks)), conn).ExecuteNonQuery(); } } var nextId = 1; foreach (var table in tables) { foreach ( var rel in table.Schema.RelatedTables.Where( r => r.RelationType == RelationType.Parent || r.RelationType == RelationType.Dimension)) { new OleDbCommand( string.Format( @"ALTER TABLE [{0}] ADD CONSTRAINT {4} FOREIGN KEY ({1}) REFERENCES [{2}] ({3})", table.Name, FieldList(rel.Fields), rel.RelatedTable.Name, FieldList(rel.RelatedFields), "FK" + nextId++), conn).ExecuteNonQuery(); } } } }
private Job Add(IJobSpecification specification) { Job job; lock (_jobListSyncRoot) { var lockKey = specification.LockKey; List<Job> jobList = null; if (!string.IsNullOrEmpty(lockKey)) { jobList = GetJobList(lockKey, true); if (jobList.Any(j => !j.EndDate.HasValue)) { throw new InvalidOperationException(string.Format("Another job is running with the lock key '{0}'", lockKey)); } } job = new Job(specification, JobExecutionSettings); if (!_jobs.TryAdd(job.Id, job)) { throw new InvalidOperationException("Dupplicate job ID"); } if (jobList != null) { jobList.Add(job); } } PurgeOld(); return job; }
public void Process(string tempDirectory, IEnumerable <TableData> sourceTables, IJobSpecification job) { var tables = sourceTables.Select(t => t.Schema).ToArray(); using (var server = new Server()) { server.Connect(ConnectionString); var db = server.Databases.Find(DbName); if (!Update) { if (db != null) { db.Drop(); } CreateSchema(db, server, tables); db = server.Databases.Find(DbName); } //server.CaptureXml = true; <- Doesn't work with QueryBinding and errors from empty partitions marked as never processed. foreach (var table in sourceTables) { var processingType = !Update && IncrementalUpdate.HasFlag( table.Schema.IsDimension() ? SqlClearOptions.Dimensions : SqlClearOptions.Facts) ? ProcessType.ProcessAdd : ProcessType.ProcessFull; if (table.Schema.IsDimension()) { ProcessPartition(db, table.Name, table.Name, processingType); } else { var partition = SqlUpdateUtil.GetPartitionField(table.Schema); if (partition != null) { ProcessPartition(db, table.Name, GetTransientPartitionName(table.Schema), ProcessType.ProcessFull, string.Format("SELECT [{0}].* FROM [{0}] {1}", table.Name, SqlUpdateUtil.GetUpdateCriteria(table.Schema, partition, true, date: ReferenceDate))); ProcessPartition(db, table.Name, table.Name, processingType, string.Format("SELECT [{0}].* FROM [{0}] {1}", table.Name, SqlUpdateUtil.GetUpdateCriteria(table.Schema, partition, false, date: ReferenceDate, cutoff: CutOff))); } else { ProcessPartition(db, table.Name, table.Name, processingType); } } } //server.ExecuteCaptureLog(true, true); } }
public void Process(string tempDirectory, IEnumerable<TableData> tables, IJobSpecification job) { CultureInfo.DefaultThreadCurrentCulture = CultureInfo.GetCultureInfo("en-US"); using (var conn = new SqlConnection(ConnectionString)) { conn.Open(); if (!SsasOnly) { if (!string.IsNullOrEmpty(CreateDatabaseName)) { if (!Update) { //Drop the database if it already exists try { new SqlCommand(string.Format(@"ALTER DATABASE [{0}] SET SINGLE_USER WITH ROLLBACK IMMEDIATE; DROP DATABASE [{0}]", CreateDatabaseName), conn).ExecuteNonQuery(); } catch { } //Create the database, and set it to simple recovery mode. This makes inserts faster. new SqlCommand(string.Format("CREATE DATABASE [{0}];", CreateDatabaseName), conn) .ExecuteNonQuery(); new SqlCommand( string.Format("ALTER DATABASE [{0}] SET RECOVERY SIMPLE", CreateDatabaseName), conn).ExecuteNonQuery(); conn.ChangeDatabase(CreateDatabaseName); new SqlCommand(@"CREATE SCHEMA Staging;", conn).ExecuteNonQuery(); var s = new StringWriter(); WriteSchema(s, tables); new SqlCommand(s.ToString(), conn).ExecuteNonQuery(); new SqlCommand(@"CREATE SCHEMA Sitecore;", conn).ExecuteNonQuery(); new SqlCommand( @"CREATE TABLE Sitecore.JobInfo ( [Schema] nvarchar(max), [Prototype] nvarchar(max), [LockDate] datetime2 null, LastCutoff datetime2 null );", conn).ExecuteNonQuery(); var cmd = new SqlCommand(@"INSERT Sitecore.JobInfo ([Schema], [Prototype]) VALUES (@Schema, @Prototype)", conn); cmd.Parameters.AddWithValue("@Schema", tables.Select(t => t.Schema).Serialize()); cmd.Parameters.AddWithValue("@Prototype", job.ToString()); cmd.ExecuteNonQuery(); OnSchemaCreating(conn); } else { conn.ChangeDatabase(CreateDatabaseName); ValidateSchema(conn, tables); } } AcquireLock(conn, null); //TODO: Implement IDisposable... try { new SqlCommand(@"EXEC sp_msforeachtable 'ALTER TABLE ? NOCHECK CONSTRAINT all'", conn) .ExecuteNonQuery(); try { UseStagingTables = true; if (UseStagingTables) { Task.WaitAll(tables.Select(table => Task.Run(() => UploadData(table))).ToArray()); } using (var tran = conn.BeginTransaction()) { foreach (var table in tables) { InsertOrUpdateData(table, conn, tran, UseStagingTables ? GetStagingTableName(table) : null); } var cmd = new SqlCommand(@"UPDATE Sitecore.JobInfo SET LastCutoff=@LastCutoff", conn, tran); cmd.Parameters.AddWithValue(@"LastCutoff", _nextCuttoff ?? (object)DBNull.Value); cmd.ExecuteNonQuery(); OnBeforeCommit(tran); tran.Commit(); } if (UseStagingTables) { foreach (var table in tables) { new SqlCommand( string.Format(@"TRUNCATE TABLE {0}", GetStagingTableName(table)), conn) { CommandTimeout = Timeout }.ExecuteNonQuery(); } } } finally { new SqlCommand(@"EXEC sp_msforeachtable 'ALTER TABLE ? CHECK CONSTRAINT all'", conn) .ExecuteNonQuery(); } if (_ssasExporter != null) { _ssasExporter.CutOff = _cutoff; _ssasExporter.Process(tempDirectory, tables, job); } } finally { ReleaseLock(conn, null); } } else { if (_ssasExporter != null) { _ssasExporter.Process(tempDirectory, tables, job); } } } }
public void Validate(IEnumerable<TableData> tables, IJobSpecification job) { using (var server = new Server()) { server.Connect(ConnectionString); } }
public UpdateJobWrapper(IJobSpecification prototype, bool rebuild = false, string lockKey = null) { _lockKey = lockKey; Prototype = prototype; Rebuild = rebuild; }
public void Process(string tempDirectory, IEnumerable<TableData> tables, IJobSpecification job) { if (tables.Any(t => (t as CsvTableData) == null)) { throw new NotSupportedException("AccessExporter require CsvTableData"); } var csvTables = tables.Cast<CsvTableData>(); var path = Path.Combine(tempDirectory, "Result.accdb"); using (var file = File.Create(path)) using (var templateDb = typeof(AccessExporter).Assembly.GetManifestResourceStream("ExperienceExtractor.Components.Resources.Empty.accdb")) { templateDb.CopyTo(file); } using (var conn = new OleDbConnection(@"Provider=Microsoft.ACE.OLEDB.12.0;Data Source=" + path)) { conn.Open(); foreach (var table in csvTables) { var csvFile = new FileInfo(table.Path); new OleDbCommand( string.Format( @"SELECT * INTO [{0}] FROM [Text;DATABASE={1}].{2}", table.Name, csvFile.DirectoryName, csvFile.Name), conn) .ExecuteNonQuery(); } foreach (var t in tables) { var pks = t.Schema.Keys; if (pks.Length > 0) { new OleDbCommand( string.Format(@"ALTER TABLE [{0}] ADD PRIMARY KEY ({1})", t.Name, FieldList(pks)), conn).ExecuteNonQuery(); } } var nextId = 1; foreach (var table in tables) { foreach ( var rel in table.Schema.RelatedTables.Where( r => r.RelationType == RelationType.Parent || r.RelationType == RelationType.Dimension)) { new OleDbCommand( string.Format( @"ALTER TABLE [{0}] ADD CONSTRAINT {4} FOREIGN KEY ({1}) REFERENCES [{2}] ({3})", table.Name, FieldList(rel.Fields), rel.RelatedTable.Name, FieldList(rel.RelatedFields), "FK" + nextId++), conn).ExecuteNonQuery(); } } } }
public void Add(IJobSpecification jobSpecification, DateTime scheduledTime) { throw new NotImplementedException(); }
public void Validate(IEnumerable<TableData> tables, IJobSpecification job) { var hasPartitionKey = false; var staleTime = GetStaleTime(tables); using (var conn = new SqlConnection(ConnectionString)) { conn.Open(); if (!string.IsNullOrEmpty(CreateDatabaseName)) { if (Update) { conn.ChangeDatabase(CreateDatabaseName); //Check lock. Release immediately upon success. AcquireLock(conn, null); ReleaseLock(conn, null); ValidateSchema(conn, tables); } if (staleTime > TimeSpan.Zero) { hasPartitionKey = true; if (Update && !Rebuild) { var lastCutoff = new SqlCommand(@"SELECT TOP 1 LastCutoff FROM Sitecore.JobInfo", conn).ExecuteScalar(); _cutoff = DBNull.Value.Equals(lastCutoff) ? (DateTime?)null : ((DateTime)lastCutoff).SpecifyKind(DateTimeKind.Utc); if (_cutoff.HasValue) { _cutoff = SqlUpdateUtil.GetPartitionDate(_cutoff.Value.Add(-staleTime), staleTime); } } _nextCuttoff = DateTime.UtcNow; } } } if (!string.IsNullOrEmpty(SsasConnectionString)) { var connectionStringBuilder = new SqlConnectionStringBuilder(ConnectionString); if (!string.IsNullOrEmpty(CreateDatabaseName)) { connectionStringBuilder.InitialCatalog = CreateDatabaseName; } else { throw new Exception("Database must be specified either in the connection string or as the Database parameter"); } _ssasExporter = new SsasExporter(SsasConnectionString, SsasDbName, "Provider=SQLOLEDB;" + connectionStringBuilder.ConnectionString); _ssasExporter.Update = Update; _ssasExporter.Validate(tables, job); _ssasExporter.ReferenceDate = (_nextCuttoff ?? DateTime.UtcNow).Add(-staleTime); if (Rebuild) { _ssasExporter.IncrementalUpdate = SqlClearOptions.None; } else { _ssasExporter.IncrementalUpdate = SqlClearOptions; if (hasPartitionKey) { _ssasExporter.IncrementalUpdate |= SqlClearOptions.Facts; } } } else if (!string.IsNullOrEmpty(SsasDbName)) { throw new Exception("A connection string for SSAS Tabular is needed"); } }
public void Process(string tempDirectory, IEnumerable <TableData> tables, IJobSpecification job) { CultureInfo.DefaultThreadCurrentCulture = CultureInfo.GetCultureInfo("en-US"); using (var conn = new SqlConnection(ConnectionString)) { conn.Open(); if (!SsasOnly) { if (!string.IsNullOrEmpty(CreateDatabaseName)) { if (!Update) { //Drop the database if it already exists try { new SqlCommand(string.Format(@"ALTER DATABASE [{0}] SET SINGLE_USER WITH ROLLBACK IMMEDIATE; DROP DATABASE [{0}]", CreateDatabaseName), conn).ExecuteNonQuery(); } catch { } //Create the database, and set it to simple recovery mode. This makes inserts faster. new SqlCommand(string.Format("CREATE DATABASE [{0}];", CreateDatabaseName), conn) .ExecuteNonQuery(); new SqlCommand( string.Format("ALTER DATABASE [{0}] SET RECOVERY SIMPLE", CreateDatabaseName), conn).ExecuteNonQuery(); conn.ChangeDatabase(CreateDatabaseName); new SqlCommand(@"CREATE SCHEMA Staging;", conn).ExecuteNonQuery(); var s = new StringWriter(); WriteSchema(s, tables); new SqlCommand(s.ToString(), conn).ExecuteNonQuery(); new SqlCommand(@"CREATE SCHEMA Sitecore;", conn).ExecuteNonQuery(); new SqlCommand( @"CREATE TABLE Sitecore.JobInfo ( [Schema] nvarchar(max), [Prototype] nvarchar(max), [LockDate] datetime2 null, LastCutoff datetime2 null );", conn).ExecuteNonQuery(); var cmd = new SqlCommand(@"INSERT Sitecore.JobInfo ([Schema], [Prototype]) VALUES (@Schema, @Prototype)", conn); cmd.Parameters.AddWithValue("@Schema", tables.Select(t => t.Schema).Serialize()); cmd.Parameters.AddWithValue("@Prototype", job.ToString()); cmd.ExecuteNonQuery(); OnSchemaCreating(conn); } else { conn.ChangeDatabase(CreateDatabaseName); ValidateSchema(conn, tables); } } AcquireLock(conn, null); //TODO: Implement IDisposable... try { new SqlCommand(@"EXEC sp_msforeachtable 'ALTER TABLE ? NOCHECK CONSTRAINT all'", conn) .ExecuteNonQuery(); try { UseStagingTables = true; if (UseStagingTables) { Task.WaitAll(tables.Select(table => Task.Run(() => UploadData(table))).ToArray()); } using (var tran = conn.BeginTransaction()) { foreach (var table in tables) { InsertOrUpdateData(table, conn, tran, UseStagingTables ? GetStagingTableName(table) : null); } var cmd = new SqlCommand(@"UPDATE Sitecore.JobInfo SET LastCutoff=@LastCutoff", conn, tran); cmd.Parameters.AddWithValue(@"LastCutoff", _nextCuttoff ?? (object)DBNull.Value); cmd.ExecuteNonQuery(); OnBeforeCommit(tran); tran.Commit(); } if (UseStagingTables) { foreach (var table in tables) { new SqlCommand( string.Format(@"TRUNCATE TABLE {0}", GetStagingTableName(table)), conn) { CommandTimeout = Timeout }.ExecuteNonQuery(); } } } finally { new SqlCommand(@"EXEC sp_msforeachtable 'ALTER TABLE ? CHECK CONSTRAINT all'", conn) .ExecuteNonQuery(); } if (_ssasExporter != null) { _ssasExporter.CutOff = _cutoff; _ssasExporter.Process(tempDirectory, tables, job); } } finally { ReleaseLock(conn, null); } } else { if (_ssasExporter != null) { _ssasExporter.Process(tempDirectory, tables, job); } } } }
public void Validate(IEnumerable <TableData> tables, IJobSpecification job) { var hasPartitionKey = false; var staleTime = GetStaleTime(tables); using (var conn = new SqlConnection(ConnectionString)) { conn.Open(); if (!string.IsNullOrEmpty(CreateDatabaseName)) { if (Update) { conn.ChangeDatabase(CreateDatabaseName); //Check lock. Release immediately upon success. AcquireLock(conn, null); ReleaseLock(conn, null); ValidateSchema(conn, tables); } if (staleTime > TimeSpan.Zero) { hasPartitionKey = true; if (Update && !Rebuild) { var lastCutoff = new SqlCommand(@"SELECT TOP 1 LastCutoff FROM Sitecore.JobInfo", conn).ExecuteScalar(); _cutoff = DBNull.Value.Equals(lastCutoff) ? (DateTime?)null : ((DateTime)lastCutoff).SpecifyKind(DateTimeKind.Utc); if (_cutoff.HasValue) { _cutoff = SqlUpdateUtil.GetPartitionDate(_cutoff.Value.Add(-staleTime), staleTime); } } _nextCuttoff = DateTime.UtcNow; } } } if (!string.IsNullOrEmpty(SsasConnectionString)) { var connectionStringBuilder = new SqlConnectionStringBuilder(ConnectionString); if (!string.IsNullOrEmpty(CreateDatabaseName)) { connectionStringBuilder.InitialCatalog = CreateDatabaseName; } else { throw new Exception("Database must be specified either in the connection string or as the Database parameter"); } _ssasExporter = new SsasExporter(SsasConnectionString, SsasDbName, "Provider=SQLOLEDB;" + connectionStringBuilder.ConnectionString); _ssasExporter.Update = Update; _ssasExporter.Validate(tables, job); _ssasExporter.ReferenceDate = (_nextCuttoff ?? DateTime.UtcNow).Add(-staleTime); if (Rebuild) { _ssasExporter.IncrementalUpdate = SqlClearOptions.None; } else { _ssasExporter.IncrementalUpdate = SqlClearOptions; if (hasPartitionKey) { _ssasExporter.IncrementalUpdate |= SqlClearOptions.Facts; } } } else if (!string.IsNullOrEmpty(SsasDbName)) { throw new Exception("A connection string for SSAS Tabular is needed"); } }