/// <summary> /// Loads the data. /// </summary> public override void LoadData() { try { using (var session = DataProvider.SessionFactory.OpenSession()) { // Add a new registry if it's not loaded yet. if (!session.Query <HospitalRegistry>().Any()) { decimal version = 1; HospitalRegistry registry = new HospitalRegistry(version) { Name = GetType().Assembly.GetCustomAttribute <AssemblyDescriptionAttribute>().Description }; session.SaveOrUpdate(registry); } // Called because this wasn't in the schema version table yet, so add it so we don't check again. var sversion = VersionStrategy.GetVersion(session); session.SaveOrUpdate(sversion); session.Flush(); } } catch (Exception ex) { Logger.Write(ex, "Error loading Hospital Registry data"); } }
/// <summary> /// Loads the data. /// </summary> public override void LoadData() { try { if (!VersionStrategy.IsLoaded() && VersionStrategy.IsNewest()) { Logger.Information($"Processing base data update for type {typeof(TEntity).Name}"); var rows = 0; // start transaction using (var session = DataProvider.SessionFactory.OpenStatelessSession()) { // Always turncate for enum types using (var cmd = session.Connection.CreateCommand()) { if (cmd.Connection.State == ConnectionState.Closed) { cmd.Connection.Open(); } cmd.CommandTimeout = 900; cmd.CommandText = string.Format("TRUNCATE TABLE {0}", tableName); cmd.ExecuteNonQuery(); } // Loop through values in the enum. using (var bulkImporter = new BulkInsert <TEntity, TKey>(session.Connection)) { bulkImporter.ConnectionRequested += (o, e) => { e.Data = session.Connection as SqlConnection; }; bulkImporter.Prepare(); bulkImporter.BatchSize = 1000; Array values = Enum.GetValues(typeof(TEnum)); foreach (var val in values) { var temp = GetEntity(val); if (temp != null) { bulkImporter.Insert(temp); rows++; } } } } SchemaVersion version; using (var session = DataProvider.SessionFactory.OpenSession()) { version = VersionStrategy.GetVersion(session); session.SaveOrUpdate(version); session.Flush(); } Logger.Information($"Base data update completed for type {typeof(TEntity).Name}: {rows} rows inserted or updated; now at schema version {version}"); } } catch (Exception ex) { Logger.Write(ex, "Error importing {0} enumeration values for data type {1}", typeof(TEnum).Name, typeof(TEntity).Name); } }
/// <summary> /// Loads the data. /// </summary> public override void LoadData() { try { // Get list of files matching mask // TODO: Throw an error if the path doesn't exist? if (Directory.Exists(baseDataDir)) { var files = Directory.GetFiles(baseDataDir, "MeasureTopics*.csv"); foreach (var file in files) { try { VersionStrategy.Filename = file; // Appending measures to the topics tables, not replacing with the newest file. if (!VersionStrategy.IsLoaded()) { // Verify data file exists. if (!File.Exists(Path.Combine(baseDataDir, file))) { Logger.Warning( "Import file \"{0}\" missing from the base data resources directory.", file); return; } using (var session = DataProvider.SessionFactory.OpenSession()) { using (var trans = session.BeginTransaction()) { var builder = new OleDbConnectionStringBuilder() { Provider = "Microsoft.ACE.OLEDB.12.0", DataSource = baseDataDir, }; builder["Extended Properties"] = "text;HDR=YES;FMT=Delimited"; using (var conn = new OleDbConnection(builder.ConnectionString)) { conn.Open(); var sql = string.Format("SELECT * FROM [{0}]", Path.GetFileName(file)); using (var cmd = new OleDbCommand(sql, conn)) { // NOTE: Not using the bulk importer here because we have two types of data being imported at the same time. var reader = cmd.ExecuteReader(); while (reader != null && reader.Read()) { var parentName = reader.Guard <string>("ParentName") ?? ""; var name = reader.Guard <string>("Name") ?? ""; var longTitle = reader.Guard <string>("Longtitle") ?? ""; var description = reader.Guard <string>("description") ?? ""; var consumerLongTitle = reader.Guard <string>("ConsumerLongTitle") ?? ""; var consumerDescription = reader.Guard <string>("ConsumerDescription") ?? ""; var categoryType = reader.Guard <string>("CategoryType"); var topicFacts1Text = reader.Guard <string>("TopicFact1Text") ?? ""; var topicFacts1Citation = reader.Guard <string>("TopicFact1Citation") ?? ""; var topicFacts1Image = reader.Guard <string>("TopicFact1Image") ?? ""; var topicFacts2Text = reader.Guard <string>("TopicFact2Text") ?? ""; var topicFacts2Citation = reader.Guard <string>("TopicFact2Citation") ?? ""; var topicFacts2Image = reader.Guard <string>("TopicFact2Image") ?? ""; var topicFacts3Text = reader.Guard <string>("TopicFact3Text") ?? ""; var topicFacts3Citation = reader.Guard <string>("TopicFact3Citation") ?? ""; var topicFacts3Image = reader.Guard <string>("TopicFact3Image") ?? ""; var tipsChecklist = reader.Guard <string>("TipsChecklist") ?? ""; var topicIcon = reader.Guard <string>("TopicIcon") ?? ""; if (!parentName.Equals("SKIP_ROW")) { AddOrUpdateToDB( session, parentName, name, longTitle, description, consumerLongTitle, consumerDescription, categoryType, topicFacts1Text, topicFacts1Citation, topicFacts1Image, topicFacts2Text, topicFacts2Citation, topicFacts2Image, topicFacts3Text, topicFacts3Citation, topicFacts3Image, tipsChecklist, topicIcon); } } } } trans.Commit(); } } using (var session = DataProvider.SessionFactory.OpenSession()) { var version = VersionStrategy.GetVersion(session); session.SaveOrUpdate(version); session.Flush(); } } } catch (Exception e) { Logger.Write(e, "Error loading measure topic data from file {0}", file); } } } } catch (Exception ex) { Logger.Write(ex, "Error loading measure topics data"); } }
/// <summary> /// Loads the data. /// </summary> public override void LoadData() { try { var tableIndexIsOff = false; // Get list of files matching mask // TODO: Throw an error if the path doesn't exist? if (Directory.Exists(baseDataDir)) { var files = Directory.GetFiles(baseDataDir, Fileprefix + "-*.csv"); PreProcessFile(ref files); foreach (var file in files) { VersionStrategy.Filename = file; if (!VersionStrategy.IsLoaded() && (ImportType == BaseDataImportStrategyType.Append || VersionStrategy.IsNewest())) { // start transaction Logger.Write($"Processing base data update for type {typeof(TEntity).Name} from file {file}"); var rows = 0; // Verify data file exists. if (!File.Exists(Path.Combine(baseDataDir, file))) { Logger.Warning("Import file \"{0}\" missing from the base data resources directory.", file); return; } using (var session = DataProvider.SessionFactory.OpenStatelessSession()) { // Turn off indexes. if (TurnOffIndexesDuringImpport && !tableIndexIsOff) { DisableTableIndexes(); tableIndexIsOff = true; } // Turncate the table if it's a replace strategy if (ImportType == BaseDataImportStrategyType.Replace) { using (var cmd = session.Connection.CreateCommand()) { if (cmd.Connection.State == ConnectionState.Closed) { cmd.Connection.Open(); } cmd.CommandTimeout = 900; cmd.CommandText = string.Format("TRUNCATE TABLE {0}", tableName); cmd.ExecuteNonQuery(); } } var builder = new OleDbConnectionStringBuilder(); builder.Provider = "Microsoft.ACE.OLEDB.12.0"; builder.DataSource = baseDataDir; builder["Extended Properties"] = "text;HDR=YES;FMT=Delimited"; using (var conn = new OleDbConnection(builder.ConnectionString)) { conn.Open(); var sql = string.Format("SELECT * FROM [{0}]", Path.GetFileName(file)); using (var cmd = new OleDbCommand(sql, conn)) { var reader = cmd.ExecuteReader(); using (var bulkImporter = new BulkInsert <TEntity, TKey>(session.Connection)) { bulkImporter.ConnectionRequested += (o, e) => e.Data = session.Connection as SqlConnection; bulkImporter.Prepare(); bulkImporter.BatchSize = 1000; // TODO: Parameterize? if (reader == null) { Logger.Warning( "A problem occurred while trying to read CSV file \"{0}\". Please make sure that the file is properly formated and has data.", file); return; } if (reader.HasRows) { while (reader.Read()) { var temp = LoadFromReader(reader); if (temp != null) { bulkImporter.Insert(temp); } rows++; } } } } } } SchemaVersion version; using (var session = DataProvider.SessionFactory.OpenSession()) { version = VersionStrategy.GetVersion(session); session.SaveOrUpdate(version); session.Flush(); } // commit transaction Logger.Information( $"Base data update completed for type {typeof(TEntity).Name}: {rows} rows inserted or updated; now at schema version {version}"); } } if (TurnOffIndexesDuringImpport && tableIndexIsOff) { Task.Factory.StartNew(() => EnableTableIndexes(tableName), TaskCreationOptions.LongRunning); } } } catch (Exception ex) { Logger.Write(ex, "Error loading base data for entity {0}", typeof(TEntity).Name); } }
/// <summary> /// Loads the data. /// </summary> public override void LoadData() { bool tableIndexIsOff = false; try { // Get list of files matching mask // TODO: Throw an error if the path doesn't exist? if (Directory.Exists(baseDataDir)) { var files = Directory.GetFiles(baseDataDir, Fileprefix + "*.csv").ToList(); // Turn off indexes. if (files.Any() && TurnOffIndexesDuringImpport && AllowDisableIndexesDuringImport) { DisableTableIndexes(); tableIndexIsOff = true; } foreach (var file in files) { VersionStrategy.Filename = file; if (!VersionStrategy.IsLoaded() && (ImportType == BaseDataImportStrategyType.Append || VersionStrategy.IsNewest())) { Logger.Write($"Processing base data update for type {typeof(TEntity).Name} from file {file}"); var rows = 0; // start transaction // Verify format file exists. if (!File.Exists(Path.Combine(baseDataDir, FormatFile))) { Logger.Warning("Format file \"{0}\" missing from the base data resources directory.", FormatFile); return; } // Verify data file exists. if (!File.Exists(Path.Combine(baseDataDir, file))) { Logger.Warning("Import file \"{0}\" missing from the base data resources directory.", file); return; } using (var session = DataProvider.SessionFactory.OpenStatelessSession()) { // Turn off indexes. // Please do not remove as we may add back. Jason //if (TurnOffIndexesDuringImpport && !tableIndexIsOff) //{ // DisableTableIndexes(); // tableIndexIsOff = true; //} // Turncate the table if it's a replace strategy if (ImportType == BaseDataImportStrategyType.Replace) { using (var cmd = session.Connection.CreateCommand()) { if (cmd.Connection.State == ConnectionState.Closed) { cmd.Connection.Open(); } cmd.CommandTimeout = 900; cmd.CommandText = string.Format("TRUNCATE TABLE {0}", tableName); cmd.ExecuteNonQuery(); } } var con = session.Connection; ProvideFeedback(string.Format("Importing file {0}", file)); using (var cmd2 = con.CreateCommand()) { cmd2.CommandText = "BULK INSERT " + tableName + " FROM '" + Path.Combine(baseDataDir, file) + "' WITH (FIRSTROW = 2, FORMATFILE = '" + Path.Combine(baseDataDir, FormatFile) + "')"; cmd2.CommandTimeout = 6000; rows = cmd2.ExecuteNonQuery(); } } SchemaVersion version; using (var session = DataProvider.SessionFactory.OpenSession()) { // TODO: Add functionality to update existing version row in all scenarios to avoid multiple entries.- Jason version = VersionStrategy.GetVersion(session); session.SaveOrUpdate(version); session.Flush(); } Logger.Information( $"Base data update completed for type {typeof(TEntity).Name}: {rows} rows inserted or updated; now at schema version {version}"); } } } } catch (Exception ex) { Logger.Write(ex, "Error loading base data for type {0}", typeof(TEntity).Name); } finally { if (TurnOffIndexesDuringImpport && AllowEnableIndexesDuringImport && tableIndexIsOff) { Task.Factory.StartNew(() => { EnableTableIndexes(tableName); }, TaskCreationOptions.LongRunning); } } }