private void Complete_Any_Recent_Loads_Requiring_Additional_Work() { // Get the list of recent loads requiring additional work DataTable additionalWorkRequired = Engine_Database.Items_Needing_Aditional_Work; if ((additionalWorkRequired != null) && (additionalWorkRequired.Rows.Count > 0)) { Add_NonError_To_Log("Processing recently loaded items needing additional work", "Standard", String.Empty, String.Empty, -1); // Create the incoming digital folder object which will be used for all these Actionable_Builder_Source_Folder sourceFolder = new Actionable_Builder_Source_Folder(); // Step through each one foreach (DataRow thisRow in additionalWorkRequired.Rows) { // Get the information about this item string bibID = thisRow["BibID"].ToString(); string vid = thisRow["VID"].ToString(); // Determine the file root for this string file_root = bibID.Substring(0, 2) + "\\" + bibID.Substring(2, 2) + "\\" + bibID.Substring(4, 2) + "\\" + bibID.Substring(6, 2) + "\\" + bibID.Substring(8, 2); // Determine the source folder for this resource string resource_folder = settings.Servers.Image_Server_Network + file_root + "\\" + vid; // Determine the METS file name string mets_file = resource_folder + "\\" + bibID + "_" + vid + ".mets.xml"; // Ensure these both exist if ((Directory.Exists(resource_folder)) && (File.Exists(mets_file))) { // Create the incoming digital resource object Incoming_Digital_Resource additionalWorkResource = new Incoming_Digital_Resource(resource_folder, sourceFolder) { BibID = bibID, VID = vid, File_Root = bibID.Substring(0, 2) + "\\" + bibID.Substring(2, 2) + "\\" + bibID.Substring(4, 2) + "\\" + bibID.Substring(6, 2) + "\\" + bibID.Substring(8, 2) }; Complete_Single_Recent_Load_Requiring_Additional_Work(additionalWorkResource); } else { Add_Error_To_Log("Unable to find valid resource files for reprocessing " + bibID + ":" + vid, bibID + ":" + vid, "Reprocess", -1); int itemID = Engine_Database.Get_ItemID_From_Bib_VID(bibID, vid); SobekCM_Item_Database.Update_Additional_Work_Needed_Flag(itemID, false); } } } }
/// <summary> Constructor for a new instance of the Incoming_Digital_Resource class </summary> /// <param name="ResourceFolder"> Folder for this incoming digital resource </param> /// <param name="SourceFolder"> Parent source folder </param> public Incoming_Digital_Resource(string ResourceFolder, Actionable_Builder_Source_Folder SourceFolder) { type = Incoming_Digital_Resource_Type.UNKNOWN; resourceFolder = ResourceFolder; Source_Folder = SourceFolder; // Set some defaults bibid = String.Empty; vid = String.Empty; packageTime = DateTime.Now; metsTypeOverride = String.Empty; NewImageFiles = new List <string>(); NewPackage = false; fileRoot = "collect/image_files/"; }
private void Complete_Any_Recent_Loads_Requiring_Additional_Work() { // Get the list of recent loads requiring additional work DataTable additionalWorkRequired = Library.Database.SobekCM_Database.Items_Needing_Aditional_Work; if ((additionalWorkRequired != null) && (additionalWorkRequired.Rows.Count > 0)) { Add_NonError_To_Log("Processing recently loaded items needing additional work", "Standard", String.Empty, String.Empty, -1); // Create the incoming digital folder object which will be used for all these Actionable_Builder_Source_Folder sourceFolder = new Actionable_Builder_Source_Folder(); // Step through each one foreach (DataRow thisRow in additionalWorkRequired.Rows) { // Get the information about this item string bibID = thisRow["BibID"].ToString(); string vid = thisRow["VID"].ToString(); // Determine the file root for this string file_root = bibID.Substring(0, 2) + "\\" + bibID.Substring(2, 2) + "\\" + bibID.Substring(4, 2) + "\\" + bibID.Substring(6, 2) + "\\" + bibID.Substring(8, 2); // Determine the source folder for this resource string resource_folder = settings.Servers.Image_Server_Network + file_root + "\\" + vid; // Determine the METS file name string mets_file = resource_folder + "\\" + bibID + "_" + vid + ".mets.xml"; // Ensure these both exist if ((Directory.Exists(resource_folder)) && (File.Exists(mets_file))) { // Create the incoming digital resource object Incoming_Digital_Resource additionalWorkResource = new Incoming_Digital_Resource(resource_folder, sourceFolder) {BibID = bibID, VID = vid, File_Root = bibID.Substring(0, 2) + "\\" + bibID.Substring(2, 2) + "\\" + bibID.Substring(4, 2) + "\\" + bibID.Substring(6, 2) + "\\" + bibID.Substring(8, 2)}; Complete_Single_Recent_Load_Requiring_Additional_Work( resource_folder, additionalWorkResource); } else { Add_Error_To_Log("Unable to find valid resource files for reprocessing " + bibID + ":" + vid, bibID + ":" + vid, "Reprocess", -1); int itemID = Library.Database.SobekCM_Database.Get_ItemID_From_Bib_VID(bibID, vid); Library.Database.SobekCM_Database.Update_Additional_Work_Needed_Flag(itemID, false, null); } } } }
/// <summary> Performs the bulk loader process and handles any incoming digital resources </summary> /// <returns> TRUE if there are still pending items to be processed, otherwise FALSE </returns> public bool Perform_BulkLoader( bool Verbose ) { verbose = Verbose; finalmessage = String.Empty; stillPendingItems = false; Add_NonError_To_Log("Worker_BulkLoader.Perform_BulkLoader: Start", verbose, String.Empty, String.Empty, -1); // Refresh any settings and item lists if (!Refresh_Settings_And_Item_List()) { Add_Error_To_Log("Worker_BulkLoader.Perform_BulkLoader: Error refreshing settings and item list", String.Empty, String.Empty, -1); finalmessage = "Error refreshing settings and item list"; return false; } // If not already verbose, check settings if (!verbose) { verbose = settings.Builder.Verbose_Flag; } Add_NonError_To_Log("Worker_BulkLoader.Perform_BulkLoader: Refreshed settings and item list", verbose, String.Empty, String.Empty, -1); // Check for abort if (CheckForAbort()) { Add_NonError_To_Log("Worker_BulkLoader.Perform_BulkLoader: Aborted (line 137)", verbose, String.Empty, String.Empty, -1); finalmessage = "Aborted per database request"; return false; } // Set to standard operation then Abort_Database_Mechanism.Builder_Operation_Flag = Builder_Operation_Flag_Enum.STANDARD_OPERATION; // Run the usage stats module first CalculateUsageStatisticsModule statsModule = new CalculateUsageStatisticsModule(); statsModule.Process += module_Process; statsModule.Error += module_Error; statsModule.DoWork(settings); // RUN ANY PRE-PROCESSING MODULES HERE if (BuilderSettings.PreProcessModules.Count > 0) { Add_NonError_To_Log("Running all pre-processing steps", verbose, String.Empty, String.Empty, -1); foreach (iPreProcessModule thisModule in BuilderSettings.PreProcessModules) { // Check for abort if (CheckForAbort()) { Abort_Database_Mechanism.Builder_Operation_Flag = Builder_Operation_Flag_Enum.ABORTING; break; } thisModule.DoWork(settings); } } // Load the settings into thall the item and folder processors foreach (iSubmissionPackageModule thisModule in BuilderSettings.ItemProcessModules) thisModule.Settings = settings; foreach (iSubmissionPackageModule thisModule in BuilderSettings.DeleteItemModules) thisModule.Settings = settings; foreach (iFolderModule thisModule in BuilderSettings.AllFolderModules) thisModule.Settings = settings; Add_NonError_To_Log("Worker_BulkLoader.Perform_BulkLoader: Begin completing any recent loads requiring additional work", verbose, String.Empty, String.Empty, -1); // Handle all packages already on the web server which are flagged for additional work required Complete_Any_Recent_Loads_Requiring_Additional_Work(); Add_NonError_To_Log("Worker_BulkLoader.Perform_BulkLoader: Finished completing any recent loads requiring additional work", verbose, String.Empty, String.Empty, -1); // Check for abort if (CheckForAbort()) { Add_NonError_To_Log("Worker_BulkLoader.Perform_BulkLoader: Aborted (line 151)", verbose, String.Empty, String.Empty, -1); finalmessage = "Aborted per database request"; ReleaseResources(); return false; } // Create the seperate queues for each type of incoming digital resource files List<Incoming_Digital_Resource> incoming_packages = new List<Incoming_Digital_Resource>(); List<Incoming_Digital_Resource> deletes = new List<Incoming_Digital_Resource>(); // Step through all the incoming folders, and run the folder modules if (BuilderSettings.IncomingFolders.Count == 0) { Add_NonError_To_Log("Worker_BulkLoader.Move_Appropriate_Inbound_Packages_To_Processing: There are no incoming folders set in the database", "Standard", String.Empty, String.Empty, -1); } else { Add_NonError_To_Log("Worker_BulkLoader.Perform_BulkLoader: Begin processing builder folders", verbose, String.Empty, String.Empty, -1); foreach (Builder_Source_Folder folder in BuilderSettings.IncomingFolders) { Actionable_Builder_Source_Folder actionFolder = new Actionable_Builder_Source_Folder(folder, BuilderSettings); foreach (iFolderModule thisModule in actionFolder.BuilderModules) { // Check for abort if (CheckForAbort()) { Add_NonError_To_Log("Worker_BulkLoader.Perform_BulkLoader: Aborted (line 151)", verbose, String.Empty, String.Empty, -1); finalmessage = "Aborted per database request"; ReleaseResources(); return false; } thisModule.DoWork(actionFolder, incoming_packages, deletes); } } // Since all folder processing is complete, release resources foreach (iFolderModule thisModule in BuilderSettings.AllFolderModules) thisModule.ReleaseResources(); } // Check for abort if (CheckForAbort()) { Add_NonError_To_Log("Worker_BulkLoader.Perform_BulkLoader: Aborted (line 179)", verbose, String.Empty, String.Empty, -1); finalmessage = "Aborted per database request"; ReleaseResources(); return false; } // If there were no packages to process further stop here if ((incoming_packages.Count == 0) && (deletes.Count == 0)) { Add_Complete_To_Log("No New Packages - Process Complete", "No Work", String.Empty, String.Empty, -1); if (finalmessage.Length == 0) finalmessage = "No New Packages - Process Complete"; ReleaseResources(); return false; } // Iterate through all non-delete resources ready for processing Add_NonError_To_Log("Worker_BulkLoader.Perform_BulkLoader: Process any incoming packages", verbose, String.Empty, String.Empty, -1); Process_All_Incoming_Packages(incoming_packages); // Can now release these resources foreach (iSubmissionPackageModule thisModule in BuilderSettings.ItemProcessModules) { thisModule.ReleaseResources(); } // Process any delete requests ( iterate through all deletes ) Add_NonError_To_Log("Worker_BulkLoader.Perform_BulkLoader: Process any deletes", verbose, String.Empty, String.Empty, -1); Process_All_Deletes(deletes); // Can now release these resources foreach (iSubmissionPackageModule thisModule in BuilderSettings.DeleteItemModules) { thisModule.ReleaseResources(); } // RUN ANY POST-PROCESSING MODULES HERE if (BuilderSettings.PostProcessModules.Count > 0) { Add_NonError_To_Log("Running all post-processing steps", verbose, String.Empty, String.Empty, -1); foreach (iPostProcessModule thisModule in BuilderSettings.PostProcessModules) { // Check for abort if (CheckForAbort()) { Abort_Database_Mechanism.Builder_Operation_Flag = Builder_Operation_Flag_Enum.ABORTING; break; } thisModule.DoWork(aggregationsToRefresh, processedItems, deletedItems, settings); } } // Add the complete entry for the log if (!CheckForAbort()) { Add_Complete_To_Log("Process Complete", "Complete", String.Empty, String.Empty, -1); if (finalmessage.Length == 0) finalmessage = "Process completed successfully"; } else { finalmessage = "Aborted per database request"; Add_Complete_To_Log("Process Aborted Cleanly", "Complete", String.Empty, String.Empty, -1); } // Clear lots of collections and such from memory, since we are done processing ReleaseResources(); Add_NonError_To_Log("Worker_BulkLoader.Perform_BulkLoader: Done", verbose, String.Empty, String.Empty, -1); return stillPendingItems; }
/// <summary> Performs the bulk loader process and handles any incoming digital resources </summary> /// <returns> TRUE if there are still pending items to be processed, otherwise FALSE </returns> public bool Perform_BulkLoader(bool Verbose) { // Run the usage stats module first //CalculateUsageStatisticsModule statsModule2 = new CalculateUsageStatisticsModule(); //statsModule2.Process += module_Process; //statsModule2.Error += module_Error; //statsModule2.DoWork(settings); verbose = Verbose; finalmessage = String.Empty; stillPendingItems = false; Add_NonError_To_Log("Starting to perform bulk load", verbose, String.Empty, String.Empty, -1); // Refresh any settings and item lists if (!Refresh_Settings_And_Item_List()) { Add_Error_To_Log("Error refreshing settings and item list", String.Empty, String.Empty, -1); finalmessage = "Error refreshing settings and item list"; ReportLastRun(); return(false); } // If not already verbose, check settings if (!verbose) { verbose = settings.Builder.Verbose_Flag; } Add_NonError_To_Log("Refreshed settings and item list", verbose, String.Empty, String.Empty, -1); // Check for abort if (CheckForAbort()) { Add_NonError_To_Log("Aborted (Worker_BulkLoader line 137)", verbose, String.Empty, String.Empty, -1); finalmessage = "Aborted per database request"; ReportLastRun(); return(false); } // Set to standard operation then Abort_Database_Mechanism.Builder_Operation_Flag = Builder_Operation_Flag_Enum.STANDARD_OPERATION; // Run some processes the first time it runs // These will be converted to scheduled tasks by version 5.0 if (firstrun) { // Run the usage stats CalculateUsageStatisticsModule statsModule = new CalculateUsageStatisticsModule(); statsModule.Process += module_Process; statsModule.Error += module_Error; statsModule.DoWork(settings); // Look for any aggregation tiles and cache the metadat for them CacheAggregationTileMetadataModule tileModule = new CacheAggregationTileMetadataModule(); tileModule.Process += module_Process; tileModule.Error += module_Error; tileModule.DoWork(settings); // Clear the old logs files ExpireOldLogEntriesModule logsModule = new ExpireOldLogEntriesModule(); logsModule.Process += module_Process; logsModule.Error += module_Error; logsModule.DoWork(settings); // Rebuild all the static pages RebuildAllAggregationBrowsesModule rebuildModule = new RebuildAllAggregationBrowsesModule(); rebuildModule.Process += module_Process; rebuildModule.Error += module_Error; rebuildModule.DoWork(settings); firstrun = false; } // RUN ANY PRE-PROCESSING MODULES HERE if (builderModules.PreProcessModules.Count > 0) { Add_NonError_To_Log("Running all pre-processing steps", verbose, String.Empty, String.Empty, -1); foreach (iPreProcessModule thisModule in builderModules.PreProcessModules) { // Check for abort if (CheckForAbort()) { Abort_Database_Mechanism.Builder_Operation_Flag = Builder_Operation_Flag_Enum.ABORTING; break; } thisModule.DoWork(settings); } } // Load the settings into thall the item and folder processors foreach (iSubmissionPackageModule thisModule in builderModules.ItemProcessModules) { thisModule.Settings = settings; } foreach (iSubmissionPackageModule thisModule in builderModules.DeleteItemModules) { thisModule.Settings = settings; } foreach (iFolderModule thisModule in builderModules.AllFolderModules) { thisModule.Settings = settings; } Add_NonError_To_Log("Begin completing any recent loads requiring additional work", verbose, String.Empty, String.Empty, -1); // Handle all packages already on the web server which are flagged for additional work required Complete_Any_Recent_Loads_Requiring_Additional_Work(); Add_NonError_To_Log("Finished completing any recent loads requiring additional work", verbose, String.Empty, String.Empty, -1); // Check for abort if (CheckForAbort()) { Add_NonError_To_Log("Aborted (Worker_BulkLoader line 151)", verbose, String.Empty, String.Empty, -1); finalmessage = "Aborted per database request"; ReleaseResources(); ReportLastRun(); return(false); } // Create the seperate queues for each type of incoming digital resource files List <Incoming_Digital_Resource> incoming_packages = new List <Incoming_Digital_Resource>(); List <Incoming_Digital_Resource> deletes = new List <Incoming_Digital_Resource>(); // Step through all the incoming folders, and run the folder modules if (builderSettings.IncomingFolders.Count == 0) { if (!noFoldersReported) { Add_NonError_To_Log("There are no incoming folders set in the database", "Standard", String.Empty, String.Empty, -1); noFoldersReported = true; } } else { Add_NonError_To_Log("Begin processing builder folders", verbose, String.Empty, String.Empty, -1); foreach (Builder_Source_Folder folder in builderSettings.IncomingFolders) { Actionable_Builder_Source_Folder actionFolder = new Actionable_Builder_Source_Folder(folder, builderModules.AssemblyClassToModule); foreach (iFolderModule thisModule in actionFolder.BuilderModules) { // Check for abort if (CheckForAbort()) { Add_NonError_To_Log("Aborted (Worker_BulkLoader line 151)", verbose, String.Empty, String.Empty, -1); finalmessage = "Aborted per database request"; ReleaseResources(); ReportLastRun(); return(false); } thisModule.DoWork(actionFolder, incoming_packages, deletes); } } // Since all folder processing is complete, release resources foreach (iFolderModule thisModule in builderModules.AllFolderModules) { thisModule.ReleaseResources(); } } // Check for abort if (CheckForAbort()) { Add_NonError_To_Log("Aborted (Worker_BulkLoader line 179)", verbose, String.Empty, String.Empty, -1); finalmessage = "Aborted per database request"; ReleaseResources(); ReportLastRun(); return(false); } // If there were no packages to process further stop here if ((incoming_packages.Count == 0) && (deletes.Count == 0)) { Add_Complete_To_Log("No New Packages - Process Complete", "No Work", String.Empty, String.Empty, -1); if (finalmessage.Length == 0) { finalmessage = "No New Packages - Process Complete"; } ReleaseResources(); ReportLastRun(); return(false); } // Iterate through all non-delete resources ready for processing Add_NonError_To_Log("Process any incoming packages", verbose, String.Empty, String.Empty, -1); Process_All_Incoming_Packages(incoming_packages); // Can now release these resources foreach (iSubmissionPackageModule thisModule in builderModules.ItemProcessModules) { thisModule.ReleaseResources(); } // Process any delete requests ( iterate through all deletes ) Add_NonError_To_Log("Process any deletes", verbose, String.Empty, String.Empty, -1); Process_All_Deletes(deletes); // Can now release these resources foreach (iSubmissionPackageModule thisModule in builderModules.DeleteItemModules) { thisModule.ReleaseResources(); } // RUN ANY POST-PROCESSING MODULES HERE if (builderModules.PostProcessModules.Count > 0) { Add_NonError_To_Log("Running all post-processing steps", verbose, String.Empty, String.Empty, -1); foreach (iPostProcessModule thisModule in builderModules.PostProcessModules) { // Check for abort if (CheckForAbort()) { Abort_Database_Mechanism.Builder_Operation_Flag = Builder_Operation_Flag_Enum.ABORTING; break; } thisModule.DoWork(aggregationsToRefresh, processedItems, deletedItems, settings); } } // Add the complete entry for the log if (!CheckForAbort()) { Add_Complete_To_Log("Process Complete", "Complete", String.Empty, String.Empty, -1); if (finalmessage.Length == 0) { finalmessage = "Process completed successfully"; } } else { finalmessage = "Aborted per database request"; Add_Complete_To_Log("Process Aborted Cleanly", "Complete", String.Empty, String.Empty, -1); } // Save information about this last run ReportLastRun(); // Clear lots of collections and such from memory, since we are done processing ReleaseResources(); Add_NonError_To_Log("Process Done", verbose, String.Empty, String.Empty, -1); return(stillPendingItems); }