/// <summary> Constructore creates a new Page_File_Enumerator to iterate through /// the <see cref="Builder_Page_File_Collection"/>. </summary> /// <param name="NodeCollection"> Collection of nodes </param> internal Builder_Page_File_Enumerator(Builder_Page_File_Collection NodeCollection) { nodes = NodeCollection; }
private static void get_files_from_current_directory(Builder_Page_File_Collection FileList, List<string> FileFilters, string SourceDirectory, string RelativeDirectory, bool RecursivelyIncludeSubfolders) { // Get the files in this directory by using file filters ( a single filter may find the same // file in this directory twice, so special code is added for this case ) List<string> files_in_this_dir = new List<string>(); foreach (string thisFilter in FileFilters) { string[] thisFilterFiles = Directory.GetFiles(SourceDirectory, thisFilter); foreach (string thisFilterFile in thisFilterFiles) { if (!files_in_this_dir.Contains(thisFilterFile)) files_in_this_dir.Add(thisFilterFile); } } // All found files should be added to the builder page file collection foreach (string thisFile in files_in_this_dir) { // Exclude the archival files if ((thisFile.ToUpper().IndexOf("_ARCHIVE.") < 0) && (thisFile.ToUpper().IndexOf(".QC.JPG") < 0)) { // Create the new page_file object Builder_Page_File newFile = new Builder_Page_File(thisFile, RelativeDirectory, true); // Add into the page file collection, sorting appropriately FileList.Insert(newFile); } } // Check subdirectories if (RecursivelyIncludeSubfolders) { string[] subdirs = Directory.GetDirectories(SourceDirectory); foreach (string thisSubDir in subdirs) { DirectoryInfo thisSubDirInfo = new DirectoryInfo(thisSubDir); string dir_name = thisSubDirInfo.Name; if (RelativeDirectory.Length == 0) get_files_from_current_directory(FileList, FileFilters, thisSubDir, dir_name, true); else get_files_from_current_directory(FileList, FileFilters, thisSubDir, RelativeDirectory + "\\" + dir_name, true); } } }
private static void recursively_add_all_METS_files(abstract_TreeNode RootNode, List<SobekCM_File_Info> METSFiles, Builder_Page_File_Collection METSFileCollection, Dictionary<SobekCM_File_Info, Page_TreeNode> FileToPage, Dictionary<Page_TreeNode, Division_TreeNode> PageToDiv, List<string> FileFilters) { if (RootNode.Page) { Page_TreeNode pageNode = (Page_TreeNode) RootNode; foreach (SobekCM_File_Info thisFile in pageNode.Files) { bool add_file = false; if (FileFilters.Count == 0) { add_file = true; } else { foreach (string file_filter in FileFilters) { if (thisFile.System_Name.ToUpper().IndexOf(file_filter) > 0) { add_file = true; break; } } } if (add_file) { METSFiles.Add(thisFile); Builder_Page_File newPageFile = new Builder_Page_File(thisFile.System_Name, true); newPageFile.METS_Page = pageNode; newPageFile.METS_Division = PageToDiv[pageNode]; METSFileCollection.Insert(newPageFile); FileToPage.Add(thisFile, pageNode); } } } else { Division_TreeNode divNode = (Division_TreeNode) RootNode; foreach (abstract_TreeNode thisNode in divNode.Nodes) { if (thisNode.Page) { try { if (!PageToDiv.ContainsKey((Page_TreeNode) thisNode)) { PageToDiv.Add((Page_TreeNode) thisNode, divNode); } } catch { } } recursively_add_all_METS_files(thisNode, METSFiles, METSFileCollection, FileToPage, PageToDiv, FileFilters); } } }
/// <summary> Adds all of the file information to a digital resource package by analyzing the directory </summary> /// <param name="BIBPackage">Digital resource package to enrich</param> /// <param name="FilesFilter"> Files to be added as page image files ( such as "*.tif|*.jpg|*.jp2" )</param> /// <param name="RecursivelyIncludeSubfolders"> Flag indicates if all files in subfolders should also be added </param> /// <param name="PageImagesInSeperateFoldersCanBeSamePage"> If two images with the same root are found in subfolders, should </param> public static void Add_All_Files(SobekCM_Item BIBPackage, string FilesFilter, bool RecursivelyIncludeSubfolders, bool PageImagesInSeperateFoldersCanBeSamePage) { // Get the set of file filters within a list List<string> file_filters = new List<string>(); if (FilesFilter.IndexOf("|") < 0) { file_filters.Add(FilesFilter.ToUpper()); } else { string[] splitter = FilesFilter.Split("|".ToCharArray()); foreach (string thisFilter in splitter) { file_filters.Add(thisFilter.ToUpper()); } } // Get the files from the current directory (or recursive directories) Builder_Page_File_Collection fileCollection = new Builder_Page_File_Collection(); get_files_from_current_directory(fileCollection, file_filters, BIBPackage.Source_Directory, String.Empty, RecursivelyIncludeSubfolders); // Now, determine which files are already in the METS file. // Build a collection of file objects from the METS List<SobekCM_File_Info> metsFiles = new List<SobekCM_File_Info>(); Builder_Page_File_Collection metsFileCollection = new Builder_Page_File_Collection(); Dictionary<SobekCM_File_Info, Page_TreeNode> fileToPage = new Dictionary<SobekCM_File_Info, Page_TreeNode>(); Dictionary<Page_TreeNode, Division_TreeNode> pageToDiv = new Dictionary<Page_TreeNode, Division_TreeNode>(); foreach (abstract_TreeNode rootNode in BIBPackage.Divisions.Physical_Tree.Roots) { recursively_add_all_METS_files(rootNode, metsFiles, metsFileCollection, fileToPage, pageToDiv, file_filters); } // Determine which files to delete from the METS package List<SobekCM_File_Info> deletes = new List<SobekCM_File_Info>(); foreach (SobekCM_File_Info thisFile in metsFiles) { if ((thisFile.METS_LocType == SobekCM_File_Info_Type_Enum.SYSTEM) && (!File.Exists(BIBPackage.Source_Directory + "//" + thisFile.System_Name))) { deletes.Add(thisFile); } } // Delete the files, and related pages foreach (SobekCM_File_Info thisFile in deletes) { metsFiles.Remove(thisFile); Page_TreeNode thisPage = fileToPage[thisFile]; if (thisPage != null) { thisPage.Files.Remove(thisFile); Division_TreeNode thisDiv = pageToDiv[thisPage]; if (thisDiv != null) { thisDiv.Nodes.Remove(thisPage); } } // Remove this from the other mets list int index = 0; int deleteIndex = -1; foreach (Builder_Page_File thisPageFile in metsFileCollection) { if (thisPageFile.FullName.ToUpper() == thisFile.System_Name.ToUpper()) { deleteIndex = index; break; } index++; } if (deleteIndex >= 0) { metsFileCollection.RemoveAt(deleteIndex); } } // Now, recursively check each division and remove empty divisions int rootNodeCounter = 0; while (rootNodeCounter < BIBPackage.Divisions.Physical_Tree.Roots.Count) { abstract_TreeNode rootNode = BIBPackage.Divisions.Physical_Tree.Roots[rootNodeCounter]; if (recursively_remove_empty_divisions(rootNode)) BIBPackage.Divisions.Physical_Tree.Roots.Remove(rootNode); else rootNodeCounter++; } // Build the list of all the remaining files Hashtable filesPresent = new Hashtable(); foreach (SobekCM_File_Info thisFile in metsFiles) { filesPresent[thisFile.System_Name] = thisFile; } // Determine which files need to be added Builder_Page_File_Collection addFiles = new Builder_Page_File_Collection(); foreach (Builder_Page_File thisFile in fileCollection) { if (!filesPresent.Contains(thisFile.FullName_With_Relative_Directory)) { addFiles.Add(thisFile); } } // Add files that need to be added if (addFiles.Count > 0) { // Make sure there is at least one division if (BIBPackage.Divisions.Physical_Tree.Roots.Count == 0) { Division_TreeNode newRootNode = new Division_TreeNode("Main", String.Empty); BIBPackage.Divisions.Physical_Tree.Roots.Add(newRootNode); } // Create the map of file names to pages Dictionary<string, Page_TreeNode> file_to_page_hash = new Dictionary<string, Page_TreeNode>(); List<abstract_TreeNode> pageNodes = BIBPackage.Divisions.Physical_Tree.Pages_PreOrder; foreach (Page_TreeNode pageNode in pageNodes) { if (pageNode.Files.Count > 0) { string first_page_name = pageNode.Files[0].File_Name_Sans_Extension; if (first_page_name.IndexOf(".") > 0) first_page_name = first_page_name.Substring(0, first_page_name.IndexOf(".")); if ((PageImagesInSeperateFoldersCanBeSamePage) || (pageNode.Files[0].METS_LocType == SobekCM_File_Info_Type_Enum.URL)) { if (first_page_name.IndexOf("\\") > 0) { string[] slash_splitter = first_page_name.Split("\\".ToCharArray()); first_page_name = slash_splitter[slash_splitter.Length - 1]; } } if (!file_to_page_hash.ContainsKey(first_page_name.ToUpper())) { file_to_page_hash[first_page_name.ToUpper()] = pageNode; } } } // If there are no existing pages, this can be easily assembled if (metsFiles.Count == 0) { try { // Get the first division Division_TreeNode firstDiv = (Division_TreeNode) BIBPackage.Divisions.Physical_Tree.Roots[0]; // Add each file foreach (Builder_Page_File thisFile in addFiles) { // Create the new METS file object SobekCM_File_Info newFileForMETS = new SobekCM_File_Info(thisFile.FullName_With_Relative_Directory); // Get the root of this file, to put all files of the same root on the same page string thisFileShort = newFileForMETS.File_Name_Sans_Extension; if (PageImagesInSeperateFoldersCanBeSamePage) { if (thisFileShort.IndexOf("\\") > 0) { string[] slash_splitter = thisFileShort.Split("\\".ToCharArray()); thisFileShort = slash_splitter[slash_splitter.Length - 1]; } } // Is this a pre-existing root ( therefore pre-existing page )? if (file_to_page_hash.ContainsKey(thisFileShort)) { // Just add this file to the pre-existing page file_to_page_hash[thisFileShort].Files.Add(newFileForMETS); } else { // This needs a new page then Page_TreeNode newPage = new Page_TreeNode(); newPage.Files.Add(newFileForMETS); firstDiv.Nodes.Add(newPage); // Add this page to the hash, so it is not added again later file_to_page_hash[thisFileShort] = newPage; } } } catch { } } else { // Configure the initial pointers Builder_Page_File previous_file = null; Builder_Page_File next_file = metsFileCollection[0]; Builder_Page_File new_file = addFiles[0]; int new_file_counter = 1; int next_file_counter = 1; // Loop through each file to be added while (new_file != null) { // Create the new METS file object SobekCM_File_Info newFileForMETS = new SobekCM_File_Info(new_file.FullName_With_Relative_Directory); // Get the root of this file, to put all files of the same root on the same page string thisFileShort = newFileForMETS.File_Name_Sans_Extension; if (PageImagesInSeperateFoldersCanBeSamePage) { if (thisFileShort.IndexOf("\\") > 0) { string[] slash_splitter = thisFileShort.Split("\\".ToCharArray()); thisFileShort = slash_splitter[slash_splitter.Length - 1]; } } // First, ensure that we have not already added a page for this if (file_to_page_hash.ContainsKey(thisFileShort)) { // Just add this file to the pre-existing page file_to_page_hash[thisFileShort].Files.Add(newFileForMETS); } else { // Move to the right part of the existing files list while ((new_file.CompareTo(next_file) > 0) && (next_file != null)) { previous_file = next_file; if (next_file_counter < metsFileCollection.Count) { next_file = metsFileCollection[next_file_counter++]; } else { next_file = null; } } // Add the page for this and link the new file Page_TreeNode newPage = new Page_TreeNode(); newPage.Files.Add(newFileForMETS); file_to_page_hash[thisFileShort] = newPage; // Get the parent division and add this page in the right place // Check there was a previous page, otherwise this inserts at the very beginning if (previous_file == null) { abstract_TreeNode abstractNode = BIBPackage.Divisions.Physical_Tree.Roots[0]; Division_TreeNode lastDivNode = (Division_TreeNode) abstractNode; while (!abstractNode.Page) { lastDivNode = (Division_TreeNode) abstractNode; if (lastDivNode.Nodes.Count > 0) { abstractNode = lastDivNode.Nodes[0]; } else { break; } } lastDivNode.Nodes.Insert(0, newPage); metsFileCollection.Insert(new_file); new_file.METS_Division = lastDivNode; new_file.METS_Page = newPage; next_file = metsFileCollection[0]; } else { Division_TreeNode parentDivNode = previous_file.METS_Division; Page_TreeNode previousPageNode = previous_file.METS_Page; int previousFileIndex = parentDivNode.Nodes.IndexOf(previousPageNode); if (previousFileIndex + 1 >= parentDivNode.Nodes.Count) parentDivNode.Nodes.Add(newPage); else parentDivNode.Nodes.Insert(previousFileIndex + 1, newPage); next_file = previous_file; next_file_counter--; new_file.METS_Division = parentDivNode; new_file.METS_Page = newPage; metsFileCollection.Insert(new_file); } } // Move to the next new file if (new_file_counter < addFiles.Count) { new_file = addFiles[new_file_counter++]; } else { new_file = null; } } } } }