/// <summary> Create a test digital resource item </summary> /// <param name="directory">Directory for the package source directory</param> /// <returns>Fully built test bib package</returns> public static SobekCM_Item Create(string directory) { SobekCM_Item testPackage = new SobekCM_Item(); // Add all the METS header information testPackage.METS_Header.Create_Date = new DateTime(2007, 1, 1); testPackage.METS_Header.Modify_Date = DateTime.Now; testPackage.METS_Header.Creator_Individual = "Mark Sullivan"; testPackage.METS_Header.Add_Creator_Individual_Notes("Programmer of new SobekCM.Resource_Object"); testPackage.METS_Header.Add_Creator_Individual_Notes("Adding coordinates"); testPackage.METS_Header.Creator_Organization = "University of Florida"; testPackage.METS_Header.Creator_Software = "SobekCM Bib Package Test"; testPackage.METS_Header.RecordStatus_Enum = METS_Record_Status.COMPLETE; testPackage.METS_Header.Add_Creator_Org_Notes("This test package was done to test DLCs new METS package"); // Add all the MODS elements Abstract_Info testAbstract = testPackage.Bib_Info.Add_Abstract("This is a sample abstract", "en"); testPackage.Bib_Info.Add_Abstract("Tämä on esimerkki abstrakteja", "fin"); testAbstract.Display_Label = "Summary Abstract"; testAbstract.Type = "summary"; testPackage.Bib_Info.Access_Condition.Text = "All rights are reserved by source institution."; testPackage.Bib_Info.Access_Condition.Language = "en"; testPackage.Bib_Info.Access_Condition.Type = "restrictions on use"; testPackage.Bib_Info.Access_Condition.Display_Label = "Rights"; testPackage.Bib_Info.Add_Identifier("000123234", "OCLC", "Electronic OCLC"); testPackage.Bib_Info.Add_Identifier("182-asdsd-28k", "DOI"); testPackage.Bib_Info.Add_Language("English", String.Empty, "en"); testPackage.Bib_Info.Add_Language("Finnish"); testPackage.Bib_Info.Add_Language(String.Empty, "ita", String.Empty); testPackage.Bib_Info.Location.Holding_Code = "MVS"; testPackage.Bib_Info.Location.Holding_Name = "From the Private Library of Mark Sullivan"; testPackage.Bib_Info.Location.PURL = "http://www.uflib.ufl.edu/ufdc/?b=CA00000000"; testPackage.Bib_Info.Location.Other_URL = "http://www.fnhm.edu"; testPackage.Bib_Info.Location.Other_URL_Display_Label = "Specimen Information"; testPackage.Bib_Info.Location.Other_URL_Note = "Specimen FLAS 125342 Database"; testPackage.Bib_Info.Location.EAD_URL = "http://digital.uflib.ufl.edu/"; testPackage.Bib_Info.Location.EAD_Name = "Digital Library Center Finding Guide"; testPackage.Bib_Info.Main_Entity_Name.Name_Type = Name_Info_Type_Enum.Personal; testPackage.Bib_Info.Main_Entity_Name.Full_Name = "Brown, B.F."; testPackage.Bib_Info.Main_Entity_Name.Terms_Of_Address = "Dr."; testPackage.Bib_Info.Main_Entity_Name.Display_Form = "B.F. Brown"; testPackage.Bib_Info.Main_Entity_Name.Affiliation = "Chemistry Dept., American University"; testPackage.Bib_Info.Main_Entity_Name.Description = "Chemistry Professor Emeritus"; testPackage.Bib_Info.Main_Entity_Name.Add_Role("Author"); Zoological_Taxonomy_Info taxonInfo = new Zoological_Taxonomy_Info(); testPackage.Add_Metadata_Module(GlobalVar.ZOOLOGICAL_TAXONOMY_METADATA_MODULE_KEY, taxonInfo); taxonInfo.Scientific_Name = "Ctenomys sociabilis"; taxonInfo.Higher_Classification = "Animalia; Chordata; Vertebrata; Mammalia; Theria; Eutheria; Rodentia; Hystricognatha; Hystricognathi; Ctenomyidae; Ctenomyini; Ctenomys"; taxonInfo.Kingdom = "Animalia"; taxonInfo.Phylum = "Chordata"; taxonInfo.Class = "Mammalia"; taxonInfo.Order = "Rodentia"; taxonInfo.Family = "Ctenomyidae"; taxonInfo.Genus = "Ctenomys"; taxonInfo.Specific_Epithet = "sociabilis"; taxonInfo.Taxonomic_Rank = "species"; taxonInfo.Common_Name = "Social Tuco-Tuco"; Name_Info name1 = new Name_Info(); name1.Name_Type = Name_Info_Type_Enum.Personal; name1.Given_Name = "John Paul"; name1.Terms_Of_Address = "Pope; II"; name1.Dates = "1920-2002"; name1.User_Submitted = true; testPackage.Bib_Info.Add_Named_Entity(name1); Name_Info name2 = new Name_Info(); name2.Name_Type = Name_Info_Type_Enum.Conference; name2.Full_Name = "Paris Peace Conference (1919-1920)"; name2.Dates = "1919-1920"; testPackage.Bib_Info.Add_Named_Entity(name2); Name_Info name3 = new Name_Info(); name3.Name_Type = Name_Info_Type_Enum.Corporate; name3.Full_Name = "United States -- Court of Appeals (2nd Court)"; testPackage.Bib_Info.Add_Named_Entity(name3); Name_Info name4 = new Name_Info(); name4.Name_Type = Name_Info_Type_Enum.Personal; name4.Full_Name = "Wilson, Mary"; name4.Display_Form = "Mary 'Weels' Wilson"; name4.Given_Name = "Mary"; name4.Family_Name = "Wilson"; name4.ID = "NAM4"; name4.Terms_Of_Address = "2nd"; name4.Add_Role("illustrator"); name4.Add_Role("cartographer"); testPackage.Bib_Info.Add_Named_Entity(name4); Name_Info donor = new Name_Info(); donor.Name_Type = Name_Info_Type_Enum.Personal; donor.Full_Name = "Livingston, Arthur"; donor.Description = "Gift in honor of Arthur Livingston"; donor.Terms_Of_Address = "3rd"; donor.Add_Role("honoree", String.Empty); testPackage.Bib_Info.Donor = donor; testPackage.Bib_Info.Main_Title.NonSort = "The "; testPackage.Bib_Info.Main_Title.Title = "Man Who Would Be King"; testPackage.Bib_Info.Main_Title.Subtitle = "The story of succession in England"; Title_Info title1 = new Title_Info("homme qui voulut être roi", Title_Type_Enum.Translated); title1.NonSort = "L'"; title1.Language = "fr"; testPackage.Bib_Info.Add_Other_Title(title1); Title_Info title2 = new Title_Info(); title2.Title = "Man Who Be King"; title2.Display_Label = "also known as"; title2.NonSort = "The"; title2.Title_Type = Title_Type_Enum.Alternative; testPackage.Bib_Info.Add_Other_Title(title2); Title_Info title3 = new Title_Info(); title3.Title = "Great works of England"; title3.Authority = "naf"; title3.Add_Part_Name("Second Portion"); title3.Add_Part_Number("2nd"); title3.Title_Type = Title_Type_Enum.Uniform; title3.User_Submitted = true; testPackage.Bib_Info.Add_Other_Title(title3); testPackage.Bib_Info.Add_Note("Funded by the NEH", Note_Type_Enum.Funding); testPackage.Bib_Info.Add_Note("Based on a play which originally appeared in France as \"Un peu plus tard, un peu plus tôt\"").User_Submitted = true; testPackage.Bib_Info.Add_Note("Anne Baxter (Louise), Maria Perschy (Angela), Gustavo Rojo (Bill), Reginald Gilliam (Mr. Johnson), [Catherine Elliot?] (Aunt Sallie), Ben Tatar (waiter)", Note_Type_Enum.Performers, "Performed By"); testPackage.Bib_Info.Origin_Info.Add_Place("New York", "nyu", "usa"); testPackage.Bib_Info.Origin_Info.Date_Issued = "1992"; testPackage.Bib_Info.Origin_Info.MARC_DateIssued_Start = "1992"; testPackage.Bib_Info.Origin_Info.MARC_DateIssued_End = "1993"; testPackage.Bib_Info.Origin_Info.Date_Copyrighted = "1999"; testPackage.Bib_Info.Origin_Info.Edition = "2nd"; Publisher_Info newPub = testPackage.Bib_Info.Add_Publisher("Published for the American Vacuum Society by the American Institute of Physics"); newPub.Add_Place("New York, New York"); newPub.User_Submitted = true; testPackage.Bib_Info.Add_Publisher("University of Florida Press House").Add_Place("Gainesville, FL"); testPackage.Bib_Info.Add_Manufacturer("Addison Randly Publishing House"); testPackage.Bib_Info.Original_Description.Extent = "1 sound disc (56 min.) : digital ; 3/4 in."; testPackage.Bib_Info.Original_Description.Add_Note("The sleeve of this sound disc was damaged in a fire"); testPackage.Bib_Info.Original_Description.Add_Note("The disc has a moderate amount of scratches, but still plays"); testPackage.Bib_Info.Series_Part_Info.Day = "18"; testPackage.Bib_Info.Series_Part_Info.Day_Index = 18; testPackage.Bib_Info.Series_Part_Info.Month = "Syyskuu"; testPackage.Bib_Info.Series_Part_Info.Month_Index = 9; testPackage.Bib_Info.Series_Part_Info.Year = "1992"; testPackage.Bib_Info.Series_Part_Info.Year_Index = 1992; testPackage.Bib_Info.Series_Part_Info.Enum1 = "Volume 12"; testPackage.Bib_Info.Series_Part_Info.Enum1_Index = 12; testPackage.Bib_Info.Series_Part_Info.Enum2 = "Issue 3"; testPackage.Bib_Info.Series_Part_Info.Enum2_Index = 3; testPackage.Bib_Info.Series_Part_Info.Enum3 = "Part 1"; testPackage.Bib_Info.Series_Part_Info.Enum3_Index = 1; testPackage.Behaviors.Serial_Info.Add_Hierarchy(1, 1992, "1992"); testPackage.Behaviors.Serial_Info.Add_Hierarchy(2, 9, "Syyskuu"); testPackage.Behaviors.Serial_Info.Add_Hierarchy(3, 18, "18"); testPackage.Bib_Info.SeriesTitle.Title = "Shakespeare's most famous musicals"; testPackage.Bib_Info.Add_Target_Audience("young adults"); testPackage.Bib_Info.Add_Target_Audience("adolescent", "marctarget"); testPackage.Bib_Info.SobekCM_Type = TypeOfResource_SobekCM_Enum.Newspaper; // Add cartographic subject Subject_Info_Cartographics newCartographics = testPackage.Bib_Info.Add_Cartographics_Subject(); newCartographics.Scale = "1:2000"; newCartographics.Projection = "Conical Projection"; newCartographics.Coordinates = "E 72°--E 148°/N 13°--N 18°"; // Add hierarchical geographic subject Subject_Info_HierarchicalGeographic hierarchical = testPackage.Bib_Info.Add_Hierarchical_Geographic_Subject(); hierarchical.Continent = "North America"; hierarchical.Country = "United States of America"; hierarchical.State = "Kansas"; hierarchical.County = "Butler"; hierarchical.City = "Augusta"; // Add hierarchical geographic subject Subject_Info_HierarchicalGeographic hierarchical2 = testPackage.Bib_Info.Add_Hierarchical_Geographic_Subject(); hierarchical2.Region = "Arctic Ocean"; // Add hierarchical geographic subject Subject_Info_HierarchicalGeographic hierarchical3 = testPackage.Bib_Info.Add_Hierarchical_Geographic_Subject(); hierarchical3.Island = "Puerto Rico"; hierarchical3.Language = "English"; hierarchical3.Province = "Provincial"; hierarchical3.Territory = "Puerto Rico"; hierarchical3.Area = "Intercontinental areas (Western Hemisphere)"; // Add a name subject Subject_Info_Name subjname1 = testPackage.Bib_Info.Add_Name_Subject(); subjname1.Authority = "lcsh"; subjname1.Full_Name = "Garcia Lorca, Federico"; subjname1.Dates = "1898-1936"; subjname1.Add_Geographic("Russia"); subjname1.Add_Geographic("Moscow"); subjname1.Add_Genre("maps"); subjname1.User_Submitted = true; // Add a title information subject Subject_Info_TitleInfo subjtitle1 = testPackage.Bib_Info.Add_Title_Subject(); subjtitle1.Title_Type = Title_Type_Enum.Uniform; subjtitle1.Authority = "naf"; subjtitle1.Title = "Missale Carnotense"; // Add a standard subject Subject_Info_Standard subject1 = testPackage.Bib_Info.Add_Subject(); subject1.Authority = "lcsh"; subject1.Add_Topic("Real property"); subject1.Add_Geographic("Mississippi"); subject1.Add_Geographic("Tippah County"); subject1.Add_Genre("Maps"); // Add a standard subject Subject_Info_Standard subject2 = testPackage.Bib_Info.Add_Subject(); subject2.Add_Occupation("Migrant laborers"); subject2.Add_Genre("School district case files"); // Add a standard subject Subject_Info_Standard subject3 = testPackage.Bib_Info.Add_Subject(); subject3.Authority = "lctgm"; subject3.Add_Topic("Educational buildings"); subject3.Add_Geographic("Washington (D.C.)"); subject3.Add_Temporal("1890-1910"); // Add a standard subject Subject_Info_Standard subject4 = testPackage.Bib_Info.Add_Subject(); subject4.Authority = "rvm"; subject4.Language = "french"; subject4.Add_Topic("Église catholique"); subject4.Add_Topic("Histoire"); subject4.Add_Temporal("20e siècle"); // Add record information testPackage.Bib_Info.Record.Add_Catalog_Language(new Language_Info("English", "eng", "en")); testPackage.Bib_Info.Record.Add_Catalog_Language(new Language_Info("French", "fre", "fr")); testPackage.Bib_Info.Record.MARC_Creation_Date = "080303"; testPackage.Bib_Info.Record.Add_MARC_Record_Content_Sources("FUG"); testPackage.Bib_Info.Record.Record_Origin = "Imported from (OCLC)001213124"; // Test the items which are in the non-MODS portion of the Bib_Info object testPackage.BibID = "MVS0000001"; testPackage.VID = "00001"; testPackage.Bib_Info.SortDate = 1234; testPackage.Bib_Info.SortTitle = "MAN WHO WOULD BE KING"; testPackage.Bib_Info.Add_Temporal_Subject(1990, 2002, "Recent history"); testPackage.Bib_Info.Add_Temporal_Subject(1990, 2002, "Lähihistoria"); testPackage.Bib_Info.Source.Code = "UF"; testPackage.Bib_Info.Source.Statement = "University of Florida"; // Add an affiliation Affiliation_Info affiliation1 = new Affiliation_Info(); affiliation1.University = "University of Florida"; affiliation1.Campus = "Gainesville Campus"; affiliation1.College = "College of Engineering"; affiliation1.Department = "Computer Engineering Department"; affiliation1.Unit = "Robotics"; affiliation1.Name_Reference = "NAM4"; testPackage.Bib_Info.Add_Affiliation(affiliation1); // Add a related item Related_Item_Info relatedItem1 = new Related_Item_Info(); relatedItem1.SobekCM_ID = "UF00001234"; relatedItem1.Relationship = Related_Item_Type_Enum.Preceding; relatedItem1.Publisher = "Gainesville Sun Publishing House"; relatedItem1.Add_Note(new Note_Info("Digitized with funding from NEH", Note_Type_Enum.Funding)); relatedItem1.Add_Note(new Note_Info("Gainesville Bee was the precursor to this item")); relatedItem1.Main_Title.NonSort = "The"; relatedItem1.Main_Title.Title = "Gainesville Bee"; relatedItem1.Add_Identifier("01234353", "oclc"); relatedItem1.Add_Identifier("002232311", "aleph"); Name_Info ri_name = new Name_Info(); ri_name.Full_Name = "Hills, Bryan"; ri_name.Terms_Of_Address = "Mr."; ri_name.Name_Type = Name_Info_Type_Enum.Personal; ri_name.Add_Role("author"); relatedItem1.Add_Name(ri_name); relatedItem1.URL = @"http://www.uflib.ufl.edu/ufdc/?b=UF00001234"; relatedItem1.URL_Display_Label = "Full Text"; testPackage.Bib_Info.Add_Related_Item(relatedItem1); // Add another related item Related_Item_Info relatedItem2 = new Related_Item_Info(); relatedItem2.Relationship = Related_Item_Type_Enum.Succeeding; relatedItem2.SobekCM_ID = "UF00009999"; relatedItem2.Main_Title.NonSort = "The"; relatedItem2.Main_Title.Title = "Daily Sun"; relatedItem2.Add_Identifier("0125437", "oclc"); relatedItem2.Add_Note("Name change occured in Fall 1933"); relatedItem2.Start_Date = "Fall 1933"; relatedItem2.End_Date = "December 31, 1945"; testPackage.Bib_Info.Add_Related_Item(relatedItem2); // Add some processing parameters testPackage.Behaviors.Add_Aggregation("JUV"); testPackage.Behaviors.Add_Aggregation("DLOC"); testPackage.Behaviors.Add_Aggregation("DLOSA1"); testPackage.Behaviors.Add_Aggregation("ALICE"); testPackage.Behaviors.Add_Aggregation("ARTE"); testPackage.Web.GUID = "GUID!"; testPackage.Behaviors.Add_Wordmark("DLOC"); testPackage.Behaviors.Add_Wordmark("UFSPEC"); testPackage.Behaviors.Main_Thumbnail = "00001thm.jpg"; // Add some downloads testPackage.Divisions.Download_Tree.Add_File("MVS_Complete.PDF"); testPackage.Divisions.Download_Tree.Add_File("MVS_Complete.MP2"); testPackage.Divisions.Download_Tree.Add_File("MVS_Part1.MP2"); testPackage.Divisions.Download_Tree.Add_File("MVS_Part1.PDF"); // Add some coordinate information GeoSpatial_Information geoSpatial = new GeoSpatial_Information(); testPackage.Add_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY, geoSpatial); geoSpatial.Add_Point(29.530151, -82.301459, "Lake Wauberg"); geoSpatial.Add_Point(29.634352, -82.350640, "Veterinary School"); Coordinate_Polygon polygon = new Coordinate_Polygon(); polygon.Label = "University of Florida Campus"; polygon.Add_Edge_Point(new Coordinate_Point(29.651435, -82.339869, String.Empty)); polygon.Add_Edge_Point(new Coordinate_Point(29.641216, -82.340298, String.Empty)); polygon.Add_Edge_Point(new Coordinate_Point(29.629503, -82.371969, String.Empty)); polygon.Add_Edge_Point(new Coordinate_Point(29.649645, -82.371712, String.Empty)); polygon.Add_Inner_Point(29.649794, -82.351971, "Stadium"); polygon.Add_Inner_Point(29.650988, -82.341156, "Library"); geoSpatial.Add_Polygon(polygon); Coordinate_Line line = new Coordinate_Line(); line.Label = "Waldo Road"; line.Add_Point(29.652852, -82.310944, "Gainesville"); line.Add_Point(29.716681, -82.268372, String.Empty); line.Add_Point(29.791494, -82.167778, "Waldo"); geoSpatial.Add_Line(line); // Add some performing arts information Performing_Arts_Info partInfo = new Performing_Arts_Info(); testPackage.Add_Metadata_Module("PerformingArts", partInfo); partInfo.Performance = "Hamlet"; partInfo.Performance_Date = "August 12, 1923"; Performer performer1 = partInfo.Add_Performer("Sullivan, Mark"); performer1.Sex = "M"; performer1.LifeSpan = "1873-"; performer1.Occupation = "actor"; performer1.Title = "Mr."; Performer performer2 = partInfo.Add_Performer("Waldbart, Julia"); performer2.Sex = "F"; performer2.LifeSpan = "1876-"; performer2.Occupation = "actress"; performer2.Title = "Mrs."; // Add some oral history information Oral_Interview_Info oralInfo = new Oral_Interview_Info(); testPackage.Add_Metadata_Module( "OralInterview", oralInfo); oralInfo.Interviewee = "Edwards, Herm"; oralInfo.Interviewer = "Proctor, Samual"; // Add some learning object resource information LearningObjectMetadata lomInfo = new LearningObjectMetadata(); testPackage.Add_Metadata_Module( GlobalVar.IEEE_LOM_METADATA_MODULE_KEY, lomInfo ); lomInfo.AggregationLevel = AggregationLevelEnum.level3; lomInfo.Status = StatusEnum.draft; LOM_System_Requirements lomReq1 = new LOM_System_Requirements(); lomReq1.RequirementType = RequirementTypeEnum.operating_system; lomReq1.Name.Value = "Windows"; lomReq1.MinimumVersion = "Windows XP"; lomReq1.MaximumVersion = "Windows 7"; lomInfo.Add_SystemRequirements(lomReq1); LOM_System_Requirements lomReq2 = new LOM_System_Requirements(); lomReq2.RequirementType = RequirementTypeEnum.software; lomReq2.Name.Value = "Java SDK"; lomReq2.MinimumVersion = "1.7.1"; lomReq2.MaximumVersion = "2.09"; lomInfo.Add_SystemRequirements(lomReq2); lomInfo.InteractivityType = InteractivityTypeEnum.mixed; lomInfo.Add_LearningResourceType("exercise"); lomInfo.Add_LearningResourceType("Tutorials", "encdlwebpedagogicaltype"); lomInfo.InteractivityLevel = InteractivityLevelEnum.high; lomInfo.Add_IntendedEndUserRole(IntendedEndUserRoleEnum.learner); lomInfo.Add_Context("Undergraduate lower division", "enclearningcontext"); lomInfo.Add_Context("15", "grade"); lomInfo.Add_Context("16", "grade"); lomInfo.Add_Context("5", "group"); lomInfo.Add_TypicalAgeRange("suitable for children over 7", "en"); lomInfo.Add_TypicalAgeRange("2-8"); lomInfo.DifficultyLevel = DifficultyLevelEnum.medium; lomInfo.TypicalLearningTime = "PT45M"; LOM_Classification lomClassification1 = new LOM_Classification(); lomInfo.Add_Classification(lomClassification1); lomClassification1.Purpose.Value = "Discipline"; LOM_TaxonPath lomTaxonPath1 = new LOM_TaxonPath(); lomClassification1.Add_TaxonPath(lomTaxonPath1); lomTaxonPath1.Add_SourceName("ARIADNE"); LOM_Taxon lomTaxon1 = new LOM_Taxon(); lomTaxonPath1.Add_Taxon(lomTaxon1); lomTaxon1.ID = "BF120"; lomTaxon1.Add_Entry("Work_History", "en"); lomTaxon1.Add_Entry("Historie", "nl"); LOM_Taxon lomTaxon2 = new LOM_Taxon(); lomTaxonPath1.Add_Taxon(lomTaxon2); lomTaxon2.ID = "BF120.1"; lomTaxon2.Add_Entry("American Work_History", "en"); LOM_Taxon lomTaxon3 = new LOM_Taxon(); lomTaxonPath1.Add_Taxon(lomTaxon3); lomTaxon3.ID = "BF120.1.4"; lomTaxon3.Add_Entry("American Civil War", "en"); LOM_Classification lomClassification2 = new LOM_Classification(); lomInfo.Add_Classification(lomClassification2); lomClassification2.Purpose.Value = "Educational Objective"; LOM_TaxonPath lomTaxonPath2 = new LOM_TaxonPath(); lomClassification2.Add_TaxonPath(lomTaxonPath2); lomTaxonPath2.Add_SourceName("Common Core Standards", "en"); LOM_Taxon lomTaxon4 = new LOM_Taxon(); lomTaxonPath2.Add_Taxon(lomTaxon4); lomTaxon4.ID = "CCS.Math.Content"; LOM_Taxon lomTaxon5 = new LOM_Taxon(); lomTaxonPath2.Add_Taxon(lomTaxon5); lomTaxon5.ID = "3"; lomTaxon5.Add_Entry("Grade 3", "en"); LOM_Taxon lomTaxon6 = new LOM_Taxon(); lomTaxonPath2.Add_Taxon(lomTaxon6); lomTaxon6.ID = "OA"; lomTaxon6.Add_Entry("Operations and Algebraic Thinking", "en"); LOM_Taxon lomTaxon7 = new LOM_Taxon(); lomTaxonPath2.Add_Taxon(lomTaxon7); lomTaxon7.ID = "A"; lomTaxon7.Add_Entry("Represent and solve problems involving multiplication and division.", "en"); LOM_Taxon lomTaxon8 = new LOM_Taxon(); lomTaxonPath2.Add_Taxon(lomTaxon8); lomTaxon8.ID = "3"; lomTaxon8.Add_Entry("Use multiplication and division within 100 to solve word problems in situations involving equal groups, arrays, and measurement quantities, e.g., by using drawings and equations with a symbol for the unknown number to represent the problem.", "en"); LOM_TaxonPath lomTaxonPath3 = new LOM_TaxonPath(); lomClassification2.Add_TaxonPath(lomTaxonPath3); lomTaxonPath3.Add_SourceName("Common Core Standards", "en"); LOM_Taxon lomTaxon14 = new LOM_Taxon(); lomTaxonPath3.Add_Taxon(lomTaxon14); lomTaxon14.ID = "CCS.Math.Content"; LOM_Taxon lomTaxon15 = new LOM_Taxon(); lomTaxonPath3.Add_Taxon(lomTaxon15); lomTaxon15.ID = "3"; lomTaxon15.Add_Entry("Grade 3", "en"); LOM_Taxon lomTaxon16 = new LOM_Taxon(); lomTaxonPath3.Add_Taxon(lomTaxon16); lomTaxon16.ID = "OA"; lomTaxon16.Add_Entry("Operations and Algebraic Thinking", "en"); LOM_Taxon lomTaxon17 = new LOM_Taxon(); lomTaxonPath3.Add_Taxon(lomTaxon17); lomTaxon17.ID = "A"; lomTaxon17.Add_Entry("Represent and solve problems involving multiplication and division.", "en"); LOM_Taxon lomTaxon18 = new LOM_Taxon(); lomTaxonPath3.Add_Taxon(lomTaxon18); lomTaxon18.ID = "4"; lomTaxon18.Add_Entry("Determine the unknown whole number in a multiplication or division equation relating three whole numbers. For example, determine the unknown number that makes the equation true in each of the equations 8 × ? = 48, 5 = _ ÷ 3, 6 × 6 = ?", "en"); // Add some views and interfaces testPackage.Behaviors.Clear_Web_Skins(); testPackage.Behaviors.Add_Web_Skin("dLOC"); testPackage.Behaviors.Add_Web_Skin("UFDC"); testPackage.Behaviors.Add_View(View_Enum.JPEG2000); testPackage.Behaviors.Add_View(View_Enum.JPEG); testPackage.Behaviors.Add_View(View_Enum.RELATED_IMAGES); testPackage.Behaviors.Add_View(View_Enum.HTML, "Full Document", "MVS001214.html"); // Create the chapters and pages and link them Division_TreeNode chapter1 = new Division_TreeNode("Chapter", "First Chapter"); Page_TreeNode page1 = new Page_TreeNode("First Page"); Page_TreeNode page2 = new Page_TreeNode("Page 2"); chapter1.Nodes.Add(page1); chapter1.Nodes.Add(page2); Division_TreeNode chapter2 = new Division_TreeNode("Chapter", "Last Chapter"); Page_TreeNode page3 = new Page_TreeNode("Page 3"); Page_TreeNode page4 = new Page_TreeNode("Last Page"); chapter2.Nodes.Add(page3); chapter2.Nodes.Add(page4); testPackage.Divisions.Physical_Tree.Roots.Add(chapter1); testPackage.Divisions.Physical_Tree.Roots.Add(chapter2); // Create the files SobekCM_File_Info file1_1 = new SobekCM_File_Info("2000626_0001.jp2", 2120, 1100); SobekCM_File_Info file1_2 = new SobekCM_File_Info("2000626_0001.jpg", 630, 330); SobekCM_File_Info file1_3 = new SobekCM_File_Info("2000626_0001.tif"); SobekCM_File_Info file2_1 = new SobekCM_File_Info("2000626_0002.jp2", 1754, 2453); SobekCM_File_Info file2_2 = new SobekCM_File_Info("2000626_0002.jpg", 630, 832); SobekCM_File_Info file2_3 = new SobekCM_File_Info("2000626_0002.tif"); SobekCM_File_Info file3_1 = new SobekCM_File_Info("2000626_0003.jp2", 2321, 1232); SobekCM_File_Info file3_2 = new SobekCM_File_Info("2000626_0003.jpg", 630, 342); SobekCM_File_Info file3_3 = new SobekCM_File_Info("2000626_0003.tif"); SobekCM_File_Info file4_1 = new SobekCM_File_Info("2000626_0004.jp2", 2145, 1024); SobekCM_File_Info file4_2 = new SobekCM_File_Info("2000626_0004.jpg", 630, 326); SobekCM_File_Info file4_3 = new SobekCM_File_Info("2000626_0004.tif"); // Link the files to the pages page1.Files.Add(file1_1); page1.Files.Add(file1_2); page1.Files.Add(file1_3); page2.Files.Add(file2_1); page2.Files.Add(file2_2); page2.Files.Add(file2_3); page3.Files.Add(file3_1); page3.Files.Add(file3_2); page3.Files.Add(file3_3); page4.Files.Add(file4_1); page4.Files.Add(file4_2); page4.Files.Add(file4_3); // Add the DAITSS information DAITSS_Info daitssInfo = new DAITSS_Info(); daitssInfo.Account = "FTU"; daitssInfo.SubAccount = "CLAS"; daitssInfo.Project = "UFDC"; daitssInfo.toArchive = true; testPackage.Add_Metadata_Module(GlobalVar.DAITSS_METADATA_MODULE_KEY, daitssInfo); // Save this package testPackage.Source_Directory = directory; return testPackage; }
private bool Read_Metadata_Section(XmlReader Input_XmlReader, GeoSpatial_Information geoInfo, Dictionary<string, object> Options) { do // Loop through reading each XML node { // get the right division information based on node type if (Input_XmlReader.NodeType == XmlNodeType.Element) //if it is an element { switch (Input_XmlReader.Name) //get name of { case "gml:Point": //is a point //read the featureType string pointFeatureType = String.Empty; if (Input_XmlReader.MoveToAttribute("featureType")) pointFeatureType = Input_XmlReader.Value; //read the label string pointLabel = String.Empty; if (Input_XmlReader.MoveToAttribute("label")) pointLabel = Input_XmlReader.Value; //get the rest of the information do { //check to see if end of element if (Input_XmlReader.NodeType == XmlNodeType.EndElement && Input_XmlReader.Name == "gml:Point") break; //if it is an element if (Input_XmlReader.NodeType == XmlNodeType.Element) { //determine the name of that element switch (Input_XmlReader.Name) { //if it is the coordinates case "gml:Coordinates": Input_XmlReader.Read(); if ((Input_XmlReader.NodeType == XmlNodeType.Text) && (Input_XmlReader.Value.Trim().Length > 0)) { //get coordinates string result = Convert.ToString(Input_XmlReader.Value); var items = result.Split(','); double latitude = double.Parse(items[0]); double longitude = double.Parse(items[1]); //add point to geo obj geoInfo.Add_Point(latitude, longitude, pointLabel, pointFeatureType); } break; } } } while (Input_XmlReader.Read()); break; case "gml:Line": //is a line //read the featureType string lineFeatureType = String.Empty; if (Input_XmlReader.MoveToAttribute("featureType")) lineFeatureType = Input_XmlReader.Value; //read the label string lineLabel = String.Empty; if (Input_XmlReader.MoveToAttribute("label")) lineLabel = Input_XmlReader.Value; //get the rest do { //check to see if end of element if (Input_XmlReader.NodeType == XmlNodeType.EndElement && Input_XmlReader.Name == "gml:Line") break; //if it is an element if (Input_XmlReader.NodeType == XmlNodeType.Element) { //determine the name of that element switch (Input_XmlReader.Name) { //if it is the coordinates case "gml:Coordinates": Input_XmlReader.Read(); if ((Input_XmlReader.NodeType == XmlNodeType.Text) && (Input_XmlReader.Value.Trim().Length > 0)) { // Parse the string into a collection of doubles, which represents lats AND longs List<double> latLongs = new List<double>(); string rValue = Input_XmlReader.Value + ' '; StringBuilder coordinatePointBuilder = new StringBuilder(); for (int iterator = 0; iterator < rValue.Length; iterator++) { char rValueChar = rValue[iterator]; if ((Char.IsNumber(rValueChar)) || (rValueChar == '.') || (rValueChar == '-')) { coordinatePointBuilder.Append(rValueChar); } else { if (coordinatePointBuilder.Length > 0) { latLongs.Add(double.Parse(coordinatePointBuilder.ToString())); coordinatePointBuilder.Remove(0, coordinatePointBuilder.Length); } } } //create newline obj Coordinate_Line newline = new Coordinate_Line(); //add points, In pairs, assign new points to the line and add the line to the coordinate/item int i = 0; while ((i + 2) <= latLongs.Count) { string lineName = "line"; lineName += i; newline.Add_Point(latLongs[i], latLongs[i + 1], lineName); i += 2; } //add featureType newline.FeatureType = lineFeatureType; //add label newline.Label = lineLabel; //add line to geo obj geoInfo.Add_Line(newline); } break; } } } while (Input_XmlReader.Read()); break; case "gml:Polygon": //is polygon //read the featuretype string polygonFeatureType = String.Empty; if (Input_XmlReader.MoveToAttribute("featureType")) polygonFeatureType = Input_XmlReader.Value; //read the polygonType string polygonPolygonType = String.Empty; if (Input_XmlReader.MoveToAttribute("polygonType")) polygonPolygonType = Input_XmlReader.Value; //read the label string polygonLabel = String.Empty; if (Input_XmlReader.MoveToAttribute("label")) polygonLabel = Input_XmlReader.Value; //read the rotation double polygonRotation = 0; if (Input_XmlReader.MoveToAttribute("rotation")) polygonRotation = Convert.ToDouble(Input_XmlReader.Value); //get the rest do { //check to see if end of element if (Input_XmlReader.NodeType == XmlNodeType.EndElement && Input_XmlReader.Name == "gml:Polygon") break; //if it is an element if (Input_XmlReader.NodeType == XmlNodeType.Element) { //determine the name of that element switch (Input_XmlReader.Name) { //if it is the coordinates case "gml:Coordinates": Input_XmlReader.Read(); if ((Input_XmlReader.NodeType == XmlNodeType.Text) && (Input_XmlReader.Value.Trim().Length > 0)) { // Parse the string into a collection of doubles, which represents lats AND longs List<double> latLongs = new List<double>(); string rValue = Input_XmlReader.Value + ' '; StringBuilder coordinatePointBuilder = new StringBuilder(); for (int iterator = 0; iterator < rValue.Length; iterator++) { char rValueChar = rValue[iterator]; if ((Char.IsNumber(rValueChar)) || (rValueChar == '.') || (rValueChar == '-')) { coordinatePointBuilder.Append(rValueChar); } else { if (coordinatePointBuilder.Length > 0) { latLongs.Add(double.Parse(coordinatePointBuilder.ToString())); coordinatePointBuilder.Remove(0, coordinatePointBuilder.Length); } } } //create a newpoly obj Coordinate_Polygon newPoly = new Coordinate_Polygon(); //add the edgepoints, In pairs, assign new points to the polygon and add the polygon to the coordinate/item int i = 0; while ((i + 2) <= latLongs.Count) { newPoly.Add_Edge_Point(latLongs[i], latLongs[i + 1]); i += 2; } //add the featuretype newPoly.FeatureType = polygonFeatureType; //add the polygontype newPoly.PolygonType = polygonPolygonType; //add the label newPoly.Label = polygonLabel; //add the rotation newPoly.Rotation = polygonRotation; //add poly to geo obj geoInfo.Add_Polygon(newPoly); } break; } } } while (Input_XmlReader.Read()); break; case "gml:Circle": //is a circle //read the featureType string circleFeatureType = String.Empty; if (Input_XmlReader.MoveToAttribute("featureType")) circleFeatureType = Input_XmlReader.Value; //read the label string circleLabel = String.Empty; if (Input_XmlReader.MoveToAttribute("label")) circleLabel = Input_XmlReader.Value; //read the radius double circleRadius = 0; if (Input_XmlReader.MoveToAttribute("radius")) circleRadius = Convert.ToDouble(Input_XmlReader.Value); //get the rest do { //check to see if end of element if (Input_XmlReader.NodeType == XmlNodeType.EndElement && Input_XmlReader.Name == "gml:Circle") break; //if it is an element if (Input_XmlReader.NodeType == XmlNodeType.Element) { //determine the name of that element switch (Input_XmlReader.Name) { //if it is the coordinates case "gml:Coordinates": Input_XmlReader.Read(); if ((Input_XmlReader.NodeType == XmlNodeType.Text) && (Input_XmlReader.Value.Trim().Length > 0)) { string result = Convert.ToString(Input_XmlReader.Value); var items = result.Split(','); double latitude = double.Parse(items[0]); double longitude = double.Parse(items[1]); //create the circle Coordinate_Circle newCircle = new Coordinate_Circle(latitude, longitude, circleRadius, circleLabel, circleFeatureType); //add to object geoInfo.Add_Circle(newCircle); } break; } } } while (Input_XmlReader.Read()); break; } } } while (Input_XmlReader.Read()); return true; }
/// <summary> Reads metadata from an open stream and saves to the provided item/package </summary> /// <param name="Input_Stream"> Open stream to read metadata from </param> /// <param name="Return_Package"> Package into which to read the metadata </param> /// <param name="Options"> Dictionary of any options which this metadata reader/writer may utilize </param> /// <param name="Error_Message">[OUTPUT] Explanation of the error, if an error occurs during reading </param> /// <returns>TRUE if successful, otherwise FALSE </returns> /// <remarks> Accepts two options: (1) 'METS_File_ReaderWriter:Minimize_File_Info' which tells whether the reader /// should just skip the file reading portion completely, and just read the bibliographic data ( Default is FALSE). /// (2) 'METS_File_ReaderWriter:Support_Divisional_dmdSec_amdSec' </remarks> public bool Read_Metadata(Stream Input_Stream, SobekCM_Item Return_Package, Dictionary<string, object> Options, out string Error_Message) { Error_Message = String.Empty; // Read the options from the dictionary of options bool minimizeFileInfo = false; if (Options != null) { if (Options.ContainsKey("METS_File_ReaderWriter:Minimize_File_Info")) bool.TryParse(Options["METS_File_ReaderWriter:Minimize_File_Info"].ToString(), out minimizeFileInfo); if (Options.ContainsKey("METS_File_ReaderWriter:Support_Divisional_dmdSec_amdSec")) { bool supportDivisionalDmdSecAmdSec; bool.TryParse(Options["METS_File_ReaderWriter:Support_Divisional_dmdSec_amdSec"].ToString(), out supportDivisionalDmdSecAmdSec); } } // Keep a list of all the files created, by file id, as additional data is gathered // from the different locations ( amdSec, fileSec, structmap ) Dictionary<string, SobekCM_File_Info> files_by_fileid = new Dictionary<string, SobekCM_File_Info>(); // For now, to do support for old way of doing downloads, build a list to hold // the deprecated download files List<Download_Info_DEPRECATED> deprecatedDownloads = new List<Download_Info_DEPRECATED>(); // Need to store the unanalyzed sections of dmdSec and amdSec until we determine if // the scope is the whole package, or the top-level div. We use lists as the value since // several sections may have NO id and the METS may even (incorrectly) have multiple sections // with the same ID Dictionary<string, List<Unanalyzed_METS_Section>> dmdSec = new Dictionary<string, List<Unanalyzed_METS_Section>>(); Dictionary<string, List<Unanalyzed_METS_Section>> amdSec = new Dictionary<string, List<Unanalyzed_METS_Section>>(); // Dictionaries store the link between dmdSec and amdSec id's to single divisions Dictionary<string, abstract_TreeNode> division_dmdids = new Dictionary<string, abstract_TreeNode>(); Dictionary<string, abstract_TreeNode> division_amdids = new Dictionary<string, abstract_TreeNode>(); try { // Try to read the XML XmlReader r = new XmlTextReader(Input_Stream); // Begin stepping through each of the XML nodes while (r.Read()) { #region Handle some processing instructions requested by Florida SUS's / FLVC (hope to deprecate) // Handle some processing instructions requested by Florida SUS's / FLVC if (r.NodeType == XmlNodeType.ProcessingInstruction) { if (r.Name.ToLower() == "fcla") { string value = r.Value.ToLower(); if (value.IndexOf("fda=\"yes\"") >= 0) { DAITSS_Info daitssInfo = Return_Package.Get_Metadata_Module(GlobalVar.DAITSS_METADATA_MODULE_KEY) as DAITSS_Info; if (daitssInfo == null) { daitssInfo = new DAITSS_Info(); Return_Package.Add_Metadata_Module(GlobalVar.DAITSS_METADATA_MODULE_KEY, daitssInfo); } daitssInfo.toArchive = true; } if (value.IndexOf("fda=\"no\"") >= 0) { DAITSS_Info daitssInfo2 = Return_Package.Get_Metadata_Module(GlobalVar.DAITSS_METADATA_MODULE_KEY) as DAITSS_Info; if (daitssInfo2 == null) { daitssInfo2 = new DAITSS_Info(); Return_Package.Add_Metadata_Module(GlobalVar.DAITSS_METADATA_MODULE_KEY, daitssInfo2); } daitssInfo2.toArchive = false; } } } #endregion if (r.NodeType == XmlNodeType.Element) { switch (r.Name.Replace("METS:", "")) { case "mets": if (r.MoveToAttribute("OBJID")) Return_Package.METS_Header.ObjectID = r.Value; break; case "metsHdr": read_mets_header(r.ReadSubtree(), Return_Package); break; case "dmdSec": case "dmdSecFedora": Unanalyzed_METS_Section thisDmdSec = store_dmd_sec(r.ReadSubtree()); if ( dmdSec.ContainsKey(thisDmdSec.ID)) dmdSec[thisDmdSec.ID].Add(thisDmdSec); else { List<Unanalyzed_METS_Section> newDmdSecList = new List<Unanalyzed_METS_Section>(); newDmdSecList.Add(thisDmdSec); dmdSec[thisDmdSec.ID] = newDmdSecList; } break; case "amdSec": Unanalyzed_METS_Section thisAmdSec = store_amd_sec(r.ReadSubtree()); if (amdSec.ContainsKey(thisAmdSec.ID)) amdSec[thisAmdSec.ID].Add(thisAmdSec); else { List<Unanalyzed_METS_Section> newAmdSecList = new List<Unanalyzed_METS_Section> {thisAmdSec}; amdSec[thisAmdSec.ID] = newAmdSecList; } break; case "fileSec": read_file_sec(r.ReadSubtree(), minimizeFileInfo, files_by_fileid); break; case "structMap": if (!r.IsEmptyElement) { read_struct_map(r.ReadSubtree(), Return_Package, files_by_fileid, division_dmdids, division_amdids); } break; case "behaviorSec": read_behavior_sec(r.ReadSubtree(), Return_Package); break; } } } // writer.Close(); r.Close(); } catch { // Do nothinh } Input_Stream.Close(); // Load some options for interoperability Dictionary<string, object> options = new Dictionary<string, object>(); options.Add("SobekCM_FileInfo_METS_amdSec_ReaderWriter:Files_By_FileID", files_by_fileid); #region Process the previously stored dmd sections // Now, process the previously stored dmd sections foreach (string thisDmdSecId in dmdSec.Keys) { // Could be multiple stored sections with the same (or no) ID foreach (Unanalyzed_METS_Section metsSection in dmdSec[thisDmdSecId]) { XmlReader reader = XmlReader.Create(new StringReader(metsSection.Inner_XML)); string mdtype = String.Empty; string othermdtype = String.Empty; while (reader.Read()) { if (reader.NodeType == XmlNodeType.Element) { if (reader.Name.ToLower().Replace("mets:", "") == "mdwrap") { if (reader.MoveToAttribute("MDTYPE")) mdtype = reader.Value; if (reader.MoveToAttribute("OTHERMDTYPE")) othermdtype = reader.Value; // NOt crazy about this part, but sometimes people do not use the OTHERMDTYPE // tag correctly, and just use the LABEL to differentiate the types if ((mdtype == "OTHER") && (othermdtype.Length == 0) && (reader.MoveToAttribute("LABEL"))) othermdtype = reader.Value; // Now, determine if this was a division-level read, or a package-wide if (division_dmdids.ContainsKey(thisDmdSecId)) { // Division level dmdSec // Get the division abstract_TreeNode node = division_dmdids[thisDmdSecId]; // Get an appropriate reader from the metadata configuration iDivision_dmdSec_ReaderWriter rw = ResourceObjectSettings.MetadataConfig.Get_Division_DmdSec_ReaderWriter(mdtype, othermdtype); // Is this dmdSec analyzable? (i.e., did we find an appropriate reader/writer?) if (rw == null) { node.Add_Unanalyzed_DMDSEC(metsSection); } else { rw.Read_dmdSec(reader, node, options); } } else { // Package-level dmdSec // Get an appropriate reader from the metadata configuration iPackage_dmdSec_ReaderWriter rw = ResourceObjectSettings.MetadataConfig.Get_Package_DmdSec_ReaderWriter(mdtype, othermdtype); // Is this dmdSec analyzable? (i.e., did we find an appropriate reader/writer?) if (rw == null) { Return_Package.Add_Unanalyzed_DMDSEC(metsSection); } else { rw.Read_dmdSec(reader, Return_Package, options); } } } } } } } #endregion #region Process the previously stored amd sections // Now, process the previously stored amd sections foreach (string thisAmdSecId in amdSec.Keys) { // Could be multiple stored sections with the same (or no) ID foreach (Unanalyzed_METS_Section metsSection in amdSec[thisAmdSecId]) { XmlReader reader = XmlReader.Create(new StringReader(metsSection.Inner_XML)); string mdtype = String.Empty; string othermdtype = String.Empty; while (reader.Read()) { if (reader.NodeType == XmlNodeType.Element) { if (reader.Name.ToLower().Replace("mets:", "") == "mdwrap") { if (reader.MoveToAttribute("MDTYPE")) mdtype = reader.Value; if (reader.MoveToAttribute("OTHERMDTYPE")) othermdtype = reader.Value; // Package-level amdSec // Get an appropriate reader from the metadata configuration iPackage_amdSec_ReaderWriter rw = ResourceObjectSettings.MetadataConfig.Get_Package_AmdSec_ReaderWriter(mdtype, othermdtype); // Is this amdSec analyzable? (i.e., did we find an appropriate reader/writer?) if (rw == null) { Return_Package.Add_Unanalyzed_AMDSEC(metsSection); } else { rw.Read_amdSec(reader, Return_Package, options); } } } } } } #endregion #region Special code used for moving downloads into the structure map system, and out of the old SobekCM METS section // For backward compatability, move from the old download system to the // new structure. This has to happen here at the end so that we have access // Were there some downloads added here? if (deprecatedDownloads.Count > 0) { // Get the list of downloads from the download tree List<SobekCM_File_Info> newStructureDownloads = Return_Package.Divisions.Download_Tree.All_Files; // Step through each download in the old system foreach (Download_Info_DEPRECATED thisDownload in deprecatedDownloads) { // Get the label (if there is one) string label = thisDownload.Label; string filename = thisDownload.FileName; bool found = false; if ((filename.Length == 0) && (thisDownload.File_ID.Length > 0)) { if (files_by_fileid.ContainsKey(thisDownload.File_ID)) { SobekCM_File_Info thisDownloadFile = files_by_fileid[thisDownload.File_ID]; filename = thisDownloadFile.System_Name; // Ensure a file of this name doesn't already exist foreach (SobekCM_File_Info existingFile in newStructureDownloads) { if (existingFile.System_Name.ToUpper().Trim() == filename.ToUpper().Trim()) { found = true; break; } } // Not found, so add it if (!found) { // Determine the label if it was missing or identical to file name if ((label.Length == 0) || (label == filename)) { label = filename; int first_period_index = label.IndexOf('.'); if (first_period_index > 0) { label = label.Substring(0, first_period_index); } } // Add the root to the download tree, if not existing Division_TreeNode newRoot; if (Return_Package.Divisions.Download_Tree.Roots.Count == 0) { newRoot = new Division_TreeNode("Main", String.Empty); Return_Package.Divisions.Download_Tree.Roots.Add(newRoot); } else { newRoot = (Division_TreeNode) Return_Package.Divisions.Download_Tree.Roots[0]; } // Add a page for this, with the provided label if there was one Page_TreeNode newPage = new Page_TreeNode(label); newRoot.Nodes.Add(newPage); // Now, add this existing file newPage.Files.Add(thisDownloadFile); // Add to the list of files added (in case it appears twice) newStructureDownloads.Add(thisDownloadFile); } } } else { // Ensure a file of this name doesn't already exist foreach (SobekCM_File_Info existingFile in newStructureDownloads) { if (existingFile.System_Name.ToUpper().Trim() == filename.ToUpper().Trim()) { found = true; break; } } // Not found, so add it if (!found) { // Determine the label if it was missing or identical to file name if ((label.Length == 0) || (label == filename)) { label = filename; int first_period_index = label.IndexOf('.'); if (first_period_index > 0) { label = label.Substring(0, first_period_index); } } // Add the root to the download tree, if not existing Division_TreeNode newRoot; if (Return_Package.Divisions.Download_Tree.Roots.Count == 0) { newRoot = new Division_TreeNode("Main", String.Empty); Return_Package.Divisions.Download_Tree.Roots.Add(newRoot); } else { newRoot = (Division_TreeNode) Return_Package.Divisions.Download_Tree.Roots[0]; } // Add a page for this, with the provided label if there was one Page_TreeNode newPage = new Page_TreeNode(label); newRoot.Nodes.Add(newPage); // Now, add this existing file SobekCM_File_Info thisDownloadFile = new SobekCM_File_Info(filename); newPage.Files.Add(thisDownloadFile); // Add to the list of files added (in case it appears twice) newStructureDownloads.Add(thisDownloadFile); } } } } #endregion #region Special code for distributing any page-level coordinate information read from the old SobekCM coordinate metadata // Get the geospatial data GeoSpatial_Information geoSpatial = Return_Package.Get_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY) as GeoSpatial_Information; if ((geoSpatial != null) && ( geoSpatial.Polygon_Count > 0 )) { // See if any has the page sequence filled out, which means it came from the old metadata system bool redistribute = false; foreach (Coordinate_Polygon thisPolygon in geoSpatial.Polygons) { if (thisPolygon.Page_Sequence > 0) { redistribute = true; break; } } // If we need to redistribute, get started! if (redistribute) { // Get the pages, by sequence List<abstract_TreeNode> pagesBySequence = Return_Package.Divisions.Physical_Tree.Pages_PreOrder; List<Coordinate_Polygon> polygonsToRemove = new List<Coordinate_Polygon>(); // Step through each polygon foreach (Coordinate_Polygon thisPolygon in geoSpatial.Polygons) { if ((thisPolygon.Page_Sequence > 0) && ( thisPolygon.Page_Sequence <= pagesBySequence.Count )) { // Get the page abstract_TreeNode thisPageFromSequence = pagesBySequence[thisPolygon.Page_Sequence - 1]; // We can assume this page does not already have the coordiantes GeoSpatial_Information thisPageCoord = new GeoSpatial_Information(); thisPageFromSequence.Add_Metadata_Module( GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY, thisPageCoord ); thisPageCoord.Add_Polygon( thisPolygon); // Remove this from the package-level coordinates polygonsToRemove.Add(thisPolygon); } } // Now, remove all polygons flagged to be removed foreach (Coordinate_Polygon thisPolygon in polygonsToRemove) { geoSpatial.Remove_Polygon(thisPolygon); } } } #endregion #region Copy any serial hierarchy in the Behaviors.Serial_Info part into the bib portion, if not there // Do some final cleanup on the SERIAL HIERARCHY if ((Return_Package.Behaviors.hasSerialInformation) && (Return_Package.Behaviors.Serial_Info.Count > 0)) { if ((Return_Package.Bib_Info.Series_Part_Info.Enum1.Length == 0) && (Return_Package.Bib_Info.Series_Part_Info.Year.Length == 0)) { if (Return_Package.Bib_Info.SobekCM_Type == TypeOfResource_SobekCM_Enum.Newspaper) { Return_Package.Bib_Info.Series_Part_Info.Year = Return_Package.Behaviors.Serial_Info[0].Display; Return_Package.Bib_Info.Series_Part_Info.Year_Index = Return_Package.Behaviors.Serial_Info[0].Order; if (Return_Package.Behaviors.Serial_Info.Count > 1) { Return_Package.Bib_Info.Series_Part_Info.Month = Return_Package.Behaviors.Serial_Info[1].Display; Return_Package.Bib_Info.Series_Part_Info.Month_Index = Return_Package.Behaviors.Serial_Info[1].Order; } } if (Return_Package.Behaviors.Serial_Info.Count > 2) { Return_Package.Bib_Info.Series_Part_Info.Day = Return_Package.Behaviors.Serial_Info[2].Display; Return_Package.Bib_Info.Series_Part_Info.Day_Index = Return_Package.Behaviors.Serial_Info[2].Order; } } else { Return_Package.Bib_Info.Series_Part_Info.Enum1 = Return_Package.Behaviors.Serial_Info[0].Display; Return_Package.Bib_Info.Series_Part_Info.Enum1_Index = Return_Package.Behaviors.Serial_Info[0].Order; if (Return_Package.Behaviors.Serial_Info.Count > 1) { Return_Package.Bib_Info.Series_Part_Info.Enum2 = Return_Package.Behaviors.Serial_Info[1].Display; Return_Package.Bib_Info.Series_Part_Info.Enum2_Index = Return_Package.Behaviors.Serial_Info[1].Order; } if (Return_Package.Behaviors.Serial_Info.Count > 2) { Return_Package.Bib_Info.Series_Part_Info.Enum3 = Return_Package.Behaviors.Serial_Info[2].Display; Return_Package.Bib_Info.Series_Part_Info.Enum3_Index = Return_Package.Behaviors.Serial_Info[2].Order; } } } #endregion return true; }
/// <summary> parse and save incoming message </summary> /// <param name="SendData"> message from page </param> public static void SaveContent(String SendData) { try { //get rid of excess string SendData = SendData.Replace("{\"sendData\": \"", "").Replace("{\"sendData\":\"", ""); //validate if (SendData.Length == 0) return; //ensure we have a geo-spatial module in the digital resource GeoSpatial_Information resourceGeoInfo = currentItem.Get_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY) as GeoSpatial_Information; //if there was no geo-spatial module if (resourceGeoInfo == null) { //create new geo-spatial module, if we do not already have one resourceGeoInfo = new GeoSpatial_Information(); currentItem.Add_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY, resourceGeoInfo); } //get the pages List<abstract_TreeNode> pages = currentItem.Divisions.Physical_Tree.Pages_PreOrder; //create a new list of all the polygons for a resource item Dictionary<string, Page_TreeNode> pageLookup = new Dictionary<string, Page_TreeNode>(); int page_index = 1; foreach (var abstractTreeNode in pages) { var pageNode = (Page_TreeNode) abstractTreeNode; if (pageNode.Label.Length == 0) pageLookup["Page " + page_index] = pageNode; else pageLookup[pageNode.Label] = pageNode; page_index++; } //get the length of incoming message int index1 = SendData.LastIndexOf("~", StringComparison.Ordinal); //split into each save message string[] allSaves = SendData.Substring(0, index1).Split('~'); //hold save type handle string saveTypeHandle; //go through each item to save and check for ovelrays and item only not pois (ORDER does matter because these will be saved to db before pois are saved) foreach (string t in allSaves) { //get the length of save message int index2 = t.LastIndexOf("|", StringComparison.Ordinal); //split into save elements string[] ar = t.Substring(0, index2).Split('|'); //determine the save type handle (position 0 in array) saveTypeHandle = ar[0]; //determine the save type (position 1 in array) string saveType = ar[1]; //based on saveType, parse into objects if (saveTypeHandle == "save") { //handle save based on type switch (saveType) { #region item case "item": //prep incoming lat/long string[] temp1 = ar[2].Split(','); double temp1Lat = Convert.ToDouble(temp1[0].Replace("(", "")); double temp1Long = Convert.ToDouble(temp1[1].Replace(")", "")); ////clear specific geo obj //resourceGeoInfo.Clear_Specific(Convert.ToString(ar[3])); //clear all the previous mains featureTypes (this will work for an item because there is only ever one item) resourceGeoInfo.Clear_NonPOIs(); //add the point obj Coordinate_Point newPoint = new Coordinate_Point(temp1Lat, temp1Long, currentItem.METS_Header.ObjectID, "main"); //add the new point resourceGeoInfo.Add_Point(newPoint); //save to db SobekCM_Database.Save_Digital_Resource(currentItem, options); break; #endregion #region overlay case "overlay": //parse the array id of the page int arrayId = (Convert.ToInt32(ar[2]) - 1); //is this always true (minus one of the human page id)? //add the label to page obj pages[arrayId].Label = ar[3]; //get the geocoordinate object for that pageId GeoSpatial_Information pageGeo = pages[arrayId].Get_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY) as GeoSpatial_Information; //if there isnt any already there if (pageGeo == null) { //create new pageGeo = new GeoSpatial_Information(); //create a polygon Coordinate_Polygon pagePolygon = new Coordinate_Polygon(); //prep incoming bounds string[] temp2 = ar[4].Split(','); pagePolygon.Clear_Edge_Points(); pagePolygon.Add_Edge_Point(Convert.ToDouble(temp2[0].Replace("(", "")), Convert.ToDouble(temp2[1].Replace(")", ""))); pagePolygon.Add_Edge_Point(Convert.ToDouble(temp2[2].Replace("(", "")), Convert.ToDouble(temp2[3].Replace(")", ""))); pagePolygon.Recalculate_Bounding_Box(); //add the rotation double result; pagePolygon.Rotation = Double.TryParse(ar[6], out result) ? result : 0; //add the featureType (explicitly add to make sure it is there) pagePolygon.FeatureType = "main"; //add the label pagePolygon.Label = ar[3]; //add the polygon type pagePolygon.PolygonType = "rectangle"; //add polygon to pagegeo pageGeo.Add_Polygon(pagePolygon); } else { try { //get current polygon info Coordinate_Polygon pagePolygon = pageGeo.Polygons[0]; //prep incoming bounds string[] temp2 = ar[4].Split(','); pagePolygon.Clear_Edge_Points(); pagePolygon.Add_Edge_Point(Convert.ToDouble(temp2[0].Replace("(", "")), Convert.ToDouble(temp2[1].Replace(")", ""))); pagePolygon.Add_Edge_Point(Convert.ToDouble(temp2[2].Replace("(", "")), Convert.ToDouble(temp2[3].Replace(")", ""))); pagePolygon.Recalculate_Bounding_Box(); //add the rotation double result; pagePolygon.Rotation = Double.TryParse(ar[6], out result) ? result : 0; //add the featureType (explicitly add to make sure it is there) pagePolygon.FeatureType = "main"; //add the label pagePolygon.Label = ar[3]; //add the polygon type pagePolygon.PolygonType = "rectangle"; //clear all previous nonPOIs for this page (NOTE: this will only work if there is only one main page item) pageGeo.Clear_NonPOIs(); //add polygon to pagegeo pageGeo.Add_Polygon(pagePolygon); } catch (Exception) { //there were no polygons try { //make a polygon Coordinate_Polygon pagePolygon = new Coordinate_Polygon(); //prep incoming bounds string[] temp2 = ar[4].Split(','); pagePolygon.Clear_Edge_Points(); pagePolygon.Add_Edge_Point(Convert.ToDouble(temp2[0].Replace("(", "")), Convert.ToDouble(temp2[1].Replace(")", ""))); pagePolygon.Add_Edge_Point(Convert.ToDouble(temp2[2].Replace("(", "")), Convert.ToDouble(temp2[3].Replace(")", ""))); pagePolygon.Recalculate_Bounding_Box(); //add the rotation double result; pagePolygon.Rotation = Double.TryParse(ar[6], out result) ? result : 0; //add the featureType (explicitly add to make sure it is there) pagePolygon.FeatureType = "main"; //add the label pagePolygon.Label = ar[3]; //add the polygon type pagePolygon.PolygonType = "rectangle"; //clear all previous nonPOIs for this page (NOTE: this will only work if there is only one main page item) pageGeo.Clear_NonPOIs(); //add polygon to pagegeo pageGeo.Add_Polygon(pagePolygon); } catch (Exception) { //welp... } } } //add the pagegeo obj pages[arrayId].Add_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY, pageGeo); //save to db SobekCM_Database.Save_Digital_Resource(currentItem, options); break; #endregion } } else { if (saveTypeHandle == "delete") { switch (saveType) { #region item case "item": //clear nonpoipoints resourceGeoInfo.Clear_NonPOIPoints(); //save to db SobekCM_Database.Save_Digital_Resource(currentItem, options); break; #endregion #region overlay case "overlay": try { //parse the array id of the page int arrayId = (Convert.ToInt32(ar[2]) - 1); //is this always true (minus one of the human page id)? //get the geocoordinate object for that pageId GeoSpatial_Information pageGeo = pages[arrayId].Get_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY) as GeoSpatial_Information; if (pageGeo != null) { Coordinate_Polygon pagePolygon = pageGeo.Polygons[0]; //reset edgepoints pagePolygon.Clear_Edge_Points(); //reset rotation pagePolygon.Rotation = 0; //add the featureType (explicitly add to make sure it is there) pagePolygon.FeatureType = "hidden"; //add the polygon type pagePolygon.PolygonType = "hidden"; //clear all previous nonPOIs for this page (NOTE: this will only work if there is only one main page item) pageGeo.Clear_NonPOIs(); //add polygon to pagegeo pageGeo.Add_Polygon(pagePolygon); } ////if there isnt any already there //if (pageGeo != null) // pageGeo.Remove_Polygon(pageGeo.Polygons[0]); //add the pagegeo obj pages[arrayId].Add_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY, pageGeo); //save to db SobekCM_Database.Save_Digital_Resource(currentItem, options); } catch (Exception) { // } break; #endregion } } } } //check to see if save poi clear has already been fired... bool firedOnce = true; //go through each item to save and check for pois only foreach (string t in allSaves) { //get the length of save message int index2 = t.LastIndexOf("|", StringComparison.Ordinal); //split into save elements string[] ar = t.Substring(0, index2).Split('|'); //determine the save type handle (position 0 in array) saveTypeHandle = ar[0]; //determine the save type (position 1 in array) string saveType = ar[1]; //based on saveType, parse into objects if (saveTypeHandle == "save") { //handle save based on type switch (saveType) { #region poi case "poi": //fixes bug if (firedOnce) { //clear previous poi points resourceGeoInfo.Clear_POIs(); firedOnce = false; } //get specific geometry (KML Standard) switch (ar[2]) { case "marker": //prep incoming lat/long string[] temp2 = ar[4].Split(','); double temp2Lat = Convert.ToDouble(temp2[0].Replace("(", "")); double temp2Long = Convert.ToDouble(temp2[1].Replace(")", "")); //add the new point resourceGeoInfo.Add_Point(temp2Lat, temp2Long, ar[3], "poi"); break; case "circle": //create new circle Coordinate_Circle poiCircle = new Coordinate_Circle {Label = ar[3], Radius = Convert.ToDouble(ar[5]), FeatureType = "poi"}; //add the incoming lat/long string[] temp3 = ar[4].Split(','); poiCircle.Latitude = Convert.ToDouble(temp3[0].Replace("(", "")); poiCircle.Longitude = Convert.ToDouble(temp3[1].Replace(")", "")); //add to the resource obj resourceGeoInfo.Add_Circle(poiCircle); break; case "rectangle": //create new polygon Coordinate_Polygon poiRectangle = new Coordinate_Polygon {Label = ar[3], FeatureType = "poi", PolygonType = "rectangle"}; //add the incoming bounds string[] temp4 = ar[4].Split(','); poiRectangle.Add_Edge_Point(Convert.ToDouble(temp4[0].Replace("(", "")), Convert.ToDouble(temp4[1].Replace(")", ""))); poiRectangle.Add_Edge_Point(Convert.ToDouble(temp4[2].Replace("(", "")), Convert.ToDouble(temp4[3].Replace(")", ""))); poiRectangle.Recalculate_Bounding_Box(); //add to resource obj resourceGeoInfo.Add_Polygon(poiRectangle); break; case "polygon": //create new polygon Coordinate_Polygon poiPolygon = new Coordinate_Polygon {Label = ar[3], FeatureType = "poi"}; //add the edge points for (int i2 = 5; i2 < ar.Length; i2++) { string[] temp5 = ar[i2].Split(','); poiPolygon.Add_Edge_Point(Convert.ToDouble(temp5[0].Replace("(", "")), Convert.ToDouble(temp5[1].Replace(")", ""))); } //add the polygon resourceGeoInfo.Add_Polygon(poiPolygon); break; case "polyline": //create new line Coordinate_Line poiLine = new Coordinate_Line {Label = ar[3], FeatureType = "poi"}; //add the edge points for (int i2 = 5; i2 < ar.Length; i2++) { string[] temp5 = ar[i2].Split(','); poiLine.Add_Point(Convert.ToDouble(temp5[0].Replace("(", "")), Convert.ToDouble(temp5[1].Replace(")", "")), ""); } //add the line resourceGeoInfo.Add_Line(poiLine); break; } break; #endregion } } } #region prep saving dir //create inprocessing directory string userInProcessDirectory = UI_ApplicationCache_Gateway.Settings.User_InProcess_Directory( currentUser, "mapwork"); string backupDirectory = UI_ApplicationCache_Gateway.Settings.Servers.Image_Server_Network + currentItem.Web.AssocFilePath + UI_ApplicationCache_Gateway.Settings.Resources.Backup_Files_Folder_Name; //ensure the user's process directory exists if (!Directory.Exists(userInProcessDirectory)) Directory.CreateDirectory(userInProcessDirectory); //ensure the backup directory exists if (!Directory.Exists(backupDirectory)) Directory.CreateDirectory(backupDirectory); string resource_directory = UI_ApplicationCache_Gateway.Settings.Servers.Image_Server_Network + currentItem.Web.AssocFilePath; string current_mets = resource_directory + currentItem.METS_Header.ObjectID + ".mets.xml"; string backup_mets = backupDirectory + "\\" + currentItem.METS_Header.ObjectID + "_" + DateTime.Now.Year.ToString() + DateTime.Now.Month.ToString() + DateTime.Now.Day.ToString() + "_" + DateTime.Now.Hour.ToString() + DateTime.Now.Minute.ToString() + DateTime.Now.Second.ToString() + ".mets.xml.BAK"; string metsInProcessFile = userInProcessDirectory + "\\" + currentItem.BibID + "_" + currentItem.VID + ".mets.xml"; #endregion #region Save mets and db //save the item to the temporary location currentItem.Save_METS(userInProcessDirectory + "\\" + currentItem.BibID + "_" + currentItem.VID + ".mets.xml"); //move temp mets to prod File.Copy(metsInProcessFile, current_mets, true); //delete in process mets file File.Delete(metsInProcessFile); //create a backup mets file File.Copy(current_mets, backup_mets, true); #endregion } catch (Exception) { //Custom_Tracer.Add_Trace("MapEdit Save Error"); throw new ApplicationException("MapEdit Save Error"); //throw; } }