/// <summary> Adds a special polygon for an aerial tile coverage from the point notation in the upper right corner </summary> /// <param name="Latitude_Point"> Decimal degree latitude for the point notation for the aerial </param> /// <param name="Longitude_Point"> Decimal degree longitude for the point notation for the aerial</param> /// <param name="Scale"> Scale of the aerial tile ( i.e., for a map of scale 1:20000, set S = 20,000 )</param> /// <param name="Tile_Width"> Width of the physical aerial tile </param> /// <param name="Tile_Height"> Height of the physical aerial tile </param> /// <param name="Earth_Radius"> Radius of the earth in the same units as the height and width above </param> /// <param name="Label"> Label for this new aerial polygon notation from the point </param> /// <returns> Fully built coordinate polygon </returns> public Coordinate_Polygon Add_Aerial_Polygon(double Latitude_Point, double Longitude_Point, int Scale, double Tile_Width, double Tile_Height, ulong Earth_Radius, string Label) { // Calculate the opposite corner double latitude_calculated = Latitude_Point - ((Scale * Tile_Height * 180) / (Math.PI * Earth_Radius)); double latitude_average_radian = Math.PI * (Latitude_Point + latitude_calculated) / 360; double longitude_calculated = Longitude_Point - ((Scale * Tile_Width * 180) / (Math.PI * Earth_Radius * Math.Cos(latitude_average_radian))); // Add a slight shift to account for the original point placement double lower_left_latitude = latitude_calculated; double lower_left_longitude = longitude_calculated; double upper_right_latitude = Latitude_Point; double upper_right_longitude = Longitude_Point; // Create the polygon Coordinate_Polygon aerialPolygon = new Coordinate_Polygon(); aerialPolygon.Add_Edge_Point(upper_right_latitude, upper_right_longitude); aerialPolygon.Add_Edge_Point(upper_right_latitude, lower_left_longitude); aerialPolygon.Add_Edge_Point(lower_left_latitude, lower_left_longitude); aerialPolygon.Add_Edge_Point(lower_left_latitude, upper_right_longitude); aerialPolygon.Label = Label; // Add this to this item polygons.Add(aerialPolygon); // Return the built polygon return(aerialPolygon); }
/// <summary> Create a test digital resource item </summary> /// <param name="directory">Directory for the package source directory</param> /// <returns>Fully built test bib package</returns> public static SobekCM_Item Create(string directory) { SobekCM_Item testPackage = new SobekCM_Item(); // Add all the METS header information testPackage.METS_Header.Create_Date = new DateTime(2007, 1, 1); testPackage.METS_Header.Modify_Date = DateTime.Now; testPackage.METS_Header.Creator_Individual = "Mark Sullivan"; testPackage.METS_Header.Add_Creator_Individual_Notes("Programmer of new SobekCM.Resource_Object"); testPackage.METS_Header.Add_Creator_Individual_Notes("Adding coordinates"); testPackage.METS_Header.Creator_Organization = "University of Florida"; testPackage.METS_Header.Creator_Software = "SobekCM Bib Package Test"; testPackage.METS_Header.RecordStatus_Enum = METS_Record_Status.COMPLETE; testPackage.METS_Header.Add_Creator_Org_Notes("This test package was done to test DLCs new METS package"); // Add all the MODS elements Abstract_Info testAbstract = testPackage.Bib_Info.Add_Abstract("This is a sample abstract", "en"); testPackage.Bib_Info.Add_Abstract("Tämä on esimerkki abstrakteja", "fin"); testAbstract.Display_Label = "Summary Abstract"; testAbstract.Type = "summary"; testPackage.Bib_Info.Access_Condition.Text = "All rights are reserved by source institution."; testPackage.Bib_Info.Access_Condition.Language = "en"; testPackage.Bib_Info.Access_Condition.Type = "restrictions on use"; testPackage.Bib_Info.Access_Condition.Display_Label = "Rights"; testPackage.Bib_Info.Add_Identifier("000123234", "OCLC", "Electronic OCLC"); testPackage.Bib_Info.Add_Identifier("182-asdsd-28k", "DOI"); testPackage.Bib_Info.Add_Language("English", String.Empty, "en"); testPackage.Bib_Info.Add_Language("Finnish"); testPackage.Bib_Info.Add_Language(String.Empty, "ita", String.Empty); testPackage.Bib_Info.Location.Holding_Code = "MVS"; testPackage.Bib_Info.Location.Holding_Name = "From the Private Library of Mark Sullivan"; testPackage.Bib_Info.Location.PURL = "http://www.uflib.ufl.edu/ufdc/?b=CA00000000"; testPackage.Bib_Info.Location.Other_URL = "http://www.fnhm.edu"; testPackage.Bib_Info.Location.Other_URL_Display_Label = "Specimen Information"; testPackage.Bib_Info.Location.Other_URL_Note = "Specimen FLAS 125342 Database"; testPackage.Bib_Info.Location.EAD_URL = "http://digital.uflib.ufl.edu/"; testPackage.Bib_Info.Location.EAD_Name = "Digital Library Center Finding Guide"; testPackage.Bib_Info.Main_Entity_Name.Name_Type = Name_Info_Type_Enum.Personal; testPackage.Bib_Info.Main_Entity_Name.Full_Name = "Brown, B.F."; testPackage.Bib_Info.Main_Entity_Name.Terms_Of_Address = "Dr."; testPackage.Bib_Info.Main_Entity_Name.Display_Form = "B.F. Brown"; testPackage.Bib_Info.Main_Entity_Name.Affiliation = "Chemistry Dept., American University"; testPackage.Bib_Info.Main_Entity_Name.Description = "Chemistry Professor Emeritus"; testPackage.Bib_Info.Main_Entity_Name.Add_Role("Author"); Zoological_Taxonomy_Info taxonInfo = new Zoological_Taxonomy_Info(); testPackage.Add_Metadata_Module(GlobalVar.ZOOLOGICAL_TAXONOMY_METADATA_MODULE_KEY, taxonInfo); taxonInfo.Scientific_Name = "Ctenomys sociabilis"; taxonInfo.Higher_Classification = "Animalia; Chordata; Vertebrata; Mammalia; Theria; Eutheria; Rodentia; Hystricognatha; Hystricognathi; Ctenomyidae; Ctenomyini; Ctenomys"; taxonInfo.Kingdom = "Animalia"; taxonInfo.Phylum = "Chordata"; taxonInfo.Class = "Mammalia"; taxonInfo.Order = "Rodentia"; taxonInfo.Family = "Ctenomyidae"; taxonInfo.Genus = "Ctenomys"; taxonInfo.Specific_Epithet = "sociabilis"; taxonInfo.Taxonomic_Rank = "species"; taxonInfo.Common_Name = "Social Tuco-Tuco"; Name_Info name1 = new Name_Info(); name1.Name_Type = Name_Info_Type_Enum.Personal; name1.Given_Name = "John Paul"; name1.Terms_Of_Address = "Pope; II"; name1.Dates = "1920-2002"; name1.User_Submitted = true; testPackage.Bib_Info.Add_Named_Entity(name1); Name_Info name2 = new Name_Info(); name2.Name_Type = Name_Info_Type_Enum.Conference; name2.Full_Name = "Paris Peace Conference (1919-1920)"; name2.Dates = "1919-1920"; testPackage.Bib_Info.Add_Named_Entity(name2); Name_Info name3 = new Name_Info(); name3.Name_Type = Name_Info_Type_Enum.Corporate; name3.Full_Name = "United States -- Court of Appeals (2nd Court)"; testPackage.Bib_Info.Add_Named_Entity(name3); Name_Info name4 = new Name_Info(); name4.Name_Type = Name_Info_Type_Enum.Personal; name4.Full_Name = "Wilson, Mary"; name4.Display_Form = "Mary 'Weels' Wilson"; name4.Given_Name = "Mary"; name4.Family_Name = "Wilson"; name4.ID = "NAM4"; name4.Terms_Of_Address = "2nd"; name4.Add_Role("illustrator"); name4.Add_Role("cartographer"); testPackage.Bib_Info.Add_Named_Entity(name4); Name_Info donor = new Name_Info(); donor.Name_Type = Name_Info_Type_Enum.Personal; donor.Full_Name = "Livingston, Arthur"; donor.Description = "Gift in honor of Arthur Livingston"; donor.Terms_Of_Address = "3rd"; donor.Add_Role("honoree", String.Empty); testPackage.Bib_Info.Donor = donor; testPackage.Bib_Info.Main_Title.NonSort = "The "; testPackage.Bib_Info.Main_Title.Title = "Man Who Would Be King"; testPackage.Bib_Info.Main_Title.Subtitle = "The story of succession in England"; Title_Info title1 = new Title_Info("homme qui voulut être roi", Title_Type_Enum.Translated); title1.NonSort = "L'"; title1.Language = "fr"; testPackage.Bib_Info.Add_Other_Title(title1); Title_Info title2 = new Title_Info(); title2.Title = "Man Who Be King"; title2.Display_Label = "also known as"; title2.NonSort = "The"; title2.Title_Type = Title_Type_Enum.Alternative; testPackage.Bib_Info.Add_Other_Title(title2); Title_Info title3 = new Title_Info(); title3.Title = "Great works of England"; title3.Authority = "naf"; title3.Add_Part_Name("Second Portion"); title3.Add_Part_Number("2nd"); title3.Title_Type = Title_Type_Enum.Uniform; title3.User_Submitted = true; testPackage.Bib_Info.Add_Other_Title(title3); testPackage.Bib_Info.Add_Note("Funded by the NEH", Note_Type_Enum.Funding); testPackage.Bib_Info.Add_Note("Based on a play which originally appeared in France as \"Un peu plus tard, un peu plus tôt\"").User_Submitted = true; testPackage.Bib_Info.Add_Note("Anne Baxter (Louise), Maria Perschy (Angela), Gustavo Rojo (Bill), Reginald Gilliam (Mr. Johnson), [Catherine Elliot?] (Aunt Sallie), Ben Tatar (waiter)", Note_Type_Enum.Performers, "Performed By"); testPackage.Bib_Info.Origin_Info.Add_Place("New York", "nyu", "usa"); testPackage.Bib_Info.Origin_Info.Date_Issued = "1992"; testPackage.Bib_Info.Origin_Info.MARC_DateIssued_Start = "1992"; testPackage.Bib_Info.Origin_Info.MARC_DateIssued_End = "1993"; testPackage.Bib_Info.Origin_Info.Date_Copyrighted = "1999"; testPackage.Bib_Info.Origin_Info.Edition = "2nd"; Publisher_Info newPub = testPackage.Bib_Info.Add_Publisher("Published for the American Vacuum Society by the American Institute of Physics"); newPub.Add_Place("New York, New York"); newPub.User_Submitted = true; testPackage.Bib_Info.Add_Publisher("University of Florida Press House").Add_Place("Gainesville, FL"); testPackage.Bib_Info.Add_Manufacturer("Addison Randly Publishing House"); testPackage.Bib_Info.Original_Description.Extent = "1 sound disc (56 min.) : digital ; 3/4 in."; testPackage.Bib_Info.Original_Description.Add_Note("The sleeve of this sound disc was damaged in a fire"); testPackage.Bib_Info.Original_Description.Add_Note("The disc has a moderate amount of scratches, but still plays"); testPackage.Bib_Info.Series_Part_Info.Day = "18"; testPackage.Bib_Info.Series_Part_Info.Day_Index = 18; testPackage.Bib_Info.Series_Part_Info.Month = "Syyskuu"; testPackage.Bib_Info.Series_Part_Info.Month_Index = 9; testPackage.Bib_Info.Series_Part_Info.Year = "1992"; testPackage.Bib_Info.Series_Part_Info.Year_Index = 1992; testPackage.Bib_Info.Series_Part_Info.Enum1 = "Volume 12"; testPackage.Bib_Info.Series_Part_Info.Enum1_Index = 12; testPackage.Bib_Info.Series_Part_Info.Enum2 = "Issue 3"; testPackage.Bib_Info.Series_Part_Info.Enum2_Index = 3; testPackage.Bib_Info.Series_Part_Info.Enum3 = "Part 1"; testPackage.Bib_Info.Series_Part_Info.Enum3_Index = 1; testPackage.Behaviors.Serial_Info.Add_Hierarchy(1, 1992, "1992"); testPackage.Behaviors.Serial_Info.Add_Hierarchy(2, 9, "Syyskuu"); testPackage.Behaviors.Serial_Info.Add_Hierarchy(3, 18, "18"); testPackage.Bib_Info.SeriesTitle.Title = "Shakespeare's most famous musicals"; testPackage.Bib_Info.Add_Target_Audience("young adults"); testPackage.Bib_Info.Add_Target_Audience("adolescent", "marctarget"); testPackage.Bib_Info.SobekCM_Type = TypeOfResource_SobekCM_Enum.Newspaper; // Add cartographic subject Subject_Info_Cartographics newCartographics = testPackage.Bib_Info.Add_Cartographics_Subject(); newCartographics.Scale = "1:2000"; newCartographics.Projection = "Conical Projection"; newCartographics.Coordinates = "E 72°--E 148°/N 13°--N 18°"; // Add hierarchical geographic subject Subject_Info_HierarchicalGeographic hierarchical = testPackage.Bib_Info.Add_Hierarchical_Geographic_Subject(); hierarchical.Continent = "North America"; hierarchical.Country = "United States of America"; hierarchical.State = "Kansas"; hierarchical.County = "Butler"; hierarchical.City = "Augusta"; // Add hierarchical geographic subject Subject_Info_HierarchicalGeographic hierarchical2 = testPackage.Bib_Info.Add_Hierarchical_Geographic_Subject(); hierarchical2.Region = "Arctic Ocean"; // Add hierarchical geographic subject Subject_Info_HierarchicalGeographic hierarchical3 = testPackage.Bib_Info.Add_Hierarchical_Geographic_Subject(); hierarchical3.Island = "Puerto Rico"; hierarchical3.Language = "English"; hierarchical3.Province = "Provincial"; hierarchical3.Territory = "Puerto Rico"; hierarchical3.Area = "Intercontinental areas (Western Hemisphere)"; // Add a name subject Subject_Info_Name subjname1 = testPackage.Bib_Info.Add_Name_Subject(); subjname1.Authority = "lcsh"; subjname1.Full_Name = "Garcia Lorca, Federico"; subjname1.Dates = "1898-1936"; subjname1.Add_Geographic("Russia"); subjname1.Add_Geographic("Moscow"); subjname1.Add_Genre("maps"); subjname1.User_Submitted = true; // Add a title information subject Subject_Info_TitleInfo subjtitle1 = testPackage.Bib_Info.Add_Title_Subject(); subjtitle1.Title_Type = Title_Type_Enum.Uniform; subjtitle1.Authority = "naf"; subjtitle1.Title = "Missale Carnotense"; // Add a standard subject Subject_Info_Standard subject1 = testPackage.Bib_Info.Add_Subject(); subject1.Authority = "lcsh"; subject1.Add_Topic("Real property"); subject1.Add_Geographic("Mississippi"); subject1.Add_Geographic("Tippah County"); subject1.Add_Genre("Maps"); // Add a standard subject Subject_Info_Standard subject2 = testPackage.Bib_Info.Add_Subject(); subject2.Add_Occupation("Migrant laborers"); subject2.Add_Genre("School district case files"); // Add a standard subject Subject_Info_Standard subject3 = testPackage.Bib_Info.Add_Subject(); subject3.Authority = "lctgm"; subject3.Add_Topic("Educational buildings"); subject3.Add_Geographic("Washington (D.C.)"); subject3.Add_Temporal("1890-1910"); // Add a standard subject Subject_Info_Standard subject4 = testPackage.Bib_Info.Add_Subject(); subject4.Authority = "rvm"; subject4.Language = "french"; subject4.Add_Topic("Église catholique"); subject4.Add_Topic("Histoire"); subject4.Add_Temporal("20e siècle"); // Add record information testPackage.Bib_Info.Record.Add_Catalog_Language(new Language_Info("English", "eng", "en")); testPackage.Bib_Info.Record.Add_Catalog_Language(new Language_Info("French", "fre", "fr")); testPackage.Bib_Info.Record.MARC_Creation_Date = "080303"; testPackage.Bib_Info.Record.Add_MARC_Record_Content_Sources("FUG"); testPackage.Bib_Info.Record.Record_Origin = "Imported from (OCLC)001213124"; // Test the items which are in the non-MODS portion of the Bib_Info object testPackage.BibID = "MVS0000001"; testPackage.VID = "00001"; testPackage.Bib_Info.SortDate = 1234; testPackage.Bib_Info.SortTitle = "MAN WHO WOULD BE KING"; testPackage.Bib_Info.Add_Temporal_Subject(1990, 2002, "Recent history"); testPackage.Bib_Info.Add_Temporal_Subject(1990, 2002, "Lähihistoria"); testPackage.Bib_Info.Source.Code = "UF"; testPackage.Bib_Info.Source.Statement = "University of Florida"; // Add an affiliation Affiliation_Info affiliation1 = new Affiliation_Info(); affiliation1.University = "University of Florida"; affiliation1.Campus = "Gainesville Campus"; affiliation1.College = "College of Engineering"; affiliation1.Department = "Computer Engineering Department"; affiliation1.Unit = "Robotics"; affiliation1.Name_Reference = "NAM4"; testPackage.Bib_Info.Add_Affiliation(affiliation1); // Add a related item Related_Item_Info relatedItem1 = new Related_Item_Info(); relatedItem1.SobekCM_ID = "UF00001234"; relatedItem1.Relationship = Related_Item_Type_Enum.Preceding; relatedItem1.Publisher = "Gainesville Sun Publishing House"; relatedItem1.Add_Note(new Note_Info("Digitized with funding from NEH", Note_Type_Enum.Funding)); relatedItem1.Add_Note(new Note_Info("Gainesville Bee was the precursor to this item")); relatedItem1.Main_Title.NonSort = "The"; relatedItem1.Main_Title.Title = "Gainesville Bee"; relatedItem1.Add_Identifier("01234353", "oclc"); relatedItem1.Add_Identifier("002232311", "aleph"); Name_Info ri_name = new Name_Info(); ri_name.Full_Name = "Hills, Bryan"; ri_name.Terms_Of_Address = "Mr."; ri_name.Name_Type = Name_Info_Type_Enum.Personal; ri_name.Add_Role("author"); relatedItem1.Add_Name(ri_name); relatedItem1.URL = @"http://www.uflib.ufl.edu/ufdc/?b=UF00001234"; relatedItem1.URL_Display_Label = "Full Text"; testPackage.Bib_Info.Add_Related_Item(relatedItem1); // Add another related item Related_Item_Info relatedItem2 = new Related_Item_Info(); relatedItem2.Relationship = Related_Item_Type_Enum.Succeeding; relatedItem2.SobekCM_ID = "UF00009999"; relatedItem2.Main_Title.NonSort = "The"; relatedItem2.Main_Title.Title = "Daily Sun"; relatedItem2.Add_Identifier("0125437", "oclc"); relatedItem2.Add_Note("Name change occured in Fall 1933"); relatedItem2.Start_Date = "Fall 1933"; relatedItem2.End_Date = "December 31, 1945"; testPackage.Bib_Info.Add_Related_Item(relatedItem2); // Add some processing parameters testPackage.Behaviors.Add_Aggregation("JUV"); testPackage.Behaviors.Add_Aggregation("DLOC"); testPackage.Behaviors.Add_Aggregation("DLOSA1"); testPackage.Behaviors.Add_Aggregation("ALICE"); testPackage.Behaviors.Add_Aggregation("ARTE"); testPackage.Web.GUID = "GUID!"; testPackage.Behaviors.Add_Wordmark("DLOC"); testPackage.Behaviors.Add_Wordmark("UFSPEC"); testPackage.Behaviors.Main_Thumbnail = "00001thm.jpg"; // Add some downloads testPackage.Divisions.Download_Tree.Add_File("MVS_Complete.PDF"); testPackage.Divisions.Download_Tree.Add_File("MVS_Complete.MP2"); testPackage.Divisions.Download_Tree.Add_File("MVS_Part1.MP2"); testPackage.Divisions.Download_Tree.Add_File("MVS_Part1.PDF"); // Add some coordinate information GeoSpatial_Information geoSpatial = new GeoSpatial_Information(); testPackage.Add_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY, geoSpatial); geoSpatial.Add_Point(29.530151, -82.301459, "Lake Wauberg"); geoSpatial.Add_Point(29.634352, -82.350640, "Veterinary School"); Coordinate_Polygon polygon = new Coordinate_Polygon(); polygon.Label = "University of Florida Campus"; polygon.Add_Edge_Point(new Coordinate_Point(29.651435, -82.339869, String.Empty)); polygon.Add_Edge_Point(new Coordinate_Point(29.641216, -82.340298, String.Empty)); polygon.Add_Edge_Point(new Coordinate_Point(29.629503, -82.371969, String.Empty)); polygon.Add_Edge_Point(new Coordinate_Point(29.649645, -82.371712, String.Empty)); polygon.Add_Inner_Point(29.649794, -82.351971, "Stadium"); polygon.Add_Inner_Point(29.650988, -82.341156, "Library"); geoSpatial.Add_Polygon(polygon); Coordinate_Line line = new Coordinate_Line(); line.Label = "Waldo Road"; line.Add_Point(29.652852, -82.310944, "Gainesville"); line.Add_Point(29.716681, -82.268372, String.Empty); line.Add_Point(29.791494, -82.167778, "Waldo"); geoSpatial.Add_Line(line); // Add some performing arts information Performing_Arts_Info partInfo = new Performing_Arts_Info(); testPackage.Add_Metadata_Module("PerformingArts", partInfo); partInfo.Performance = "Hamlet"; partInfo.Performance_Date = "August 12, 1923"; Performer performer1 = partInfo.Add_Performer("Sullivan, Mark"); performer1.Sex = "M"; performer1.LifeSpan = "1873-"; performer1.Occupation = "actor"; performer1.Title = "Mr."; Performer performer2 = partInfo.Add_Performer("Waldbart, Julia"); performer2.Sex = "F"; performer2.LifeSpan = "1876-"; performer2.Occupation = "actress"; performer2.Title = "Mrs."; // Add some oral history information Oral_Interview_Info oralInfo = new Oral_Interview_Info(); testPackage.Add_Metadata_Module( "OralInterview", oralInfo); oralInfo.Interviewee = "Edwards, Herm"; oralInfo.Interviewer = "Proctor, Samual"; // Add some learning object resource information LearningObjectMetadata lomInfo = new LearningObjectMetadata(); testPackage.Add_Metadata_Module( GlobalVar.IEEE_LOM_METADATA_MODULE_KEY, lomInfo ); lomInfo.AggregationLevel = AggregationLevelEnum.level3; lomInfo.Status = StatusEnum.draft; LOM_System_Requirements lomReq1 = new LOM_System_Requirements(); lomReq1.RequirementType = RequirementTypeEnum.operating_system; lomReq1.Name.Value = "Windows"; lomReq1.MinimumVersion = "Windows XP"; lomReq1.MaximumVersion = "Windows 7"; lomInfo.Add_SystemRequirements(lomReq1); LOM_System_Requirements lomReq2 = new LOM_System_Requirements(); lomReq2.RequirementType = RequirementTypeEnum.software; lomReq2.Name.Value = "Java SDK"; lomReq2.MinimumVersion = "1.7.1"; lomReq2.MaximumVersion = "2.09"; lomInfo.Add_SystemRequirements(lomReq2); lomInfo.InteractivityType = InteractivityTypeEnum.mixed; lomInfo.Add_LearningResourceType("exercise"); lomInfo.Add_LearningResourceType("Tutorials", "encdlwebpedagogicaltype"); lomInfo.InteractivityLevel = InteractivityLevelEnum.high; lomInfo.Add_IntendedEndUserRole(IntendedEndUserRoleEnum.learner); lomInfo.Add_Context("Undergraduate lower division", "enclearningcontext"); lomInfo.Add_Context("15", "grade"); lomInfo.Add_Context("16", "grade"); lomInfo.Add_Context("5", "group"); lomInfo.Add_TypicalAgeRange("suitable for children over 7", "en"); lomInfo.Add_TypicalAgeRange("2-8"); lomInfo.DifficultyLevel = DifficultyLevelEnum.medium; lomInfo.TypicalLearningTime = "PT45M"; LOM_Classification lomClassification1 = new LOM_Classification(); lomInfo.Add_Classification(lomClassification1); lomClassification1.Purpose.Value = "Discipline"; LOM_TaxonPath lomTaxonPath1 = new LOM_TaxonPath(); lomClassification1.Add_TaxonPath(lomTaxonPath1); lomTaxonPath1.Add_SourceName("ARIADNE"); LOM_Taxon lomTaxon1 = new LOM_Taxon(); lomTaxonPath1.Add_Taxon(lomTaxon1); lomTaxon1.ID = "BF120"; lomTaxon1.Add_Entry("Work_History", "en"); lomTaxon1.Add_Entry("Historie", "nl"); LOM_Taxon lomTaxon2 = new LOM_Taxon(); lomTaxonPath1.Add_Taxon(lomTaxon2); lomTaxon2.ID = "BF120.1"; lomTaxon2.Add_Entry("American Work_History", "en"); LOM_Taxon lomTaxon3 = new LOM_Taxon(); lomTaxonPath1.Add_Taxon(lomTaxon3); lomTaxon3.ID = "BF120.1.4"; lomTaxon3.Add_Entry("American Civil War", "en"); LOM_Classification lomClassification2 = new LOM_Classification(); lomInfo.Add_Classification(lomClassification2); lomClassification2.Purpose.Value = "Educational Objective"; LOM_TaxonPath lomTaxonPath2 = new LOM_TaxonPath(); lomClassification2.Add_TaxonPath(lomTaxonPath2); lomTaxonPath2.Add_SourceName("Common Core Standards", "en"); LOM_Taxon lomTaxon4 = new LOM_Taxon(); lomTaxonPath2.Add_Taxon(lomTaxon4); lomTaxon4.ID = "CCS.Math.Content"; LOM_Taxon lomTaxon5 = new LOM_Taxon(); lomTaxonPath2.Add_Taxon(lomTaxon5); lomTaxon5.ID = "3"; lomTaxon5.Add_Entry("Grade 3", "en"); LOM_Taxon lomTaxon6 = new LOM_Taxon(); lomTaxonPath2.Add_Taxon(lomTaxon6); lomTaxon6.ID = "OA"; lomTaxon6.Add_Entry("Operations and Algebraic Thinking", "en"); LOM_Taxon lomTaxon7 = new LOM_Taxon(); lomTaxonPath2.Add_Taxon(lomTaxon7); lomTaxon7.ID = "A"; lomTaxon7.Add_Entry("Represent and solve problems involving multiplication and division.", "en"); LOM_Taxon lomTaxon8 = new LOM_Taxon(); lomTaxonPath2.Add_Taxon(lomTaxon8); lomTaxon8.ID = "3"; lomTaxon8.Add_Entry("Use multiplication and division within 100 to solve word problems in situations involving equal groups, arrays, and measurement quantities, e.g., by using drawings and equations with a symbol for the unknown number to represent the problem.", "en"); LOM_TaxonPath lomTaxonPath3 = new LOM_TaxonPath(); lomClassification2.Add_TaxonPath(lomTaxonPath3); lomTaxonPath3.Add_SourceName("Common Core Standards", "en"); LOM_Taxon lomTaxon14 = new LOM_Taxon(); lomTaxonPath3.Add_Taxon(lomTaxon14); lomTaxon14.ID = "CCS.Math.Content"; LOM_Taxon lomTaxon15 = new LOM_Taxon(); lomTaxonPath3.Add_Taxon(lomTaxon15); lomTaxon15.ID = "3"; lomTaxon15.Add_Entry("Grade 3", "en"); LOM_Taxon lomTaxon16 = new LOM_Taxon(); lomTaxonPath3.Add_Taxon(lomTaxon16); lomTaxon16.ID = "OA"; lomTaxon16.Add_Entry("Operations and Algebraic Thinking", "en"); LOM_Taxon lomTaxon17 = new LOM_Taxon(); lomTaxonPath3.Add_Taxon(lomTaxon17); lomTaxon17.ID = "A"; lomTaxon17.Add_Entry("Represent and solve problems involving multiplication and division.", "en"); LOM_Taxon lomTaxon18 = new LOM_Taxon(); lomTaxonPath3.Add_Taxon(lomTaxon18); lomTaxon18.ID = "4"; lomTaxon18.Add_Entry("Determine the unknown whole number in a multiplication or division equation relating three whole numbers. For example, determine the unknown number that makes the equation true in each of the equations 8 × ? = 48, 5 = _ ÷ 3, 6 × 6 = ?", "en"); // Add some views and interfaces testPackage.Behaviors.Clear_Web_Skins(); testPackage.Behaviors.Add_Web_Skin("dLOC"); testPackage.Behaviors.Add_Web_Skin("UFDC"); testPackage.Behaviors.Add_View(View_Enum.JPEG2000); testPackage.Behaviors.Add_View(View_Enum.JPEG); testPackage.Behaviors.Add_View(View_Enum.RELATED_IMAGES); testPackage.Behaviors.Add_View(View_Enum.HTML, "Full Document", "MVS001214.html"); // Create the chapters and pages and link them Division_TreeNode chapter1 = new Division_TreeNode("Chapter", "First Chapter"); Page_TreeNode page1 = new Page_TreeNode("First Page"); Page_TreeNode page2 = new Page_TreeNode("Page 2"); chapter1.Nodes.Add(page1); chapter1.Nodes.Add(page2); Division_TreeNode chapter2 = new Division_TreeNode("Chapter", "Last Chapter"); Page_TreeNode page3 = new Page_TreeNode("Page 3"); Page_TreeNode page4 = new Page_TreeNode("Last Page"); chapter2.Nodes.Add(page3); chapter2.Nodes.Add(page4); testPackage.Divisions.Physical_Tree.Roots.Add(chapter1); testPackage.Divisions.Physical_Tree.Roots.Add(chapter2); // Create the files SobekCM_File_Info file1_1 = new SobekCM_File_Info("2000626_0001.jp2", 2120, 1100); SobekCM_File_Info file1_2 = new SobekCM_File_Info("2000626_0001.jpg", 630, 330); SobekCM_File_Info file1_3 = new SobekCM_File_Info("2000626_0001.tif"); SobekCM_File_Info file2_1 = new SobekCM_File_Info("2000626_0002.jp2", 1754, 2453); SobekCM_File_Info file2_2 = new SobekCM_File_Info("2000626_0002.jpg", 630, 832); SobekCM_File_Info file2_3 = new SobekCM_File_Info("2000626_0002.tif"); SobekCM_File_Info file3_1 = new SobekCM_File_Info("2000626_0003.jp2", 2321, 1232); SobekCM_File_Info file3_2 = new SobekCM_File_Info("2000626_0003.jpg", 630, 342); SobekCM_File_Info file3_3 = new SobekCM_File_Info("2000626_0003.tif"); SobekCM_File_Info file4_1 = new SobekCM_File_Info("2000626_0004.jp2", 2145, 1024); SobekCM_File_Info file4_2 = new SobekCM_File_Info("2000626_0004.jpg", 630, 326); SobekCM_File_Info file4_3 = new SobekCM_File_Info("2000626_0004.tif"); // Link the files to the pages page1.Files.Add(file1_1); page1.Files.Add(file1_2); page1.Files.Add(file1_3); page2.Files.Add(file2_1); page2.Files.Add(file2_2); page2.Files.Add(file2_3); page3.Files.Add(file3_1); page3.Files.Add(file3_2); page3.Files.Add(file3_3); page4.Files.Add(file4_1); page4.Files.Add(file4_2); page4.Files.Add(file4_3); // Add the DAITSS information DAITSS_Info daitssInfo = new DAITSS_Info(); daitssInfo.Account = "FTU"; daitssInfo.SubAccount = "CLAS"; daitssInfo.Project = "UFDC"; daitssInfo.toArchive = true; testPackage.Add_Metadata_Module(GlobalVar.DAITSS_METADATA_MODULE_KEY, daitssInfo); // Save this package testPackage.Source_Directory = directory; return testPackage; }
private void read_coordinates_info(XmlReader Input_XmlReader, SobekCM_Item Return_Package) { GeoSpatial_Information geoInfo = Return_Package.Get_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY) as GeoSpatial_Information; if (geoInfo == null) { geoInfo = new GeoSpatial_Information(); Return_Package.Add_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY, geoInfo); } while (Input_XmlReader.Read()) { if ((Input_XmlReader.NodeType == XmlNodeType.EndElement) && (Input_XmlReader.Name == sobekcm_namespace + ":Coordinates")) { return; } if (Input_XmlReader.NodeType == XmlNodeType.Element) { switch (Input_XmlReader.Name.Replace(sobekcm_namespace + ":", "")) { case "KML": if (!Input_XmlReader.IsEmptyElement) { Input_XmlReader.Read(); if (Input_XmlReader.NodeType == XmlNodeType.Text) geoInfo.KML_Reference = Input_XmlReader.Value; } break; case "Point": geoInfo.Add_Point(read_point(Input_XmlReader)); break; case "Line": Coordinate_Line newLine = new Coordinate_Line(); if (Input_XmlReader.MoveToAttribute("label")) newLine.Label = Input_XmlReader.Value; while (Input_XmlReader.Read()) { if ((Input_XmlReader.NodeType == XmlNodeType.EndElement) && (Input_XmlReader.Name == sobekcm_namespace + ":Line")) { geoInfo.Add_Line(newLine); break; } if ((Input_XmlReader.NodeType == XmlNodeType.Element) && (Input_XmlReader.Name == sobekcm_namespace + ":Point")) { newLine.Add_Point(read_point(Input_XmlReader)); } } break; case "Polygon": Coordinate_Polygon newPolygon = new Coordinate_Polygon(); if (Input_XmlReader.MoveToAttribute("label")) newPolygon.Label = Input_XmlReader.Value; if (Input_XmlReader.MoveToAttribute("ID")) newPolygon.ID = Input_XmlReader.Value; if (Input_XmlReader.MoveToAttribute("pageSeq")) { try { newPolygon.Page_Sequence = Convert.ToUInt16(Input_XmlReader.Value); } catch { } } while (Input_XmlReader.Read()) { if ((Input_XmlReader.NodeType == XmlNodeType.EndElement) && (Input_XmlReader.Name == sobekcm_namespace + ":Polygon")) { geoInfo.Add_Polygon(newPolygon); break; } if (Input_XmlReader.NodeType == XmlNodeType.Element) { if (Input_XmlReader.Name == sobekcm_namespace + ":Edge") { while (Input_XmlReader.Read()) { if ((Input_XmlReader.NodeType == XmlNodeType.EndElement) && (Input_XmlReader.Name == sobekcm_namespace + ":Edge")) { break; } if ((Input_XmlReader.NodeType == XmlNodeType.Element) && (Input_XmlReader.Name == sobekcm_namespace + ":Point")) { newPolygon.Add_Edge_Point(read_point(Input_XmlReader)); } } } if (Input_XmlReader.Name == sobekcm_namespace + ":Internal") { while (Input_XmlReader.Read()) { if ((Input_XmlReader.NodeType == XmlNodeType.EndElement) && (Input_XmlReader.Name == sobekcm_namespace + ":Internal")) { break; } if ((Input_XmlReader.NodeType == XmlNodeType.Element) && (Input_XmlReader.Name == sobekcm_namespace + ":Point")) { newPolygon.Add_Inner_Point(read_point(Input_XmlReader)); } } } } } break; } } } }
/// <summary> Reads the MARC Core-compliant section of XML and stores the data in the provided digital resource </summary> /// <param name="r"> XmlTextReader from which to read the marc data </param> /// <param name="thisBibInfo">Bibliographic object into which most the values are read</param> /// <param name="package"> Digital resource object to save the data to if this is reading the top-level bibDesc (OPTIONAL)</param> /// <param name="Importing_Record"> Importing record flag is used to determine if special treatment should be applied to the 001 identifier. If this is reading MarcXML from a dmdSec, this is set to false </param> /// <param name="Options"> Dictionary of any options which this metadata reader/writer may utilize </param> public static void Read_MarcXML_Info(XmlReader r, Bibliographic_Info thisBibInfo, SobekCM_Item package, bool Importing_Record, Dictionary<string, object> Options ) { // Create the MARC_XML_Reader to load everything into first MARC_Record record = new MARC_Record(); // Read from the file record.Read_MARC_Info(r); // Handle optional mapping first for retaining the 856 as a related link if ((Options != null) && (Options.ContainsKey("MarcXML_File_ReaderWriter.Retain_856_As_Related_Link"))) { if (Options["MarcXML_File_ReaderWriter.Retain_856_As_Related_Link"].ToString().ToUpper() == "TRUE") { if ((record.Get_Data_Subfield(856, 'u').Length > 0) && (record.Get_Data_Subfield(856, 'y').Length > 0)) { string url856 = record.Get_Data_Subfield(856, 'u'); string label856 = record.Get_Data_Subfield(856, 'y'); thisBibInfo.Location.Other_URL = url856; thisBibInfo.Location.Other_URL_Note = label856; } } } // Now, load values into the bib package // Load the date ( 260 |c ) thisBibInfo.Origin_Info.MARC_DateIssued = Remove_Trailing_Punctuation(record.Get_Data_Subfield(260, 'c')); // Load the descriptions and notes about this item Add_Descriptions(thisBibInfo, record); // Look for the 786 with special identifiers to map back into the source notes foreach (MARC_Field thisRecord in record[786]) { if ((thisRecord.Indicators == "0 ") && (thisRecord.Subfield_Count == 1) && (thisRecord.has_Subfield('n'))) thisBibInfo.Add_Note(thisRecord.Subfields[0].Data, Note_Type_Enum.Source); } // Add the contents (505) if (record.Get_Data_Subfield(505, 'a').Length > 2) { thisBibInfo.Add_TableOfContents(record.Get_Data_Subfield(505, 'a')); } // Get the scale information (034) if (record.Get_Data_Subfield(034, 'b').Length > 2) { thisBibInfo.Add_Scale(record.Get_Data_Subfield(034, 'b'), "SUBJ034"); } // Get the scale information (255) if ((record.Get_Data_Subfield(255, 'a').Length > 2) || (record.Get_Data_Subfield(255, 'b').Length > 2) || (record.Get_Data_Subfield(255, 'c').Length > 2)) { thisBibInfo.Add_Scale(record.Get_Data_Subfield(255, 'a'), record.Get_Data_Subfield(255, 'b'), record.Get_Data_Subfield(255, 'c'), "SUBJ255"); } // Get the coordinate information (034) if ((record.Get_Data_Subfield(034, 'd').Length > 0) && (record.Get_Data_Subfield(034, 'e').Length > 0) && (record.Get_Data_Subfield(034, 'f').Length > 0) && (record.Get_Data_Subfield(034, 'g').Length > 0)) { // This is an extra metadata component GeoSpatial_Information geoInfo = package.Get_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY) as GeoSpatial_Information; if (geoInfo == null) { geoInfo = new GeoSpatial_Information(); package.Add_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY, geoInfo); } if (geoInfo.Polygon_Count == 0) { try { string d_field = record.Get_Data_Subfield(034, 'd').Replace("O", "0"); string e_field = record.Get_Data_Subfield(034, 'e').Replace("O", "0"); string f_field = record.Get_Data_Subfield(034, 'f').Replace("O", "0"); string g_field = record.Get_Data_Subfield(034, 'g').Replace("O", "0"); double d_value = 1; double e_value = 1; double f_value = 1; double g_value = 1; if (d_field.Contains(".")) { if (d_field.Contains("W")) { d_value = -1*Convert.ToDouble(d_field.Replace("W", "")); } else { d_value = Convert.ToDouble(d_field.Replace("E", "")); } } else { d_value = Convert.ToDouble(d_field.Substring(1, 3)) + (Convert.ToDouble(d_field.Substring(4, 2))/60); if ((d_field[0] == '-') || (d_field[0] == 'W')) { d_value = -1*d_value; } } if (d_value < -180) d_value = d_value + 360; if (e_field.Contains(".")) { if (e_field.Contains("W")) { e_value = -1*Convert.ToDouble(e_field.Replace("W", "")); } else { e_value = Convert.ToDouble(e_field.Replace("E", "")); } } else { e_value = Convert.ToDouble(e_field.Substring(1, 3)) + (Convert.ToDouble(e_field.Substring(4, 2))/60); if ((e_field[0] == '-') || (e_field[0] == 'W')) { e_value = -1*e_value; } } if (e_value < -180) e_value = e_value + 360; if (f_field.Contains(".")) { if (f_field.Contains("S")) { f_value = -1*Convert.ToDouble(f_field.Replace("S", "")); } else { f_value = Convert.ToDouble(f_field.Replace("N", "")); } } else { f_value = Convert.ToDouble(f_field.Substring(1, 3)) + (Convert.ToDouble(f_field.Substring(4, 2))/60); if ((f_field[0] == '-') || (f_field[0] == 'S')) { f_value = -1*f_value; } } if (g_field.Contains(".")) { if (g_field.Contains("S")) { g_value = -1*Convert.ToDouble(g_field.Replace("S", "")); } else { g_value = Convert.ToDouble(g_field.Replace("N", "")); } } else { g_value = Convert.ToDouble(g_field.Substring(1, 3)) + (Convert.ToDouble(g_field.Substring(4, 2))/60); if ((g_field[0] == '-') || (g_field[0] == 'S')) { g_value = -1*g_value; } } Coordinate_Polygon polygon = new Coordinate_Polygon(); polygon.Add_Edge_Point(f_value, d_value); polygon.Add_Edge_Point(g_value, d_value); polygon.Add_Edge_Point(g_value, e_value); polygon.Add_Edge_Point(f_value, e_value); polygon.Label = "Map Coverage"; geoInfo.Add_Polygon(polygon); } catch { } } } // Add the abstract ( 520 |a ) foreach (MARC_Field thisRecord in record[520]) { if (thisRecord.has_Subfield('a')) { Abstract_Info newAbstract = new Abstract_Info(); switch (thisRecord.Indicator1) { case ' ': newAbstract.Type = "summary"; newAbstract.Display_Label = "Summary"; break; case '0': newAbstract.Type = "subject"; newAbstract.Display_Label = "Subject"; break; case '1': newAbstract.Type = "review"; newAbstract.Display_Label = "Review"; break; case '2': newAbstract.Type = "scope and content"; newAbstract.Display_Label = "Scope and Content"; break; case '4': newAbstract.Type = "content advice"; newAbstract.Display_Label = "Content Advice"; break; default: newAbstract.Display_Label = "Abstract"; break; } if (thisRecord.has_Subfield('b')) { newAbstract.Abstract_Text = thisRecord['a'] + " " + thisRecord['b']; } else { newAbstract.Abstract_Text = thisRecord['a']; } thisBibInfo.Add_Abstract(newAbstract); } } // Load the format ( 300 ) if (record.has_Field(300)) { StringBuilder builder300 = new StringBuilder(); if (record.Get_Data_Subfield(300, 'a').Length > 0) { builder300.Append(record.Get_Data_Subfield(300, 'a').Replace(":", "").Replace(";", "").Trim()); } builder300.Append(" : "); if (record.Get_Data_Subfield(300, 'b').Length > 0) { builder300.Append(record.Get_Data_Subfield(300, 'b').Replace(";", "").Trim()); } builder300.Append(" ; "); if (record.Get_Data_Subfield(300, 'c').Length > 0) { builder300.Append(record.Get_Data_Subfield(300, 'c')); } thisBibInfo.Original_Description.Extent = builder300.ToString().Trim(); if (thisBibInfo.Original_Description.Extent.Replace(" ", "").Replace(":", "").Replace(";", "") == "v.") thisBibInfo.Original_Description.Extent = String.Empty; } // Load the current frequency (310) foreach (MARC_Field thisRecord in record[310]) { if (thisRecord.has_Subfield('a')) { if (thisRecord.has_Subfield('b')) { thisBibInfo.Origin_Info.Add_Frequency(Remove_Trailing_Punctuation(thisRecord['a']).Replace("[", "(").Replace("]", ")") + "[" + thisRecord['b'].Replace("[", "(").Replace("]", ")") + "]"); } else { thisBibInfo.Origin_Info.Add_Frequency(Remove_Trailing_Punctuation(thisRecord['a']).Replace("[", "(").Replace("]", ")")); } } } // Load the previous frequency (321) foreach (MARC_Field thisRecord in record[321]) { if (thisRecord.has_Subfield('a')) { if (thisRecord.has_Subfield('b')) { thisBibInfo.Origin_Info.Add_Frequency(Remove_Trailing_Punctuation(thisRecord['a']).Replace("[", "(").Replace("]", ")") + "[ FORMER " + thisRecord['b'].Replace("[", "(").Replace("]", ")") + "]"); } else { thisBibInfo.Origin_Info.Add_Frequency(Remove_Trailing_Punctuation(thisRecord['a']).Replace("[", "(").Replace("]", ")") + "[ FORMER ]"); } } } // Load the edition ( 250 ) if (record.has_Field(250)) { if (record.Get_Data_Subfield(250, 'b').Length > 0) { thisBibInfo.Origin_Info.Edition = record.Get_Data_Subfield(250, 'a').Replace("/", "").Replace("=", "").Trim() + " -- " + record.Get_Data_Subfield(250, 'b'); } else { thisBibInfo.Origin_Info.Edition = record.Get_Data_Subfield(250, 'a'); } } // Load the language ( 008 ) if (record.has_Field(8)) { string field_08 = record[8][0].Control_Field_Value; if (field_08.Length > 5) { // Get the language code string languageCode = field_08.Substring(field_08.Length - 5, 3); // Add as the language of the item Language_Info thisLanguage = thisBibInfo.Add_Language(String.Empty, languageCode, String.Empty); // Add as the language of the cataloging thisBibInfo.Record.Add_Catalog_Language(new Language_Info(thisLanguage.Language_Text, thisLanguage.Language_ISO_Code, String.Empty)); } } // Load any additional languages (041) foreach (MARC_Field thisRecord in record[041]) { foreach (MARC_Subfield thisSubfield in thisRecord.Subfields) { if ((thisSubfield.Subfield_Code == 'a') || (thisSubfield.Subfield_Code == 'b') || (thisSubfield.Subfield_Code == 'd') || (thisSubfield.Subfield_Code == 'e') || (thisSubfield.Subfield_Code == 'f') || (thisSubfield.Subfield_Code == 'g') || (thisSubfield.Subfield_Code == 'h')) { thisBibInfo.Add_Language(thisSubfield.Data); } } } // Load the publisher ( 260 |b ) if (record.has_Field(260)) { string[] special_260_splitter = record[260][0].Control_Field_Value.Split("|".ToCharArray()); Publisher_Info thisInfo = new Publisher_Info(); foreach (string thisSplitter in special_260_splitter) { if (thisSplitter.Length > 2) { if (thisSplitter[0] == 'a') { thisInfo.Add_Place(Remove_Trailing_Punctuation(thisSplitter.Substring(2).Replace(" :", "").Trim())); thisInfo.Name = "[s.n.]"; thisBibInfo.Add_Publisher(thisInfo); } if (thisSplitter[0] == 'b') { string pubname = thisSplitter.Substring(2).Replace(";", "").Trim(); if ((pubname.Length > 1) && (pubname[pubname.Length - 1] == ',')) { pubname = pubname.Substring(0, pubname.Length - 1); } thisInfo.Name = pubname; thisBibInfo.Add_Publisher(thisInfo); thisInfo = new Publisher_Info(); } if (thisSplitter[0] == 'e') { thisInfo.Add_Place(thisSplitter.Substring(2).Replace("(", "").Replace(" :", "").Trim()); } if (thisSplitter[0] == 'f') { string manname = thisSplitter.Substring(2).Replace(")", "").Trim(); if ((manname.Length > 1) && (manname[manname.Length - 1] == ',')) { manname = manname.Substring(0, manname.Length - 1); } thisInfo.Name = manname; thisBibInfo.Add_Manufacturer(thisInfo); thisInfo = new Publisher_Info(); } } } } // Load the dates from the 008 string field_008 = String.Empty; if (record.has_Field(008)) { field_008 = record[8][0].Control_Field_Value; if (field_008.Length > 14) { // Save the two date points thisBibInfo.Origin_Info.MARC_DateIssued_Start = field_008.Substring(7, 4).Trim(); thisBibInfo.Origin_Info.MARC_DateIssued_End = field_008.Substring(11, 4).Trim(); // See what type of dates they are (if they are special) char date_type = field_008[6]; switch (date_type) { case 'r': thisBibInfo.Origin_Info.Date_Reprinted = thisBibInfo.Origin_Info.MARC_DateIssued_Start; break; case 't': thisBibInfo.Origin_Info.Date_Copyrighted = thisBibInfo.Origin_Info.MARC_DateIssued_End; break; } } if (field_008.Length > 5) { thisBibInfo.Record.MARC_Creation_Date = field_008.Substring(0, 6); } } // Load the location from the 008 if (field_008.Length > 17) { thisBibInfo.Origin_Info.Add_Place(String.Empty, field_008.Substring(15, 3), String.Empty); } // Load the main record number ( 001 ) string idValue; string oclc = String.Empty; if (record.has_Field(1)) { idValue = record[1][0].Control_Field_Value.Trim(); if (idValue.Length > 0) { thisBibInfo.Record.Main_Record_Identifier.Identifier = idValue; if (Importing_Record) { if (Char.IsNumber(idValue[0])) { // Add this ALEPH number if (thisBibInfo.ALEPH_Record != idValue) { thisBibInfo.Add_Identifier(idValue, "ALEPH"); } thisBibInfo.Record.Record_Origin = "Imported from (ALEPH)" + idValue; } else { if (idValue.Length >= 7) { if ((idValue.IndexOf("ocm") == 0) || (idValue.IndexOf("ocn") == 0)) { oclc = idValue.Replace("ocn", "").Replace("ocm", ""); if (thisBibInfo.OCLC_Record != oclc) { thisBibInfo.Add_Identifier(oclc, "OCLC"); } thisBibInfo.Record.Record_Origin = "Imported from (OCLC)" + oclc; } else { thisBibInfo.Add_Identifier(idValue.Substring(0, 7), "NOTIS"); thisBibInfo.Record.Record_Origin = "Imported from (NOTIS)" + idValue.Substring(0, 7); } } } } } } // If this was OCLC record (non-local) look for a 599 added during time of export if (oclc.Length > 0) { if (record.has_Field(599)) { // Tracking box number will be in the |a field if ((package != null) && (record[599][0].has_Subfield('a'))) { package.Tracking.Tracking_Box = record[599][0]['a']; } // Disposition advice will be in the |b field if ((package != null) && (record[599][0].has_Subfield('b'))) { package.Tracking.Disposition_Advice_Notes = record[599][0]['b']; string advice_notes_as_caps = package.Tracking.Disposition_Advice_Notes.ToUpper(); if ((advice_notes_as_caps.IndexOf("RETURN") >= 0) || (advice_notes_as_caps.IndexOf("RETAIN") >= 0)) { package.Tracking.Disposition_Advice = 1; } else { if (advice_notes_as_caps.IndexOf("WITHDRAW") >= 0) { package.Tracking.Disposition_Advice = 2; } else if (advice_notes_as_caps.IndexOf("DISCARD") >= 0) { package.Tracking.Disposition_Advice = 3; } } } // Do not overlay record in the future will be in the |c field if (record[599][0].has_Subfield('c')) { string record_overlay_notes = record[599][0]['c'].Trim(); if (record_overlay_notes.Length > 0) { if (package != null) { package.Tracking.Never_Overlay_Record = true; package.Tracking.Internal_Comments = record_overlay_notes; } thisBibInfo.Record.Record_Content_Source = thisBibInfo.Record.Record_Content_Source + " (" + record_overlay_notes + ")"; } } } } // Step through all of the identifiers foreach (MARC_Field thisRecord in record[35]) { // Only continue if there is an id in this record if (thisRecord.has_Subfield('a')) { // Was this the old NOTIS number? if (thisRecord.Indicators == "9 ") { thisBibInfo.Add_Identifier(thisRecord['a'], "NOTIS"); } // Was this the OCLC number? if ((oclc.Length == 0) && (thisRecord['a'].ToUpper().IndexOf("OCOLC") >= 0)) { thisBibInfo.Add_Identifier(thisRecord['a'].ToUpper().Replace("(OCOLC)", "").Trim(), "OCLC"); } // Was this the BIB ID? if ((package != null) && (thisRecord['a'].ToUpper().IndexOf("IID") >= 0)) { package.BibID = thisRecord['a'].ToUpper().Replace("(IID)", "").Trim(); } } } // Also, look for the old original OCLC in the 776 10 |w if (thisBibInfo.OCLC_Record.Length == 0) { foreach (MARC_Field thisRecord in record[776]) { if ((thisRecord.Indicators == "1 ") && (thisRecord.has_Subfield('w')) && (thisRecord['w'].ToUpper().IndexOf("OCOLC") >= 0)) { thisBibInfo.Add_Identifier(thisRecord['w'].ToUpper().Replace("(OCOLC)", "").Trim(), "OCLC"); } } } // Look for the LCCN in field 10 if (record.Get_Data_Subfield(10, 'a').Length > 0) thisBibInfo.Add_Identifier(record.Get_Data_Subfield(10, 'a'), "LCCN"); // Look for ISBN in field 20 if (record.Get_Data_Subfield(20, 'a').Length > 0) thisBibInfo.Add_Identifier(record.Get_Data_Subfield(20, 'a'), "ISBN"); // Look for ISSN in field 22 if (record.Get_Data_Subfield(22, 'a').Length > 0) thisBibInfo.Add_Identifier(record.Get_Data_Subfield(22, 'a'), "ISSN"); // Look for classification ( LCC ) in field 50 if (record.Get_Data_Subfield(50, 'a').Length > 0) { string subfield_3 = String.Empty; if (record.Get_Data_Subfield(50, '3').Length > 0) { subfield_3 = record.Get_Data_Subfield(50, '3'); } if (record.Get_Data_Subfield(50, 'b').Length > 0) thisBibInfo.Add_Classification(record.Get_Data_Subfield(50, 'a') + " " + record.Get_Data_Subfield(50, 'b'), "lcc").Display_Label = subfield_3; else thisBibInfo.Add_Classification(record.Get_Data_Subfield(50, 'a'), "lcc").Display_Label = subfield_3; } // Look for classification ( DDC ) in field 82 if (record.Get_Data_Subfield(82, 'a').Length > 0) { string subfield_2 = String.Empty; if (record.Get_Data_Subfield(82, '2').Length > 0) { subfield_2 = record.Get_Data_Subfield(82, '2'); } if (record.Get_Data_Subfield(82, 'b').Length > 0) thisBibInfo.Add_Classification(record.Get_Data_Subfield(82, 'a') + " " + record.Get_Data_Subfield(82, 'b'), "ddc").Edition = subfield_2; else thisBibInfo.Add_Classification(record.Get_Data_Subfield(82, 'a'), "ddc").Edition = subfield_2; } // Look for classification ( UDC ) in field 80 if (record.Get_Data_Subfield(80, 'a').Length > 0) { StringBuilder builder = new StringBuilder(); builder.Append(record.Get_Data_Subfield(80, 'a')); if (record.Get_Data_Subfield(80, 'b').Length > 0) builder.Append(" " + record.Get_Data_Subfield(80, 'b')); if (record.Get_Data_Subfield(80, 'x').Length > 0) builder.Append(" " + record.Get_Data_Subfield(80, 'x')); thisBibInfo.Add_Classification(builder.ToString(), "udc"); } // Look for classification ( NLM ) in field 60 if (record.Get_Data_Subfield(60, 'a').Length > 0) { if (record.Get_Data_Subfield(60, 'b').Length > 0) thisBibInfo.Add_Classification(record.Get_Data_Subfield(60, 'a') + " " + record.Get_Data_Subfield(60, 'b'), "nlm"); else thisBibInfo.Add_Classification(record.Get_Data_Subfield(60, 'a'), "nlm"); } // Look for classification ( SUDOCS or CANDOCS ) in field 86 foreach (MARC_Field thisRecord in record[84]) { string authority = String.Empty; switch (thisRecord.Indicator1) { case '0': authority = "sudocs"; break; case '1': authority = "candocs"; break; default: if (thisRecord.has_Subfield('2')) authority = thisRecord['2']; break; } if (thisRecord.has_Subfield('a')) thisBibInfo.Add_Classification(thisRecord['a'], authority); } // Look for other classifications in field 084 foreach (MARC_Field thisRecord in record[84]) { if (thisRecord.has_Subfield('a')) { string subfield_2 = String.Empty; if (thisRecord.has_Subfield('2')) { subfield_2 = thisRecord['2']; } if (thisRecord.has_Subfield('b')) thisBibInfo.Add_Classification(thisRecord['a'] + " " + thisRecord['b'], subfield_2); else thisBibInfo.Add_Classification(thisRecord['a'], subfield_2); } } // Look for any other identifiers in field 24 foreach (MARC_Field thisRecord in record[24]) { string identifier_source = String.Empty; switch (thisRecord.Indicator1) { case '0': identifier_source = "isrc"; break; case '1': identifier_source = "upc"; break; case '2': identifier_source = "ismn"; break; case '3': identifier_source = "ian"; break; case '4': identifier_source = "sici"; break; case '7': identifier_source = thisRecord['2']; break; } if (thisRecord.has_Subfield('d')) { thisBibInfo.Add_Identifier(thisRecord['a'] + " (" + thisRecord['d'] + ")", identifier_source); } else { thisBibInfo.Add_Identifier(thisRecord['a'], identifier_source); } } // Look for the ISSN in the 440 and 490 |x and LCCN in the 490 |l foreach (MARC_Field thisRecord in record[440]) { if (thisRecord.has_Subfield('x')) { thisBibInfo.Add_Identifier(thisRecord['x'], "ISSN"); } } foreach (MARC_Field thisRecord in record[490]) { if (thisRecord.has_Subfield('x')) { thisBibInfo.Add_Identifier(thisRecord['x'], "ISSN"); } if (thisRecord.has_Subfield('l')) { thisBibInfo.Add_Identifier(thisRecord['l'], "LCCN"); } } // Load all the MARC Content Sources (040) if (record.has_Field(40)) { if (record.Get_Data_Subfield(40, 'a').Length > 0) { thisBibInfo.Record.Add_MARC_Record_Content_Sources(record.Get_Data_Subfield(40, 'a')); } if (record.Get_Data_Subfield(40, 'b').Length > 0) { thisBibInfo.Record.Add_MARC_Record_Content_Sources(record.Get_Data_Subfield(40, 'b')); } if (record.Get_Data_Subfield(40, 'c').Length > 0) { thisBibInfo.Record.Add_MARC_Record_Content_Sources(record.Get_Data_Subfield(40, 'c')); } string modifying = record.Get_Data_Subfield(40, 'd'); if (modifying.Length > 0) { string[] modSplitter = modifying.Split("|".ToCharArray()); foreach (string split in modSplitter) { thisBibInfo.Record.Add_MARC_Record_Content_Sources(split.Trim()); } } if (record.Get_Data_Subfield(40, 'e').Length > 0) { thisBibInfo.Record.Description_Standard = record.Get_Data_Subfield(40, 'e'); } } // Add the spatial information ( 752, 662 ) Add_Hierarchical_Subject(thisBibInfo, record, 752); Add_Hierarchical_Subject(thisBibInfo, record, 662); // Add all the subjects ( 600... 658, excluding 655 ) Add_Personal_Name(thisBibInfo, record, 600, 4); Add_Corporate_Name(thisBibInfo, record, 610, 4); Add_Conference_Name(thisBibInfo, record, 611, 4); Add_Main_Title(thisBibInfo, record, 630, Title_Type_Enum.UNSPECIFIED, 1, 4); // Add all additional subjects // Letters indicate which fields are: TOPICAL, GEOGRAPHIC, TEMPORAL, GENRE, OCCUPATION Add_Subject(thisBibInfo, record, 648, "x", "z", "ay", "v", ""); Add_Subject(thisBibInfo, record, 650, "ax", "z", "y", "v", ""); Add_Subject(thisBibInfo, record, 651, "x", "az", "y", "v", ""); Add_Subject(thisBibInfo, record, 653, "a", "", "", "", ""); Add_Subject(thisBibInfo, record, 654, "av", "y", "z", "", ""); Add_Subject(thisBibInfo, record, 655, "x", "z", "y", "av", ""); Add_Subject(thisBibInfo, record, 656, "x", "z", "y", "v", "a"); Add_Subject(thisBibInfo, record, 657, "ax", "z", "y", "v", ""); Add_Subject(thisBibInfo, record, 690, "ax", "z", "y", "v", ""); Add_Subject(thisBibInfo, record, 691, "x", "az", "y", "v", ""); // Add the genres (655 -- again) foreach (MARC_Field thisRecord in record[655]) { if (thisRecord.has_Subfield('a')) { if (thisRecord.has_Subfield('2')) thisBibInfo.Add_Genre(thisRecord['a'], thisRecord['2']); else thisBibInfo.Add_Genre(thisRecord['a']); } } // Add the abbreviated title (210) foreach (MARC_Field thisRecord in record[210]) { if (thisRecord.has_Subfield('a')) { Title_Info abbrTitle = new Title_Info(thisRecord['a'], Title_Type_Enum.Abbreviated); if (thisRecord.has_Subfield('b')) abbrTitle.Subtitle = thisRecord['b']; thisBibInfo.Add_Other_Title(abbrTitle); } } // Add the title ( 245 |a, |b ) Add_Main_Title(thisBibInfo, record, 245, Title_Type_Enum.UNSPECIFIED, 2, 1); // Add the translated titles ( 242 ) Add_Main_Title(thisBibInfo, record, 242, Title_Type_Enum.Translated, 2, 2); // Add the alternative titles ( 246, 740 ) Add_Main_Title(thisBibInfo, record, 246, Title_Type_Enum.Alternative, 0, 2); Add_Main_Title(thisBibInfo, record, 740, Title_Type_Enum.Alternative, 1, 2); // Add the uniform titles (130, 240, 730 ) Add_Main_Title(thisBibInfo, record, 130, Title_Type_Enum.Uniform, 1, 2); Add_Main_Title(thisBibInfo, record, 240, Title_Type_Enum.Uniform, 2, 2); Add_Main_Title(thisBibInfo, record, 730, Title_Type_Enum.Uniform, 1, 2); // Add the series titles ( 440, 490 ) Add_Main_Title(thisBibInfo, record, 440, Title_Type_Enum.UNSPECIFIED, 2, 3); Add_Main_Title(thisBibInfo, record, 490, Title_Type_Enum.UNSPECIFIED, 0, 3); // Add the creators and contributors ( 100, 110 , 111, 700, 710, 711, 720, 796, 797 ) Add_Personal_Name(thisBibInfo, record, 100, 1); Add_Personal_Name(thisBibInfo, record, 700, 2); Add_Personal_Name(thisBibInfo, record, 796, 3); Add_Corporate_Name(thisBibInfo, record, 110, 1); Add_Corporate_Name(thisBibInfo, record, 710, 2); Add_Corporate_Name(thisBibInfo, record, 797, 3); Add_Conference_Name(thisBibInfo, record, 111, 1); Add_Conference_Name(thisBibInfo, record, 711, 2); // Add the Other Edition Value (775) foreach (MARC_Field thisRecord in record[775]) { Related_Item_Info otherEditionItem = new Related_Item_Info(); otherEditionItem.Relationship = Related_Item_Type_Enum.OtherVersion; if (thisRecord.has_Subfield('t')) otherEditionItem.Main_Title.Title = thisRecord['t']; if (thisRecord.has_Subfield('x')) otherEditionItem.Add_Identifier(thisRecord['x'], "issn"); if (thisRecord.has_Subfield('z')) otherEditionItem.Add_Identifier(thisRecord['z'], "isbn"); if (thisRecord.has_Subfield('w')) { string[] splitter = thisRecord['w'].Split("|".ToCharArray()); foreach (string thisSplitter in splitter) { if (thisSplitter.IndexOf("(DLC)sn") >= 0) { otherEditionItem.Add_Identifier(thisSplitter.Replace("(DLC)sn", "").Trim(), "lccn"); } if (thisSplitter.IndexOf("(OCoLC)") >= 0) { otherEditionItem.Add_Identifier(thisSplitter.Replace("(OCoLC)", "").Trim(), "oclc"); } } } thisBibInfo.Add_Related_Item(otherEditionItem); } // Add the Preceding Entry (780) foreach (MARC_Field thisRecord in record[780]) { Related_Item_Info precedingItem = new Related_Item_Info(); precedingItem.Relationship = Related_Item_Type_Enum.Preceding; if (thisRecord.has_Subfield('t')) precedingItem.Main_Title.Title = thisRecord['t']; if (thisRecord.has_Subfield('x')) precedingItem.Add_Identifier(thisRecord['x'], "issn"); if (thisRecord.has_Subfield('z')) precedingItem.Add_Identifier(thisRecord['z'], "isbn"); if (thisRecord.has_Subfield('w')) { string[] splitter = thisRecord['w'].Split("|".ToCharArray()); foreach (string thisSplitter in splitter) { if ((thisSplitter.IndexOf("(DLC)sn") >= 0) || (thisSplitter.IndexOf("(OCoLC)") >= 0)) { if (thisSplitter.IndexOf("(DLC)sn") >= 0) { precedingItem.Add_Identifier(thisSplitter.Replace("(DLC)sn", "").Trim(), "lccn"); } if (thisSplitter.IndexOf("(OCoLC)") >= 0) { precedingItem.Add_Identifier(thisSplitter.Replace("(OCoLC)", "").Trim(), "oclc"); } } else { precedingItem.Add_Identifier(thisSplitter.Trim(), String.Empty); } } if (thisRecord.has_Subfield('o')) { if (thisRecord['o'].IndexOf("(SobekCM)") >= 0) precedingItem.SobekCM_ID = thisRecord['o'].Replace("(SobekCM)", "").Trim(); } } thisBibInfo.Add_Related_Item(precedingItem); } // Add the Suceeding Entry (785) foreach (MARC_Field thisRecord in record[785]) { Related_Item_Info succeedingItem = new Related_Item_Info(); succeedingItem.Relationship = Related_Item_Type_Enum.Succeeding; if (thisRecord.has_Subfield('t')) succeedingItem.Main_Title.Title = thisRecord['t']; if (thisRecord.has_Subfield('x')) succeedingItem.Add_Identifier(thisRecord['x'], "issn"); if (thisRecord.has_Subfield('z')) succeedingItem.Add_Identifier(thisRecord['z'], "isbn"); if (thisRecord.has_Subfield('w')) { string[] splitter = thisRecord['w'].Split("|".ToCharArray()); foreach (string thisSplitter in splitter) { if ((thisSplitter.IndexOf("(DLC)sn") >= 0) || (thisSplitter.IndexOf("(OCoLC)") >= 0)) { if (thisSplitter.IndexOf("(DLC)sn") >= 0) { succeedingItem.Add_Identifier(thisSplitter.Replace("(DLC)sn", "").Trim(), "lccn"); } if (thisSplitter.IndexOf("(OCoLC)") >= 0) { succeedingItem.Add_Identifier(thisSplitter.Replace("(OCoLC)", "").Trim(), "oclc"); } } else { succeedingItem.Add_Identifier(thisSplitter.Trim(), String.Empty); } } } if (thisRecord.has_Subfield('o')) { if (thisRecord['o'].IndexOf("(SobekCM)") >= 0) succeedingItem.SobekCM_ID = thisRecord['o'].Replace("(SobekCM)", "").Trim(); } thisBibInfo.Add_Related_Item(succeedingItem); } // Add the Other Relationship Entry (787) foreach (MARC_Field thisRecord in record[787]) { Related_Item_Info otherRelationItem = new Related_Item_Info(); otherRelationItem.Relationship = Related_Item_Type_Enum.UNKNOWN; if (thisRecord.has_Subfield('t')) otherRelationItem.Main_Title.Title = thisRecord['t']; if (thisRecord.has_Subfield('x')) otherRelationItem.Add_Identifier(thisRecord['x'], "issn"); if (thisRecord.has_Subfield('z')) otherRelationItem.Add_Identifier(thisRecord['z'], "isbn"); if (thisRecord.has_Subfield('w')) { string[] splitter = thisRecord['w'].Split("|".ToCharArray()); foreach (string thisSplitter in splitter) { if ((thisSplitter.IndexOf("(DLC)sn") >= 0) || (thisSplitter.IndexOf("(OCoLC)") >= 0)) { if (thisSplitter.IndexOf("(DLC)sn") >= 0) { otherRelationItem.Add_Identifier(thisSplitter.Replace("(DLC)sn", "").Trim(), "lccn"); } if (thisSplitter.IndexOf("(OCoLC)") >= 0) { otherRelationItem.Add_Identifier(thisSplitter.Replace("(OCoLC)", "").Trim(), "oclc"); } } else { otherRelationItem.Add_Identifier(thisSplitter.Trim(), String.Empty); } } } if (thisRecord.has_Subfield('o')) { if (thisRecord['o'].IndexOf("(SobekCM)") >= 0) otherRelationItem.SobekCM_ID = thisRecord['o'].Replace("(SobekCM)", "").Trim(); } thisBibInfo.Add_Related_Item(otherRelationItem); } // Get the type of resource ( Leader/006, Leader/007, Serial 008/021 ) string marc_type = String.Empty; switch (record.Leader[6]) { case 'a': case 't': thisBibInfo.SobekCM_Type = TypeOfResource_SobekCM_Enum.Book; marc_type = "BKS"; break; case 'e': case 'f': thisBibInfo.SobekCM_Type = TypeOfResource_SobekCM_Enum.Map; marc_type = "MAP"; break; case 'c': case 'd': thisBibInfo.SobekCM_Type = TypeOfResource_SobekCM_Enum.Book; marc_type = "BKS"; break; case 'i': case 'j': thisBibInfo.SobekCM_Type = TypeOfResource_SobekCM_Enum.Audio; marc_type = "REC"; break; case 'k': thisBibInfo.SobekCM_Type = TypeOfResource_SobekCM_Enum.Photograph; marc_type = "VIS"; break; case 'g': thisBibInfo.SobekCM_Type = TypeOfResource_SobekCM_Enum.Video; marc_type = "VIS"; break; case 'r': thisBibInfo.SobekCM_Type = TypeOfResource_SobekCM_Enum.Artifact; marc_type = "VIS"; break; case 'm': thisBibInfo.SobekCM_Type = TypeOfResource_SobekCM_Enum.Archival; marc_type = "COM"; break; case 'p': thisBibInfo.SobekCM_Type = TypeOfResource_SobekCM_Enum.Archival; marc_type = "MIX"; break; case 'o': marc_type = "VIS"; thisBibInfo.SobekCM_Type = TypeOfResource_SobekCM_Enum.Archival; break; } if (record.Leader[7] == 'c') thisBibInfo.Type.Collection = true; if (record.Leader[7] == 's') { thisBibInfo.SobekCM_Type = TypeOfResource_SobekCM_Enum.Serial; if (field_008.Length > 22) { if (field_008[21] == 'n') thisBibInfo.SobekCM_Type = TypeOfResource_SobekCM_Enum.Newspaper; } marc_type = "CNR"; } thisBibInfo.EncodingLevel = record.Leader[17].ToString().Replace("^", "#").Replace(" ", "#"); if (field_008.Length > 35) { if ((marc_type == "BKS") || (marc_type == "CNR") || (marc_type == "MAP") || (marc_type == "COM") || (marc_type == "VIS")) { switch (field_008[28]) { case 'c': thisBibInfo.Add_Genre("multilocal government publication", "marcgt"); break; case 'f': thisBibInfo.Add_Genre("federal government publication", "marcgt"); break; case 'i': thisBibInfo.Add_Genre("international intergovernmental publication", "marcgt"); break; case 'l': thisBibInfo.Add_Genre("local government publication", "marcgt"); break; case 'm': thisBibInfo.Add_Genre("multistate government publication", "marcgt"); break; case 'o': thisBibInfo.Add_Genre("government publication", "marcgt"); break; case 's': thisBibInfo.Add_Genre("government publication (state, provincial, terriorial, dependent)", "marcgt"); break; case 'a': thisBibInfo.Add_Genre("government publication (autonomous or semiautonomous component)", "marcgt"); break; } } if ((marc_type == "BKS") || (marc_type == "CNR")) { string nature_of_contents = field_008.Substring(24, 4); if (nature_of_contents.IndexOf("a") >= 0) thisBibInfo.Add_Genre("abstract or summary", "marcgt"); if (nature_of_contents.IndexOf("b") >= 0) thisBibInfo.Add_Genre("bibliography", "marcgt"); if (nature_of_contents.IndexOf("c") >= 0) thisBibInfo.Add_Genre("catalog", "marcgt"); if (nature_of_contents.IndexOf("d") >= 0) thisBibInfo.Add_Genre("dictionary", "marcgt"); if (nature_of_contents.IndexOf("r") >= 0) thisBibInfo.Add_Genre("directory", "marcgt"); if (nature_of_contents.IndexOf("k") >= 0) thisBibInfo.Add_Genre("discography", "marcgt"); if (nature_of_contents.IndexOf("e") >= 0) thisBibInfo.Add_Genre("encyclopedia", "marcgt"); if (nature_of_contents.IndexOf("q") >= 0) thisBibInfo.Add_Genre("filmography", "marcgt"); if (nature_of_contents.IndexOf("f") >= 0) thisBibInfo.Add_Genre("handbook", "marcgt"); if (nature_of_contents.IndexOf("i") >= 0) thisBibInfo.Add_Genre("index", "marcgt"); if (nature_of_contents.IndexOf("w") >= 0) thisBibInfo.Add_Genre("law report or digest", "marcgt"); if (nature_of_contents.IndexOf("g") >= 0) thisBibInfo.Add_Genre("legal article", "marcgt"); if (nature_of_contents.IndexOf("v") >= 0) thisBibInfo.Add_Genre("legal case and case notes", "marcgt"); if (nature_of_contents.IndexOf("l") >= 0) thisBibInfo.Add_Genre("legislation", "marcgt"); if (nature_of_contents.IndexOf("j") >= 0) thisBibInfo.Add_Genre("patent", "marcgt"); if (nature_of_contents.IndexOf("p") >= 0) thisBibInfo.Add_Genre("programmed text", "marcgt"); if (nature_of_contents.IndexOf("o") >= 0) thisBibInfo.Add_Genre("review", "marcgt"); if (nature_of_contents.IndexOf("s") >= 0) thisBibInfo.Add_Genre("statistics", "marcgt"); if (nature_of_contents.IndexOf("n") >= 0) thisBibInfo.Add_Genre("survey of literature", "marcgt"); if (nature_of_contents.IndexOf("t") >= 0) thisBibInfo.Add_Genre("technical report", "marcgt"); if (nature_of_contents.IndexOf("m") >= 0) thisBibInfo.Add_Genre("theses", "marcgt"); if (nature_of_contents.IndexOf("z") >= 0) thisBibInfo.Add_Genre("treaty", "marcgt"); if (nature_of_contents.IndexOf("2") >= 0) thisBibInfo.Add_Genre("offprint", "marcgt"); if (nature_of_contents.IndexOf("y") >= 0) thisBibInfo.Add_Genre("yearbook", "marcgt"); if (nature_of_contents.IndexOf("5") >= 0) thisBibInfo.Add_Genre("calendar", "marcgt"); if (nature_of_contents.IndexOf("6") >= 0) thisBibInfo.Add_Genre("comic/graphic novel", "marcgt"); if (field_008[29] == '1') thisBibInfo.Add_Genre("conference publication", "marcgt"); } if (marc_type == "CNR") { if (field_008[21] == 'd') thisBibInfo.Add_Genre("database", "marcgt"); if (field_008[21] == 'l') thisBibInfo.Add_Genre("loose-leaf", "marcgt"); if (field_008[21] == 'n') thisBibInfo.Add_Genre("newspaper", "marcgt"); if (field_008[21] == 'p') thisBibInfo.Add_Genre("periodical", "marcgt"); if (field_008[21] == 's') thisBibInfo.Add_Genre("series", "marcgt"); if (field_008[21] == 'w') thisBibInfo.Add_Genre("web site", "marcgt"); // Get the frequency switch (field_008[18]) { case 'a': thisBibInfo.Origin_Info.Add_Frequency("annual", "marcfrequency"); break; case 'b': thisBibInfo.Origin_Info.Add_Frequency("bimonthly", "marcfrequency"); break; case 'c': thisBibInfo.Origin_Info.Add_Frequency("semiweekly", "marcfrequency"); break; case 'd': thisBibInfo.Origin_Info.Add_Frequency("daily", "marcfrequency"); break; case 'e': thisBibInfo.Origin_Info.Add_Frequency("biweekly", "marcfrequency"); break; case 'f': thisBibInfo.Origin_Info.Add_Frequency("semiannual", "marcfrequency"); break; case 'g': thisBibInfo.Origin_Info.Add_Frequency("biennial", "marcfrequency"); break; case 'h': thisBibInfo.Origin_Info.Add_Frequency("triennial", "marcfrequency"); break; case 'i': thisBibInfo.Origin_Info.Add_Frequency("three times a week", "marcfrequency"); break; case 'j': thisBibInfo.Origin_Info.Add_Frequency("three times a month", "marcfrequency"); break; case 'k': thisBibInfo.Origin_Info.Add_Frequency("continuously updated", "marcfrequency"); break; case 'm': thisBibInfo.Origin_Info.Add_Frequency("monthly", "marcfrequency"); break; case 'q': thisBibInfo.Origin_Info.Add_Frequency("quarterly", "marcfrequency"); break; case 's': thisBibInfo.Origin_Info.Add_Frequency("semimonthly", "marcfrequency"); break; case 't': thisBibInfo.Origin_Info.Add_Frequency("three times a year", "marcfrequency"); break; case 'w': thisBibInfo.Origin_Info.Add_Frequency("weekly", "marcfrequency"); break; case 'z': thisBibInfo.Origin_Info.Add_Frequency("other", "marcfrequency"); break; } // Get the regularity switch (field_008[19]) { case 'n': thisBibInfo.Origin_Info.Add_Frequency("normalized irregular", "marcfrequency"); break; case 'r': thisBibInfo.Origin_Info.Add_Frequency("regular", "marcfrequency"); break; case 'x': thisBibInfo.Origin_Info.Add_Frequency("completely irregular", "marcfrequency"); break; } } if (marc_type == "MAP") { // Get the form of item if (field_008[25] == 'e') thisBibInfo.Add_Genre("atlas", "marcgt"); if (field_008[25] == 'd') thisBibInfo.Add_Genre("globe", "marcgt"); if (field_008[25] == 'a') thisBibInfo.Add_Genre("single map", "marcgt"); if (field_008[25] == 'b') thisBibInfo.Add_Genre("map series", "marcgt"); if (field_008[25] == 'c') thisBibInfo.Add_Genre("map serial", "marcgt"); // Get the projection, if there is one if ((field_008.Substring(22, 2) != " ") && (field_008.Substring(22, 2) != "||") && (field_008.Substring(22, 2) != "^^") && (field_008.Substring(22, 2) != "||")) { Subject_Info_Cartographics cartographicsSubject = new Subject_Info_Cartographics(); cartographicsSubject.ID = "SUBJ008"; cartographicsSubject.Projection = field_008.Substring(22, 2); thisBibInfo.Add_Subject(cartographicsSubject); } // Get whether this is indexed if (field_008[31] == '1') { thisBibInfo.Add_Genre("indexed", "marcgt"); } } if (marc_type == "REC") { string nature_of_recording = field_008.Substring(30, 2); if (nature_of_recording.IndexOf("a") >= 0) thisBibInfo.Add_Genre("autobiography", "marcgt"); if (nature_of_recording.IndexOf("b") >= 0) thisBibInfo.Add_Genre("biography", "marcgt"); if (nature_of_recording.IndexOf("c") >= 0) thisBibInfo.Add_Genre("conference publication", "marcgt"); if (nature_of_recording.IndexOf("d") >= 0) thisBibInfo.Add_Genre("drama", "marcgt"); if (nature_of_recording.IndexOf("e") >= 0) thisBibInfo.Add_Genre("essay", "marcgt"); if (nature_of_recording.IndexOf("f") >= 0) thisBibInfo.Add_Genre("fiction", "marcgt"); if (nature_of_recording.IndexOf("o") >= 0) thisBibInfo.Add_Genre("folktale", "marcgt"); if (nature_of_recording.IndexOf("k") >= 0) thisBibInfo.Add_Genre("humor, satire", "marcgt"); if (nature_of_recording.IndexOf("i") >= 0) thisBibInfo.Add_Genre("instruction", "marcgt"); if (nature_of_recording.IndexOf("t") >= 0) thisBibInfo.Add_Genre("interview", "marcgt"); if (nature_of_recording.IndexOf("j") >= 0) thisBibInfo.Add_Genre("language instruction", "marcgt"); if (nature_of_recording.IndexOf("m") >= 0) thisBibInfo.Add_Genre("memoir", "marcgt"); if (nature_of_recording.IndexOf("p") >= 0) thisBibInfo.Add_Genre("poetry", "marcgt"); if (nature_of_recording.IndexOf("r") >= 0) thisBibInfo.Add_Genre("rehearsal", "marcgt"); if (nature_of_recording.IndexOf("g") >= 0) thisBibInfo.Add_Genre("reporting", "marcgt"); if (nature_of_recording.IndexOf("s") >= 0) thisBibInfo.Add_Genre("sound", "marcgt"); if (nature_of_recording.IndexOf("l") >= 0) thisBibInfo.Add_Genre("speech", "marcgt"); } if (marc_type == "COM") { switch (field_008[26]) { case 'e': thisBibInfo.Add_Genre("database", "marcgt"); break; case 'f': thisBibInfo.Add_Genre("font", "marcgt"); break; case 'g': thisBibInfo.Add_Genre("game", "marcgt"); break; case 'a': thisBibInfo.Add_Genre("numeric data", "marcgt"); break; case 'h': thisBibInfo.Add_Genre("sound", "marcgt"); break; } } if (marc_type == "VIS") { switch (field_008[33]) { case 'a': thisBibInfo.Add_Genre("art original", "marcgt"); break; case 'c': thisBibInfo.Add_Genre("art reproduction", "marcgt"); break; case 'n': thisBibInfo.Add_Genre("chart", "marcgt"); break; case 'd': thisBibInfo.Add_Genre("diorama", "marcgt"); break; case 'f': thisBibInfo.Add_Genre("filmstrip", "marcgt"); break; case 'o': thisBibInfo.Add_Genre("flash card", "marcgt"); break; case 'k': thisBibInfo.Add_Genre("graphic", "marcgt"); break; case 'b': thisBibInfo.Add_Genre("kit", "marcgt"); break; case 'p': thisBibInfo.Add_Genre("microscope slide", "marcgt"); break; case 'q': thisBibInfo.Add_Genre("model", "marcgt"); break; case 'm': thisBibInfo.Add_Genre("motion picture", "marcgt"); break; case 'i': thisBibInfo.Add_Genre("picture", "marcgt"); break; case 'r': thisBibInfo.Add_Genre("realia", "marcgt"); break; case 's': thisBibInfo.Add_Genre("slide", "marcgt"); break; case 'l': thisBibInfo.Add_Genre("technical drawing", "marcgt"); break; case 'w': thisBibInfo.Add_Genre("toy", "marcgt"); break; case 't': thisBibInfo.Add_Genre("transparency", "marcgt"); break; case 'v': thisBibInfo.Add_Genre("video recording", "marcgt"); break; } } if (marc_type == "BKS") { switch (field_008[34]) { case 'a': thisBibInfo.Add_Genre("autobiography", "marcgt"); break; case 'b': thisBibInfo.Add_Genre("individual biography", "marcgt"); break; case 'c': thisBibInfo.Add_Genre("collective biography", "marcgt"); break; } switch (field_008[33]) { case 'a': thisBibInfo.Add_Genre("comic strip", "marcgt"); break; case 'd': thisBibInfo.Add_Genre("drama", "marcgt"); break; case 'e': thisBibInfo.Add_Genre("essay", "marcgt"); break; case 'h': thisBibInfo.Add_Genre("humor, satire", "marcgt"); break; case 'i': thisBibInfo.Add_Genre("letter", "marcgt"); break; case 'p': thisBibInfo.Add_Genre("poetry", "marcgt"); break; case 'f': thisBibInfo.Add_Genre("novel", "marcgt"); break; case 'j': thisBibInfo.Add_Genre("short story", "marcgt"); break; case 's': thisBibInfo.Add_Genre("speech", "marcgt"); break; case '0': thisBibInfo.Add_Genre("non-fiction", "marcgt"); break; case '1': thisBibInfo.Add_Genre("fiction", "marcgt"); break; } if ((field_008[30] == 'h') || (field_008[31] == 'h')) { thisBibInfo.Add_Genre("history", "marcgt"); } if (field_008[30] == '1') { thisBibInfo.Add_Genre("festschrift", "marcgt"); } } } // Look for target audience (521) foreach (MARC_Field thisRecord in record[521]) { if (thisRecord.has_Subfield('a')) { if (thisRecord.has_Subfield('b')) { thisBibInfo.Add_Target_Audience(thisRecord['a'].Replace("[", "(").Replace("]", ")") + " [ " + thisRecord['b'].Replace("[", "(").Replace("]", ")") + " ]"); } else { thisBibInfo.Add_Target_Audience(thisRecord['a'].Replace("[", "(").Replace("]", ")")); } } } // Look for target audince (008/22) if ((marc_type == "BKS") || (marc_type == "COM") || (marc_type == "REC") || (marc_type == "SCO") || (marc_type == "VIS")) { if (field_008.Length > 22) { switch (field_008[22]) { case 'd': thisBibInfo.Add_Target_Audience("adolescent", "marctarget"); break; case 'e': thisBibInfo.Add_Target_Audience("adult", "marctarget"); break; case 'g': thisBibInfo.Add_Target_Audience("general", "marctarget"); break; case 'b': thisBibInfo.Add_Target_Audience("primary", "marctarget"); break; case 'c': thisBibInfo.Add_Target_Audience("pre-adolescent", "marctarget"); break; case 'j': thisBibInfo.Add_Target_Audience("juvenile", "marctarget"); break; case 'a': thisBibInfo.Add_Target_Audience("preschool", "marctarget"); break; case 'f': thisBibInfo.Add_Target_Audience("specialized", "marctarget"); break; } } } // Get any project codes ( 852 ) if ((package != null) && (package.Behaviors.Aggregation_Count == 0)) { foreach (MARC_Field thisRecord in record[852]) { if ((thisRecord.Indicators.Trim().Length == 0) && (thisRecord.has_Subfield('b'))) { string allCodes = thisRecord['b']; string[] splitAllCodes = allCodes.Split("|;".ToCharArray()); foreach (string splitCode in splitAllCodes) { package.Behaviors.Add_Aggregation(splitCode.Trim()); } } } } }
/// <summary> Add a single, completely built polygon associated with this digital resource </summary> /// <param name="Polygon"> Coordinate polygon object associated with this digital resource </param> public void Add_Polygon(Coordinate_Polygon Polygon) { polygons.Add(Polygon); }
private void Compute_SobekCM_Main_Spatial() { // Set the distance to zero initially sobekcm_main_spatial_distance = 0; // Fields to hold the boundary positions double least_latitude = 0; double most_latitude = 0; double least_longitude = 0; double most_longitude = 0; // Build the spatial_kml for this StringBuilder spatial_kml_builder = new StringBuilder(50); string spatial_kml = String.Empty; try { // Check for areas first if (Polygon_Count > 0) { // If only one polygon, easy to assign if (Polygon_Count == 1) { spatial_kml_builder.Append("A|"); Coordinate_Point first_point = Get_Polygon(0).Edge_Points[0]; least_latitude = first_point.Latitude; most_latitude = first_point.Latitude; least_longitude = first_point.Longitude; most_longitude = first_point.Longitude; foreach (Coordinate_Point thisPoint in Get_Polygon(0).Edge_Points) { if (thisPoint.Latitude < least_latitude) { least_latitude = thisPoint.Latitude; } if (thisPoint.Latitude > most_latitude) { most_latitude = thisPoint.Latitude; } if (thisPoint.Longitude < least_longitude) { least_longitude = thisPoint.Longitude; } if (thisPoint.Longitude > most_longitude) { most_longitude = thisPoint.Longitude; } if (spatial_kml_builder.Length > 2) { spatial_kml_builder.Append("|"); } spatial_kml_builder.Append(thisPoint.Latitude + "," + thisPoint.Longitude); } } else { // Try to find the display polygon Coordinate_Polygon polygon = null; for (int i = 0; i < Polygon_Count; i++) { Coordinate_Polygon thisPolygon = Get_Polygon(i); if ((thisPolygon.Label.ToUpper().IndexOf("DISPLAY") >= 0) || (thisPolygon.Label.ToUpper().IndexOf("MAIN") >= 0)) { polygon = thisPolygon; break; } } if (polygon != null) { // Either a DISPLAY or MAIN polygon was found spatial_kml_builder.Append("A|"); foreach (Coordinate_Point thisPoint in polygon.Edge_Points) { if (spatial_kml_builder.Length > 2) { spatial_kml_builder.Append("|"); } spatial_kml_builder.Append(thisPoint.Latitude + "," + thisPoint.Longitude); } ReadOnlyCollection <Coordinate_Point> bounding_boxes = polygon.Bounding_Box; if (bounding_boxes.Count == 2) { least_latitude = Math.Min(bounding_boxes[0].Latitude, bounding_boxes[1].Latitude); most_latitude = Math.Max(bounding_boxes[0].Latitude, bounding_boxes[1].Latitude); least_longitude = Math.Min(bounding_boxes[0].Longitude, bounding_boxes[1].Longitude); most_longitude = Math.Max(bounding_boxes[0].Longitude, bounding_boxes[1].Longitude); } } else { // Determine a bounding box then Coordinate_Polygon polygon2 = Get_Polygon(0); Coordinate_Point first_point = polygon2.Bounding_Box[0]; least_latitude = first_point.Latitude; most_latitude = first_point.Latitude; least_longitude = first_point.Longitude; most_longitude = first_point.Longitude; foreach (Coordinate_Point thisPoint in polygon2.Bounding_Box) { if (thisPoint.Latitude < least_latitude) { least_latitude = thisPoint.Latitude; } if (thisPoint.Latitude > most_latitude) { most_latitude = thisPoint.Latitude; } if (thisPoint.Longitude < least_longitude) { least_longitude = thisPoint.Longitude; } if (thisPoint.Longitude > most_longitude) { most_longitude = thisPoint.Longitude; } } for (int i = 1; i < Polygon_Count; i++) { Coordinate_Polygon thisPolygon = Get_Polygon(i); foreach (Coordinate_Point thisPoint in thisPolygon.Bounding_Box) { if (thisPoint.Latitude < least_latitude) { least_latitude = thisPoint.Latitude; } if (thisPoint.Latitude > most_latitude) { most_latitude = thisPoint.Latitude; } if (thisPoint.Longitude < least_longitude) { least_longitude = thisPoint.Longitude; } if (thisPoint.Longitude > most_longitude) { most_longitude = thisPoint.Longitude; } } } if ((least_latitude != most_latitude) || (least_longitude != most_longitude)) { spatial_kml_builder.Append("A|" + least_latitude + "," + least_longitude + "|" + most_latitude + "," + most_longitude); } else { spatial_kml_builder.Append("P|" + least_latitude + "," + least_longitude); } } } // Since this is an area, compute the greatest distance double latitude_distance = Math.Abs(most_latitude - least_latitude); double longitude_distance = Math.Abs(most_longitude - least_longitude); sobekcm_main_spatial_distance = Math.Sqrt(Math.Pow(latitude_distance, 2) + Math.Pow(longitude_distance, 2)); } } catch { } try { // Try to build the spatial kml from points if there was no area possible if ((spatial_kml_builder.Length == 0) && (Point_Count > 0)) { // If only one point, this is easy! if (Point_Count == 1) { Coordinate_Point thisPoint = points[0]; spatial_kml_builder.Append("P|" + thisPoint.Latitude + "," + thisPoint.Longitude); } else { Coordinate_Point first_point = points[0]; least_latitude = Convert.ToDouble(first_point.Latitude); most_latitude = least_latitude; least_longitude = Convert.ToDouble(first_point.Longitude); most_longitude = least_latitude; foreach (Coordinate_Point thisPoint in points) { if (thisPoint.Latitude < least_latitude) { least_latitude = thisPoint.Latitude; } if (thisPoint.Latitude > most_latitude) { most_latitude = thisPoint.Latitude; } if (thisPoint.Longitude < least_longitude) { least_longitude = thisPoint.Longitude; } if (thisPoint.Longitude > most_longitude) { most_longitude = thisPoint.Longitude; } } if ((least_latitude != most_latitude) || (least_longitude != most_longitude)) { spatial_kml_builder.Append("A|" + least_latitude + "," + least_longitude + "|" + most_latitude + "," + most_longitude); // Since this is really points, we'll not chane the distance value from zero } else { spatial_kml_builder.Append("P|" + least_latitude + "," + least_longitude); } } } } catch { } sobekcm_main_spatial_string = spatial_kml_builder.ToString(); }
/// <summary> Chance for this metadata module to perform any additional database work /// such as saving digital resource data into custom tables </summary> /// <param name="ItemID"> Primary key for this item within the SobekCM database </param> /// <param name="DB_ConnectionString"> Connection string for the current database </param> /// <param name="BibObject"> Entire resource, in case there are dependencies between this module and somethingt in the full resource </param> /// <param name="Error_Message"> In the case of an error, this contains text of the error </param> /// <returns> TRUE if no error occurred, otherwise FALSE </returns> /// <remarks> This module currently saves the coordinate information to the database by calling /// the stored procedure SobekCM_Save_Item_Footprint </remarks> public bool Save_Additional_Info_To_Database(int ItemID, string DB_ConnectionString, SobekCM_Item BibObject, out string Error_Message) { // Set the default error mesasge Error_Message = String.Empty; // Save each and every coordinate point foreach (Coordinate_Point thisPoint in Points) { Save_Item_Footprint(ItemID, thisPoint.Latitude, thisPoint.Longitude, -1.0, -1.0, -1.0, -1.0, DB_ConnectionString, out Error_Message); } // Add each polygon (bounding box only) double rect_latitude_a; double rect_longitude_a; double rect_latitude_b; double rect_longitude_b; for (int rect_index = 0; rect_index < Polygon_Count; rect_index++) { try { // Get the polygon Coordinate_Polygon polygon = Get_Polygon(rect_index); // Set initial values rect_latitude_a = -1.0; rect_longitude_a = -1.0; rect_latitude_b = -1.0; rect_longitude_b = -1.0; // Step through each point double this_latitude; double this_longitude; bool first_point = true; foreach (Coordinate_Point thisPoint in polygon.Edge_Points) { try { this_latitude = thisPoint.Latitude; this_longitude = thisPoint.Longitude; if (first_point) { rect_latitude_a = this_latitude; rect_longitude_a = this_longitude; rect_latitude_b = this_latitude; rect_longitude_b = this_longitude; first_point = false; } else { if (this_latitude < rect_latitude_a) { rect_latitude_a = this_latitude; } if (this_latitude > rect_latitude_b) { rect_latitude_b = this_latitude; } if (this_longitude < rect_longitude_a) { rect_longitude_a = this_longitude; } if (this_longitude > rect_longitude_b) { rect_longitude_b = this_longitude; } } } catch { } } // Ensure the values aren't all still -1.0 if ((rect_latitude_a != -1.0) && (rect_longitude_a != -1.0)) { Save_Item_Footprint(ItemID, -1.0, -1.0, rect_latitude_a, rect_longitude_a, rect_latitude_b, rect_longitude_b, DB_ConnectionString, out Error_Message); } } catch { } } return(true); }
///// <summary> Clears a specific set of spatial information matching the URI from this digital resource </summary> //public void Clear_Specific(string URI) //{ // //points.Remove() //} /// <summary> Clears a specific polygon spatial information from this digital resource </summary> public void Clear_Specific_Polygon(Coordinate_Polygon PolygonToRemove) { polygons.Remove(PolygonToRemove); }
/// <summary> Remove a polygon from the collection of polygons associated with these coordinates </summary> /// <param name="Polygon_To_Remove"> Polygon to remove </param> public void Remove_Polygon(Coordinate_Polygon Polygon_To_Remove) { polygons.Remove(Polygon_To_Remove); }
private bool Read_Metadata_Section(XmlReader Input_XmlReader, GeoSpatial_Information geoInfo, Dictionary<string, object> Options) { do // Loop through reading each XML node { // get the right division information based on node type if (Input_XmlReader.NodeType == XmlNodeType.Element) //if it is an element { switch (Input_XmlReader.Name) //get name of { case "gml:Point": //is a point //read the featureType string pointFeatureType = String.Empty; if (Input_XmlReader.MoveToAttribute("featureType")) pointFeatureType = Input_XmlReader.Value; //read the label string pointLabel = String.Empty; if (Input_XmlReader.MoveToAttribute("label")) pointLabel = Input_XmlReader.Value; //get the rest of the information do { //check to see if end of element if (Input_XmlReader.NodeType == XmlNodeType.EndElement && Input_XmlReader.Name == "gml:Point") break; //if it is an element if (Input_XmlReader.NodeType == XmlNodeType.Element) { //determine the name of that element switch (Input_XmlReader.Name) { //if it is the coordinates case "gml:Coordinates": Input_XmlReader.Read(); if ((Input_XmlReader.NodeType == XmlNodeType.Text) && (Input_XmlReader.Value.Trim().Length > 0)) { //get coordinates string result = Convert.ToString(Input_XmlReader.Value); var items = result.Split(','); double latitude = double.Parse(items[0]); double longitude = double.Parse(items[1]); //add point to geo obj geoInfo.Add_Point(latitude, longitude, pointLabel, pointFeatureType); } break; } } } while (Input_XmlReader.Read()); break; case "gml:Line": //is a line //read the featureType string lineFeatureType = String.Empty; if (Input_XmlReader.MoveToAttribute("featureType")) lineFeatureType = Input_XmlReader.Value; //read the label string lineLabel = String.Empty; if (Input_XmlReader.MoveToAttribute("label")) lineLabel = Input_XmlReader.Value; //get the rest do { //check to see if end of element if (Input_XmlReader.NodeType == XmlNodeType.EndElement && Input_XmlReader.Name == "gml:Line") break; //if it is an element if (Input_XmlReader.NodeType == XmlNodeType.Element) { //determine the name of that element switch (Input_XmlReader.Name) { //if it is the coordinates case "gml:Coordinates": Input_XmlReader.Read(); if ((Input_XmlReader.NodeType == XmlNodeType.Text) && (Input_XmlReader.Value.Trim().Length > 0)) { // Parse the string into a collection of doubles, which represents lats AND longs List<double> latLongs = new List<double>(); string rValue = Input_XmlReader.Value + ' '; StringBuilder coordinatePointBuilder = new StringBuilder(); for (int iterator = 0; iterator < rValue.Length; iterator++) { char rValueChar = rValue[iterator]; if ((Char.IsNumber(rValueChar)) || (rValueChar == '.') || (rValueChar == '-')) { coordinatePointBuilder.Append(rValueChar); } else { if (coordinatePointBuilder.Length > 0) { latLongs.Add(double.Parse(coordinatePointBuilder.ToString())); coordinatePointBuilder.Remove(0, coordinatePointBuilder.Length); } } } //create newline obj Coordinate_Line newline = new Coordinate_Line(); //add points, In pairs, assign new points to the line and add the line to the coordinate/item int i = 0; while ((i + 2) <= latLongs.Count) { string lineName = "line"; lineName += i; newline.Add_Point(latLongs[i], latLongs[i + 1], lineName); i += 2; } //add featureType newline.FeatureType = lineFeatureType; //add label newline.Label = lineLabel; //add line to geo obj geoInfo.Add_Line(newline); } break; } } } while (Input_XmlReader.Read()); break; case "gml:Polygon": //is polygon //read the featuretype string polygonFeatureType = String.Empty; if (Input_XmlReader.MoveToAttribute("featureType")) polygonFeatureType = Input_XmlReader.Value; //read the polygonType string polygonPolygonType = String.Empty; if (Input_XmlReader.MoveToAttribute("polygonType")) polygonPolygonType = Input_XmlReader.Value; //read the label string polygonLabel = String.Empty; if (Input_XmlReader.MoveToAttribute("label")) polygonLabel = Input_XmlReader.Value; //read the rotation double polygonRotation = 0; if (Input_XmlReader.MoveToAttribute("rotation")) polygonRotation = Convert.ToDouble(Input_XmlReader.Value); //get the rest do { //check to see if end of element if (Input_XmlReader.NodeType == XmlNodeType.EndElement && Input_XmlReader.Name == "gml:Polygon") break; //if it is an element if (Input_XmlReader.NodeType == XmlNodeType.Element) { //determine the name of that element switch (Input_XmlReader.Name) { //if it is the coordinates case "gml:Coordinates": Input_XmlReader.Read(); if ((Input_XmlReader.NodeType == XmlNodeType.Text) && (Input_XmlReader.Value.Trim().Length > 0)) { // Parse the string into a collection of doubles, which represents lats AND longs List<double> latLongs = new List<double>(); string rValue = Input_XmlReader.Value + ' '; StringBuilder coordinatePointBuilder = new StringBuilder(); for (int iterator = 0; iterator < rValue.Length; iterator++) { char rValueChar = rValue[iterator]; if ((Char.IsNumber(rValueChar)) || (rValueChar == '.') || (rValueChar == '-')) { coordinatePointBuilder.Append(rValueChar); } else { if (coordinatePointBuilder.Length > 0) { latLongs.Add(double.Parse(coordinatePointBuilder.ToString())); coordinatePointBuilder.Remove(0, coordinatePointBuilder.Length); } } } //create a newpoly obj Coordinate_Polygon newPoly = new Coordinate_Polygon(); //add the edgepoints, In pairs, assign new points to the polygon and add the polygon to the coordinate/item int i = 0; while ((i + 2) <= latLongs.Count) { newPoly.Add_Edge_Point(latLongs[i], latLongs[i + 1]); i += 2; } //add the featuretype newPoly.FeatureType = polygonFeatureType; //add the polygontype newPoly.PolygonType = polygonPolygonType; //add the label newPoly.Label = polygonLabel; //add the rotation newPoly.Rotation = polygonRotation; //add poly to geo obj geoInfo.Add_Polygon(newPoly); } break; } } } while (Input_XmlReader.Read()); break; case "gml:Circle": //is a circle //read the featureType string circleFeatureType = String.Empty; if (Input_XmlReader.MoveToAttribute("featureType")) circleFeatureType = Input_XmlReader.Value; //read the label string circleLabel = String.Empty; if (Input_XmlReader.MoveToAttribute("label")) circleLabel = Input_XmlReader.Value; //read the radius double circleRadius = 0; if (Input_XmlReader.MoveToAttribute("radius")) circleRadius = Convert.ToDouble(Input_XmlReader.Value); //get the rest do { //check to see if end of element if (Input_XmlReader.NodeType == XmlNodeType.EndElement && Input_XmlReader.Name == "gml:Circle") break; //if it is an element if (Input_XmlReader.NodeType == XmlNodeType.Element) { //determine the name of that element switch (Input_XmlReader.Name) { //if it is the coordinates case "gml:Coordinates": Input_XmlReader.Read(); if ((Input_XmlReader.NodeType == XmlNodeType.Text) && (Input_XmlReader.Value.Trim().Length > 0)) { string result = Convert.ToString(Input_XmlReader.Value); var items = result.Split(','); double latitude = double.Parse(items[0]); double longitude = double.Parse(items[1]); //create the circle Coordinate_Circle newCircle = new Coordinate_Circle(latitude, longitude, circleRadius, circleLabel, circleFeatureType); //add to object geoInfo.Add_Circle(newCircle); } break; } } } while (Input_XmlReader.Read()); break; } } } while (Input_XmlReader.Read()); return true; }
/// <summary> Chance for this metadata module to perform any additional database work /// such as saving digital resource data into custom tables </summary> /// <param name="ItemID"> Primary key for this item within the SobekCM database </param> /// <param name="DB_ConnectionString"> Connection string for the current database </param> /// <param name="BibObject"> Entire resource, in case there are dependencies between this module and somethingt in the full resource </param> /// <param name="Error_Message"> In the case of an error, this contains text of the error </param> /// <returns> TRUE if no error occurred, otherwise FALSE </returns> /// <remarks> This module currently saves the coordinate information to the database by calling /// the stored procedure SobekCM_Save_Item_Footprint </remarks> public bool Save_Additional_Info_To_Database(int ItemID, string DB_ConnectionString, SobekCM_Item BibObject, out string Error_Message) { // Set the default error mesasge Error_Message = String.Empty; // Clear all item footprint info // Save each and every coordinate point foreach (Coordinate_Point thisPoint in Points) { //create kml string (just a sample) string pointKMLString = "<Placemark><name>" + thisPoint.Label + "</name><point><coordinates>" + thisPoint.Latitude + "," + thisPoint.Longitude + "</coordinates></point></Placemark>"; Save_Item_Footprint(ItemID, thisPoint.Latitude, thisPoint.Longitude, -1.0, -1.0, -1.0, -1.0, pointKMLString, DB_ConnectionString, out Error_Message); } // Add each polygon (bounding box only) for (int rect_index = 0; rect_index < Polygon_Count; rect_index++) { try { // Get the polygon Coordinate_Polygon polygon = Get_Polygon(rect_index); // Set initial values double rect_latitude_a = -1.0; double rect_longitude_a = -1.0; double rect_latitude_b = -1.0; double rect_longitude_b = -1.0; // Step through each point bool first_point = true; foreach (Coordinate_Point thisPoint in polygon.Edge_Points) { try { double this_latitude = thisPoint.Latitude; double this_longitude = thisPoint.Longitude; if (first_point) { rect_latitude_a = this_latitude; rect_longitude_a = this_longitude; rect_latitude_b = this_latitude; rect_longitude_b = this_longitude; first_point = false; } else { if (this_latitude < rect_latitude_a) { rect_latitude_a = this_latitude; } if (this_latitude > rect_latitude_b) { rect_latitude_b = this_latitude; } if (this_longitude < rect_longitude_a) { rect_longitude_a = this_longitude; } if (this_longitude > rect_longitude_b) { rect_longitude_b = this_longitude; } } } catch { } } // Ensure the values aren't all still -1.0 if ((rect_latitude_a != -1.0) && (rect_longitude_a != -1.0)) { //for just the polygon (notice long/lat sequence) string rect_KML_String = "<Placemark><name>" + polygon.Label + "</name><Polygon><outerBoundaryIs><LinearRing><coordinates>" + rect_longitude_a + "," + rect_latitude_a + " " + rect_longitude_b + "," + rect_latitude_b + "</coordinates></LinearRing></outerBoundaryIs></Polygon></Placemark>"; //for an overlay kml (not finished, requires url and perhaps folder support //string rect_KML_String = "<GroundOverlay><name>" + polygon.Label + "</name><icon>url</icon><LatLonBox>" + rect_latitude_a + "," + rect_longitude_a + " " + rect_latitude_b + "," + rect_longitude_b + "</LatLonBox></GroundOverlay>"; Save_Item_Footprint(ItemID, -1.0, -1.0, rect_latitude_a, rect_longitude_a, rect_latitude_b, rect_longitude_b, rect_KML_String, DB_ConnectionString, out Error_Message); } } catch { } } return(true); }
/// <summary> Adds a special polygon for an aerial tile coverage from the point notation in the upper right corner </summary> /// <param name="Latitude_Point"> Decimal degree latitude for the point notation for the aerial </param> /// <param name="Longitude_Point"> Decimal degree longitude for the point notation for the aerial</param> /// <param name="Scale"> Scale of the aerial tile ( i.e., for a map of scale 1:20000, set S = 20,000 )</param> /// <param name="Tile_Width"> Width of the physical aerial tile </param> /// <param name="Tile_Height"> Height of the physical aerial tile </param> /// <param name="Earth_Radius"> Radius of the earth in the same units as the height and width above </param> /// <param name="Label"> Label for this new aerial polygon notation from the point </param> /// <returns> Fully built coordinate polygon </returns> public Coordinate_Polygon Add_Aerial_Polygon(double Latitude_Point, double Longitude_Point, int Scale, double Tile_Width, double Tile_Height, ulong Earth_Radius, string Label) { // Calculate the opposite corner double latitude_calculated = Latitude_Point - ((Scale * Tile_Height * 180) / (Math.PI * Earth_Radius)); double latitude_average_radian = Math.PI * (Latitude_Point + latitude_calculated) / 360; double longitude_calculated = Longitude_Point - ((Scale * Tile_Width * 180) / (Math.PI * Earth_Radius * Math.Cos(latitude_average_radian))); // Add a slight shift to account for the original point placement double lower_left_latitude = latitude_calculated; double lower_left_longitude = longitude_calculated; double upper_right_latitude = Latitude_Point; double upper_right_longitude = Longitude_Point; // Create the polygon Coordinate_Polygon aerialPolygon = new Coordinate_Polygon(); aerialPolygon.Add_Edge_Point(upper_right_latitude, upper_right_longitude); aerialPolygon.Add_Edge_Point(upper_right_latitude, lower_left_longitude); aerialPolygon.Add_Edge_Point(lower_left_latitude, lower_left_longitude); aerialPolygon.Add_Edge_Point(lower_left_latitude, upper_right_longitude); aerialPolygon.Label = Label; // Add this to this item polygons.Add(aerialPolygon); // Return the built polygon return aerialPolygon; }
/// <summary> parse and save incoming message </summary> /// <param name="SendData"> message from page </param> public static void SaveContent(String SendData) { try { //get rid of excess string SendData = SendData.Replace("{\"sendData\": \"", "").Replace("{\"sendData\":\"", ""); //validate if (SendData.Length == 0) return; //ensure we have a geo-spatial module in the digital resource GeoSpatial_Information resourceGeoInfo = currentItem.Get_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY) as GeoSpatial_Information; //if there was no geo-spatial module if (resourceGeoInfo == null) { //create new geo-spatial module, if we do not already have one resourceGeoInfo = new GeoSpatial_Information(); currentItem.Add_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY, resourceGeoInfo); } //get the pages List<abstract_TreeNode> pages = currentItem.Divisions.Physical_Tree.Pages_PreOrder; //create a new list of all the polygons for a resource item Dictionary<string, Page_TreeNode> pageLookup = new Dictionary<string, Page_TreeNode>(); int page_index = 1; foreach (var abstractTreeNode in pages) { var pageNode = (Page_TreeNode) abstractTreeNode; if (pageNode.Label.Length == 0) pageLookup["Page " + page_index] = pageNode; else pageLookup[pageNode.Label] = pageNode; page_index++; } //get the length of incoming message int index1 = SendData.LastIndexOf("~", StringComparison.Ordinal); //split into each save message string[] allSaves = SendData.Substring(0, index1).Split('~'); //hold save type handle string saveTypeHandle; //go through each item to save and check for ovelrays and item only not pois (ORDER does matter because these will be saved to db before pois are saved) foreach (string t in allSaves) { //get the length of save message int index2 = t.LastIndexOf("|", StringComparison.Ordinal); //split into save elements string[] ar = t.Substring(0, index2).Split('|'); //determine the save type handle (position 0 in array) saveTypeHandle = ar[0]; //determine the save type (position 1 in array) string saveType = ar[1]; //based on saveType, parse into objects if (saveTypeHandle == "save") { //handle save based on type switch (saveType) { #region item case "item": //prep incoming lat/long string[] temp1 = ar[2].Split(','); double temp1Lat = Convert.ToDouble(temp1[0].Replace("(", "")); double temp1Long = Convert.ToDouble(temp1[1].Replace(")", "")); ////clear specific geo obj //resourceGeoInfo.Clear_Specific(Convert.ToString(ar[3])); //clear all the previous mains featureTypes (this will work for an item because there is only ever one item) resourceGeoInfo.Clear_NonPOIs(); //add the point obj Coordinate_Point newPoint = new Coordinate_Point(temp1Lat, temp1Long, currentItem.METS_Header.ObjectID, "main"); //add the new point resourceGeoInfo.Add_Point(newPoint); //save to db SobekCM_Database.Save_Digital_Resource(currentItem, options); break; #endregion #region overlay case "overlay": //parse the array id of the page int arrayId = (Convert.ToInt32(ar[2]) - 1); //is this always true (minus one of the human page id)? //add the label to page obj pages[arrayId].Label = ar[3]; //get the geocoordinate object for that pageId GeoSpatial_Information pageGeo = pages[arrayId].Get_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY) as GeoSpatial_Information; //if there isnt any already there if (pageGeo == null) { //create new pageGeo = new GeoSpatial_Information(); //create a polygon Coordinate_Polygon pagePolygon = new Coordinate_Polygon(); //prep incoming bounds string[] temp2 = ar[4].Split(','); pagePolygon.Clear_Edge_Points(); pagePolygon.Add_Edge_Point(Convert.ToDouble(temp2[0].Replace("(", "")), Convert.ToDouble(temp2[1].Replace(")", ""))); pagePolygon.Add_Edge_Point(Convert.ToDouble(temp2[2].Replace("(", "")), Convert.ToDouble(temp2[3].Replace(")", ""))); pagePolygon.Recalculate_Bounding_Box(); //add the rotation double result; pagePolygon.Rotation = Double.TryParse(ar[6], out result) ? result : 0; //add the featureType (explicitly add to make sure it is there) pagePolygon.FeatureType = "main"; //add the label pagePolygon.Label = ar[3]; //add the polygon type pagePolygon.PolygonType = "rectangle"; //add polygon to pagegeo pageGeo.Add_Polygon(pagePolygon); } else { try { //get current polygon info Coordinate_Polygon pagePolygon = pageGeo.Polygons[0]; //prep incoming bounds string[] temp2 = ar[4].Split(','); pagePolygon.Clear_Edge_Points(); pagePolygon.Add_Edge_Point(Convert.ToDouble(temp2[0].Replace("(", "")), Convert.ToDouble(temp2[1].Replace(")", ""))); pagePolygon.Add_Edge_Point(Convert.ToDouble(temp2[2].Replace("(", "")), Convert.ToDouble(temp2[3].Replace(")", ""))); pagePolygon.Recalculate_Bounding_Box(); //add the rotation double result; pagePolygon.Rotation = Double.TryParse(ar[6], out result) ? result : 0; //add the featureType (explicitly add to make sure it is there) pagePolygon.FeatureType = "main"; //add the label pagePolygon.Label = ar[3]; //add the polygon type pagePolygon.PolygonType = "rectangle"; //clear all previous nonPOIs for this page (NOTE: this will only work if there is only one main page item) pageGeo.Clear_NonPOIs(); //add polygon to pagegeo pageGeo.Add_Polygon(pagePolygon); } catch (Exception) { //there were no polygons try { //make a polygon Coordinate_Polygon pagePolygon = new Coordinate_Polygon(); //prep incoming bounds string[] temp2 = ar[4].Split(','); pagePolygon.Clear_Edge_Points(); pagePolygon.Add_Edge_Point(Convert.ToDouble(temp2[0].Replace("(", "")), Convert.ToDouble(temp2[1].Replace(")", ""))); pagePolygon.Add_Edge_Point(Convert.ToDouble(temp2[2].Replace("(", "")), Convert.ToDouble(temp2[3].Replace(")", ""))); pagePolygon.Recalculate_Bounding_Box(); //add the rotation double result; pagePolygon.Rotation = Double.TryParse(ar[6], out result) ? result : 0; //add the featureType (explicitly add to make sure it is there) pagePolygon.FeatureType = "main"; //add the label pagePolygon.Label = ar[3]; //add the polygon type pagePolygon.PolygonType = "rectangle"; //clear all previous nonPOIs for this page (NOTE: this will only work if there is only one main page item) pageGeo.Clear_NonPOIs(); //add polygon to pagegeo pageGeo.Add_Polygon(pagePolygon); } catch (Exception) { //welp... } } } //add the pagegeo obj pages[arrayId].Add_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY, pageGeo); //save to db SobekCM_Database.Save_Digital_Resource(currentItem, options); break; #endregion } } else { if (saveTypeHandle == "delete") { switch (saveType) { #region item case "item": //clear nonpoipoints resourceGeoInfo.Clear_NonPOIPoints(); //save to db SobekCM_Database.Save_Digital_Resource(currentItem, options); break; #endregion #region overlay case "overlay": try { //parse the array id of the page int arrayId = (Convert.ToInt32(ar[2]) - 1); //is this always true (minus one of the human page id)? //get the geocoordinate object for that pageId GeoSpatial_Information pageGeo = pages[arrayId].Get_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY) as GeoSpatial_Information; if (pageGeo != null) { Coordinate_Polygon pagePolygon = pageGeo.Polygons[0]; //reset edgepoints pagePolygon.Clear_Edge_Points(); //reset rotation pagePolygon.Rotation = 0; //add the featureType (explicitly add to make sure it is there) pagePolygon.FeatureType = "hidden"; //add the polygon type pagePolygon.PolygonType = "hidden"; //clear all previous nonPOIs for this page (NOTE: this will only work if there is only one main page item) pageGeo.Clear_NonPOIs(); //add polygon to pagegeo pageGeo.Add_Polygon(pagePolygon); } ////if there isnt any already there //if (pageGeo != null) // pageGeo.Remove_Polygon(pageGeo.Polygons[0]); //add the pagegeo obj pages[arrayId].Add_Metadata_Module(GlobalVar.GEOSPATIAL_METADATA_MODULE_KEY, pageGeo); //save to db SobekCM_Database.Save_Digital_Resource(currentItem, options); } catch (Exception) { // } break; #endregion } } } } //check to see if save poi clear has already been fired... bool firedOnce = true; //go through each item to save and check for pois only foreach (string t in allSaves) { //get the length of save message int index2 = t.LastIndexOf("|", StringComparison.Ordinal); //split into save elements string[] ar = t.Substring(0, index2).Split('|'); //determine the save type handle (position 0 in array) saveTypeHandle = ar[0]; //determine the save type (position 1 in array) string saveType = ar[1]; //based on saveType, parse into objects if (saveTypeHandle == "save") { //handle save based on type switch (saveType) { #region poi case "poi": //fixes bug if (firedOnce) { //clear previous poi points resourceGeoInfo.Clear_POIs(); firedOnce = false; } //get specific geometry (KML Standard) switch (ar[2]) { case "marker": //prep incoming lat/long string[] temp2 = ar[4].Split(','); double temp2Lat = Convert.ToDouble(temp2[0].Replace("(", "")); double temp2Long = Convert.ToDouble(temp2[1].Replace(")", "")); //add the new point resourceGeoInfo.Add_Point(temp2Lat, temp2Long, ar[3], "poi"); break; case "circle": //create new circle Coordinate_Circle poiCircle = new Coordinate_Circle {Label = ar[3], Radius = Convert.ToDouble(ar[5]), FeatureType = "poi"}; //add the incoming lat/long string[] temp3 = ar[4].Split(','); poiCircle.Latitude = Convert.ToDouble(temp3[0].Replace("(", "")); poiCircle.Longitude = Convert.ToDouble(temp3[1].Replace(")", "")); //add to the resource obj resourceGeoInfo.Add_Circle(poiCircle); break; case "rectangle": //create new polygon Coordinate_Polygon poiRectangle = new Coordinate_Polygon {Label = ar[3], FeatureType = "poi", PolygonType = "rectangle"}; //add the incoming bounds string[] temp4 = ar[4].Split(','); poiRectangle.Add_Edge_Point(Convert.ToDouble(temp4[0].Replace("(", "")), Convert.ToDouble(temp4[1].Replace(")", ""))); poiRectangle.Add_Edge_Point(Convert.ToDouble(temp4[2].Replace("(", "")), Convert.ToDouble(temp4[3].Replace(")", ""))); poiRectangle.Recalculate_Bounding_Box(); //add to resource obj resourceGeoInfo.Add_Polygon(poiRectangle); break; case "polygon": //create new polygon Coordinate_Polygon poiPolygon = new Coordinate_Polygon {Label = ar[3], FeatureType = "poi"}; //add the edge points for (int i2 = 5; i2 < ar.Length; i2++) { string[] temp5 = ar[i2].Split(','); poiPolygon.Add_Edge_Point(Convert.ToDouble(temp5[0].Replace("(", "")), Convert.ToDouble(temp5[1].Replace(")", ""))); } //add the polygon resourceGeoInfo.Add_Polygon(poiPolygon); break; case "polyline": //create new line Coordinate_Line poiLine = new Coordinate_Line {Label = ar[3], FeatureType = "poi"}; //add the edge points for (int i2 = 5; i2 < ar.Length; i2++) { string[] temp5 = ar[i2].Split(','); poiLine.Add_Point(Convert.ToDouble(temp5[0].Replace("(", "")), Convert.ToDouble(temp5[1].Replace(")", "")), ""); } //add the line resourceGeoInfo.Add_Line(poiLine); break; } break; #endregion } } } #region prep saving dir //create inprocessing directory string userInProcessDirectory = UI_ApplicationCache_Gateway.Settings.User_InProcess_Directory( currentUser, "mapwork"); string backupDirectory = UI_ApplicationCache_Gateway.Settings.Servers.Image_Server_Network + currentItem.Web.AssocFilePath + UI_ApplicationCache_Gateway.Settings.Resources.Backup_Files_Folder_Name; //ensure the user's process directory exists if (!Directory.Exists(userInProcessDirectory)) Directory.CreateDirectory(userInProcessDirectory); //ensure the backup directory exists if (!Directory.Exists(backupDirectory)) Directory.CreateDirectory(backupDirectory); string resource_directory = UI_ApplicationCache_Gateway.Settings.Servers.Image_Server_Network + currentItem.Web.AssocFilePath; string current_mets = resource_directory + currentItem.METS_Header.ObjectID + ".mets.xml"; string backup_mets = backupDirectory + "\\" + currentItem.METS_Header.ObjectID + "_" + DateTime.Now.Year.ToString() + DateTime.Now.Month.ToString() + DateTime.Now.Day.ToString() + "_" + DateTime.Now.Hour.ToString() + DateTime.Now.Minute.ToString() + DateTime.Now.Second.ToString() + ".mets.xml.BAK"; string metsInProcessFile = userInProcessDirectory + "\\" + currentItem.BibID + "_" + currentItem.VID + ".mets.xml"; #endregion #region Save mets and db //save the item to the temporary location currentItem.Save_METS(userInProcessDirectory + "\\" + currentItem.BibID + "_" + currentItem.VID + ".mets.xml"); //move temp mets to prod File.Copy(metsInProcessFile, current_mets, true); //delete in process mets file File.Delete(metsInProcessFile); //create a backup mets file File.Copy(current_mets, backup_mets, true); #endregion } catch (Exception) { //Custom_Tracer.Add_Trace("MapEdit Save Error"); throw new ApplicationException("MapEdit Save Error"); //throw; } }