public static SmashDataExplosion Load(DocumentParser file) { SmashDataExplosion explosion = new SmashDataExplosion { Count = file.ReadInts(), StartDelay = file.ReadVector2(), Offset = file.ReadVector3(), XFactor = file.ReadVector2(), YFactor = file.ReadVector2(), ZFactor = file.ReadVector2(), FrameRate = file.ReadVector2(), ScalingFactor = file.ReadVector2(), RotationMode = file.ReadEnum <ExplosionRotationMode>() }; int frameCount = file.ReadInt(); for (int i = 0; i < frameCount; i++) { explosion.Frames.Add(new SmashDataExplosionFrame { Opacity = file.ReadInt(), Pixelmap = file.ReadLine() }); } return(explosion); }
static void Main(string[] args) { string inputDocument = @".\SampleInvoice.pdf"; // Create DocumentParser instance using (DocumentParser documentParser = new DocumentParser("demo", "demo")) { // Add an internal generic template for typical invoices. // Note, if it does not parse all required fields, you should create // own template using Template Editor application. documentParser.AddTemplate("internal://invoice"); Console.WriteLine($"Parsing \"{inputDocument}\"..."); Console.WriteLine(); // Parse document data in JSON format string jsonString = documentParser.ParseDocument(inputDocument, OutputFormat.JSON); // Display parsed data in console Console.WriteLine("Parsing results in JSON format:"); Console.WriteLine(); Console.WriteLine(jsonString); } Console.WriteLine(); Console.WriteLine("Press any key to continue..."); Console.ReadLine(); }
public string DeleteObject(string text) { var dp = new DocumentParser(); var d = dp.Parse(text) as Document; if (d == null) { return("No object"); } if (_lastCollectionUsed == null) { return("No collection"); } using (var db = MDB.GetMongo()) { if (d.Contains("_id") && d["_id"] != null) { var selector = new Document(); selector["_id"] = d["_id"]; db[DataBaseName][_lastCollectionUsed].Delete(selector); return(string.Format("Object with _id: {1} deleted in collection '{0}'", _lastCollectionUsed, d["_id"])); } return(string.Format("Object contains no id")); } }
//-------------------------------------------------------------------------- public string GetContents(LocationDetail locationDetail, long fileOffsetFrom, long fileOffsetTo, out long contentsWordOffsetBegin, out long contentsWordOffsetEnd) { contentsWordOffsetBegin = 0; contentsWordOffsetEnd = 0; Document document = locationDetail.Document; string contents = DocumentParser.GetContents(document.LocalFile, fileOffsetFrom, fileOffsetTo); long binaryOffset = locationDetail.Location.FileOffset - fileOffsetFrom; contentsWordOffsetBegin = DocumentParser.IndexFromBinaryOffsetInContent(contents, binaryOffset); Word word = WordsService.Instance.GetWordById( locationDetail.Location.WordId); if (word == null) { throw new Exception("Word not found in DB?!"); } contentsWordOffsetEnd = contentsWordOffsetBegin + word.Length; return(contents); }
protected override void ToolExportClick(object sender, EventArgs e) { if (Procedure == null || Procedure.Data == null) { base.ToolExportClick(sender, e); } else { string fileName = Procedure.DataName.Replace(".", DateTime.Now.ToString("yyMMddHHmmss") + "."); using (var dialog = new SaveFileDialog() { InitialFileName = fileName }) { if (dialog.Run(ParentWindow)) { DocumentParser.Execute(Procedure, new ExecuteArgs() { Parameters = parameters, Result = Query }); System.Diagnostics.Process.Start(dialog.FileName); } } } }
public static async Task Run(Stream blobStream, string name, TraceWriter log) { log.Info($"Processing blob:{name}"); // parse the document to extract images IEnumerable <PageImage> pages = DocumentParser.Parse(blobStream).Pages; // create and apply the skill set to create annotations SkillSet <PageImage> skillSet = CreateCognitiveSkillSet(); var annotations = await skillSet.ApplyAsync(pages); // Commit them to Cosmos DB to be used by full corpus skills such as Topics await cosmosDb.SaveAsync(annotations); // index the annotated document with azure search AnnotatedDocument document = new AnnotatedDocument(annotations.Select(a => a.Get <AnnotatedPage>("page-metadata"))); var searchDocument = new SearchDocument(name) { Metadata = document.Metadata, Text = document.Text, LinkedEntities = annotations .SelectMany(a => a.Get <EntityLink[]>("linked-entities") ?? new EntityLink[0]) .GroupBy(l => l.Name) .OrderByDescending(g => g.Max(l => l.Score)) .Select(l => l.Key) .ToList(), }; var batch = IndexBatch.MergeOrUpload(new[] { searchDocument }); var result = await indexClient.Documents.IndexAsync(batch); if (!result.Results[0].Succeeded) { log.Error($"index failed for {name}: {result.Results[0].ErrorMessage}"); } }
static void Main(string[] args) { string inputDocument1 = @".\DigitalOcean-scanned.jpg"; // Create DocumentParser instance using (DocumentParser documentParser = new DocumentParser("demo", "demo")) { // Enable Optical Character Recognition (OCR) // in .Auto mode (SDK automatically checks if needs to use OCR or not) documentParser.OCRMode = OCRMode.Auto; // Set the location of "tessdata" folder containing language data files documentParser.OCRLanguageDataFolder = @".\tessdata\"; // Set OCR language documentParser.OCRLanguage = "eng"; // "eng" for english, "deu" for German, "fra" for French, "spa" for Spanish etc - according to files in /tessdata // Find more language files at https://github.com/tesseract-ocr/tessdata/tree/3.04.00 Console.WriteLine($"Parsing \"{inputDocument1}\"..."); Console.WriteLine(); // Parse document data in JSON format string jsonString = documentParser.ParseDocument(inputDocument1, OutputFormat.JSON); // Display parsed data in console Console.WriteLine("Parsing results in JSON format:"); Console.WriteLine(); Console.WriteLine(jsonString); } Console.WriteLine(); Console.WriteLine("Press any key to continue..."); Console.ReadLine(); }
static void Main(string[] args) { string customTemplate = @".\SampleTemplate.yml"; string inputDocument = @".\SampleInvoice.pdf"; // Create DocumentParser instance using (DocumentParser documentParser = new DocumentParser("demo", "demo")) { Console.WriteLine($"Loading template \"{customTemplate}\"..."); Console.WriteLine(); documentParser.AddTemplate(customTemplate); Console.WriteLine($"Parsing \"{inputDocument}\"..."); Console.WriteLine(); // Parse invoice data in JSON format string jsonString = documentParser.ParseDocument(inputDocument, OutputFormat.JSON); // Display parsed data in console Console.WriteLine("Parsing results in JSON format:"); Console.WriteLine(); Console.WriteLine(jsonString); } Console.WriteLine(); Console.WriteLine("Press any key to continue..."); Console.ReadLine(); }
//-------------------------------------------------------------------------- private Document GenerateDocumentModel(DocumentParser document) { Document doc = new Document() { GutenbergId = document.GutenbergId }; string value; if (!document.MetaData.TryGetValue("title", out value)) { throw new Exception("Document is missing required field: " + "title"); } doc.Title = value; if (!document.MetaData.TryGetValue("author", out value)) { throw new Exception("Document is missing required field: " + "author"); } doc.Author = value; if (document.MetaData.TryGetValue("release date", out value)) { DateTime output; if (DateTime.TryParse(value, out output)) { doc.ReleaseDate = output; } } // save the normalized document to our storage folder doc.LocalFile = SaveToStorage(document); doc.TableLoadState = Document.LoadState.NotComplete; return(doc); }
//-------------------------------------------------------------------------- public Phrase AddPhrase(string phrase) { DocumentParser parsedPhrase = DocumentParser.FromPhrase(phrase); DocumentWord documentWord = null; List <Word> words = new List <Word>(); while (parsedPhrase.GetNextWord(out documentWord)) { Word word = WordsService.Instance.GetWord(documentWord.Text); words.Add(word); } if (words.Count < kMinimalWordsInPhrase) { throw new Exception("Phrases must have, at least, " + kMinimalWordsInPhrase + " words!"); } Phrase resultPhrase = new Phrase() { Words = words }; PhrasesDao.Insert(ref resultPhrase); return(resultPhrase); }
static void Main(string[] args) { string inputDocument1 = @".\DigitalOcean.pdf"; string inputDocument2 = @".\AmazonAWS.pdf"; // Create DocumentParser instance using (DocumentParser documentParser = new DocumentParser("demo", "demo")) { Console.WriteLine($"Parsing \"{inputDocument1}\"..."); Console.WriteLine(); // Parse document data in JSON format string jsonString = documentParser.ParseDocument(inputDocument1, OutputFormat.JSON); // Display parsed data in console Console.WriteLine("Parsing results in JSON format:"); Console.WriteLine(); Console.WriteLine(jsonString); Console.WriteLine(); Console.WriteLine($"Parsing \"{inputDocument2}\"..."); Console.WriteLine(); // Parse document data in YAML format string yamlString = documentParser.ParseDocument(inputDocument2, OutputFormat.YAML); // Display parsed data in console Console.WriteLine("Parsing results in YAML format:"); Console.WriteLine(); Console.WriteLine(yamlString); } Console.WriteLine(); Console.WriteLine("Press any key to continue..."); Console.ReadLine(); }
public static Noncar Load(string path) { DocumentParser file = new DocumentParser(path); Noncar noncar = new Noncar { Name = Path.GetFileNameWithoutExtension(path) }; noncar.Number = file.ReadInt(); noncar.CentreOfMass = file.ReadVector3(); noncar.CentreOfMassWhenAttached = file.ReadVector3(); noncar.Bounds.Min = file.ReadVector3(); noncar.Bounds.Max = file.ReadVector3(); int numExtraPoints = file.ReadInt(); for (int i = 0; i < numExtraPoints; i++) { noncar.ExtraPoints.Add(file.ReadVector3()); } noncar.Mass = file.ReadVector2(); noncar.AngularMomentum = file.ReadVector3(); noncar.BendAngle = file.ReadSingle(); noncar.TorqueToMove = file.ReadSingle(); int numMaterials = file.ReadInt(); for (int i = 0; i < numMaterials; i++) { noncar.Shrapnel.Add(file.ReadLine()); } return(noncar); }
public static OpponentTXT Load(string path) { DocumentParser file = new DocumentParser(path); OpponentTXT opponents = new OpponentTXT(); int numOpponents = file.ReadInt(); for (int i = 0; i < numOpponents; ++i) { opponents.Opponents.Add(new OpponentDetails { DriverName = file.ReadLine(), DriverShortName = file.ReadLine(), CarName = file.ReadLine(), StrengthRating = file.ReadInt(), CostToBuy = file.ReadInt(), NetworkAvailability = file.ReadEnum <NetworkAvailability>(), CarFilename = file.ReadLine(), TopSpeed = file.ReadLine(), KerbWeight = file.ReadLine(), To60 = file.ReadLine(), Bio = file.ReadLine() }); } return(opponents); }
//-------------------------------------------------------------------------- private Document InsertDocument(DocumentParser document) { // first, check if the document already exists Document doc = DocumentsDao.GetDocumentById(document.GutenbergId); if ((doc != null) && (doc.TableLoadState == Document.LoadState.Complete)) { throw new Exception("document already exists!"); } if (doc != null) { // if the document exists but the load state wasn't complete, it means // we probably failed to load it previously and so we will run a clean // up before trying to reload it CleanupDocument(doc); doc = null; } // check we have the minimal set of meta data doc = GenerateDocumentModel(document); DocumentsDao.Insert(doc); _documentCache[doc.Id] = doc; return(doc); }
public VehicleMaterialMap(DocumentParser doc) { Name = doc.ReadNextLine(); while (!doc.NextLineIsASection()) { string[] mm = doc.ReadStringArray(); switch (mm[0].ToLower()) { case "shrapnel": Shrapnel = Vector3.Parse(mm[1]); break; case "localise": Localisation = mm[1]; break; case "material_map_product_id": MaterialMapProductID = mm[1].ToInt(); break; default: if (mm[1] == ":") { Substitutions[mm[0]] = mm[2]; } else { throw new NotImplementedException($"Unknown MaterialMap parameter: {mm[0]}"); } break; } } }
static void Main(string[] args) { // Step 1: Generate Parse PDF File With Template and Generate Json string inputPDF = "InputData/Test_Report_Format.pdf"; string template = "InputData/TestReportFormat.yml"; // Create Document Parser Instance DocumentParser docParser = new DocumentParser("demo", "demo"); // Add Template docParser.AddTemplate(template); // Parse document data in JSON format string jsonString = docParser.ParseDocument(inputPDF, OutputFormat.JSON); // Step 2: Parse Json fileds in class format var oInpModel = JsonParserHelper.ParseJsonHL7Fields(jsonString); // Step 3: Get Data in HL7 Format var oHL7Format = Hl7Helper.GetHL7Format(oInpModel); // Store HL7 File and open with default associated program var outputFile = "outputHl7.txt"; System.IO.File.WriteAllText(outputFile, oHL7Format); Process.Start(outputFile); }
static void Main(string[] args) { // Step 1: Generate Parse PDF File With Template and Generate Json string inputPDF = "Sample_Files/Test_Report_Format.pdf"; string template = "Sample_Files/TestReportFormat.yml"; // Create Document Parser Instance DocumentParser docParser = new DocumentParser("demo", "demo"); // Add Template docParser.AddTemplate(template); // Parse document data in JSON format string jsonString = docParser.ParseDocument(inputPDF, ByteScout.DocumentParser.OutputFormat.JSON); // Step 2: Parse Json fileds in class format var oInpModel = JsonParserHelper.ParseJsonHL7Fields(jsonString); // Step 3: Get Data in HL7 Format var oHL7Format = Hl7Helper.GetHL7Format(oInpModel); // Step 4: write HL7 to Spreadsheet var oResultFile = "output.xlsx"; WriteToSpreadsheet(oHL7Format, oResultFile); // Open generated file with default application program Process.Start(oResultFile); }
public static ParsedDocument From(string fileName, string content) { DocumentParser documentParser = new DocumentParser(content); ParsedNode rootNode = documentParser.Parse(); return(new ParsedDocument(fileName, rootNode)); }
private void LoadButton_Click(object sender, EventArgs e) { OpenFileDialog.Filter = "Microsoft Word 2007|*.docx|All files (*.*)|*.*"; OpenFileDialog.FilterIndex = 1; OpenFileDialog.RestoreDirectory = true; if (OpenFileDialog.ShowDialog() == DialogResult.OK) { byte[] fileHash = FileHasher.HashMD5(OpenFileDialog.FileName); if (!fileRepository.CheckFileHash(fileHash) && fileRepository.InsertFileHashInDB(OpenFileDialog.FileName, fileHash)) { Cursor.Current = Cursors.WaitCursor; parser = new DocumentParser(OpenFileDialog.FileName); parser.ParseDocument(); pairRepository.InsertPairs(parser.ParseDocument()); Cursor.Current = Cursors.Arrow; } else { MessageBox.Show("File already exist in DB", "ScheduleApp"); } } }
public VehicleSuspensionFactors(DocumentParser doc) { while (!doc.NextLineIsASection()) { string[] sf = doc.ReadStringArray(2); switch (sf[0]) { case "max_compression": MaxCompression = sf[1].ToSingle(); break; case "ride_height": RightHeight = sf[1].ToSingle(); break; case "max_steering_lock": MaxSteeringLock = sf[1].ToInt(); break; case "max_extension": MaxExtension = sf[1].ToSingle(); break; default: throw new NotImplementedException($"Unknown SuspensionFactor parameter: {sf[0]}"); } } }
private DocumentDAOImplementation() { Logging.singlton(nameof(DocumentDAO)); driver = DatabaseDriverImplementation.getInstance(); parser = DocumentParserImplementation.getInstance(); driver.createTable(DatabaseConstants.CREATE_DOCUMENT_TABLE); }
public static CrushPointNeighbour Load(DocumentParser file) { return(new CrushPointNeighbour { VertexIndex = file.ReadInt(), Factor = file.ReadInt() }); }
//-------------------------------------------------------------------------- // returns the filename private FileInfo SaveToStorage(DocumentParser document) { string filename = document.GutenbergId + ".txt"; filename = Path.Combine( GlobalParamatersService.Configuration.Storage.FullName, filename); document.Save(new FileInfo(filename), true); return(new FileInfo(filename)); }
public static void ParseBusinessLocation(this XElement element, EpcisEvent epcisEvent) { foreach (var innerElement in element.Elements().Where(x => !new[] { "id", "corrective" }.Contains(x.Name.LocalName))) { epcisEvent.CustomFields.Add(DocumentParser.ParseCustomField(innerElement, epcisEvent, FieldType.BusinessLocationExtension)); } epcisEvent.BusinessLocation = element.Element("id").Value; }
public static void ParseReadPoint(this XElement element, EpcisEvent epcisEvent) { epcisEvent.ReadPoint = element.Element("id").Value; foreach (var innerElement in element.Elements().Where(x => x.Name.Namespace != XNamespace.None)) { epcisEvent.CustomFields.Add(DocumentParser.ParseCustomField(innerElement, epcisEvent, FieldType.ReadPointExtension)); } }
public void GivenContentFile_WhenGetEntryByPosition_ThenGetExpectedEntry() { // Arrange var lines = new[] { " _ _ _ _ _ _ _ ", " | _| _||_||_ |_ ||_||_|", " ||_ _| | _||_| ||_| _|", " ", " _ _ _ _ _ _ _ _ _ ", "| || || || || || || || || |", "|_||_||_||_||_||_||_||_||_|", " ", " ", " | | | | | | | | |", " | | | | | | | | |", " ", " _ _ _ _ _ _ _ _ _ ", " _| _| _| _| _| _| _| _| _|", "|_ |_ |_ |_ |_ |_ |_ |_ |_ ", " ", }; var numberRecognizerMock = new Mock <INumberRecognizer>(); // Act var documentParser = new DocumentParser(numberRecognizerMock.Object); var entry = documentParser.GetEntryByPosition(lines, 0).ToList(); Assert.IsNotNull(entry, "entry should not be null"); Assert.AreEqual(4, entry.Count, "4 lines should be returned"); Assert.AreEqual(lines[0], entry[0]); Assert.AreEqual(lines[1], entry[1]); Assert.AreEqual(lines[2], entry[2]); entry = documentParser.GetEntryByPosition(lines, 1).ToList(); Assert.IsNotNull(entry, "entry should not be null"); Assert.AreEqual(4, entry.Count, "4 lines should be returned"); Assert.AreEqual(lines[4], entry[0]); Assert.AreEqual(lines[5], entry[1]); Assert.AreEqual(lines[6], entry[2]); entry = documentParser.GetEntryByPosition(lines, 2).ToList(); Assert.IsNotNull(entry, "entry should not be null"); Assert.AreEqual(4, entry.Count, "4 lines should be returned"); Assert.AreEqual(lines[8], entry[0]); Assert.AreEqual(lines[9], entry[1]); Assert.AreEqual(lines[10], entry[2]); entry = documentParser.GetEntryByPosition(lines, 3).ToList(); Assert.IsNotNull(entry, "entry should not be null"); Assert.AreEqual(4, entry.Count, "4 lines should be returned"); Assert.AreEqual(lines[12], entry[0]); Assert.AreEqual(lines[13], entry[1]); Assert.AreEqual(lines[14], entry[2]); }
public SemanticOperationHandler(SemanticsGlobalScope semanticsScope, DocumentParser documentParser, Scope <Object> semanticOperationVariableMap) { this.semanticsScope = semanticsScope; this.documentParser = documentParser; // semanticOperationVariableMap.Add( // SemanticOperationKeyWords.PURLCONNECTION_MIME, // documentParser.purlConnection().mimeType()); this.semanticOperationVariableMap = semanticOperationVariableMap; }
public void ParseBatchErrorDocument() { using (var reader = GenerateReader(JsonSample.Error)) { var documentParser = new DocumentParser(new ArangoDatabase()); BaseResult baseResult = null; Assert.Throws <ArangoServerException>(() => documentParser.ParseBatchResult <Person>(reader, out baseResult)); } }
public void CanParseBasePropertyValue() { IEnumerable <ValueElement> values = DocumentParser.StronglyTypedArray(DocumentParser.Value) .Parse("[myValue]"); IEnumerable <ValueElement> propertyValues = values as ValueElement[] ?? values.ToArray(); Assert.Single(propertyValues); Assert.Equal("myValue", propertyValues.First().Value); }
protected void ParseSingleResult <T>(string value, Action <JObject, T> assert) { using (var reader = GenerateReader(value)) { var documentParser = new DocumentParser(new ArangoDatabase()); JObject jObject = null; var result = documentParser.ParseSingleResult <T>(reader, out jObject, true); assert(jObject, result); } }
public void ParseSingle() { using(var reader = GenerateReader(JsonSample.SingleResult)) { var documentParser = new DocumentParser(new ArangoDatabase()); JObject jObject=null; var person = documentParser.ParseSingleResult<Person>(reader, out jObject, true); Assert.Equal(person.Age, 27); Assert.Equal(person.Fullname, "raoof hojat"); Assert.Equal(person.Height, 172); Assert.True(JObject.DeepEquals(jObject, JObject.Parse(JsonSample.SingleResult))); } }
static void Main(string[] args) { string templatePath = string.Format("{0}\\Template\\{1}", GetExecutionPath(), "Comments.docx"); byte[] documentByte = File.ReadAllBytes(templatePath); DocumentParser parser = new DocumentParser(); var comments = parser.GetCommentsFromDocument(documentByte); foreach (var comment in comments) { Console.WriteLine("----------------------------"); Console.WriteLine(comment.CommentText); Console.WriteLine(); Console.WriteLine(comment.CommentedText); Console.WriteLine("----------------------------"); } Console.WriteLine("-- Ended --"); Console.ReadLine(); }
public void ParseBatchList() { using (var reader = GenerateReader(JsonSample.ListResult)) { var db = new ArangoDatabase(); var documentParser = new DocumentParser(db); BaseResult baseResult = null; var personList = documentParser.ParseBatchResult<Person>(reader, out baseResult); Assert.Equal(personList.Count, 2); Assert.Equal(personList[0].Age, 27); Assert.Equal(personList[0].Fullname, "raoof hojat"); Assert.Equal(personList[0].Height, 172); Assert.Equal(personList[1].Age, 7); Assert.Equal(personList[1].Fullname, "hojat raoof"); Assert.Equal(personList[1].Height, 721); var info1 = db.FindDocumentInfo(personList[0]); Assert.NotNull(info1.Document); Assert.Equal(info1.Id, "Person/KEY1"); Assert.Equal(info1.Key, "KEY1"); Assert.Equal(info1.Rev, "REV1"); var info2 = db.FindDocumentInfo(personList[1]); Assert.NotNull(info2.Document); Assert.Equal(info2.Id, "Person/KEY2"); Assert.Equal(info2.Key, "KEY2"); Assert.Equal(info2.Rev, "REV2"); Assert.Equal(baseResult.Code, 200); Assert.Equal(baseResult.Error, false); } }
public void ParseBatchErrorDocumentWithNotThrowing() { using (var reader = GenerateReader(JsonSample.Error)) { var db = new ArangoDatabase(); db.Setting.ThrowForServerErrors = false; var documentParser = new DocumentParser(db); BaseResult baseResult = null; var list = documentParser.ParseBatchResult<Person>(reader, out baseResult); Assert.Equal(list.Count, 0); Assert.Equal(baseResult.Error, true); Assert.Equal(baseResult.Code, 400); Assert.Equal(baseResult.ErrorMessage, "ERROR"); Assert.Equal(baseResult.ErrorNum, 1202); } }
public void ParseBatchErrorDocument() { using (var reader = GenerateReader(JsonSample.Error)) { var documentParser = new DocumentParser(new ArangoDatabase()); BaseResult baseResult = null; Assert.Throws<ArangoServerException>(() => documentParser.ParseBatchResult<Person>(reader, out baseResult)); } }
public void ParseSingleError() { using (var reader = GenerateReader(JsonSample.Error)) { var documentParser = new DocumentParser(new ArangoDatabase()); JObject jObject = null; var person = documentParser.ParseSingleResult<Person>(reader, out jObject, true); // person wont be null here because we have no base result yet Assert.Equal(person.Age, 0); Assert.Null(person.Fullname); Assert.Equal(person.Height, 0); } }
public void ParseBatchDocument() { using (var reader = GenerateReader(JsonSample.NestedSingleResult)) { var documentParser = new DocumentParser(new ArangoDatabase()); BaseResult baseResult = null; var personList = documentParser.ParseBatchResult<Person>(reader, out baseResult); Assert.Equal(personList.Count, 1); var person = personList[0]; Assert.Equal(person.Age, 27); Assert.Equal(person.Fullname, "raoof hojat"); Assert.Equal(person.Height, 172); Assert.Equal(baseResult.Code, 200); Assert.Equal(baseResult.Error, false); } }