public void TestProperty() { const string title = "Mapreduce"; var entry = new BibEntry { ["Title"] = title }; Assert.AreEqual(title, entry.Title); }
public void TestSetType() { var entry = new BibEntry { Type = "inbook" }; Assert.AreEqual("InBook", entry.Type); entry.Type = "inBoOK"; Assert.AreEqual("InBook", entry.Type); }
public void TestIndexer() { const string title = "Mapreduce"; var entry = new BibEntry { ["Title"] = title }; Assert.AreEqual(title, entry["title"]); Assert.AreEqual(title, entry["Title"]); Assert.AreEqual(title, entry["TitlE"]); }
private void mapToBibEntry(Misc value) { var x = new BibEntry(); x.Title = value.DocumentTitle; x.Key = value.Id; x.Year = value.Publication_Year; x.Type = "misc"; x.Publisher = value.Publisher; x.Author = value.Authors; mf.Escrever(x.ToString()); }
public static async Task <string> FindArticle(BibEntry entry) { var doi = entry["doi"]; var isbn = entry["isbn"]; var url = entry["url"]; if (doi == null && isbn != null) { doi = await IdentifierUtility.DoiFromIsbn(isbn); } if (doi != null) { return(await FindArticle(doi)); } throw new SciHubException(SciHubExceptionType.Article); }
public static async Task <string> SearchUrl(string url) { var entry = new BibEntry() { Type = "online" }; var html = await WebUtility.SimpleTextRequest(url); var doc = new HtmlDocument(); doc.LoadHtml(html); var title = doc.DocumentNode.SelectSingleNode(".//title").InnerText; entry.Title = title; return(entry.ToString()); }
private static int compareEntries(BibEntry a, BibEntry b) { string textA = a.data["title"]; string textB = b.data["title"]; if (a.data.ContainsKey("author")) { textA = a.data["author"] + textA; } if (b.data.ContainsKey("author")) { textB = b.data["author"] + textB; } textA = textA.ToLower(); textB = textB.ToLower(); int length = Math.Max(textA.Length, textB.Length); int result = 0; for (int i = 0; i < length; ++i) { if (textA[i] != textB[i]) { bool isLetterA = ((textA[i] >= 'a') && (textA[i] <= 'z')) || ((textA[i] >= 'а') && (textA[i] <= 'я')) || (textA[i] == 'ё'); bool isLetterB = ((textB[i] >= 'a') && (textB[i] <= 'z')) || ((textB[i] >= 'а') && (textB[i] <= 'я')) || (textB[i] == 'ё'); if (isLetterA && isLetterB) { result = textA[i].CompareTo(textB[i]); } else if (isLetterA && !isLetterB) { result = 1; } else if (!isLetterA && isLetterB) { result = -1; } else { if (textA[i] == ',') { result = -1; } else if (textB[i] == ',') { result = 1; } else { result = 0; } } break; } } if (result == 0) { result = textB.Length - textA.Length; } return(result); }
static void Main(string[] args) { if (args.Length < 1) { Console.WriteLine("<ERROR> No file to process"); return; } string data = ""; try { StreamReader file = new StreamReader(args[0], Encoding.GetEncoding(1251)); data = file.ReadToEnd(); file.Close(); } catch (Exception e) { Console.WriteLine("<ERROR> {0}", e.ToString()); return; } string outputFile = Path.ChangeExtension(args[0], ".tex"); if (args.Length > 2) { outputFile = args[1]; } StreamWriter output; try { output = new StreamWriter(outputFile, false, Encoding.GetEncoding(1251)); } catch (Exception e) { Console.WriteLine("<ERROR> {0}", e.ToString()); return; } List <BibEntry> foreign = new List <BibEntry>(); List <BibEntry> native = new List <BibEntry>(); BibEntry entry; while (data.Length > 0) { int pos = data.IndexOf('@'); if (pos < 0) { break; } data = data.Substring(pos + 1); pos = data.IndexOf('{'); if (pos < 0) { break; } entry = new BibEntry(); entry.type = data.Substring(0, pos).ToLower().Trim(); data = data.Substring(pos + 1); pos = data.IndexOf(','); if (pos < 0) { break; } entry.id = data.Substring(0, pos).Trim(); data = data.Substring(pos + 1); int level = 1; pos = 0; bool writeValue = false; string key = ""; string value = ""; while (level > 0) { if (pos >= data.Length) { break; } switch (data[pos]) { case '\r': case '\n': break; case ',': if (level < 2) { key = key.ToLower().Trim(); value = value.Replace('\t', ' ').Trim(); entry.data.Add(key, value); key = ""; value = ""; writeValue = false; } else { goto default; } break; case '=': if (level < 2) { writeValue = true; } else { goto default; } break; case '{': level++; break; case '}': if (level < 2) { key = key.ToLower().Trim(); value = value.Replace('\t', ' ').Trim(); entry.data.Add(key, value); key = ""; value = ""; writeValue = false; } level--; break; default: if (writeValue) { value += data[pos]; } else { key += data[pos]; } break; } pos++; } if (isNative(entry.data["title"]) || (entry.type == "patent")) { native.Add(entry); } else { foreign.Add(entry); } data = data.Substring(pos + 1); } //native.Sort(compareEntries); //foreign.Sort(compareEntries); output.WriteLine(@"\begin{thebibliography}{999}"); output.WriteLine(); List <KeyValuePair <string, string> > rNative = processEntries(native, true); List <KeyValuePair <string, string> > rForeign = processEntries(foreign, false); rNative.Sort(compareSimple); rForeign.Sort(compareSimple); foreach (KeyValuePair <string, string> pair in rNative) { output.WriteLine(@"\bibitem{" + pair.Key + @"}"); output.WriteLine(pair.Value); output.WriteLine(); } foreach (KeyValuePair <string, string> pair in rForeign) { output.WriteLine(@"\bibitem{" + pair.Key + @"}"); output.WriteLine(pair.Value); output.WriteLine(); } output.WriteLine(@"\end{thebibliography}"); output.Close(); }
/// <summary> /// Creates and seeds a new part. /// </summary> /// <param name="item">The item this part should belong to.</param> /// <param name="roleId">The optional part role ID.</param> /// <param name="factory">The part seeder factory. This is used /// for layer parts, which need to seed a set of fragments.</param> /// <returns>A new part.</returns> /// <exception cref="ArgumentNullException">item or factory</exception> public override IPart GetPart(IItem item, string roleId, PartSeederFactory factory) { if (item == null) { throw new ArgumentNullException(nameof(item)); } if (factory == null) { throw new ArgumentNullException(nameof(factory)); } BibliographyPart part = new BibliographyPart(); SetPartMetadata(part, roleId, item); Faker f = new Faker(); for (int i = 0; i < 5; i++) { BibEntry entry = new BibEntry { TypeId = SeedHelper.RandomPickOneOf(_typeIds), Authors = (from a in SeedHelper.RandomPickOf(_authors, Randomizer.Seed.Next(1, 5) == 1? 2:1) select ParseAuthor(a)).ToArray(), Title = f.Lorem.Sentence(3, 8), Language = SeedHelper.RandomPickOneOf(_languages) }; if (entry.TypeId != "site") { entry.FirstPage = (short)Randomizer.Seed.Next(1, 100); entry.LastPage = (short)(entry.FirstPage + Randomizer.Seed.Next(1, 20)); entry.YearPub = (short)(DateTime.Now.Year - Randomizer.Seed.Next(0, 20)); } switch (entry.TypeId) { case "article-b": entry.Contributors = (from a in SeedHelper.RandomPickOf(_authors, Randomizer.Seed.Next(1, 5) == 1 ? 2 : 1) select ParseAuthor(a)).ToArray(); entry.Container = f.Lorem.Sentence(); break; case "article-j": entry.Container = SeedHelper.RandomPickOneOf(_journals); break; case "site": entry.Location = $"www.{f.Lorem.Word().ToLowerInvariant()}.com"; break; default: entry.Edition = (short)Randomizer.Seed.Next(1, 3); break; } // keywords List <Keyword> keywords = new List <Keyword>(); for (int j = 0; j < Randomizer.Seed.Next(0, 3); j++) { keywords.Add(new Keyword { Language = SeedHelper.RandomPickOneOf(_languages), Value = f.Lorem.Word() }); } if (keywords.Count > 0) { entry.Keywords = keywords.ToArray(); } part.Entries.Add(entry); } return(part); }
static IDictionary <string, string?> ToPropertyDictionary(BibEntry entry) => entry.GetType() .GetProperties() .Where(p => p.CanRead && p.PropertyType == typeof(string) && p.GetIndexParameters().Length == 0) .ToDictionary(p => p.Name, p => p.GetValue(entry, null) as string);
private IEnumerable <BibEntry> Parser(List <BibParseError> errors) { var curState = ParserState.Begin; var nextState = ParserState.Begin; int lastTokenIndex = 0; int len = 0; var entryItems = new List <BibEntryItem>(); BibEntry?bib = null; var tagValueBuilder = new StringBuilder(); var tagName = ""; var keySet = new HashSet <string>(); // Fetch token from Tokenizer and build BibEntry foreach (var token in Tokenizer(errors)) { len = token.Index - lastTokenIndex; // Transfer state if (StateMap[curState].ContainsKey(token.Type)) { nextState = StateMap[curState][token.Type].Item1; } else { var expected = from pair in StateMap[curState] select pair.Key; //throw new UnexpectedTokenException(_lineCount, _colCount, token.Type, expected.ToArray()); errors.Add(new BibParseError(lastTokenIndex, len, _lineCount, _colCount, "Bib.UnexpectedToken")); yield break; } var builderState = StateMap[curState][token.Type].Item2; // Build BibEntry switch (builderState) { case BibBuilderState.Create: bib = new BibEntry(token.Index); break; case BibBuilderState.SetType: Debug.Assert(bib != null, "bib != null"); if (!Enum.TryParse <EntryType>(token.Value, true, out var result)) { //throw new UnexpectedEntryTypeExpection(_lineCount, _colCount, token.Value); errors.Add(new BibParseError(lastTokenIndex, len, _lineCount, _colCount, "Bib.EntryType", token.Value)); } bib.Type = token.Value; AddItem(entryItems, lastTokenIndex, token.Index, EntryItemType.Type); break; case BibBuilderState.SetKey: Debug.Assert(bib != null, "bib != null"); bib.Key = token.Value; if (!keySet.Add(bib.Key)) { errors.Add(new BibParseError(lastTokenIndex, len, _lineCount, _colCount, "Bib.DuplicateKey", token.Value)); } AddItem(entryItems, lastTokenIndex + 1, token.Index, EntryItemType.Key); break; case BibBuilderState.SetTagName: tagName = token.Value; AddItem(entryItems, lastTokenIndex + 1, token.Index, EntryItemType.Name); break; case BibBuilderState.SetTagValue: tagValueBuilder.Append(token.Value); break; case BibBuilderState.SetTag: Debug.Assert(bib != null, "bib != null"); bib[tagName] = CompositeTex(tagValueBuilder.ToString()); tagValueBuilder.Clear(); tagName = string.Empty; AddItem(entryItems, lastTokenIndex, token.Index, EntryItemType.Value); break; case BibBuilderState.Build: Debug.Assert(bib != null, "bib != null"); if (!string.IsNullOrEmpty(tagName)) { bib[tagName] = CompositeTex(tagValueBuilder.ToString()); tagValueBuilder.Clear(); tagName = string.Empty; } bib.SourceLength = token.Index - bib.SourcePosition + 1; bib.Items.AddRange(entryItems); entryItems.Clear(); yield return(bib); break; } if (builderState != BibBuilderState.SetTagValue) { lastTokenIndex = token.Index; } curState = nextState; } if (curState != ParserState.OutEntry) { var expected = from pair in StateMap[curState] select pair.Key; //throw new UnexpectedTokenException(_lineCount, _colCount, TokenType.EOF, expected.ToArray()); yield break; } }
public BibFolding(BibEntry entry) : base(entry.SourcePosition, entry.SourcePosition + entry.SourceLength) { Name = $"@{entry.Type}{{{entry.Key}..."; }
public void GetDataPins_SingleEntryAllPinFields_Ok() { BibliographyPart part = GetPart(0); BibEntry entry = new BibEntry { // type (1) TypeId = "book-chapter", // authors (2) Authors = new[] { new BibAuthor { FirstName = "Steven", LastName = "Heller", }, new BibAuthor { FirstName = "Karen", LastName = "Pomeroy" } }, // title (1) Title = "A Survey: Perì Theôn", // contributors (1) Contributors = new[] { new BibAuthor { FirstName = "Homer", LastName = "Simpson", RoleId = "ed" } }, // container (1) Container = "Theology, Today!", // keywords (1) Keywords = new[] { new Keyword { Language = "eng", Value = "gods" } }, PlacePub = "New York", YearPub = 2020 }; part.Entries.Add(entry); List <DataPin> pins = part.GetDataPins().ToList(); Assert.Equal(7, pins.Count); // authors + contributors Assert.Equal(3, pins.Count(p => p.Name == "biblio.author")); Assert.NotNull(pins.Find( p => p.Name == "biblio.author" && p.Value == "heller")); Assert.NotNull(pins.Find( p => p.Name == "biblio.author" && p.Value == "pomeroy")); Assert.NotNull(pins.Find( p => p.Name == "biblio.author" && p.Value == "simpson")); // title Assert.Equal(1, pins.Count(p => p.Name == "biblio.title")); Assert.Equal("a survey peri theon", pins.Find( p => p.Name == "biblio.title").Value); Assert.Equal(1, pins.Count(p => p.Name == "biblio.container")); Assert.Equal("theology today", pins.Find( p => p.Name == "biblio.container").Value); Assert.Equal(1, pins.Count(p => p.Name == "biblio.keyword")); Assert.Equal("gods", pins.Find( p => p.Name == "biblio.keyword").Value); }
public void GetDataPins_MultipleEntries_Ok() { BibliographyPart part = GetPart(0); BibEntry book = new BibEntry { TypeId = "book", Authors = new[] { new BibAuthor { FirstName = "Steven", LastName = "Heller", }, new BibAuthor { FirstName = "Karen", LastName = "Pomeroy" } }, Title = "Design Literacy: Understanding Graphic Design", PlacePub = "New York", YearPub = 2020 }; part.Entries.Add(book); BibEntry paper = new BibEntry { TypeId = "journal-paper", Authors = new[] { new BibAuthor { FirstName = "Daniele", LastName = "Fusi" } }, Title = "Sailing for a Second Navigation: Paradigms in Producing Digital Content", Container = "SemRom", Number = "n.s.7", YearPub = 2018, FirstPage = 213, LastPage = 276, Keywords = new[] { new Keyword { Language = "eng", Value = "scholarly digital edition" } } }; part.Entries.Add(paper); List <DataPin> pins = part.GetDataPins().ToList(); Assert.Equal(9, pins.Count); Assert.Equal(3, pins.Count(p => p.Name == "biblio.author")); Assert.Equal(2, pins.Count(p => p.Name == "biblio.title")); Assert.Equal(1, pins.Count(p => p.Name == "biblio.container")); Assert.Equal(1, pins.Count(p => p.Name == "biblio.keyword")); }