public TemplateParser( CharSource charSrc ) : base(charSrc) { if (charSrc.charList == null || charSrc.charList.Length == 0) return; base.beginParse(); }
public VarBlockParser( VarLabelParsed objVar, CharSource charSrc ) : base(charSrc) { this.objVar = objVar; parse( ); }
private void Id() { int la0; Skip(); // Line 111: ([0-9A-Z_a-z])* for (;;) { la0 = LA0; if (Id_set0.Contains(la0)) { Skip(); } else { break; } } // line 112 _value = (Symbol)(CharSource.Slice(_startIndex, InputPosition - _startIndex).ToString()); }
/// <summary> /// For command line input /// </summary> /// <param name="text"></param> /// <param name="executable"></param> /// <returns></returns> public bool InterpretString(string text, out Executable executable) { var source = new CharSource(new StringReader(text)); executable = null; try { executable = Evaluatable.Parse(source, functions); return(true); } catch (ParsingException p) { Log($"Caught {p.GetType().Name} at line {source.Line}: {p.Message}", Error); return(false); } catch (Exception e) { Log($"Caught {e.GetType().Name}: {e.Message}", Error); return(false); } }
void Id() { int la0; Skip(); // Line 116: ([0-9A-Z_a-z])* for (;;) { la0 = LA0; if (Id_set0.Contains(la0)) { Skip(); } else { break; } } #line 117 "MyGrammars.ecs" _value = (Symbol)(CharSource.Slice(_startIndex, InputPosition - _startIndex).ToString()); #line default }
//------------------------------------------------------------------------- public void test_parse_multipleSources() { CharSource source1 = CharSource.wrap("Reference,Sensitivity Tenor,Zero Rate Delta\n" + "GBP-LIBOR,P1M,1.1\n" + "GBP-LIBOR,P2M,1.2\n"); CharSource source2 = CharSource.wrap("Reference,Sensitivity Tenor,Zero Rate Delta\n" + "GBP-LIBOR,P3M,1.3\n" + "GBP-LIBOR,P6M,1.4\n"); ValueWithFailures <ListMultimap <string, CurveSensitivities> > test = LOADER.parse(ImmutableList.of(source1, source2)); assertEquals(test.Failures.size(), 0, test.Failures.ToString()); assertEquals(test.Value.Keys.Count, 1); IList <CurveSensitivities> list = test.Value.get(""); assertEquals(list.Count, 2); CurveSensitivities csens0 = list[0]; assertEquals(csens0.TypedSensitivities.size(), 1); assertSens(csens0, ZERO_RATE_DELTA, "GBP-LIBOR", GBP, "1M, 2M", 1.1, 1.2); CurveSensitivities csens1 = list[1]; assertEquals(csens1.TypedSensitivities.size(), 1); assertSens(csens1, ZERO_RATE_DELTA, "GBP-LIBOR", GBP, "3M, 6M", 1.3, 1.4); }
//------------------------------------------------------------------------- public virtual void nextBatch_predicate() { using (CsvIterator csvFile = CsvIterator.of(CharSource.wrap(CSV5GROUPED), true)) { ImmutableList <string> headers = csvFile.headers(); assertEquals(headers.size(), 2); assertEquals(headers.get(0), "id"); assertEquals(headers.get(1), "value"); int batches = 0; int total = 0; while (csvFile.hasNext()) { CsvRow first = csvFile.peek(); string id = first.getValue("id"); IList <CsvRow> batch = csvFile.nextBatch(row => row.getValue("id").Equals(id)); assertEquals(batch.Select(row => row.getValue("id")).Distinct().Count(), 1); batches++; total += batch.Count; } assertEquals(batches, 3); assertEquals(total, 6); } }
public virtual void test_asStream_simple_with_header() { using (CsvIterator csvFile = CsvIterator.of(CharSource.wrap(CSV1), true)) { ImmutableList <string> headers = csvFile.headers(); assertEquals(headers.size(), 2); assertEquals(headers.get(0), "h1"); assertEquals(headers.get(1), "h2"); IList <CsvRow> rows = csvFile.asStream().collect(toList()); assertEquals(csvFile.hasNext(), false); assertEquals(rows.Count, 2); CsvRow row0 = rows[0]; assertEquals(row0.headers(), headers); assertEquals(row0.fieldCount(), 2); assertEquals(row0.field(0), "r11"); assertEquals(row0.field(1), "r12"); CsvRow row1 = rows[1]; assertEquals(row1.headers(), headers); assertEquals(row1.fieldCount(), 2); assertEquals(row1.field(0), "r21"); assertEquals(row1.field(1), "r22"); } }
//------------------------------------------------------------------------- public void test_parse_standard() { CharSource source = ResourceLocator.ofClasspath("com/opengamma/strata/loader/csv/sensitivity-standard.csv").CharSource; assertEquals(LOADER.isKnownFormat(source), true); ValueWithFailures <ListMultimap <string, CurveSensitivities> > test = LOADER.parse(ImmutableList.of(source)); assertEquals(test.Failures.size(), 0, test.Failures.ToString()); assertEquals(test.Value.size(), 1); IList <CurveSensitivities> list = test.Value.get(""); assertEquals(list.Count, 1); CurveSensitivities csens0 = list[0]; assertEquals(csens0.TypedSensitivities.size(), 2); string tenors = "1D, 1W, 2W, 1M, 3M, 6M, 12M, 2Y, 5Y, 10Y"; assertSens(csens0, ZERO_RATE_DELTA, "GBP", GBP, tenors, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10); assertSens(csens0, ZERO_RATE_DELTA, "GBP-LIBOR", GBP, tenors, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10); assertSens(csens0, ZERO_RATE_GAMMA, "GBP", GBP, tenors, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1); assertSens(csens0, ZERO_RATE_GAMMA, "GBP-LIBOR", GBP, tenors, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1); }
public void test_parse_grid_dateInTenorColumn() { CharSource source = CharSource.wrap("Sensitivity Type,Sensitivity Tenor,GBP\n" + "ZeroRateGamma,2018-06-30,1\n"); assertEquals(LOADER_DATE.isKnownFormat(source), true); ValueWithFailures <ListMultimap <string, CurveSensitivities> > test = LOADER_DATE.parse(ImmutableList.of(source)); assertEquals(test.Failures.size(), 0, test.Failures.ToString()); assertEquals(test.Value.size(), 1); IList <CurveSensitivities> list = test.Value.get(""); assertEquals(list.Count, 1); CurveSensitivities csens0 = list[0]; assertEquals(csens0.TypedSensitivities.size(), 1); CurrencyParameterSensitivities cpss = csens0.getTypedSensitivity(ZERO_RATE_GAMMA); assertEquals(cpss.Sensitivities.size(), 1); CurrencyParameterSensitivity cps = cpss.Sensitivities.get(0); assertEquals(cps.ParameterMetadata.size(), 1); assertEquals(cps.ParameterMetadata.get(0), LabelDateParameterMetadata.of(date(2018, 6, 30), "2018-06-30")); }
void ParseIdOrSymbol(int start, bool isBQString) { UString unparsed = CharSource.Slice(start, InputPosition - start); UString parsed; Debug.Assert(isBQString == (CharSource.TryGet(start, '\0') == '`')); Debug.Assert(!_verbatim); if (!_idCache.TryGetValue(unparsed, out _value)) { if (isBQString) { parsed = ParseStringCore(start); } else if (_parseNeeded) { parsed = ScanNormalIdentifier(unparsed); } else { parsed = unparsed; } _idCache[unparsed.ShedExcessMemory(50)] = _value = GSymbol.Get(parsed.ToString()); } }
protected UString GetUnescapedString(bool hasEscapes, bool isTripleQuoted) { UString value; if (hasEscapes) { UString original = Text(); value = UnescapeQuotedString(ref original, Error, IndentString, true); Debug.Assert(original.IsEmpty); } else { Debug.Assert(CharSource.TryGet(InputPosition - 1, '?') == CharSource.TryGet(_startPosition, '!')); if (isTripleQuoted) { value = CharSource.Slice(_startPosition + 3, InputPosition - _startPosition - 6).ToString(); } else { value = CharSource.Slice(_startPosition + 1, InputPosition - _startPosition - 2).ToString(); } } return(value); }
void ParseNumberValue() { int start = _startPosition; if (_isNegative) { start++; } if (_numberBase != 10) { start += 2; } int stop = InputPosition; if (_typeSuffix != null) { stop -= _typeSuffix.Name.Length; } UString digits = CharSource.Slice(start, stop - start); string error; if ((_value = LesLexer.ParseNumberCore(digits, _isNegative, _numberBase, _isFloat, _typeSuffix, out error)) == null) { _value = 0; } else if (_value == CodeSymbols.Sub) { InputPosition = _startPosition + 1; _type = TT.Sub; } if (error != null) { Error(_startPosition, error); } }
UString UnescapeQuotedString(int start) { Debug.Assert(_verbatim == (CharSource[start] == '@')); if (_verbatim) { start++; } char q; Debug.Assert((q = CharSource.TryGet(start, '\0')) == '"' || q == '\'' || q == '`'); bool tripleQuoted = (_style & NodeStyle.BaseStyleMask) == NodeStyle.TDQStringLiteral || (_style & NodeStyle.BaseStyleMask) == NodeStyle.TQStringLiteral; if (!_parseNeeded) { Debug.Assert(!tripleQuoted); return(CharSource.Slice(start + 1, InputPosition - start - 2)); } else { UString original = CharSource.Slice(start, InputPosition - start); return(UnescapeQuotedString(ref original, _verbatim, Error, _indent)); } }
public ResultArgument(CharSource source, ref char chr) { source.Advance(out chr); }
public virtual void test_of_empty_with_header() { assertThrowsIllegalArg(() => CsvIterator.of(CharSource.wrap(""), true)); }
private static Multimap <LocalDate, Curve> parseSingle(System.Predicate <LocalDate> datePredicate, CharSource curvesResource, IDictionary <CurveName, LoadedCurveSettings> settingsMap) { CsvFile csv = CsvFile.of(curvesResource, true); IDictionary <LoadedCurveKey, IList <LoadedCurveNode> > allNodes = new Dictionary <LoadedCurveKey, IList <LoadedCurveNode> >(); foreach (CsvRow row in csv.rows()) { string dateStr = row.getField(CURVE_DATE); string curveNameStr = row.getField(CURVE_NAME); string pointDateStr = row.getField(CURVE_POINT_DATE); string pointValueStr = row.getField(CURVE_POINT_VALUE); string pointLabel = row.getField(CURVE_POINT_LABEL); LocalDate date = LoaderUtils.parseDate(dateStr); if (datePredicate(date)) { LocalDate pointDate = LoaderUtils.parseDate(pointDateStr); double pointValue = Convert.ToDouble(pointValueStr); LoadedCurveKey key = LoadedCurveKey.of(date, CurveName.of(curveNameStr)); IList <LoadedCurveNode> curveNodes = allNodes.computeIfAbsent(key, k => new List <LoadedCurveNode>()); curveNodes.Add(LoadedCurveNode.of(pointDate, pointValue, pointLabel)); } } return(buildCurves(settingsMap, allNodes)); }
public CodeParser( CharSource charSrc ) : base(charSrc) { this.charSrc = charSrc; }
public Scanner(TextReader input) { this.input = new TextReaderCharSource(input); }
//------------------------------------------------------------------------- /// <summary> /// Parses the seasonality definition CSV file. /// </summary> /// <param name="charSource"> the seasonality CSV character source </param> /// <returns> the map of seasonality definitions </returns> public static IDictionary <CurveName, SeasonalityDefinition> parseSeasonalityDefinitions(CharSource charSource) { ImmutableMap.Builder <CurveName, SeasonalityDefinition> builder = ImmutableMap.builder(); CsvFile csv = CsvFile.of(charSource, true); foreach (CsvRow row in csv.rows()) { string curveNameStr = row.getField(CURVE_NAME); string shiftTypeStr = row.getField(SHIFT_TYPE); DoubleArray values = DoubleArray.of(12, i => double.Parse(row.getField(MONTH_PAIRS.get(i)))); CurveName curveName = CurveName.of(curveNameStr); ShiftType shiftType = ShiftType.valueOf(shiftTypeStr.ToUpper(Locale.ENGLISH)); builder.put(curveName, SeasonalityDefinition.of(values, shiftType)); } return(builder.build()); }
public BlockParser( CharSource charSrc ) { this.charSrc = charSrc; }
//------------------------------------------------------------------------ /// <summary> /// Parses the specified source as a CSV file, using a comma as the separator. /// <para> /// CSV files sometimes contain a Unicode Byte Order Mark. /// Callers are responsible for handling this, such as by using <seealso cref="UnicodeBom"/>. /// /// </para> /// </summary> /// <param name="source"> the CSV file resource </param> /// <param name="headerRow"> whether the source has a header row, an empty source must still contain the header </param> /// <returns> the CSV file </returns> /// <exception cref="UncheckedIOException"> if an IO exception occurs </exception> /// <exception cref="IllegalArgumentException"> if the file cannot be parsed </exception> public static CsvFile of(CharSource source, bool headerRow) { return(of(source, headerRow, ',')); }
public StringBlockParser(CharSource charSrc) : base(charSrc) { parseString(); }
public virtual void test_load_invalidUnknownType() { PositionCsvLoader test = PositionCsvLoader.standard(); ValueWithFailures <IList <Position> > trades = test.parse(ImmutableList.of(CharSource.wrap("Strata Position Type\nFoo"))); assertEquals(trades.Failures.size(), 1); FailureItem failure = trades.Failures.get(0); assertEquals(failure.Reason, FailureReason.PARSING); assertEquals(failure.Message, "CSV file position type 'Foo' is not known at line 2"); }
public void test_write_standard_roundTrip() { CharSource source = ResourceLocator.ofClasspath("com/opengamma/strata/loader/csv/sensitivity-standard.csv").CharSource; ValueWithFailures <ListMultimap <string, CurveSensitivities> > parsed1 = LOADER.parse(ImmutableList.of(source)); assertEquals(parsed1.Failures.size(), 0, parsed1.Failures.ToString()); assertEquals(parsed1.Value.size(), 1); IList <CurveSensitivities> csensList1 = parsed1.Value.get(""); assertEquals(csensList1.Count, 1); CurveSensitivities csens1 = csensList1[0]; StringBuilder buf = new StringBuilder(); WRITER.write(csens1, buf); string content = buf.ToString(); ValueWithFailures <ListMultimap <string, CurveSensitivities> > parsed2 = LOADER.parse(ImmutableList.of(CharSource.wrap(content))); assertEquals(parsed2.Failures.size(), 0, parsed2.Failures.ToString()); assertEquals(parsed2.Value.size(), 1); IList <CurveSensitivities> csensList2 = parsed2.Value.get(""); assertEquals(csensList2.Count, 1); CurveSensitivities csens2 = csensList2[0]; assertEquals(csens2, csens1); }
protected UString Text(int startPosition) { return(CharSource.Slice(startPosition, InputPosition - startPosition)); }
public BlockParser(CharSource charSrc) { this.charSrc = charSrc; }
public StringBlockParser( CharSource charSrc ) : base(charSrc) { parseString(); }
// Gets the text of the current token that has been parsed so far protected UString Text() { return(CharSource.Slice(_startPosition, InputPosition - _startPosition)); }
void Num() { int la0, la1; #line 121 "MyGrammars.ecs" bool dot = false; #line default // Line 122: ([.])? la0 = LA0; if (la0 == '.') { Skip(); #line 122 "MyGrammars.ecs" dot = true; #line default } MatchRange('0', '9'); // Line 123: ([0-9])* for (;;) { la0 = LA0; if (la0 >= '0' && la0 <= '9') { Skip(); } else { break; } } // Line 124: (&!{dot} [.] [0-9] ([0-9])*)? la0 = LA0; if (la0 == '.') { if (!dot) { la1 = LA(1); if (la1 >= '0' && la1 <= '9') { Skip(); Skip(); // Line 124: ([0-9])* for (;;) { la0 = LA0; if (la0 >= '0' && la0 <= '9') { Skip(); } else { break; } } } } } #line 125 "MyGrammars.ecs" _value = double.Parse(CharSource.Slice(_startIndex, InputPosition - _startIndex).ToString()); #line default }
public virtual void test_load_invalidNoType() { PositionCsvLoader test = PositionCsvLoader.standard(); ValueWithFailures <IList <Position> > trades = test.parse(ImmutableList.of(CharSource.wrap("Id"))); assertEquals(trades.Failures.size(), 1); FailureItem failure = trades.Failures.get(0); assertEquals(failure.Reason, FailureReason.PARSING); assertEquals(failure.Message.Contains("CSV file does not contain 'Strata Position Type' header"), true); }
double Number() { int la0; double result = default(double); // line 47 int start = InputPosition; // Line 48: ([\-])? la0 = LA0; if (la0 == '-') { Skip(); } // Line 49: ([0] | [1-9] ([0-9])*) la0 = LA0; if (la0 == '0') { Skip(); } else { MatchRange('1', '9'); // Line 49: ([0-9])* for (;;) { la0 = LA0; if (la0 >= '0' && la0 <= '9') { Skip(); } else { break; } } } // Line 50: ([.] ([0-9])*)? la0 = LA0; if (la0 == '.') { Skip(); // Line 50: ([0-9])* for (;;) { la0 = LA0; if (la0 >= '0' && la0 <= '9') { Skip(); } else { break; } } } // Line 51: ([Ee] ([+\-])? [0-9] ([0-9])*)? la0 = LA0; if (la0 == 'E' || la0 == 'e') { Skip(); // Line 51: ([+\-])? la0 = LA0; if (la0 == '+' || la0 == '-') { Skip(); } MatchRange('0', '9'); // Line 51: ([0-9])* for (;;) { la0 = LA0; if (la0 >= '0' && la0 <= '9') { Skip(); } else { break; } } } // line 53 UString str = CharSource.Slice(start, InputPosition - start); result = ParseHelpers.TryParseDouble(ref str, 10); return(result); }
public virtual void test_load_invalidNoQuantity() { EtdContractSpecId specId = EtdContractSpecId.of("OG-ETD", "F-ECAG-FGBL"); EtdContractSpec contract = EtdContractSpec.builder().id(specId).type(EtdType.FUTURE).exchangeId(ExchangeIds.ECAG).contractCode(FGBL).description("Dummy").priceInfo(SecurityPriceInfo.of(Currency.GBP, 100)).build(); ReferenceData refData = ImmutableReferenceData.of(specId, contract); PositionCsvLoader test = PositionCsvLoader.of(refData); ValueWithFailures <IList <Position> > trades = test.parse(ImmutableList.of(CharSource.wrap("Strata Position Type,Exchange,Contract Code,Expiry\nFUT,ECAG,FGBL,2017-06"))); assertEquals(trades.Failures.size(), 1); FailureItem failure = trades.Failures.get(0); assertEquals(failure.Reason, FailureReason.PARSING); assertEquals(failure.Message, "CSV file position could not be parsed at line 2: " + "Security must contain a quantity column, either 'Quantity' or 'Long Quantity' and 'Short Quantity'"); }
object WordLiteral() { int la0, la1, la2, la3, la4; object result = default(object); int start = InputPosition; // Line 86: ( [t] [r] [u] [e] / [f] [a] [l] [s] [e] / [n] [u] [l] [l] / [A-Za-z] ([A-Za-z])* ) do { la0 = LA0; if (la0 == 't') { la1 = LA(1); if (la1 == 'r') { la2 = LA(2); if (la2 == 'u') { la3 = LA(3); if (la3 == 'e') { switch (LA(4)) { case -1: case '\t': case '\n': case '\r': case ' ': case ',': case '/': case ']': case '}': { Skip(); Skip(); Skip(); Skip(); // line 86 result = G.BoxedTrue; } break; default: goto match4; } } else { goto match4; } } else { goto match4; } } else { goto match4; } } else if (la0 == 'f') { la1 = LA(1); if (la1 == 'a') { la2 = LA(2); if (la2 == 'l') { la3 = LA(3); if (la3 == 's') { la4 = LA(4); if (la4 == 'e') { switch (LA(5)) { case -1: case '\t': case '\n': case '\r': case ' ': case ',': case '/': case ']': case '}': { Skip(); Skip(); Skip(); Skip(); Skip(); // line 87 return(G.BoxedFalse); } default: goto match4; } } else { goto match4; } } else { goto match4; } } else { goto match4; } } else { goto match4; } } else if (la0 == 'n') { la1 = LA(1); if (la1 == 'u') { la2 = LA(2); if (la2 == 'l') { la3 = LA(3); if (la3 == 'l') { switch (LA(4)) { case -1: case '\t': case '\n': case '\r': case ' ': case ',': case '/': case ']': case '}': { Skip(); Skip(); Skip(); Skip(); // line 88 return(null); } default: goto match4; } } else { goto match4; } } else { goto match4; } } else { goto match4; } } else { goto match4; } break; match4: { MatchRange('A', 'Z', 'a', 'z'); // Line 89: ([A-Za-z])* for (;;) { la0 = LA0; if (la0 >= 'A' && la0 <= 'Z' || la0 >= 'a' && la0 <= 'z') { Skip(); } else { break; } } // line 90 Error(0, "JSON does not support identifiers"); return(CharSource.Slice(start, InputPosition - start).ToString()); } } while (false); return(result); }
// Gets the text of the current token that has been parsed so far private UString Text() { return(CharSource.Slice(_startIndex, InputPosition - _startIndex)); }
public FunctionParser( CharSource charSrc ) : base(charSrc) { this.charSrc = charSrc; }