/// <summary> /// Initializes a new instance of the <see cref="TokenizerState<T>"/> class. /// </summary> /// <param name="instance">The instance.</param> /// <param name="definition">The definition.</param> /// <param name="args">The args.</param> /// <param name="forCreate">if set to <c>true</c> [for create].</param> /// <remarks>Calls <see cref="ISupportInitialize.BeginInit"/> if the instance implements <see cref="ISupportInitialize"/></remarks> public TokenizerState(T instance, TokenizerDefinition definition, TokenizerArgs args, bool forCreate) { if (instance == null) { throw new ArgumentNullException("instance"); } else if (definition == null) { throw new ArgumentNullException("definition"); } else if (args == null) { throw new ArgumentNullException("args"); } _instance = instance; _definition = definition; _args = args; if (forCreate) { _init = instance as ITokenizerInitialize; if (_init != null) { TokenizerEventArgs e = new TokenizerEventArgs(); e.Context = args.Context; } } }
/// <summary> /// Loads the logfile at the specified path and parses it into a <see cref="TBLogFile"/> instance /// </summary> /// <param name="path">The path.</param> /// <returns></returns> public static TBLogFile Load(string path) { using (XmlReader xr = XmlReader.Create(path)) { TBLogFile file; XPathDocument doc = new XPathDocument(xr); XPathNavigator nav = doc.CreateNavigator(); nav.MoveToRoot(); nav.MoveToFirstChild(); if ((nav.NamespaceURI != Namespace) || (nav.LocalName != "TurtleBuildData")) { return(null); } TokenizerArgs args = new TokenizerArgs(); args.SkipUnknownNamedItems = true; if (Tokenizer.TryParseXml(nav, args, out file)) { return(file); } else { return(null); } } }
public static DirectoryMapData Load(string path) { if (string.IsNullOrEmpty(path)) { throw new ArgumentNullException("path"); } path = Path.GetFullPath(path); using (FileStream fs = File.OpenRead(QQnPath.Combine(path, DirMapFile))) { XPathDocument doc = new XPathDocument(fs); XPathNavigator nav = doc.CreateNavigator(); nav.MoveToRoot(); nav.MoveToFirstChild(); DirectoryMapData data; TokenizerArgs ta = new TokenizerArgs(); ta.SkipUnknownNamedItems = true; if (Tokenizer.TryParseXml(nav, ta, out data)) { data._directory = path; return(data); } } return(null); }
internal bool TryWriteXml(XmlWriter writer, TokenizerArgs args, object value) { Type xmlTokenizer = typeof(XmlTokenizer <>).MakeGenericType(_groupType); MethodInfo[] mi = xmlTokenizer.GetMethods(BindingFlags.Static | BindingFlags.InvokeMethod | BindingFlags.NonPublic); object[] arguments = new object[] { writer, value, args }; return((bool)xmlTokenizer.InvokeMember("TryWrite", BindingFlags.Static | BindingFlags.InvokeMethod | BindingFlags.NonPublic, null, null, arguments, CultureInfo.InvariantCulture)); }
internal bool TryParseXml(System.Xml.XPath.XPathNavigator nav, TokenizerArgs args, out object value) { Type xmlTokenizer = typeof(XmlTokenizer <>).MakeGenericType(_groupType); object[] arguments = new object[] { nav, args, null }; bool ok = (bool)xmlTokenizer.InvokeMember("TryParse", BindingFlags.Static | BindingFlags.InvokeMethod | BindingFlags.NonPublic, null, null, arguments, CultureInfo.InvariantCulture); value = arguments[2]; return(ok); }
/// <summary> /// Initializes a new instance of the <see cref="TPack"/> class. /// </summary> /// <param name="baseStream">The base stream.</param> /// <param name="verificationMode">The verification mode.</param> /// <param name="disposeAtClose">A list of objects to dispose in this order when the stream is closed</param> internal TPack(Stream baseStream, VerificationMode verificationMode, IDisposable[] disposeAtClose) { if (baseStream == null) { throw new ArgumentNullException("baseStream"); } _assuredStream = new AssuredStream(baseStream, verificationMode); _disposeAtClose = new List <IDisposable>(); _disposeAtClose.Add(_assuredStream); _disposeAtClose.Add(baseStream); if (disposeAtClose != null) { _disposeAtClose.AddRange(disposeAtClose); } MultipleStreamCreateArgs msa = new MultipleStreamCreateArgs(); msa.VerificationMode = VerificationMode.Full; _reader = new MultipleStreamReader(_assuredStream, msa); Pack pack; using (Stream r = _reader.GetNextStream(PackageDefinitionId)) { XPathDocument doc = new XPathDocument(r); XPathNavigator nav = doc.CreateNavigator(); nav.MoveToRoot(); nav.MoveToFirstChild(); TokenizerArgs ta = new TokenizerArgs(); ta.SkipUnknownNamedItems = true; if (!Tokenizer.TryParseXml(nav, out pack) || pack == null) { throw new IOException(); } else { _pack = pack; } } }
internal static bool TryParse(IList <string> items, TokenizerArgs args, out T to) { if (items == null) { throw new ArgumentNullException("items"); } else if (args == null) { throw new ArgumentNullException("args"); } List <string> cArgs = new List <string>(items); using (TokenizerState <T> state = Tokenizer.NewState <T>(args)) { TokenizerDefinition definition = state.Definition; to = null; int i; bool atEnd = false; char[] checkChars = args.PlusMinSuffixArguments ? new char[] { args.ArgumentValueSeparator, '+', '-' } : new char[] { args.ArgumentValueSeparator }; int nPlaced = 0; for (i = 0; i < cArgs.Count; i++) { string a = cArgs[i]; if (!atEnd && (a.Length > 1) && args.CommandLineChars.Contains(a[0])) { bool twoStart = a[0] == a[1]; if (a.Length == 2 && twoStart) { if (!definition.HasPlacedArguments) { args.ErrorMessage = TokenizerMessages.NoPlacedArgumentsDefined; return(false); } atEnd = true; } else { int aFrom = twoStart ? 2 : 1; int aTo = args.AllowDirectArgs ? a.IndexOfAny(checkChars, aFrom) : -1; char cTo = (aTo > 0) ? a[aTo] : '\0'; string item = (aTo > 0) ? a.Substring(aFrom, aTo - aFrom) : a.Substring(aFrom); TokenItem token; string value = null; if (definition.TryGetToken(item, args.CaseSensitive, out token)) { if (token.RequiresValue) { if (i + 1 < cArgs.Count) { token.Evaluate(cArgs[++i], state); } else { args.ErrorMessage = TokenizerMessages.RequiredArgumentValueIsMissing; return(false); } } else { token.Evaluate(null, state); } continue; } else { // Look for a shorter argument for (int ii = item.Length - 1; ii > 0; ii--) { if (definition.TryGetToken(item.Substring(0, ii), args.CaseSensitive, out token) && token.AllowDirectValue(item.Substring(ii), state)) { token.EvaluateDirect(item.Substring(ii), state); break; } else { token = null; } } } if (token == null) { args.ErrorMessage = string.Format(CultureInfo.InvariantCulture, TokenizerMessages.UnknownArgumentX, a); return(false); } if (token.RequiresValue && value == null) { if (i < cArgs.Count - 1) { value = cArgs[i++]; } else { args.ErrorMessage = string.Format(CultureInfo.InvariantCulture, TokenizerMessages.ValueExpectedForArgumentX, a); return(false); } } continue; } } else if (!atEnd && args.AllowResponseFile && a.Length > 1 && a[0] == '@') { string file = a.Substring(1); if (!File.Exists(file)) { args.ErrorMessage = string.Format(CultureInfo.InvariantCulture, TokenizerMessages.ResponseFileXNotFound, file); return(false); } using (StreamReader sr = File.OpenText(a.Substring(1))) { string line; int n = i + 1; while (null != (line = sr.ReadLine())) { line = line.TrimStart(); if (line.Length > 1) { if (line[0] != '#') { foreach (string word in Tokenizer.GetCommandlineWords(line)) { cArgs.Insert(n++, word); } } } } } continue; } else if (!args.AllowNamedBetweenPlaced) { atEnd = true; } if (state.Definition.HasPlacedArguments) { if (nPlaced < state.Definition.PlacedItems.Count) { state.Definition.PlacedItems[nPlaced].Evaluate(cArgs[i], state); nPlaced++; } else if (state.Definition.RestToken != null) { state.Definition.RestToken.Evaluate(cArgs[i], state); } else { args.ErrorMessage = string.Format(CultureInfo.InvariantCulture, TokenizerMessages.UnknownArgumentX, cArgs[i]); return(false); } } } if (!state.IsComplete) { return(false); } to = state.Instance; return(true); } }
internal static bool TryWrite(XmlWriter writer, T instance, TokenizerArgs args) { if (writer == null) { throw new ArgumentNullException("writer"); } else if (instance == null) { throw new ArgumentNullException("instance"); } else if (args == null) { throw new ArgumentNullException("args"); } Hashtable written = new Hashtable(); using (TokenizerState <T> state = Tokenizer.NewState <T>(args, instance)) { // Step 1: Try to write tokens as attributes foreach (TokenMember member in state.Definition.AllTokenMembers) { object[] values = member.GetValues(state); if (member.Tokens.Count > 0 && member.Groups.Count > 0) { continue; // Write the members as element } if ((values == null) || (values.Length == 0) || values.Length > 1) { continue; } else if (member.Tokens.Count <= 0) { continue; } written[member] = member; foreach (object value in values) { if (value == null) { continue; } Type type = value.GetType(); foreach (TokenItem ti in member.Tokens) { if (ti.Name == null) { continue; } if (ti.ValueType != null && !ti.ValueType.IsAssignableFrom(type)) { continue; } // Will throw if multiple times written -> Definition bug, resolve there writer.WriteAttributeString(ti.Name, ti.GetStringValue(value, state)); break; } } } // Step 2: Write tokengroups and members with multiple values foreach (TokenMember member in state.Definition.AllTokenMembers) { if (written.Contains(member)) { continue; } object[] values = member.GetValues(state); if ((values == null) || (values.Length == 0)) { continue; } foreach (object value in values) { if (value == null) { continue; } Type type = value.GetType(); bool writtenItem = false; foreach (TokenGroupItem tg in member.Groups) { if (tg.ValueType != null && !tg.ValueType.IsAssignableFrom(type)) { continue; } writer.WriteStartElement(tg.Name); // Will throw if multiple times written -> Definition bug, resolve there if (!tg.TryWriteXml(writer, args.Clone(state.Instance), value)) { return(false); } writer.WriteEndElement(); writtenItem = true; break; } if (!writtenItem) { foreach (TokenItem ti in member.Tokens) { if (ti.Name == null) { continue; } if (ti.ValueType != null && !ti.ValueType.IsAssignableFrom(type)) { continue; } // Will throw if multiple times written -> Definition bug, resolve there writer.WriteElementString(ti.Name, ti.GetStringValue(value, state)); break; } } } } } return(true); }
internal static bool TryParse(IXPathNavigable element, TokenizerArgs args, out T to) { XPathNavigator nav = element.CreateNavigator(); to = null; using (TokenizerState <T> state = Tokenizer.NewState <T>(args)) { if (nav.MoveToFirstAttribute()) { do { TokenItem ti; if (!state.Definition.TryGetToken(nav.LocalName, args.CaseSensitive, out ti)) { if (args.SkipUnknownNamedItems) { continue; } else { return(false); } } ti.Evaluate(nav.Value, state); }while (nav.MoveToNextAttribute()); nav.MoveToParent(); } if (nav.HasChildren) { if (nav.MoveToFirstChild()) { do { string name = nav.LocalName; TokenGroupItem group; TokenItem ti; if (state.Definition.TryGetGroup(name, args.CaseSensitive, out group)) { object value; if (!group.TryParseXml(nav, args.Clone(state.Instance), out value)) { return(false); } group.Member.SetValue(state, value); } else if (state.Definition.TryGetToken(name, args.CaseSensitive, out ti)) { // Allow tokens as element ti.Evaluate(nav.Value, state); } else if (!args.SkipUnknownNamedItems) { return(false); } }while (nav.MoveToNext(XPathNodeType.Element)); } } to = state.Instance; return(true); } }