public static bool TryParse (string input, out RetryConditionHeaderValue parsedValue) { parsedValue = null; var lexer = new Lexer (input); var t = lexer.Scan (); if (t != Token.Type.Token) return false; var ts = lexer.TryGetTimeSpanValue (t); if (ts != null) { if (lexer.Scan () != Token.Type.End) return false; parsedValue = new RetryConditionHeaderValue (ts.Value); } else { DateTimeOffset date; if (!Lexer.TryGetDateValue (input, out date)) return false; parsedValue = new RetryConditionHeaderValue (date); } return true; }
public static bool TryParse(string input, out RetryConditionHeaderValue parsedValue) { parsedValue = null; var lexer = new Lexer(input); var t = lexer.Scan(); if (t != Token.Type.Token) { return(false); } var ts = lexer.TryGetTimeSpanValue(t); if (ts != null) { if (lexer.Scan() != Token.Type.End) { return(false); } parsedValue = new RetryConditionHeaderValue(ts.Value); } else { DateTimeOffset date; if (!Lexer.TryGetDateValue(input, out date)) { return(false); } parsedValue = new RetryConditionHeaderValue(date); } return(true); }
public static bool TryParse(string input, out CacheControlHeaderValue parsedValue) { parsedValue = null; if (input == null) return true; var value = new CacheControlHeaderValue (); var lexer = new Lexer (input); Token t; do { t = lexer.Scan (); if (t != Token.Type.Token) return false; string s = lexer.GetStringValue (t); bool token_read = false; TimeSpan? ts; switch (s) { case "no-store": value.NoStore = true; break; case "no-transform": value.NoTransform = true; break; case "only-if-cached": value.OnlyIfCached = true; break; case "public": value.Public = true; break; case "must-revalidate": value.MustRevalidate = true; break; case "proxy-revalidate": value.ProxyRevalidate = true; break; case "max-stale": value.MaxStale = true; t = lexer.Scan (); if (t != Token.Type.SeparatorEqual) { token_read = true; break; } t = lexer.Scan (); if (t != Token.Type.Token) return false; ts = lexer.TryGetTimeSpanValue (t); if (ts == null) return false; value.MaxStaleLimit = ts; break; case "max-age": case "s-maxage": case "min-fresh": t = lexer.Scan (); if (t != Token.Type.SeparatorEqual) { return false; } t = lexer.Scan (); if (t != Token.Type.Token) return false; ts = lexer.TryGetTimeSpanValue (t); if (ts == null) return false; switch (s.Length) { case 7: value.MaxAge = ts; break; case 8: value.SharedMaxAge = ts; break; default: value.MinFresh = ts; break; } break; case "private": case "no-cache": if (s.Length == 7) { value.Private = true; } else { value.NoCache = true; } t = lexer.Scan (); if (t != Token.Type.SeparatorEqual) { token_read = true; break; } t = lexer.Scan (); if (t != Token.Type.QuotedString) return false; foreach (var entry in lexer.GetQuotedStringValue (t).Split (',')) { var qs = entry.Trim ('\t', ' '); if (s.Length == 7) { value.PrivateHeaders.Add (qs); } else { value.NoCache = true; value.NoCacheHeaders.Add (qs); } } break; default: string name = lexer.GetStringValue (t); string svalue = null; t = lexer.Scan (); if (t == Token.Type.SeparatorEqual) { t = lexer.Scan (); switch (t.Kind) { case Token.Type.Token: case Token.Type.QuotedString: svalue = lexer.GetStringValue (t); break; default: return false; } } else { token_read = true; } value.Extensions.Add (NameValueHeaderValue.Create (name, svalue)); break; } if (!token_read) t = lexer.Scan (); } while (t == Token.Type.SeparatorComma); if (t != Token.Type.End) return false; parsedValue = value; return true; }
public static bool TryParse(string input, out CacheControlHeaderValue parsedValue) { parsedValue = null; if (input == null) { return(true); } var value = new CacheControlHeaderValue(); var lexer = new Lexer(input); Token t; do { t = lexer.Scan(); if (t != Token.Type.Token) { return(false); } string s = lexer.GetStringValue(t); bool token_read = false; TimeSpan?ts; switch (s) { case "no-store": value.NoStore = true; break; case "no-transform": value.NoTransform = true; break; case "only-if-cached": value.OnlyIfCached = true; break; case "public": value.Public = true; break; case "must-revalidate": value.MustRevalidate = true; break; case "proxy-revalidate": value.ProxyRevalidate = true; break; case "max-stale": value.MaxStale = true; t = lexer.Scan(); if (t != Token.Type.SeparatorEqual) { token_read = true; break; } t = lexer.Scan(); if (t != Token.Type.Token) { return(false); } ts = lexer.TryGetTimeSpanValue(t); if (ts == null) { return(false); } value.MaxStaleLimit = ts; break; case "max-age": case "s-maxage": case "min-fresh": t = lexer.Scan(); if (t != Token.Type.SeparatorEqual) { return(false); } t = lexer.Scan(); if (t != Token.Type.Token) { return(false); } ts = lexer.TryGetTimeSpanValue(t); if (ts == null) { return(false); } switch (s.Length) { case 7: value.MaxAge = ts; break; case 8: value.SharedMaxAge = ts; break; default: value.MinFresh = ts; break; } break; case "private": case "no-cache": if (s.Length == 7) { value.Private = true; } else { value.NoCache = true; } t = lexer.Scan(); if (t != Token.Type.SeparatorEqual) { token_read = true; break; } t = lexer.Scan(); if (t != Token.Type.QuotedString) { return(false); } foreach (var entry in lexer.GetQuotedStringValue(t).Split(',')) { var qs = entry.Trim('\t', ' '); if (s.Length == 7) { value.PrivateHeaders.Add(qs); } else { value.NoCache = true; value.NoCacheHeaders.Add(qs); } } break; default: string name = lexer.GetStringValue(t); string svalue = null; t = lexer.Scan(); if (t == Token.Type.SeparatorEqual) { t = lexer.Scan(); switch (t.Kind) { case Token.Type.Token: case Token.Type.QuotedString: svalue = lexer.GetStringValue(t); break; default: return(false); } } else { token_read = true; } value.Extensions.Add(NameValueHeaderValue.Create(name, svalue)); break; } if (!token_read) { t = lexer.Scan(); } } while (t == Token.Type.SeparatorComma); if (t != Token.Type.End) { return(false); } parsedValue = value; return(true); }