/** * Get Test Queries. The InputStream is closed. */ public static IEnumerator <SpatialTestQuery> GetTestQueries( SpatialArgsParser parser, SpatialContext ctx, string name, Stream @in) { List <SpatialTestQuery> results = new List <SpatialTestQuery>(); TextReader bufInput = new StreamReader(@in, Encoding.UTF8); try { String line; for (int lineNumber = 1; (line = bufInput.ReadLine()) != null; lineNumber++) { SpatialTestQuery test = new SpatialTestQuery(); test.line = line; test.lineNumber = lineNumber; try { // skip a comment if (line.StartsWith("[", StringComparison.Ordinal)) { int idx2 = line.IndexOf(']'); if (idx2 > 0) { line = line.Substring(idx2 + 1); } } int idx = line.IndexOf('@'); StringTokenizer st = new StringTokenizer(line.Substring(0, idx - 0)); while (st.MoveNext()) { test.ids.Add(st.Current.Trim()); } test.args = parser.Parse(line.Substring(idx + 1).Trim(), ctx); results.Add(test); } catch (Exception ex) { throw RuntimeException.Create("invalid query line: " + test.line, ex); } } } finally { bufInput.Dispose(); } return(results.GetEnumerator()); }
public override void SetParams(string @params) { base.SetParams(@params); //language = country = variant = ""; culture = ""; string _; StringTokenizer st = new StringTokenizer(@params, ","); if (st.MoveNext()) { //language = st.nextToken(); culture = st.Current; } if (st.MoveNext()) { culture += "-" + st.Current; } if (st.MoveNext()) { _ = st.Current; } }
private static void AssertTrie(Trie trie, string file, bool usefull, bool storeorig) { using TextReader @in = new StreamReader(new FileStream(file, FileMode.Open), Encoding.UTF8); for (string line = @in.ReadLine(); line != null; line = @in.ReadLine()) { line = line.ToLowerInvariant(); using StringTokenizer st = new StringTokenizer(line); if (st.MoveNext()) { string stem = st.Current; if (storeorig) { string cmd = (usefull) ? trie.GetFully(stem) : trie .GetLastOnPath(stem); StringBuilder stm = new StringBuilder(stem); Diff.Apply(stm, cmd); assertEquals(stem.ToLowerInvariant(), stm.ToString().ToLowerInvariant()); } while (st.MoveNext()) { string token = st.Current; if (token.Equals(stem, StringComparison.Ordinal)) { continue; } string cmd = (usefull) ? trie.GetFully(token) : trie .GetLastOnPath(token); StringBuilder stm = new StringBuilder(token); Diff.Apply(stm, cmd); assertEquals(stem.ToLowerInvariant(), stm.ToString().ToLowerInvariant()); } } else // LUCENENET: st.MoveNext() will return false rather than throwing a NoSuchElementException { // no base token (stem) on a line } } }
public int Compare(string a, string b) { var aTokens = new StringTokenizer(a, "."); var bTokens = new StringTokenizer(b, "."); while (aTokens.MoveNext()) { int aToken = Convert.ToInt32(aTokens.Current, CultureInfo.InvariantCulture); if (bTokens.MoveNext()) { int bToken = Convert.ToInt32(bTokens.Current, CultureInfo.InvariantCulture); if (aToken != bToken) { return(aToken < bToken ? -1 : 1); } } else { // a has some extra trailing tokens. if these are all zeroes, thats ok. if (aToken != 0) { return(1); } } } // b has some extra trailing tokens. if these are all zeroes, thats ok. while (bTokens.MoveNext()) { if (Convert.ToInt32(bTokens.Current, CultureInfo.InvariantCulture) != 0) { return(-1); } } return(0); }
public static Type GetMemberTypeForGetter(Type type, string memberName) { if (memberName.IndexOf('.') > -1) { IEnumerator enumerator = new StringTokenizer(memberName, ".").GetEnumerator(); while (enumerator.MoveNext()) { memberName = (string)enumerator.Current; type = ReflectionInfo.GetInstance(type).GetGetterType(memberName); } return(type); } type = ReflectionInfo.GetInstance(type).GetGetterType(memberName); return(type); }
public static MemberInfo GetMemberInfoForSetter(Type type, string memberName) { if (memberName.IndexOf('.') > -1) { IEnumerator enumerator = new StringTokenizer(memberName, ".").GetEnumerator(); Type type2 = null; while (enumerator.MoveNext()) { memberName = (string)enumerator.Current; type2 = type; type = ReflectionInfo.GetInstance(type).GetSetterType(memberName); } return(ReflectionInfo.GetInstance(type2).GetSetter(memberName)); } return(ReflectionInfo.GetInstance(type).GetSetter(memberName)); }
private string[] PropToStringArray(string s) { if (s.IndexOf(':') < 0) { return(new string[] { s }); } List <string> a = new List <string>(); StringTokenizer st = new StringTokenizer(s, ":"); while (st.MoveNext()) { string t = st.Current; a.Add(t); } return(a.ToArray()); }
public void ST005() { // arrange var source = "1234"; var stringTokenizer = new StringTokenizer(source); for (var i = 0; i < 5; i++) { stringTokenizer.MoveNext(); } // act var currentToken = stringTokenizer.CurrentToken; // assert currentToken.Should().BeEmpty(); }
public void ST009() { // arrange var source = "1234"; var stringTokenizer = new StringTokenizer(source); while (stringTokenizer.MoveNext()) { stringTokenizer.AddCurrentChar(); } // act var result = stringTokenizer.PositionIsValid; // assert result.Should().BeFalse(); }
public void ST007() { // arrange var source = "1234"; var stringTokenizer = new StringTokenizer(source); while (stringTokenizer.MoveNext()) { stringTokenizer.AddChar('A'); } // act var currentToken = stringTokenizer.CurrentToken; // assert currentToken.Should().Be("AAAA"); }
/// <summary> /// Get the list of codecs discovered via a Java ServiceLoader, or /// listed in the configuration. /// </summary> /// <remarks> /// Get the list of codecs discovered via a Java ServiceLoader, or /// listed in the configuration. Codecs specified in configuration come /// later in the returned list, and are considered to override those /// from the ServiceLoader. /// </remarks> /// <param name="conf">the configuration to look in</param> /// <returns> /// a list of the /// <see cref="CompressionCodec"/> /// classes /// </returns> public static IList <Type> GetCodecClasses(Configuration conf) { IList <Type> result = new AList <Type>(); // Add codec classes discovered via service loading lock (CodecProviders) { // CODEC_PROVIDERS is a lazy collection. Synchronize so it is // thread-safe. See HADOOP-8406. foreach (CompressionCodec codec in CodecProviders) { result.AddItem(codec.GetType()); } } // Add codec classes from configuration string codecsString = conf.Get(CommonConfigurationKeys.IoCompressionCodecsKey); if (codecsString != null) { StringTokenizer codecSplit = new StringTokenizer(codecsString, ","); while (codecSplit.MoveNext()) { string codecSubstring = codecSplit.NextToken().Trim(); if (codecSubstring.Length != 0) { try { Type cls = conf.GetClassByName(codecSubstring); if (!typeof(CompressionCodec).IsAssignableFrom(cls)) { throw new ArgumentException("Class " + codecSubstring + " is not a CompressionCodec" ); } result.AddItem(cls.AsSubclass <CompressionCodec>()); } catch (TypeLoadException ex) { throw new ArgumentException("Compression codec " + codecSubstring + " not found." , ex); } } } } return(result); }
public void ST003() { // arrange var source = "1234"; var stringTokenizer = new StringTokenizer(source); for (var i = 0; i < 5; i++) { stringTokenizer.MoveNext(); } // act char currentChar; Action action = () => currentChar = stringTokenizer.CurrentChar; // assert action.Should().ThrowExactly <InvalidOperationException>(); }
public void ST010() { // arrange var source = "1234"; var stringTokenizer = new StringTokenizer(source); while (stringTokenizer.MoveNext()) { stringTokenizer.AddCurrentChar(); } // act stringTokenizer.ClearToken(); var currentToken = stringTokenizer.CurrentToken; // assert currentToken.Should().BeEmpty(); }
public static object GetMemberValue(object obj, string memberName, AccessorFactory accessorFactory) { if (memberName.IndexOf('.') <= -1) { return(GetMember(obj, memberName, accessorFactory)); } IEnumerator enumerator = new StringTokenizer(memberName, ".").GetEnumerator(); object obj2 = obj; string current = null; while (enumerator.MoveNext()) { current = (string)enumerator.Current; obj2 = GetMember(obj2, current, accessorFactory); if (obj2 == null) { return(obj2); } } return(obj2); }
// extract properties to array, e.g. for "10.7:100.4:-2.3" return int[]{10.7,100.4,-2.3}. private double[] PropToDoubleArray(string s) { if (s.IndexOf(':') < 0) { return(new double[] { double.Parse(s, CultureInfo.InvariantCulture) }); } List <double> a = new List <double>(); StringTokenizer st = new StringTokenizer(s, ":"); while (st.MoveNext()) { string t = st.Current; a.Add(double.Parse(t, CultureInfo.InvariantCulture)); } double[] res = new double[a.Count]; for (int i = 0; i < a.Count; i++) { res[i] = a[i]; } return(res); }
// extract properties to array, e.g. for "true:true:false" return boolean[]{true,false,false}. private bool[] PropToBooleanArray(string s) { if (s.IndexOf(':') < 0) { return(new bool[] { bool.Parse(s) }); } List <bool> a = new List <bool>(); StringTokenizer st = new StringTokenizer(s, ":"); while (st.MoveNext()) { string t = st.Current; a.Add(bool.Parse(t)); } bool[] res = new bool[a.Count]; for (int i = 0; i < a.Count; i++) { res[i] = a[i]; } return(res); }
public static bool HasWritableProperty(object obj, string propertyName) { bool flag = false; if (obj is IDictionary) { return(((IDictionary)obj).Contains(propertyName)); } if (propertyName.IndexOf('.') > -1) { IEnumerator enumerator = new StringTokenizer(propertyName, ".").GetEnumerator(); Type getterType = obj.GetType(); while (enumerator.MoveNext()) { propertyName = (string)enumerator.Current; getterType = ReflectionInfo.GetInstance(getterType).GetGetterType(propertyName); flag = ReflectionInfo.GetInstance(getterType).HasWritableMember(propertyName); } return(flag); } return(ReflectionInfo.GetInstance(obj.GetType()).HasWritableMember(propertyName)); }
// extract properties to array, e.g. for "10:100:5" return int[]{10,100,5}. private int[] PropToInt32Array(string s) { if (s.IndexOf(':') < 0) { return(new int[] { int.Parse(s, CultureInfo.InvariantCulture) }); } List <int> a = new List <int>(); StringTokenizer st = new StringTokenizer(s, ":"); while (st.MoveNext()) { string t = st.Current; a.Add(int.Parse(t, CultureInfo.InvariantCulture)); } int[] res = new int[a.Count]; for (int i = 0; i < a.Count; i++) { res[i] = a[i]; } return(res); }
/// <summary> /// Parses "a=b c=d f" (whitespace separated) into name-value pairs. If there /// is no '=' as in 'f' above then it's short for f=f. /// </summary> protected static IDictionary <string, string> ParseMap(string body) { var map = new Dictionary <string, string>(); StringTokenizer st = new StringTokenizer(body, " \n\t"); while (st.MoveNext()) { string a = st.Current; int idx = a.IndexOf('='); if (idx > 0) { string k = a.Substring(0, idx - 0); string v = a.Substring(idx + 1); map[k] = v; } else { map[a] = a; } } return(map); }
/// <summary> /// Entry point to the Compile application. /// <para/> /// This program takes any number of arguments: the first is the name of the /// desired stemming algorithm to use (a list is available in the package /// description) , all of the rest should be the path or paths to a file or /// files containing a stemmer table to compile. /// </summary> /// <param name="args">the command line arguments</param> public static void Main(string[] args) { if (args.Length < 1) { return; } // LUCENENET NOTE: This line does nothing in .NET // and also does nothing in Java...what? //args[0].ToUpperInvariant(); // Reads the first char of the first arg backward = args[0][0] == '-'; int qq = (backward) ? 1 : 0; bool storeorig = false; if (args[0][qq] == '0') { storeorig = true; qq++; } multi = args[0][qq] == 'M'; if (multi) { qq++; } // LUCENENET specific - reformatted with : string charset = SystemProperties.GetProperty("egothor:stemmer:charset", "UTF-8"); var stemmerTables = new List <string>(); // LUCENENET specific // command line argument overrides environment variable or default, if supplied for (int i = 1; i < args.Length; i++) { if ("-e".Equals(args[i], StringComparison.Ordinal) || "--encoding".Equals(args[i], StringComparison.Ordinal)) { charset = args[i]; } else { stemmerTables.Add(args[i]); } } char[] optimizer = new char[args[0].Length - qq]; for (int i = 0; i < optimizer.Length; i++) { optimizer[i] = args[0][qq + i]; } foreach (var stemmerTable in stemmerTables) { // System.out.println("[" + args[i] + "]"); Diff diff = new Diff(); //int stems = 0; // not used int words = 0; AllocTrie(); Console.WriteLine(stemmerTable); using (TextReader input = new StreamReader( new FileStream(stemmerTable, FileMode.Open, FileAccess.Read), Encoding.GetEncoding(charset))) { string line; while ((line = input.ReadLine()) != null) { try { line = line.ToLowerInvariant(); StringTokenizer st = new StringTokenizer(line); st.MoveNext(); string stem = st.Current; if (storeorig) { trie.Add(stem, "-a"); words++; } while (st.MoveNext()) { string token = st.Current; if (token.Equals(stem, StringComparison.Ordinal) == false) { trie.Add(token, diff.Exec(token, stem)); words++; } } } catch (InvalidOperationException /*x*/) { // no base token (stem) on a line } } } Optimizer o = new Optimizer(); Optimizer2 o2 = new Optimizer2(); Lift l = new Lift(true); Lift e = new Lift(false); Gener g = new Gener(); for (int j = 0; j < optimizer.Length; j++) { string prefix; switch (optimizer[j]) { case 'G': trie = trie.Reduce(g); prefix = "G: "; break; case 'L': trie = trie.Reduce(l); prefix = "L: "; break; case 'E': trie = trie.Reduce(e); prefix = "E: "; break; case '2': trie = trie.Reduce(o2); prefix = "2: "; break; case '1': trie = trie.Reduce(o); prefix = "1: "; break; default: continue; } trie.PrintInfo(Console.Out, prefix + " "); } using (DataOutputStream os = new DataOutputStream( new FileStream(stemmerTable + ".out", FileMode.OpenOrCreate, FileAccess.Write))) { os.WriteUTF(args[0]); trie.Store(os); } } }
public static void EndDMLevel() { edict_t ent; string s, t, f; string seps = " ,\\n\\r"; if (((int)dmflags.value & Defines.DF_SAME_LEVEL) != 0) { PlayerHud.BeginIntermission(CreateTargetChangeLevel(level.mapname)); return; } if (sv_maplist.string_renamed.Length > 0) { s = sv_maplist.string_renamed; f = null; StringTokenizer tk = new StringTokenizer(s, seps); while (tk.RemainingTokens > 0) { tk.MoveNext(); t = tk.Current; if (f == null) { f = t; } if (t.EqualsIgnoreCase(level.mapname)) { if (tk.RemainingTokens == 0) { if (f == null) { PlayerHud.BeginIntermission(CreateTargetChangeLevel(level.mapname)); } else { PlayerHud.BeginIntermission(CreateTargetChangeLevel(f)); } } else { tk.MoveNext(); PlayerHud.BeginIntermission(CreateTargetChangeLevel(tk.Current)); } return; } } } if (level.nextmap.Length > 0) { PlayerHud.BeginIntermission(CreateTargetChangeLevel(level.nextmap)); } else { EdictIterator edit = null; edit = G_Find(edit, findByClass, "target_changelevel"); if (edit == null) { PlayerHud.BeginIntermission(CreateTargetChangeLevel(level.mapname)); return; } ent = edit.o; PlayerHud.BeginIntermission(ent); } }
private void addSpacedWord(string word, LinkSet ls, int startw, int spacew, TextState textState, bool addToPending) { /* * Split string based on four delimeters: * \u00A0 - Latin1 NBSP (Non breaking space) * \u202F - unknown reserved character according to Unicode Standard * \u3000 - CJK IDSP (Ideographic space) * \uFEFF - Arabic ZWN BSP (Zero width no break space) */ StringTokenizer st = new StringTokenizer(word, "\u00A0\u202F\u3000\uFEFF", true); int extraw = 0; while (st.MoveNext()) { string currentWord = (string)st.Current; if (currentWord.Length == 1 && (isNBSP(currentWord[0]))) { // Add an InlineSpace int spaceWidth = getCharWidth(currentWord[0]); if (spaceWidth > 0) { InlineSpace ispace = new InlineSpace(spaceWidth); extraw += spaceWidth; if (prevUlState) { ispace.setUnderlined(textState.getUnderlined()); } if (prevOlState) { ispace.setOverlined(textState.getOverlined()); } if (prevLTState) { ispace.setLineThrough(textState.getLineThrough()); } if (addToPending) { pendingAreas.Add(ispace); pendingWidth += spaceWidth; } else { addChild(ispace); } } } else { WordArea ia = new WordArea(currentFontState, this.red, this.green, this.blue, currentWord, getWordWidth(currentWord)); ia.setYOffset(placementOffset); ia.setUnderlined(textState.getUnderlined()); prevUlState = textState.getUnderlined(); ia.setOverlined(textState.getOverlined()); prevOlState = textState.getOverlined(); ia.setLineThrough(textState.getLineThrough()); prevLTState = textState.getLineThrough(); ia.setVerticalAlign(vAlign); if (addToPending) { pendingAreas.Add(ia); pendingWidth += getWordWidth(currentWord); } else { addChild(ia); } if (ls != null) { Rectangle lr = new Rectangle(startw + extraw, spacew, ia.getContentWidth(), fontState.FontSize); ls.addRect(lr, this, ia); } } } }
public void Verification_Simple() { StringTokenizer st = new StringTokenizer("This is a simple test!"); Assert.IsNull(st.Current); Assert.IsTrue(st.MoveNext()); StringToken token = st.Current; Assert.AreEqual(StringTokenType.Word, token.Type); Assert.AreEqual("This", token.String); Assert.IsTrue(st.MoveNext()); token = st.Current; Assert.AreEqual(StringTokenType.Whitespace, token.Type); Assert.AreEqual(" ", token.String); Assert.IsTrue(st.MoveNext()); token = st.Current; Assert.AreEqual(StringTokenType.Word, token.Type); Assert.AreEqual("is", token.String); Assert.IsTrue(st.MoveNext()); token = st.Current; Assert.AreEqual(StringTokenType.Whitespace, token.Type); Assert.AreEqual(" ", token.String); Assert.IsTrue(st.MoveNext()); token = st.Current; Assert.AreEqual(StringTokenType.Word, token.Type); Assert.AreEqual("a", token.String); Assert.IsTrue(st.MoveNext()); token = st.Current; Assert.AreEqual(StringTokenType.Whitespace, token.Type); Assert.AreEqual(" ", token.String); Assert.IsTrue(st.MoveNext()); token = st.Current; Assert.AreEqual(StringTokenType.Word, token.Type); Assert.AreEqual("simple", token.String); Assert.IsTrue(st.MoveNext()); token = st.Current; Assert.AreEqual(StringTokenType.Whitespace, token.Type); Assert.AreEqual(" ", token.String); Assert.IsTrue(st.MoveNext()); token = st.Current; Assert.AreEqual(StringTokenType.Word, token.Type); Assert.AreEqual("test", token.String); Assert.IsTrue(st.MoveNext()); token = st.Current; Assert.AreEqual(StringTokenType.Punctuation, token.Type); Assert.AreEqual("!", token.String); Assert.IsFalse(st.MoveNext()); }
/// <summary> /// Sends a generic OpenID Connect request to the given endpoint and /// converts the returned response to an OpenID Connect response. /// </summary> /// <param name="method">The HTTP method used to send the OpenID Connect request.</param> /// <param name="uri">The endpoint to which the request is sent.</param> /// <param name="request">The OpenID Connect request to send.</param> /// <returns>The OpenID Connect response returned by the server.</returns> public virtual async Task <OpenIdConnectResponse> SendAsync( [NotNull] HttpMethod method, [NotNull] Uri uri, [NotNull] OpenIdConnectRequest request) { if (method == null) { throw new ArgumentNullException(nameof(method)); } if (uri == null) { throw new ArgumentNullException(nameof(uri)); } if (request == null) { throw new ArgumentNullException(nameof(request)); } if (HttpClient.BaseAddress == null && !uri.IsAbsoluteUri) { throw new ArgumentException("The address cannot be a relative URI when no base address " + "is associated with the HTTP client.", nameof(uri)); } var parameters = new Dictionary <string, string>(); foreach (var parameter in request.GetParameters()) { var value = (string)parameter.Value; if (string.IsNullOrEmpty(value)) { continue; } parameters.Add(parameter.Key, value); } if (method == HttpMethod.Get && parameters.Count != 0) { var builder = new StringBuilder(); foreach (var parameter in parameters) { if (builder.Length != 0) { builder.Append('&'); } builder.Append(UrlEncoder.Default.Encode(parameter.Key)); builder.Append('='); builder.Append(UrlEncoder.Default.Encode(parameter.Value)); } if (!uri.IsAbsoluteUri) { uri = new Uri(HttpClient.BaseAddress, uri); } uri = new UriBuilder(uri) { Query = builder.ToString() }.Uri; } var message = new HttpRequestMessage(method, uri); if (method != HttpMethod.Get) { message.Content = new FormUrlEncodedContent(parameters); } var response = await HttpClient.SendAsync(message, HttpCompletionOption.ResponseHeadersRead); if (response.Headers.Location != null) { var payload = response.Headers.Location.Fragment; if (string.IsNullOrEmpty(payload)) { payload = response.Headers.Location.Query; } if (string.IsNullOrEmpty(payload)) { return(new OpenIdConnectResponse()); } var result = new OpenIdConnectResponse(); using (var tokenizer = new StringTokenizer(payload, OpenIdConnectConstants.Separators.Ampersand).GetEnumerator()) { while (tokenizer.MoveNext()) { var parameter = tokenizer.Current; if (parameter.Length == 0) { continue; } // Always skip the first char (# or ?). if (parameter.Offset == 0) { parameter = parameter.Subsegment(1, parameter.Length - 1); } var index = parameter.IndexOf('='); if (index == -1) { continue; } var name = parameter.Substring(0, index); if (string.IsNullOrEmpty(name)) { continue; } var value = parameter.Substring(index + 1, parameter.Length - (index + 1)); if (string.IsNullOrEmpty(value)) { continue; } result.AddParameter( Uri.UnescapeDataString(name.Replace('+', ' ')), Uri.UnescapeDataString(value.Replace('+', ' '))); } } return(result); } else if (string.Equals(response.Content?.Headers?.ContentType?.MediaType, "application/json", StringComparison.OrdinalIgnoreCase)) { using (var stream = await response.Content.ReadAsStreamAsync()) using (var reader = new JsonTextReader(new StreamReader(stream))) { var serializer = JsonSerializer.CreateDefault(); return(serializer.Deserialize <OpenIdConnectResponse>(reader)); } } else if (string.Equals(response.Content?.Headers?.ContentType?.MediaType, "text/html", StringComparison.OrdinalIgnoreCase)) { using (var stream = await response.Content.ReadAsStreamAsync()) { var result = new OpenIdConnectResponse(); var document = await new HtmlParser().ParseAsync(stream); foreach (var element in document.Body.GetElementsByTagName("input")) { var name = element.GetAttribute("name"); if (string.IsNullOrEmpty(name)) { continue; } var value = element.GetAttribute("value"); if (string.IsNullOrEmpty(value)) { continue; } result.AddParameter(name, value); } return(result); } } else if (string.Equals(response.Content?.Headers?.ContentType?.MediaType, "text/plain", StringComparison.OrdinalIgnoreCase)) { using (var stream = await response.Content.ReadAsStreamAsync()) using (var reader = new StreamReader(stream)) { var result = new OpenIdConnectResponse(); for (var line = await reader.ReadLineAsync(); line != null; line = await reader.ReadLineAsync()) { var index = line.IndexOf(':'); if (index == -1) { continue; } result.AddParameter(line.Substring(0, index), line.Substring(index + 1)); } return(result); } } return(new OpenIdConnectResponse()); }
private ParameterProperty NewParseMapping(string token, Type parameterClassType, IScope scope) { ParameterProperty property = new ParameterProperty(); IEnumerator enumerator = new StringTokenizer(token, "=,", false).GetEnumerator(); enumerator.MoveNext(); property.PropertyName = ((string)enumerator.Current).Trim(); while (enumerator.MoveNext()) { string current = (string)enumerator.Current; if (!enumerator.MoveNext()) { throw new DataMapperException("Incorrect inline parameter map format (missmatched name=value pairs): " + token); } string str2 = (string)enumerator.Current; if (!"type".Equals(current)) { if (!"dbType".Equals(current)) { if (!"direction".Equals(current)) { if (!"nullValue".Equals(current)) { if (!"handler".Equals(current)) { throw new DataMapperException("Unrecognized parameter mapping field: '" + current + "' in " + token); } property.CallBackName = str2; } else { property.NullValue = str2; } } else { property.DirectionAttribute = str2; } continue; } property.DbType = str2; } else { property.CLRType = str2; continue; } } if (property.CallBackName.Length > 0) { property.Initialize(scope, parameterClassType); return(property); } ITypeHandler unkownTypeHandler = null; if (parameterClassType == null) { unkownTypeHandler = scope.DataExchangeFactory.TypeHandlerFactory.GetUnkownTypeHandler(); } else { unkownTypeHandler = this.ResolveTypeHandler(scope.DataExchangeFactory.TypeHandlerFactory, parameterClassType, property.PropertyName, property.CLRType, property.DbType); } property.TypeHandler = unkownTypeHandler; property.Initialize(scope, parameterClassType); return(property); }
public static void PrepRefresh() { string mapname; int i; string name; float rotate; float[] axis = new float[3]; if ((i = Globals.cl.configstrings[Defines.CS_MODELS + 1].Length) == 0) { return; } SCR.AddDirtyPoint(0, 0); SCR.AddDirtyPoint(Globals.viddef.GetWidth() - 1, Globals.viddef.GetHeight() - 1); mapname = Globals.cl.configstrings[Defines.CS_MODELS + 1].Substring(5, i - 4); Com.Printf("Map: " + mapname + "\\r"); SCR.UpdateScreen(); Globals.re.BeginRegistration(mapname); Com.Printf(" \\r"); Com.Printf("pics\\r"); SCR.UpdateScreen(); SCR.TouchPics(); Com.Printf(" \\r"); CL_tent.RegisterTEntModels(); num_cl_weaponmodels = 1; cl_weaponmodels[0] = "weapon.md2"; for (i = 1; i < Defines.MAX_MODELS && Globals.cl.configstrings[Defines.CS_MODELS + i].Length != 0; i++) { name = new string (Globals.cl.configstrings[Defines.CS_MODELS + i]); if (name.Length > 37) { name = name.Substring(0, 36); } if (name[0] != '*') { Com.Printf(name + "\\r"); } SCR.UpdateScreen(); CoreSys.SendKeyEvents(); if (name[0] == '#') { if (num_cl_weaponmodels < Defines.MAX_CLIENTWEAPONMODELS) { cl_weaponmodels[num_cl_weaponmodels] = Globals.cl.configstrings[Defines.CS_MODELS + i].Substring(1); num_cl_weaponmodels++; } } else { Globals.cl.model_draw[i] = Globals.re.RegisterModel(Globals.cl.configstrings[Defines.CS_MODELS + i]); if (name[0] == '*') { Globals.cl.model_clip[i] = CM.InlineModel(Globals.cl.configstrings[Defines.CS_MODELS + i]); } else { Globals.cl.model_clip[i] = null; } } if (name[0] != '*') { Com.Printf(" \\r"); } } Com.Printf("images\\r"); SCR.UpdateScreen(); for (i = 1; i < Defines.MAX_IMAGES && Globals.cl.configstrings[Defines.CS_IMAGES + i].Length > 0; i++) { Globals.cl.image_precache[i] = Globals.re.RegisterPic(Globals.cl.configstrings[Defines.CS_IMAGES + i]); CoreSys.SendKeyEvents(); } Com.Printf(" \\r"); for (i = 0; i < Defines.MAX_CLIENTS; i++) { if (Globals.cl.configstrings[Defines.CS_PLAYERSKINS + i].Length == 0) { continue; } Com.Printf("client " + i + '\\'); SCR.UpdateScreen(); CoreSys.SendKeyEvents(); CL_parse.ParseClientinfo(i); Com.Printf(" \\r"); } CL_parse.LoadClientinfo(Globals.cl.baseclientinfo, "unnamed\\\\male/grunt"); Com.Printf("sky\\r"); SCR.UpdateScreen(); rotate = float.Parse(Globals.cl.configstrings[Defines.CS_SKYROTATE]); StringTokenizer st = new StringTokenizer(Globals.cl.configstrings[Defines.CS_SKYAXIS]); st.MoveNext(); axis[0] = float.Parse(st.Current); st.MoveNext(); axis[1] = float.Parse(st.Current); st.MoveNext(); axis[2] = float.Parse(st.Current); Globals.re.SetSky(Globals.cl.configstrings[Defines.CS_SKY], rotate, axis); Com.Printf(" \\r"); Globals.re.EndRegistration(); Con.ClearNotify(); SCR.UpdateScreen(); Globals.cl.refresh_prepped = true; Globals.cl.force_refdef = true; }
protected void SecondPassCompile() { log.Debug("Execute first pass mapping processing"); //build annotatedClassEntities var tempAnnotatedClasses = new List <System.Type>(annotatedClasses.Count); foreach (System.Type clazz in annotatedClasses) { if (AttributeHelper.IsAttributePresent <EntityAttribute>(clazz)) { annotatedClassEntities.Add(clazz.Name, clazz); tempAnnotatedClasses.Add(clazz); } else if (AttributeHelper.IsAttributePresent <MappedSuperclassAttribute>(clazz)) { tempAnnotatedClasses.Add(clazz); } //only keep MappedSuperclasses and Entity in this list } annotatedClasses = tempAnnotatedClasses; //process default values first if (!isDefaultProcessed) { AnnotationBinder.BindDefaults(CreateExtendedMappings()); isDefaultProcessed = true; } //TODO this should go to a helper code //process entities if (precedence == null) { precedence = Properties[ARTEFACT]; } if (precedence == null) { precedence = DEFAULT_PRECEDENCE; } var precedences = new StringTokenizer(precedence, ",; ", false).GetEnumerator(); if (precedences.MoveNext()) { throw new MappingException(ARTEFACT + " cannot be empty: " + precedence); } while (precedences.MoveNext()) { string artifact = precedences.Current; RemoveConflictedArtifact(artifact); ProcessArtifactsOfType(artifact); } //int cacheNbr = caches.Count; //for (int index = 0; index < cacheNbr; index++) //{ // CacheHolder cacheHolder = caches[index]; // if (cacheHolder.isClass) // { // base.SetCacheConcurrencyStrategy(cacheHolder.role, cacheHolder.usage, cacheHolder.region, cacheHolder.cacheLazy); // } // else // { // base.SetCollectionCacheConcurrencyStrategy(cacheHolder.role, cacheHolder.usage, cacheHolder.region); // } //} //caches.Clear(); try { inSecondPass = true; ProcessFkSecondPassInOrder(); var iter = secondPasses.GetEnumerator(); while (iter.MoveNext()) { var sp = iter.Current; //do the second pass of fk before the others and remove them if (sp is CreateKeySecondPass) { sp.Invoke(classes); } } iter = secondPasses.GetEnumerator(); while (iter.MoveNext()) { var sp = iter.Current; //do the SecondaryTable second pass before any association becasue associations can be built on joins if (sp is SecondaryTableSecondPass) { sp.Invoke(classes); } } } catch (RecoverableException ex) { //TODO: remove then RecoverableException //the exception was not recoverable after all throw ex.InnerException; } }