public override bool Process <_T0>(HashSet <_T0> annotations, IRoundEnvironment roundEnv ) { if (!CheckClassNameConstants()) { return(true); } IList <Tuple <RuleDependency, IElement> > dependencies = GetDependencies(roundEnv); IDictionary <ITypeMirror, IList <Tuple <RuleDependency, IElement> > > recognizerDependencies = new Dictionary <ITypeMirror, IList <Tuple <RuleDependency, IElement> > >(); foreach (Tuple <RuleDependency, IElement> dependency in dependencies) { ITypeMirror recognizerType = GetRecognizerType(dependency.Item1); IList <Tuple <RuleDependency, IElement> > list = recognizerDependencies.Get(recognizerType ); if (list == null) { list = new List <Tuple <RuleDependency, IElement> >(); recognizerDependencies.Put(recognizerType, list); } list.AddItem(dependency); } foreach (KeyValuePair <ITypeMirror, IList <Tuple <RuleDependency, IElement> > > entry in recognizerDependencies.EntrySet()) { processingEnv.GetMessager().PrintMessage(Diagnostic.Kind.Note, string.Format("ANTLR 4: Validating %d dependencies on rules in %s." , entry.Value.Count, entry.Key.ToString())); CheckDependencies(entry.Value, entry.Key); } return(true); }
/// <exception cref="System.IO.IOException"></exception> private Tree BuildTree(Dictionary <string, string> headEntries) { Tree tree = new Tree(db); if (headEntries == null) { return(tree); } FileTreeEntry fileEntry; Tree parent; ObjectInserter oi = db.NewObjectInserter(); try { foreach (KeyValuePair <string, string> e in headEntries.EntrySet()) { fileEntry = tree.AddFile(e.Key); fileEntry.SetId(GenSha1(e.Value)); parent = fileEntry.GetParent(); while (parent != null) { parent.SetId(oi.Insert(Constants.OBJ_TREE, parent.Format())); parent = parent.GetParent(); } } oi.Flush(); } finally { oi.Release(); } return(tree); }
/* * Reads in a Hashtable in which the values are Vectors of * UnresolvedPermissions and saves them in the perms field. */ //JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException private void ReadObject(ObjectInputStream @in) { // Don't call defaultReadObject() // Read in serialized fields ObjectInputStream.GetField gfields = @in.ReadFields(); // Get permissions //JAVA TO C# CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes: //ORIGINAL LINE: @SuppressWarnings("unchecked") Hashtable<String, Vector<UnresolvedPermission>> permissions = (Hashtable<String, Vector<UnresolvedPermission>>) gfields.get("permissions", null); Dictionary <String, Vector <UnresolvedPermission> > permissions = (Dictionary <String, Vector <UnresolvedPermission> >)gfields.Get("permissions", null); // writeObject writes a Hashtable<String, Vector<UnresolvedPermission>> // for the permissions key, so this cast is safe, unless the data is corrupt. Perms = new HashMap <String, List <UnresolvedPermission> >(permissions.Size() * 2); // Convert each entry (Vector) into a List Set <Map_Entry <String, Vector <UnresolvedPermission> > > set = permissions.EntrySet(); foreach (Map_Entry <String, Vector <UnresolvedPermission> > e in set) { // Convert Vector into ArrayList Vector <UnresolvedPermission> vec = e.Value; List <UnresolvedPermission> list = new List <UnresolvedPermission>(vec.Size()); list.AddAll(vec); // Add to Hashtable being serialized Perms.Put(e.Key, list); } }
public override long RamBytesUsed() { long sizeInBytes = 0; foreach (KeyValuePair <string, TermsReader> entry in _fields.EntrySet()) { sizeInBytes += (entry.Key.Length * RamUsageEstimator.NUM_BYTES_CHAR); sizeInBytes += entry.Value.ramBytesUsed(); } return(sizeInBytes); }
public virtual string GetUserCommentDescription() { sbyte[] commentBytes = _directory.GetByteArray(ExifSubIFDDirectory.TagUserComment); if (commentBytes == null) { return(null); } if (commentBytes.Length == 0) { return(string.Empty); } IDictionary <string, string> encodingMap = new Dictionary <string, string>(); encodingMap.Put("ASCII", Runtime.GetProperty("file.encoding")); // Someone suggested "ISO-8859-1". encodingMap.Put("UNICODE", "UTF-16LE"); encodingMap.Put("JIS", "Shift-JIS"); // We assume this charset for now. Another suggestion is "JIS". try { if (commentBytes.Length >= 10) { string firstTenBytesString = Sharpen.Runtime.GetStringForBytes(commentBytes, 0, 10); // try each encoding name foreach (KeyValuePair <string, string> pair in encodingMap.EntrySet()) { string encodingName = pair.Key; string charset = pair.Value; if (firstTenBytesString.StartsWith(encodingName)) { // skip any null or blank characters commonly present after the encoding name, up to a limit of 10 from the start for (int j = encodingName.Length; j < 10; j++) { sbyte b = commentBytes[j]; if (b != '\0' && b != ' ') { return(Sharpen.Extensions.Trim(Sharpen.Runtime.GetStringForBytes(commentBytes, j, commentBytes.Length - j, charset))); } } return(Sharpen.Extensions.Trim(Sharpen.Runtime.GetStringForBytes(commentBytes, 10, commentBytes.Length - 10, charset))); } } } // special handling fell through, return a plain string representation return(Sharpen.Extensions.Trim(Sharpen.Runtime.GetStringForBytes(commentBytes, Runtime.GetProperty("file.encoding")))); } catch (UnsupportedEncodingException) { return(null); } }
/// <exception cref="System.IO.IOException"></exception> private void BuildIndex(Dictionary <string, string> indexEntries) { GitIndex index = new GitIndex(db); if (indexEntries != null) { foreach (KeyValuePair <string, string> e in indexEntries.EntrySet()) { index.Add(trash, WriteTrashFile(e.Key, e.Value)).ForceRecheck(); } } index.Write(); db.GetIndex().Read(); }
/// <exception cref="Apache.Http.Cookie.MalformedCookieException"></exception> private IList <Apache.Http.Cookie.Cookie> CreateCookies(HeaderElement[] elems, CookieOrigin origin) { IList <Apache.Http.Cookie.Cookie> cookies = new AList <Apache.Http.Cookie.Cookie>(elems .Length); foreach (HeaderElement headerelement in elems) { string name = headerelement.GetName(); string value = headerelement.GetValue(); if (name == null || name.Length == 0) { throw new MalformedCookieException("Cookie name may not be empty"); } BasicClientCookie2 cookie = new BasicClientCookie2(name, value); cookie.SetPath(GetDefaultPath(origin)); cookie.SetDomain(GetDefaultDomain(origin)); cookie.SetPorts(new int[] { origin.GetPort() }); // cycle through the parameters NameValuePair[] attribs = headerelement.GetParameters(); // Eliminate duplicate attributes. The first occurrence takes precedence // See RFC2965: 3.2 Origin Server Role IDictionary <string, NameValuePair> attribmap = new Dictionary <string, NameValuePair >(attribs.Length); for (int j = attribs.Length - 1; j >= 0; j--) { NameValuePair param = attribs[j]; attribmap.Put(param.GetName().ToLower(Sharpen.Extensions.GetEnglishCulture()), param ); } foreach (KeyValuePair <string, NameValuePair> entry in attribmap.EntrySet()) { NameValuePair attrib = entry.Value; string s = attrib.GetName().ToLower(Sharpen.Extensions.GetEnglishCulture()); cookie.SetAttribute(s, attrib.GetValue()); CookieAttributeHandler handler = FindAttribHandler(s); if (handler != null) { handler.Parse(cookie, attrib.GetValue()); } } cookies.AddItem(cookie); } return(cookies); }
/// <exception cref="System.IO.IOException"></exception> private ObjectId BuildTree(Dictionary <string, string> headEntries) { DirCache lockDirCache = DirCache.NewInCore(); // assertTrue(lockDirCache.lock()); DirCacheEditor editor = lockDirCache.Editor(); if (headEntries != null) { foreach (KeyValuePair <string, string> e in headEntries.EntrySet()) { DirCacheCheckoutTest.AddEdit addEdit = new DirCacheCheckoutTest.AddEdit(e.Key, GenSha1 (e.Value), e.Value.Length); editor.Add(addEdit); } } editor.Finish(); return(lockDirCache.WriteTree(db.NewObjectInserter())); }
/// <exception cref="System.IO.IOException"></exception> private void BuildIndex(Dictionary <string, string> indexEntries) { dirCache = new DirCache(db.GetIndexFile(), db.FileSystem); if (indexEntries != null) { NUnit.Framework.Assert.IsTrue(dirCache.Lock()); DirCacheEditor editor = dirCache.Editor(); foreach (KeyValuePair <string, string> e in indexEntries.EntrySet()) { WriteTrashFile(e.Key, e.Value); ObjectInserter inserter = db.NewObjectInserter(); ObjectId id = inserter.Insert(Constants.OBJ_BLOB, Constants.Encode(e.Value)); editor.Add(new DirCacheEditor.DeletePath(e.Key)); editor.Add(new _PathEdit_287(id, e.Key)); } NUnit.Framework.Assert.IsTrue(editor.Commit()); } }
protected override void Dispose(bool disposing) { if (disposing) { _delegateFieldsConsumer.Dispose(); // Now we are done accumulating values for these fields var nonSaturatedBlooms = (from entry in _bloomFilters.EntrySet() let bloomFilter = entry.Value where !outerInstance._bloomFilterFactory.IsSaturated(bloomFilter, entry.Key) select entry).ToList(); var bloomFileName = IndexFileNames.SegmentFileName( _state.SegmentInfo.Name, _state.SegmentSuffix, BLOOM_EXTENSION); IndexOutput bloomOutput = null; try { bloomOutput = _state.Directory.CreateOutput(bloomFileName, _state.Context); CodecUtil.WriteHeader(bloomOutput, /*BLOOM_CODEC_NAME*/ outerInstance.Name, VERSION_CURRENT); // remember the name of the postings format we will delegate to bloomOutput.WriteString(outerInstance._delegatePostingsFormat.Name); // First field in the output file is the number of fields+blooms saved bloomOutput.WriteInt32(nonSaturatedBlooms.Count); foreach (var entry in nonSaturatedBlooms) { var fieldInfo = entry.Key; var bloomFilter = entry.Value; bloomOutput.WriteInt32(fieldInfo.Number); SaveAppropriatelySizedBloomFilter(bloomOutput, bloomFilter, fieldInfo); } CodecUtil.WriteFooter(bloomOutput); } finally { IOUtils.Dispose(bloomOutput); } //We are done with large bitsets so no need to keep them hanging around _bloomFilters.Clear(); } }
/// <summary>Returns a set of all of the keys and values</summary> /// <returns>a set of all of the keys and values</returns> public ICollection <KeyValuePair <string, object> > ValueSet() { return(mValues.EntrySet()); }
/// <exception cref="System.IO.IOException"></exception> internal override void ConfigureRequest(HttpURLConnection conn) { IDictionary<string, string> p = new Dictionary<string, string>(@params); p.Put("username", user); string realm = p.Get("realm"); string nonce = p.Get("nonce"); string uri = p.Get("uri"); string qop = p.Get("qop"); string method = conn.GetRequestMethod(); string A1 = user + ":" + realm + ":" + pass; string A2 = method + ":" + uri; string expect; if ("auth".Equals(qop)) { string c = p.Get("cnonce"); string nc = string.Format("%08x", ++requestCount); p.Put("nc", nc); expect = KD(H(A1), nonce + ":" + nc + ":" + c + ":" + qop + ":" + H(A2)); } else { expect = KD(H(A1), nonce + ":" + H(A2)); } p.Put("response", expect); StringBuilder v = new StringBuilder(); foreach (KeyValuePair<string, string> e in p.EntrySet()) { if (v.Length > 0) { v.Append(", "); } v.Append(e.Key); v.Append('='); v.Append('"'); v.Append(e.Value); v.Append('"'); } conn.SetRequestProperty(HttpSupport.HDR_AUTHORIZATION, NAME + " " + v); }
/// <exception cref="System.IO.IOException"></exception> private void BuildIndex(Dictionary<string, string> indexEntries) { GitIndex index = new GitIndex(db); if (indexEntries != null) { foreach (KeyValuePair<string, string> e in indexEntries.EntrySet()) { index.Add(trash, WriteTrashFile(e.Key, e.Value)).ForceRecheck(); } } index.Write(); db.GetIndex().Read(); }
/// <exception cref="System.IO.IOException"></exception> private Tree BuildTree(Dictionary<string, string> headEntries) { Tree tree = new Tree(db); if (headEntries == null) { return tree; } FileTreeEntry fileEntry; Tree parent; ObjectInserter oi = db.NewObjectInserter(); try { foreach (KeyValuePair<string, string> e in headEntries.EntrySet()) { fileEntry = tree.AddFile(e.Key); fileEntry.SetId(GenSha1(e.Value)); parent = fileEntry.GetParent(); while (parent != null) { parent.SetId(oi.Insert(Constants.OBJ_TREE, parent.Format())); parent = parent.GetParent(); } } oi.Flush(); } finally { oi.Release(); } return tree; }
// [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass public virtual void TestRandomSortedBytes() { Directory dir = NewDirectory(); IndexWriterConfig cfg = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); if (!DefaultCodecSupportsDocsWithField()) { // if the codec doesnt support missing, we expect missing to be mapped to byte[] // by the impersonator, but we have to give it a chance to merge them to this cfg.SetMergePolicy(NewLogMergePolicy()); } RandomIndexWriter w = new RandomIndexWriter(Random(), dir, cfg); int numDocs = AtLeast(100); BytesRefHash hash = new BytesRefHash(); IDictionary<string, string> docToString = new Dictionary<string, string>(); int maxLength = TestUtil.NextInt(Random(), 1, 50); for (int i = 0; i < numDocs; i++) { Document doc = new Document(); doc.Add(NewTextField("id", "" + i, Field.Store.YES)); string @string = TestUtil.RandomRealisticUnicodeString(Random(), 1, maxLength); BytesRef br = new BytesRef(@string); doc.Add(new SortedDocValuesField("field", br)); hash.Add(br); docToString["" + i] = @string; w.AddDocument(doc); } if (Rarely()) { w.Commit(); } int numDocsNoValue = AtLeast(10); for (int i = 0; i < numDocsNoValue; i++) { Document doc = new Document(); doc.Add(NewTextField("id", "noValue", Field.Store.YES)); w.AddDocument(doc); } if (!DefaultCodecSupportsDocsWithField()) { BytesRef bytesRef = new BytesRef(); hash.Add(bytesRef); // add empty value for the gaps } if (Rarely()) { w.Commit(); } if (!DefaultCodecSupportsDocsWithField()) { // if the codec doesnt support missing, we expect missing to be mapped to byte[] // by the impersonator, but we have to give it a chance to merge them to this w.ForceMerge(1); } for (int i = 0; i < numDocs; i++) { Document doc = new Document(); string id = "" + i + numDocs; doc.Add(NewTextField("id", id, Field.Store.YES)); string @string = TestUtil.RandomRealisticUnicodeString(Random(), 1, maxLength); BytesRef br = new BytesRef(@string); hash.Add(br); docToString[id] = @string; doc.Add(new SortedDocValuesField("field", br)); w.AddDocument(doc); } w.Commit(); IndexReader reader = w.Reader; SortedDocValues docValues = MultiDocValues.GetSortedValues(reader, "field"); int[] sort = hash.Sort(BytesRef.UTF8SortedAsUnicodeComparer); BytesRef expected = new BytesRef(); BytesRef actual = new BytesRef(); Assert.AreEqual(hash.Size(), docValues.ValueCount); for (int i = 0; i < hash.Size(); i++) { hash.Get(sort[i], expected); docValues.LookupOrd(i, actual); Assert.AreEqual(expected.Utf8ToString(), actual.Utf8ToString()); int ord = docValues.LookupTerm(expected); Assert.AreEqual(i, ord); } AtomicReader slowR = SlowCompositeReaderWrapper.Wrap(reader); ISet<KeyValuePair<string, string>> entrySet = docToString.EntrySet(); foreach (KeyValuePair<string, string> entry in entrySet) { // pk lookup DocsEnum termDocsEnum = slowR.TermDocsEnum(new Term("id", entry.Key)); int docId = termDocsEnum.NextDoc(); expected = new BytesRef(entry.Value); docValues.Get(docId, actual); Assert.AreEqual(expected, actual); } reader.Dispose(); w.Dispose(); dir.Dispose(); }
/// <exception cref="System.IO.IOException"></exception> private void BuildIndex(Dictionary<string, string> indexEntries) { dirCache = new DirCache(db.GetIndexFile(), db.FileSystem); if (indexEntries != null) { NUnit.Framework.Assert.IsTrue(dirCache.Lock()); DirCacheEditor editor = dirCache.Editor(); foreach (KeyValuePair<string, string> e in indexEntries.EntrySet()) { WriteTrashFile(e.Key, e.Value); ObjectInserter inserter = db.NewObjectInserter(); ObjectId id = inserter.Insert(Constants.OBJ_BLOB, Constants.Encode(e.Value)); editor.Add(new DirCacheEditor.DeletePath(e.Key)); editor.Add(new _PathEdit_284(id, e.Key)); } NUnit.Framework.Assert.IsTrue(editor.Commit()); } }
/// <exception cref="System.IO.IOException"></exception> private ObjectId BuildTree(Dictionary<string, string> headEntries) { DirCache lockDirCache = DirCache.NewInCore(); // assertTrue(lockDirCache.lock()); DirCacheEditor editor = lockDirCache.Editor(); if (headEntries != null) { foreach (KeyValuePair<string, string> e in headEntries.EntrySet()) { DirCacheCheckoutTest.AddEdit addEdit = new DirCacheCheckoutTest.AddEdit(e.Key, GenSha1 (e.Value), e.Value.Length); editor.Add(addEdit); } } editor.Finish(); return lockDirCache.WriteTree(db.NewObjectInserter()); }
/// <summary> /// This method is used to acquire the name and value pairs that /// have currently been collected by this session. This is used /// to determine which mappings are available within the session. /// </summary> /// <returns> /// thie set of mappings that exist within the session /// </returns> public Set EntrySet() { return(map.EntrySet()); }