private ResolveDelegate <BuilderContext>?TryGetPipeline(ref HashKey key) { // Iterate through containers hierarchy for (UnityContainer?container = this; null != container; container = container._parent) { // Skip to parent if no registrations if (null == container._metadata) { continue; } Debug.Assert(null != container._registry); var registry = container._registry; // Check for exact match for (var i = registry.Buckets[key.HashCode % registry.Buckets.Length]; i >= 0; i = registry.Entries[i].Next) { ref var candidate = ref registry.Entries[i]; if (candidate.Key != key) { continue; } // Found it return(candidate.Pipeline); } }
/// <inheritdoc /> public object?Get(Type type, Type policyInterface) { var key = new HashKey(type); // Iterate through containers hierarchy for (UnityContainer?container = Container; null != container; container = container._parent) { // Skip to parent if no registrations if (null == container._registry) { continue; } var registry = container._registry; var targetBucket = key.HashCode % registry.Buckets.Length; for (var i = registry.Buckets[targetBucket]; i >= 0; i = registry.Entries[i].Next) { ref var entry = ref registry.Entries[i]; if (entry.Key != key || entry.Policies is ImplicitRegistration) { continue; } return(entry.Policies.Get(policyInterface)); } }
public static Branch CreateBranch(string name, HashKey commitKey) { Branch branch = new Branch(name, commitKey); _trackedBranches.Add(name, branch); return(branch); }
private IRegistration GetSimpleRegistration(Type type, string?name) { var key = new HashKey(type, name); // Iterate through containers hierarchy for (UnityContainer?container = this; null != container; container = container._parent) { // Skip to parent if no registrations if (null == container._metadata) { continue; } Debug.Assert(null != container._registry); var registry = container._registry; // Check for exact match for (var i = registry.Buckets[key.HashCode % registry.Buckets.Length]; i >= 0; i = registry.Entries[i].Next) { ref var candidate = ref registry.Entries[i]; if (candidate.Key != key) { continue; } // Found a registration if (!(candidate.Policies is IRegistration)) { candidate.Policies = container.CreateRegistration(type, name, candidate.Policies); } return((IRegistration)candidate.Policies); } }
/// <summary> /// Probes the transposition table for a entry that matches the position key. /// </summary> /// <param name="key">The position key</param> /// <returns>(true, entry) if one was found, (false, empty) if not found</returns> public (bool, TranspositionTableEntry) Probe(HashKey key) { var ttc = FindCluster(key); var g = _generation; // Probing the Table will automatically update the generation of the entry in case the // probing retrieves an element. TranspositionTableEntry e = default; var set = false; for (var i = 0; i < ttc.Cluster.Length; ++i) { if (ttc.Cluster[i].Key32 != 0 && ttc.Cluster[i].Key32 != key.UpperKey) { continue; } ttc.Cluster[i].Generation = g; e = ttc.Cluster[i]; set = true; Hits++; break; } return(set, !set ? TTCluster.DefaultEntry : e); }
/// <inheritdoc /> public bool IsRegistered(Type type, string?name) { var key = new HashKey(type, name); // Iterate through hierarchy and check if exists for (UnityContainer?container = this; null != container; container = container._parent) { // Skip to parent if no registry if (null == container._metadata) { continue; } Debug.Assert(null != container._registry); var registry = container._registry; var targetBucket = key.HashCode % registry.Buckets.Length; // Look for exact match for (var i = registry.Buckets[targetBucket]; i >= 0; i = registry.Entries[i].Next) { ref var candidate = ref registry.Entries[i]; if (candidate.Key != key || !candidate.IsExplicit) { continue; } return(true); } }
private void StoreCommitAndAdvanceHeadBranch(Commit commit, TreeBuilder treeBuilder) { Branch currentBranch = ReferenceDatabase.GetHead(); HashKey newCommitKey = ObjectDatabase.StoreCommitWithTreeHierarchy(commit, treeBuilder); currentBranch.SetCommitKey(newCommitKey); }
public void SetParameters( AmazonDynamoDBClient client, SelectQueryInfo queryInfo, ExpressionAttributeName[] expressionAttributeNames, ExpressionAttributeValue[] expressionAttributeValues, HashKey hashKey, SortKey sortKey, string hashKeyName, string sortKeyName, string tableName, string indexName, string filterExpression ) { Client = client; QueryInfo = queryInfo; ExpressionAttributeNames = expressionAttributeNames; ExpressionAttributeValues = expressionAttributeValues; HashKey = hashKey; SortKey = sortKey; HashKeyName = hashKeyName; SortKeyName = sortKeyName; TableName = tableName; IndexName = indexName; FilterExpression = filterExpression; Initialize(); }
public void Test() { //Arrange int size = 1000000; var filter = new BloomFilter(size, 0.1); var fn = new Hash(new Fnv1a32()); var fn1 = new Hash(new Murmur32()); filter.AddHashFn(fn); filter.AddHashFn(fn1); //Act List <HashKey> res = new List <HashKey>(size); for (int i = 0; i < size; i++) { var item = new HashKey(i.ToString()); res.Add(item); filter.Add(item); } //Assert for (int i = 0; i < size; i++) { Assert.True(filter.MayContains(res[i])); } }
public void PrepareToMove(Node node, ref CurrentState current) { // Save the parent (theoretically could overflow). current.Parents[current.ParentIndex++] = node; // Save the parent's sokoban coordinate. OldSokobanRow = current.SokobanRow; OldSokobanColumn = current.SokobanColumn; // Record the parent's pushes. ParentPushes = node.Pushes; #if USE_INCREMENTAL_PATH_FINDER #if true // Force a full calculation when switching // from full to incremental. bool oldIncremental = current.Incremental; current.Incremental = node.HasChildren && !node.Child.Searched; if (!oldIncremental && current.Incremental) { current.PathFinder.ForceFullCalculation(); } #endif #endif #if USE_INCREMENTAL_HASH_KEY // Save the parent's hash key. OldHashKey = current.HashKey; #endif }
private void DeletePage(int pageId) { var key = CreateKey(pageId); ushort version; var page = LoadJson(storage.Pages, key, writeBatch.Value, out version); var usageCount = page.Value <int>("usage_count"); if (usageCount <= 1) { var pageData = storage.Pages.GetIndex(Tables.Pages.Indices.Data); var pagesByKey = storage.Pages.GetIndex(Tables.Pages.Indices.ByKey); var strongHash = page.Value <byte[]>("page_strong_hash"); var weakHash = page.Value <int>("page_weak_hash"); var hashKey = new HashKey { Strong = strongHash, Weak = weakHash }; storage.Pages.Delete(writeBatch.Value, key, version); pageData.Delete(writeBatch.Value, key); pagesByKey.Delete(writeBatch.Value, ConvertToKey(hashKey)); } else { page["usage_count"] = usageCount - 1; storage.Pages.Add(writeBatch.Value, key, page, version); } }
public int InsertPage(byte[] buffer, int size) { var hashKey = new HashKey(buffer, size); var key = (Slice)ConvertToKey(hashKey); var pageByKey = storage.Pages.GetIndex(Tables.Pages.Indices.ByKey); var pageData = storage.Pages.GetIndex(Tables.Pages.Indices.Data); var result = pageByKey.Read(Snapshot, key, writeBatch.Value); if (result != null) { var id = (Slice)result.Reader.ToStringValue(); ushort version; var page = LoadJson(storage.Pages, id, writeBatch.Value, out version); if (page == null) { throw new InvalidOperationException(string.Format("Could not find page '{0}'. Probably data is corrupted.", id)); } var usageCount = page.Value <int>("usage_count"); page["usage_count"] = usageCount + 1; storage.Pages.Add(writeBatch.Value, id, page, version); return(page.Value <int>("id")); } var newPageId = IdGenerator.GetNextIdForTable(storage.Pages); var newPageKeyString = CreateKey(newPageId); var newPageKey = (Slice)newPageKeyString; var newPage = new RavenJObject { { "id", newPageId }, { "page_strong_hash", hashKey.Strong }, { "page_weak_hash", hashKey.Weak }, { "usage_count", 1 } }; storage.Pages.Add(writeBatch.Value, newPageKey, newPage, 0); var dataStream = CreateStream(); using (var finalDataStream = fileCodecs.Aggregate((Stream) new UndisposableStream(dataStream), (current, codec) => codec.EncodePage(current))) { finalDataStream.Write(buffer, 0, size); finalDataStream.Flush(); } dataStream.Position = 0; pageData.Add(writeBatch.Value, newPageKey, dataStream, 0); pageByKey.Add(writeBatch.Value, key, newPageKeyString); return(newPageId); }
internal bool IsRegistered(Type type) { var key = new HashKey(type); // Iterate through containers hierarchy for (UnityContainer?container = this; null != container; container = container._parent) { // Skip to parent if no registrations if (null == container._metadata) { continue; } var metadata = container._metadata;; var targetBucket = key.HashCode % metadata.Buckets.Length; for (var i = metadata.Buckets[targetBucket]; i >= 0; i = metadata.Entries[i].Next) { if (metadata.Entries[i].HashKey != key) { continue; } return(true); } return(false); } return(false); }
public static ListenerWrapper GetListenerWrapper <T1, T2>([DisallowNull] Action <T1, T2> listener) { var hashKey = new HashKey(listener.Target, listener.Method); return(ListenerWrappers .GetOrAdd(hashKey, k => new ListenerWrapper(args => listener((T1)args[0], (T2)args[1]), hashKey))); }
public static ListenerWrapper GetListenerWrapper <T1, T2, T3, T4, T5, T6, T7, T8>([DisallowNull] Action <T1, T2, T3, T4, T5, T6, T7, T8> listener) { var hashKey = new HashKey(listener.Target, listener.Method); return(ListenerWrappers .GetOrAdd(hashKey, k => new ListenerWrapper(args => listener((T1)args[0], (T2)args[1], (T3)args[2], (T4)args[3], (T5)args[4], (T6)args[5], (T7)args[6], (T8)args[7]), hashKey))); }
/// <param name="parentKey"> null if this is the initial commit </param> /// <param name="treeKey"></param> /// <param name="message"></param> public Commit(HashKey parentKey, HashKey treeKey, string message) { ParentKey = parentKey; TreeKey = treeKey; Message = message; _commitFileContent = CreateCommitFileContent(); _checksum = ContentHasher.HashContent(_commitFileContent); }
public void Add(HashKey key) { CheckAllHashFnInitialized(); for (int idx = 0; idx < _hashFunctions.Length; idx++) { var hashIdx = (int)(_hashFunctions[idx].Compute(key) % M); _bitArray.Set(hashIdx, true); } }
/// <include file='doc\BindingContext.uex' path='docs/doc[@for="BindingContext.HashKey.Equals"]/*' /> /// <internalonly/> /// <devdoc> /// </devdoc> public override bool Equals(object target) { if (target is HashKey) { HashKey keyTarget = (HashKey)target; return(wRef.Target == keyTarget.wRef.Target && dataMember == keyTarget.dataMember); } return(false); }
public void Initialize(Level level, PathFinder pathFinder) { Level = level; PathFinder = pathFinder; SokobanRow = level.SokobanRow; SokobanColumn = level.SokobanColumn; level.RemoveSokoban(); HashKey = level.GetOccupantsHashKey(); }
private Tree(HashKey checksum, string dirName, IDictionary <string, HashKey> blobs, IDictionary <string, HashKey> subTrees) { InitBlobs(blobs); InitSubTrees(subTrees); DirName = dirName; _checksum = checksum; }
public Blob(RelativePath filePath) { FilePath = filePath.GetRelativeToGitRoot(); FileName = filePath.GetFileName(); using (StreamReader reader = new StreamReader(filePath.GetAbsolutePath())) { FileContent = reader.ReadToEnd(); } _blobContent = CreateBlobFileContent(); _checksum = ContentHasher.HashContent(_blobContent); }
public void SimpleStoreRetrieveTest() { CreateFile("a.txt", "a content"); Blob blob = new Blob(new RelativePath("a.txt")); HashKey blobKey = ObjectDatabase.Store(blob); Blob retrievedBlob = ObjectDatabase.RetrieveBlob(blobKey); Assert.Equal(retrievedBlob, blob); }
public override bool Equals(object obj) { if (!(obj is HashKey)) { return(false); } HashKey comp = (HashKey)obj; return(this.ObjectId == comp.ObjectId); }
public override bool Equals(object o) { HashKey hk = o as HashKey; if (hk == null) { return(false); } return(hk.source == source && hk.member == member); }
public static ListenerWrapper GetListenerWrapper([DisallowNull] Delegate listener) { if (listener.Method.ReturnType != typeof(void)) { throw new ArgumentException("listener must be return void."); } var hashKey = new HashKey(listener.Target, listener.Method); return(ListenerWrappers.GetOrAdd(hashKey, k => new ListenerWrapper(args => listener.DynamicInvoke(args), hashKey))); }
private HashKey CreateHk() { var hk = new HashKey(); hk.Add("K", "entitykey1"); hk.Add("A", "abc"); hk.AddItems("ids", new object[] { "id1", "id2", "id3" }); return(hk); }
public static Commit RetrieveCommit(HashKey key) { string fileName = key.ToString(); string fileContent = ReadFileContent(fileName); if (fileContent == null) { return(null); } return(Commit.ParseFromString(fileContent)); }
/// <summary> /// Retrieves tree object from the database. /// </summary> /// <param name="key">key to use for searching in database</param> /// <returns>null when no tree was found</returns> public static Tree RetrieveTree(HashKey key) { string fileName = key.ToString(); string fileContent = ReadFileContent(fileName); if (fileContent == null) { return(null); } return(Tree.ParseFromString(fileContent)); }
static void Main() { /// RotorEnigma Ia = RotorEnigma.I; /// Ia.InitialPosition = Ia.OperatingAlphabet[5]; /// Hexa h = new Hexa(byte.MaxValue); /// /// Enigma enigma1 = new Enigma(Reflector.B, Ia, RotorEnigma.II, RotorEnigma.VIII); /// enigma1.ToString(); /// string test = enigma1.Process("HELLO WORLD !"); /// enigma1.Reset(); /// string rslt0 = enigma1.Process(test); /// /// Enigma enigma3 = enigma1.Clone(true); /// string rslt3 = enigma3.Process(test); string[] tbl = new string[] { HashKey.DigestFile(HashAlgorithmEnum.MD5, "XLIFF_2.xlf"), HashKey.DigestFile(HashAlgorithmEnum.SHA1, "XLIFF_2.xlf"), HashKey.DigestFile(HashAlgorithmEnum.SHA256, "XLIFF_2.xlf"), HashKey.DigestFile(HashAlgorithmEnum.SHA384, "XLIFF_2.xlf"), HashKey.DigestFile(HashAlgorithmEnum.SHA512, "XLIFF_2.xlf"), HashKey.DigestFile(HashAlgorithmEnum.KeyedHashAlgorithm, "XLIFF_2.xlf"), HashKey.DigestFile(HashAlgorithmEnum.RIPEMD160, "XLIFF_2.xlf"), }; Chromatik.Unicode.Unicode.Load("en"); tbl.ForEach(Incremente); //System.Globalization.Localization.QtTranslation trs = System.Globalization.Localization.QtTranslation.LoadTranslation(@"for_translation_sigil_sigil_fr.ts.xml"); //trs.Save("test.ts.xml"); ; Xliff xliff = Xliff.LoadXliff("XLIFF_2.xlf"); string fI = xliff.IDs[1]; XliffFile fS = xliff.Identifieds[1]; xliff[0].ID = "f3"; xliff[0].ID = "f2"; XmlDocument xml = XmlDocumentCreate.DocumentXML("<xml><span>kkkkkkk</span> <span de=\"\">yyyy</span><i> 65246541 </i><span>sdfwsfd</span></xml>"); XmlDocumentWriter.Document("test.0.xml", xml, DocumentType.HTML5); Application.EnableVisualStyles(); Application.SetCompatibleTextRenderingDefault(false); Application.Run(new Form1()); }
public override int GetHashCode() { unchecked { var hashCode = KeyType?.GetHashCode() ?? 0; hashCode = (hashCode * 397) ^ (ValueType?.GetHashCode() ?? 0); hashCode = (hashCode * 397) ^ HashKey.GetHashCode(); hashCode = (hashCode * 397) ^ KeyTypeState.GetHashCode(); hashCode = (hashCode * 397) ^ ValueTypeState.GetHashCode(); return(hashCode); } }
protected virtual void RemoveCore(object dataSource) { HashKey[] keys = new HashKey [managers.Keys.Count]; managers.Keys.CopyTo(keys, 0); for (int i = 0; i < keys.Length; i++) { if (keys[i].source == dataSource) { managers.Remove(keys[i]); } } }
private static string GetBranchFileContent(Branch branch) { HashKey key = branch.GetCommitKey(); if (key == null) { return("0"); } else { return(key.ToString()); } }
/// <summary> /// GetFeatures /// </summary> /// <param name="needed">All features requested by the gadget.</param> /// <param name="unsupported">Populated with any unsupported features.</param> /// <returns>All objects necessary for needed in graph-dependent order</returns> public HashSet<GadgetFeature> GetFeatures(HashKey<String> needed, HashSet<String> unsupported) { graphComplete = true; if (needed.Count == 0) { foreach (var item in core.Keys) { needed.Add(item); } } // We use the cache only for situations where all needed are available. // if any are missing, the result won't be cached. HashSet<GadgetFeature> libCache; if (cache.TryGetValue(needed, out libCache)) { return libCache; } HashSet<GadgetFeature> ret = new HashSet<GadgetFeature>(); PopulateDependencies(needed, ret); // Fill in anything that was optional but missing. These won't be cached. if (unsupported != null) { foreach (String feature in needed) { if (!features.ContainsKey(feature)) { unsupported.Add(feature); } } } if (unsupported == null || unsupported.Count == 0) { cache[needed] = ret; } return ret; }
/** * @return All {@code GadgetFeature} objects necessary for {@code needed} in * graph-dependent order. */ public HashSet<GadgetFeature> GetFeatures(HashKey<String> needed) { return GetFeatures(needed, null); }
/// <summary> /// Поиск подстроки. ( Спуск по суффиксному дереву ) /// </summary> // ToDo: этот метод не должен принимать depth. Данная величина должна быть внутренним состоянием Pointer'а, // никто снаружи не должен за ней следить. В частности, TextComparer будет передавать Pointer'у только // очередно public bool PointerDown(Pointer pointer, byte hash, int depth) { if (pointer.Depth == depth) { var key = new HashKey(hash, pointer.Left, pointer.Right); LcpValue value; //pointer.LastLeft = pointer.Left; //pointer.LastRigth = pointer.Rigth; if (!_cldtab.TryGetValue(key, out value)) return false; // ToDo: Суффиксная ссылка не обновляется, если мы находимся в вершине и не можем пройти дальше по хешу. // Таким образом, в TextComparer'е будет использована старая суф. ссылка, что повлечет дополнительное время на спуск до нужной позиции. // Проще всего перенести следующие две строчки на пару позиций вверх. pointer.LastLeft = pointer.Left; pointer.LastRight = pointer.Right; pointer.Left = value.Left; pointer.Right = value.Right; pointer.Depth = value.Lcp; return true; } var id = _suftab[pointer.Left] + depth; var res = _hashes[id] == hash; return res; }
public int InsertPage(byte[] buffer, int size) { var key = new HashKey(buffer, size); Api.JetSetCurrentIndex(session, Pages, "by_keys"); Api.MakeKey(session, Pages, key.Weak, MakeKeyGrbit.NewKey); Api.MakeKey(session, Pages, key.Strong, MakeKeyGrbit.None); if (Api.TrySeek(session, Pages, SeekGrbit.SeekEQ)) { Api.EscrowUpdate(session, Pages, tableColumnsCache.PagesColumns["usage_count"], 1); return Api.RetrieveColumnAsInt32(session, Pages, tableColumnsCache.PagesColumns["id"]).Value; } var bookMarkBuffer = new byte[SystemParameters.BookmarkMost]; var actualSize = 0; using (var update = new Update(session, Pages, JET_prep.Insert)) { Api.SetColumn(session, Pages, tableColumnsCache.PagesColumns["page_strong_hash"], key.Strong); Api.SetColumn(session, Pages, tableColumnsCache.PagesColumns["page_weak_hash"], key.Weak); Api.JetSetColumn(session, Pages, tableColumnsCache.PagesColumns["data"], buffer, size, SetColumnGrbit.None, null); try { update.Save(bookMarkBuffer, bookMarkBuffer.Length, out actualSize); } catch (EsentKeyDuplicateException) { // it means that page is being inserted by another thread throw new ConcurrencyException("The same file page is being created"); } } Api.JetGotoBookmark(session, Pages, bookMarkBuffer, actualSize); return Api.RetrieveColumnAsInt32(session, Pages, tableColumnsCache.PagesColumns["id"]).Value; }
/// <summary> /// Поиск точки продолжения поиска после перехода по суффиксной ссылке. /// Аналогично PointerDown, но совершает быстрый спуск от вершины к вершине, не проверяя все значения на рёбрах. /// </summary> // ToDo: Спуск возможен всегда (читаем теорию), поэтому метод должен быть void, а не bool. В хеш-таблице всегда содержится нужное значение, // при условии её правильного построения. // Также данный метод должен внутри себя крутить цикл, проглатывающий ребра, до тех пор, пока не спустится до нужной позиции, а не оставлять эту обязанность компареру. // (см. псевдокод в документации). // Хеш и глубину передавать не нужно, хороший PointerDownByLink должен и так знать эти параметры. // Вообще, нужно объединить два метода UseSuffixLink и PointerDownByLink в один. public bool PointerDownByLink(Pointer pointer, byte hash, int depth) { var key = new HashKey(hash, pointer.Left, pointer.Right); LcpValue value; if (!_cldtab.TryGetValue(key, out value)) return false; pointer.LastLeft = pointer.Left; pointer.LastRight = pointer.Right; pointer.Left = value.Left; pointer.Right = value.Right; pointer.Depth = value.Lcp; return true; }
protected virtual void RemoveCore (object dataSource) { HashKey[] keys = new HashKey [managers.Keys.Count]; managers.Keys.CopyTo (keys, 0); for (int i = 0; i < keys.Length; i ++) { if (keys[i].source == dataSource) managers.Remove (keys[i]); } }
/// <summary> /// Injects javascript libraries needed to satisfy feature dependencies. /// </summary> /// <param name="gadget"></param> /// <param name="headTag"></param> private void InjectFeatureLibraries(Gadget gadget, Node headTag) { // TODO: If there isn't any js in the document, we can skip this. Unfortunately, that means // both script tags (easy to detect) and event handlers (much more complex). GadgetContext context = gadget.getContext(); GadgetSpec spec = gadget.getSpec(); String forcedLibs = context.getParameter("libs"); HashKey<String> forced; if (string.IsNullOrEmpty(forcedLibs)) { forced = new HashKey<string>(); } else { forced = new HashKey<string>(); foreach (var item in forcedLibs.Split(':')) { forced.Add(item); } } // Forced libs are always done first. if (forced.Count != 0) { String jsUrl = urlGenerator.getBundledJsUrl(forced, context); Element libsTag = headTag.getOwnerDocument().createElement("script"); libsTag.setAttribute("src", jsUrl); headTag.appendChild(libsTag); // Forced transitive deps need to be added as well so that they don't get pulled in twice. // TODO: Figure out a clean way to avoid having to call getFeatures twice. foreach(GadgetFeature dep in featureRegistry.GetFeatures(forced)) { forced.Add(dep.getName()); } } // Inline any libs that weren't forced. The ugly context switch between inline and external // Js is needed to allow both inline and external scripts declared in feature.xml. String container = context.getContainer(); ICollection<GadgetFeature> features = GetFeatures(spec, forced); // Precalculate the maximum length in order to avoid excessive garbage generation. int size = 0; foreach(GadgetFeature feature in features) { foreach(JsLibrary library in feature.getJsLibraries(RenderingContext.GADGET, container)) { if (library._Type == JsLibrary.Type.URL) { size += library.Content.Length; } } } // Really inexact. StringBuilder inlineJs = new StringBuilder(size); foreach (GadgetFeature feature in features) { foreach (JsLibrary library in feature.getJsLibraries(RenderingContext.GADGET, container)) { if (library._Type == JsLibrary.Type.URL) { if (inlineJs.Length > 0) { Element inlineTag = headTag.getOwnerDocument().createElement("script"); headTag.appendChild(inlineTag); inlineTag.appendChild(headTag.getOwnerDocument().createTextNode(inlineJs.ToString())); inlineJs.Length = 0; } Element referenceTag = headTag.getOwnerDocument().createElement("script"); referenceTag.setAttribute("src", library.Content); headTag.appendChild(referenceTag); } else { if (!forced.Contains(feature.getName())) { // already pulled this file in from the shared contents. if (context.getDebug()) { inlineJs.Append(library.DebugContent); } else { inlineJs.Append(library.Content); } inlineJs.Append(";\n"); } } } } inlineJs.Append(GetLibraryConfig(gadget, features)); if (inlineJs.Length > 0) { Element inlineTag = headTag.getOwnerDocument().createElement("script"); headTag.appendChild(inlineTag); inlineTag.appendChild(headTag.getOwnerDocument().createTextNode(inlineJs.ToString())); } }
protected virtual void AddCore (object dataSource, BindingManagerBase listManager) { if (dataSource == null) throw new ArgumentNullException ("dataSource"); if (listManager == null) throw new ArgumentNullException ("listManager"); HashKey key = new HashKey (dataSource, String.Empty); managers [key] = listManager; }
/// <summary> /// Get all features needed to satisfy this rendering request. /// </summary> /// <param name="spec"></param> /// <param name="forced">Forced libraries; added in addition to those found in the spec. Defaults to "core"</param> /// <returns></returns> private ICollection<GadgetFeature> GetFeatures(GadgetSpec spec, ICollection<String> forced) { Dictionary<String, Feature> features = spec.getModulePrefs().getFeatures(); HashKey<String> libs = new HashKey<string>(); foreach (var item in features.Keys) { libs.Add(item); } if (forced.Count != 0) { foreach (var item in forced) { libs.Add(item); } } HashSet<String> unsupported = new HashSet<String>(); ICollection<GadgetFeature> feats = featureRegistry.GetFeatures(libs, unsupported); foreach (var item in forced) { unsupported.Remove(item); } if (unsupported.Count != 0) { // Remove non-required libs unsupported.RemoveWhere(x => !features[x].getRequired()); // Throw error with full list of unsupported libraries if (unsupported.Count != 0) { throw new UnsupportedFeatureException(String.Join(",", unsupported.ToArray())); } } return feats; }
public void Index(string resourceName) { HttpRequest req = System.Web.HttpContext.Current.Request; HttpResponse resp = System.Web.HttpContext.Current.Response; // If an If-Modified-Since header is ever provided, we always say // not modified. This is because when there actually is a change, // cache busting should occur. if (req.Params["If-Modified-Since"] != null && req.Params["v"] != null) { resp.StatusCode = 304; return; } if (resourceName.EndsWith(".js")) { // Lop off the suffix for lookup purposes resourceName = resourceName.Substring(0, resourceName.Length - ".js".Length); } HashKey<string> needed = new HashKey<string>(); if (resourceName.Contains("__")) { foreach (string item in resourceName.Split(new[]{'_'}, StringSplitOptions.RemoveEmptyEntries)) { needed.Add(item); } } else { needed.Add(resourceName); } String debugStr = req.Params["debug"]; String container = req.Params["container"]; String containerStr = req.Params["c"]; bool debug = "1".Equals(debugStr); if (container == null) { container = ContainerConfig.DEFAULT_CONTAINER; } RenderingContext rcontext = "1".Equals(containerStr) ? RenderingContext.CONTAINER : RenderingContext.GADGET; ICollection<GadgetFeature> features = registry.GetFeatures(needed); StringBuilder jsData = new StringBuilder(); foreach (GadgetFeature feature in features) { foreach (JsLibrary lib in feature.getJsLibraries(rcontext, container)) { if (!lib._Type.Equals(JsLibrary.Type.URL)) { if (debug) { jsData.Append(lib.DebugContent); } else { jsData.Append(lib.Content); } jsData.Append(";\n"); } } } if (jsData.Length == 0) { resp.StatusCode = (int)HttpStatusCode.NotFound; return; } if (req.Params["v"] != null) { // Versioned files get cached indefinitely HttpUtil.SetCachingHeaders(resp); } else { // Unversioned files get cached for 1 hour. HttpUtil.SetCachingHeaders(resp, 60 * 60); } resp.ContentType = "text/javascript; charset=utf-8"; resp.ContentEncoding = Encoding.UTF8; resp.Output.Write(jsData.ToString()); }
public BindingManagerBase this [object dataSource, string dataMember] { get { if (dataSource == null) throw new ArgumentNullException ("dataSource"); if (dataMember == null) dataMember = String.Empty; #if NET_2_0 ICurrencyManagerProvider cm_provider = dataSource as ICurrencyManagerProvider; if (cm_provider != null) { if (dataMember.Length == 0) return cm_provider.CurrencyManager; return cm_provider.GetRelatedCurrencyManager (dataMember); } #endif HashKey key = new HashKey (dataSource, dataMember); BindingManagerBase res = managers [key] as BindingManagerBase; if (res != null) return res; res = CreateBindingManager (dataSource, dataMember); if (res == null) return null; managers [key] = res; return res; } }
public Node(NodeCollection nodes, int row, int column, Direction direction, int pushes, int moves) { this.id = nodes.Allocate(this); this.row = row; this.column = column; this.direction = direction; this.pushes = pushes; this.moves = moves; this.score = 0; #if DEBUG this.hashKey = HashKey.Empty; #endif }
public bool Contains (object dataSource, string dataMember) { if (dataSource == null) throw new ArgumentNullException ("dataSource"); if (dataMember == null) dataMember = String.Empty; HashKey key = new HashKey (dataSource, dataMember); return managers [key] != null; }