public virtual void Build(MetricDB original, Index refs, int k) { this.K = k; this.IdxRefs = refs; var n = original.Count; this.Fingerprints = new List<int[]> (n); for (int i = 0; i < n; ++i) { this.Fingerprints.Add (null); } var tasks = Environment.ProcessorCount << 3; int blocksize = n / tasks; int advance = 0; long minElapsedTicks = 20000000; // control the print rate long prevTicks = DateTime.Now.Ticks; long currTicks; var create_block = new Action<int> (delegate(int blockID) { var sp = blockID * blocksize; var ep = Math.Min (n, sp + blocksize); currTicks = DateTime.Now.Ticks; if (advance == 0 || currTicks - prevTicks > minElapsedTicks) { Console.WriteLine ("KnrFP {0} ({1}/{2}), db: {3}, num_refs: {4}, K: {5}, timestamp: {6}", this, advance, n, Path.GetFileName(original.Name), this.IdxRefs.DB.Count, this.K, DateTime.Now); prevTicks = currTicks; } for (; sp < ep; ++sp) { var fp = this.GetFP(original[sp]); this.Fingerprints[sp] = fp; advance++; } }); LongParallel.For (0, 1 + n / blocksize, create_block); Console.WriteLine ("done"); }
public BatchGroup(int toCreate, int willCreate) : base(toCreate, willCreate) { name = BatchType.Null; index = Index.Index_Null; willDraw = true; }
/* ** Return a pointer to the column affinity string associated with index ** pIdx. A column affinity string has one character for each column in ** the table, according to the affinity of the column: ** ** Character Column affinity ** ------------------------------ ** 'a' TEXT ** 'b' NONE ** 'c' NUMERIC ** 'd' INTEGER ** 'e' REAL ** ** An extra 'b' is appended to the end of the string to cover the ** rowid that appears as the last column in every index. ** ** Memory for the buffer containing the column index affinity string ** is managed along with the rest of the Index structure. It will be ** released when sqlite3DeleteIndex() is called. */ static string sqlite3IndexAffinityStr( Vdbe v, Index pIdx ) { if ( pIdx.zColAff == null || pIdx.zColAff[0] == '\0' ) { /* The first time a column affinity string for a particular index is ** required, it is allocated and populated here. It is then stored as ** a member of the Index structure for subsequent use. ** ** The column affinity string will eventually be deleted by ** sqliteDeleteIndex() when the Index structure itself is cleaned ** up. */ int n; Table pTab = pIdx.pTable; sqlite3 db = sqlite3VdbeDb( v ); StringBuilder pIdx_zColAff = new StringBuilder( pIdx.nColumn + 2 );// (char )sqlite3DbMallocRaw(0, pIdx->nColumn+2); // if ( pIdx_zColAff == null ) // { // db.mallocFailed = 1; // return null; // } for ( n = 0; n < pIdx.nColumn; n++ ) { pIdx_zColAff.Append( pTab.aCol[pIdx.aiColumn[n]].affinity ); } pIdx_zColAff.Append( SQLITE_AFF_NONE ); pIdx_zColAff.Append( '\0' ); pIdx.zColAff = pIdx_zColAff.ToString(); } return pIdx.zColAff; }
protected Hierarchy(Azul.Color colColor, GameObject.Name typeName, Index index = Index.Index_Null) : base(typeName, SpriteEnum.Null, index, new Azul.Color(0f, 0f, 0f), colColor, 0.0f, 0.0f) { this.type = typeName; this.index = index; this.sprite = new ProxySprite(SpriteEnum.Null, index, 0.0f, 0.0f); }
public virtual void Build(MetricDB db, int num_pairs, int maxCandidates = -1) { this.DB = db; this.Fingerprints = new BinQ8HammingSpace (1); this.Sample = new SampleSpace("", this.DB, num_pairs * 2); this.MaxCandidates = maxCandidates; var n = this.DB.Count; var A = new byte[n][]; int pc = this.DB.Count / 100 + 1; int advance = 0; var create_one = new Action<int> (delegate(int i) { var fp = this.GetFP(this.DB[i]); A[i] = fp; if (advance % pc == 0) { Console.WriteLine ("DEBUG {0} ({1}/{2}), db: {3}, num_pairs: {4}, timestamp: {5}", this, advance, n, db.Name, num_pairs, DateTime.Now); } advance++; }); ParallelOptions ops = new ParallelOptions(); ops.MaxDegreeOfParallelism = -1; Parallel.For (0, n, create_one); foreach (var fp in A) { this.Fingerprints.Add( fp ); } var s = new Sequential (); s.Build (this.Fingerprints); this.InternalIndex = s; }
protected override void ExecuteSync(IContent content, Index index, object parameter) { var value = content.Retrieve(index); var collectionDescriptor = (CollectionDescriptor)TypeDescriptorFactory.Default.Find(value.GetType()); object itemToAdd = null; // TODO: Find a better solution for ContentSerializerAttribute that doesn't require to reference Core.Serialization (and unreference this assembly) if (collectionDescriptor.ElementType.IsAbstract || collectionDescriptor.ElementType.IsNullable() || collectionDescriptor.ElementType.GetCustomAttributes(typeof(ContentSerializerAttribute), true).Any()) { // If the parameter is a type instead of an instance, try to construct an instance of this type var type = parameter as Type; if (type?.GetConstructor(Type.EmptyTypes) != null) itemToAdd = Activator.CreateInstance(type); } else if (collectionDescriptor.ElementType == typeof(string)) { itemToAdd = parameter ?? ""; } else { itemToAdd = parameter ?? ObjectFactory.NewInstance(collectionDescriptor.ElementType); } if (index.IsEmpty) { content.Add(itemToAdd); } else { // Handle collections in collections // TODO: this is not working on the observable node side var collectionNode = content.Reference.AsEnumerable[index].TargetNode; collectionNode.Content.Add(itemToAdd); } }
public void Dispose() { if (Index != null) { Index = null; } }
public static bool GetButtonDown(Button button, Index controlIndex) { KeyCode code = GetKeycode(button, controlIndex); bool aux = Input.GetKeyDown(code); if (!aux && (int)controlIndex < 3) //miramos el PC { string xName = ""; switch (button) { case Button.A: xName = "1_Action_" + (int)controlIndex; break; case Button.B: xName = "2_Action_" + (int)controlIndex; break; case Button.Y: xName = "3_Action_" + (int)controlIndex; break; case Button.X: xName = "4_Action_" + (int)controlIndex; break; } aux = Input.GetAxis(xName) > 0.0f; } return aux; }
internal Index GetAutoIndex() { Index index = new Index(); index.SetLocation(UriHelper.ConcatUri(GraphEnvironment.GetBaseUri(),"db/data/index/auto/node")); return index; }
public BuildingSpot(Tile firstTile, Tile secondTile) { building = Building.None; FirstTile = firstTile; SecondTile = secondTile; SpotIndex = new Index(FirstTile, SecondTile); }
async Task InnerInvoke(Context context, Index index) { ElementInstance element; for (int i = index.Value; i < executingElements.Count; i++) { index.Value = i; element = executingElements[index.Value]; if (element.IsBefore) { await element.Invoke(context, ctx => Task.CompletedTask).ConfigureAwait(false); continue; } if (element.IsSurround) { index.Value += 1; await element.Invoke(context, ctx => InnerInvoke(ctx, index)).ConfigureAwait(false); i = index.Value++; continue; } if (element.IsAfter) { afterElements.Push(Tuple.Create(context, element)); } } if (index.Value == executingElements.Count) { foreach (var contextAndElement in afterElements) { await contextAndElement.Item2.Invoke(contextAndElement.Item1, ctx => Task.CompletedTask).ConfigureAwait(false); } } }
public KnrFP(KnrFP inputDB, int new_n, int new_K = -1) { this.K = new_K; this.Fingerprints = new StringSpace<int> (); this.Fingerprints.seqs.Capacity = new_n; this.IdxRefs = inputDB.IdxRefs; if (new_K <= 0) { for (int i = 0; i < new_n; ++i) { var u = inputDB.Fingerprints.seqs [i]; this.Fingerprints.Add (u); } } else { if (new_K > inputDB.Fingerprints.seqs [0].Length) { throw new ArgumentOutOfRangeException("new_K > old_K need a complete re-construction of the transformation"); } for (int i = 0; i < new_n; ++i) { var u = inputDB.Fingerprints.seqs [i]; var v = new int[new_K]; for (int j = 0; j < new_K; ++j) { v [j] = u [j]; } this.Fingerprints.Add (v); } } }
public FlannColoredModelPoints(List<Tuple<CvPoint3D64f, CvColor>> modelPoints, IndexParams indexParams, SearchParams searchParams, double colorScale) { _modelPoints = modelPoints; _modelMat = new CvMat(_modelPoints.Count, 6, MatrixType.F32C1); unsafe { float* modelArr = _modelMat.DataSingle; foreach (var tuple in _modelPoints) { *(modelArr++) = (float)tuple.Item1.X; *(modelArr++) = (float)tuple.Item1.Y; *(modelArr++) = (float)tuple.Item1.Z; *(modelArr++) = (float)(tuple.Item2.R * colorScale / 255); *(modelArr++) = (float)(tuple.Item2.G * colorScale / 255); *(modelArr++) = (float)(tuple.Item2.B * colorScale / 255); } } _colorScale = colorScale; _modelDataMat = new Mat(_modelMat); _indexParam = indexParams; _searchParam = searchParams; _indexParam.IsEnabledDispose = false; _searchParam.IsEnabledDispose = false; _flannIndex = new Index(_modelDataMat, _indexParam); }
public void CanAddDuplicatesIfInitializedWithAllowDuplicateFlag() { SUT = new Index<string, double>(true); SUT.Add("hello", 40); SUT.Add("hello", 40); Assert.AreEqual(2, SUT["hello"].Count); }
/// <summary> /// Retrieves the value itself or the value of one of its item, depending on the given <see cref="Index"/>. /// </summary> /// <param name="value">The value on which this method applies.</param> /// <param name="index">The index of the item to retrieve. If <see cref="Index.Empty"/> is passed, this method will return the value itself.</param> /// <param name="descriptor">The descriptor of the type of <paramref name="value"/>.</param> /// <returns>The value itself or the value of one of its item.</returns> public static object Retrieve(object value, Index index, ITypeDescriptor descriptor) { if (index.IsEmpty) return value; if (value == null) throw new ArgumentNullException(nameof(value)); var collectionDescriptor = descriptor as CollectionDescriptor; if (collectionDescriptor != null) { return collectionDescriptor.GetValue(value, index.Int); } var dictionaryDescriptor = descriptor as DictionaryDescriptor; if (dictionaryDescriptor != null) { return dictionaryDescriptor.GetValue(value, index.Value); } // Try with the concrete type descriptor var objectDescriptor = TypeDescriptorFactory.Default.Find(value.GetType()); if (objectDescriptor != descriptor) { return Retrieve(value, index, objectDescriptor); } throw new NotSupportedException("Unable to retrieve the value at the given index, this collection is unsupported"); }
private void btnSave_Click(object sender, EventArgs e) { short? precision = null; if (!string.IsNullOrEmpty(this.tbPrecision.Text)) { short precisionValue; if (short.TryParse(this.tbPrecision.Text, out precisionValue)) { precision = precisionValue; } else { MessageBox.Show("Please enter a valid precision value."); this.DialogResult = DialogResult.None; return; } } if (this.rbHash.Checked) { this.index = new HashIndex(this.rbNumber.Checked ? DataType.Number : DataType.String) { Precision = precision }; } else { this.index = new RangeIndex(this.rbNumber.Checked ? DataType.Number : DataType.String) { Precision = precision }; } this.DialogResult = DialogResult.OK; return; }
public void SearchAll(string query, Index.Target Target) { List<UserObject> Users = SearchUsers(query); foreach (UserObject item in Users) { List<ComputerObject> CompsUser = SearchComputer(item.Name, "description"); Response Rs = new Response(item, CompsUser); Target(Rs); } List<ComputerObject> Comps = SearchComputer(query, "name"); foreach (ComputerObject item in Comps) { string pattern = @"([А-ЯЁ][а-яё]+[\-\s]?){3,}"; Regex regex = new Regex(pattern); Match match = regex.Match(item.Description); if (match.Success) { List<UserObject> UsersComp = SearchUsers(match.Value.Trim()); Response Rs = new Response(item, UsersComp); Target(Rs); } } }
public static ShieldBlock create(ShieldBlock.LeftTopBlock type, Index index, float x, float y) { ShieldBlock block; if (index.Equals(Index.Index_0)) { block = new ShieldBlock(GameObject.Name.Shields, index, SpriteEnum.LeftTop_0, x, y); } else if (index.Equals(Index.Index_1)) { block = new ShieldBlock(GameObject.Name.Shields, index, SpriteEnum.LeftTop_1, x, y); } else if (index.Equals(Index.Index_2)) { block = new ShieldBlock(GameObject.Name.Shields, index, SpriteEnum.LeftTop_2, x, y); } else { block = new ShieldBlock(GameObject.Name.Shields, index, SpriteEnum.LeftTop_3, x, y); } GameObjectManager.insert(block, Instance.root); Instance.batch.attach(block.Spr); SpriteBatchManager.attachToGroup(block.ColObj.Spr, BatchGroup.BatchType.Collisions); return block; }
public void SetIndex(Index index) { this.index = index; if (index.Kind == IndexKind.Hash) { this.rbHash.Checked = true; if (((HashIndex)index).DataType == DataType.Number) { this.rbNumber.Checked = true; } else { this.rbString.Checked = true; } this.tbPrecision.Text = ((HashIndex)index).Precision.HasValue ? ((HashIndex)index).Precision.Value.ToString(CultureInfo.InvariantCulture) : string.Empty; } else { this.rbRange.Checked = true; if (((RangeIndex)index).DataType == DataType.Number) { this.rbNumber.Checked = true; } else { this.rbString.Checked = true; } this.tbPrecision.Text = ((RangeIndex)index).Precision.HasValue ? ((RangeIndex)index).Precision.Value.ToString(CultureInfo.InvariantCulture) : string.Empty; } }
protected override void ExecuteSync(IContent content, Index index, object parameter) { var value = content.Retrieve(index); var collectionDescriptor = (CollectionDescriptor)TypeDescriptorFactory.Default.Find(value.GetType()); object itemToAdd = null; var elementType = collectionDescriptor.ElementType; if (CanAddNull(elementType) || IsReferenceType(elementType)) { // If the parameter is a type instead of an instance, try to construct an instance of this type var type = parameter as Type; if (type?.GetConstructor(Type.EmptyTypes) != null) itemToAdd = ObjectFactoryRegistry.NewInstance(type); } else if (collectionDescriptor.ElementType == typeof(string)) { itemToAdd = parameter ?? ""; } else { itemToAdd = parameter ?? ObjectFactoryRegistry.NewInstance(collectionDescriptor.ElementType); } if (index.IsEmpty) { content.Add(itemToAdd); } else { // Handle collections in collections // TODO: this is not working on the observable node side var collectionNode = content.Reference.AsEnumerable[index].TargetNode; collectionNode.Content.Add(itemToAdd); } }
public void TestInitialize() { _testApiKey = Environment.GetEnvironmentVariable("ALGOLIA_API_KEY"); _testApplicationID = Environment.GetEnvironmentVariable("ALGOLIA_APPLICATION_ID"); _client = new AlgoliaClient(_testApplicationID, _testApiKey); _index = _client.InitIndex(safe_name("àlgol?à-csharp")); }
public void set(BaseSprite spr) { this.sprite = spr; Debug.Assert(sprite != null); this.name = (SpriteEnum)spr.getName(); this.index = spr.getIndex(); }
/// <summary> /// The process query. /// </summary> /// <param name="condition"> /// The condition. /// </param> /// <param name="index"> /// The index. /// </param> /// <returns> /// The <see cref="BooleanQuery"/>. /// </returns> public override Query ProcessQuery(QueryOccurance condition, Index index) { Assert.ArgumentNotNull(index, "Index"); var baseQuery = base.ProcessQuery(condition, index); var translator = new QueryTranslator(index); Assert.IsNotNull(translator, "translator"); var fieldQuery = this.Partial ? QueryBuilder.BuildPartialFieldValueClause(index, this.FieldName, this.FieldValue) : QueryBuilder.BuildExactFieldValueClause(index, this.FieldName, this.FieldValue); if (baseQuery == null) { return fieldQuery; } if (baseQuery is BooleanQuery) { var booleanQuery = baseQuery as BooleanQuery; booleanQuery.Add(fieldQuery, translator.GetOccur(condition)); } return baseQuery; }
/// <summary> /// Initializes a new instance of the <see cref="ObservableModelNode"/> class. /// </summary> /// <param name="ownerViewModel">The <see cref="ObservableViewModel"/> that owns the new <see cref="ObservableModelNode"/>.</param> /// <param name="baseName">The base name of this node. Can be null if <see cref="index"/> is not. If so a name will be automatically generated from the index.</param> /// <param name="isPrimitive">Indicate whether this node should be considered as a primitive node.</param> /// <param name="sourceNode">The model node bound to the new <see cref="ObservableModelNode"/>.</param> /// <param name="graphNodePath">The <see cref="GraphNodePath"/> corresponding to the given <see cref="sourceNode"/>.</param> /// <param name="index">The index of this content in the model node, when this node represent an item of a collection. <see cref="Index.Empty"/> must be passed otherwise</param> protected ObservableModelNode(ObservableViewModel ownerViewModel, string baseName, bool isPrimitive, IGraphNode sourceNode, GraphNodePath graphNodePath, Index index) : base(ownerViewModel, baseName, index) { if (sourceNode == null) throw new ArgumentNullException(nameof(sourceNode)); if (baseName == null && index == null) throw new ArgumentException("baseName and index can't be both null."); this.isPrimitive = isPrimitive; SourceNode = sourceNode; // By default we will always combine items of list of primitive items. CombineMode = !index.IsEmpty && isPrimitive ? CombineMode.AlwaysCombine : CombineMode.CombineOnlyForAll; SourceNodePath = graphNodePath; // Override display name if available var memberDescriptor = GetMemberDescriptor() as MemberDescriptorBase; if (memberDescriptor != null) { if (index.IsEmpty) { var displayAttribute = TypeDescriptorFactory.Default.AttributeRegistry.GetAttribute<DisplayAttribute>(memberDescriptor.MemberInfo); if (!string.IsNullOrEmpty(displayAttribute?.Name)) { DisplayName = displayAttribute.Name; } IsReadOnly = !memberDescriptor.HasSet; } } }
public void set(SpriteEnum sName, ImageEnum iName, Index index = Index.Index_Null) { this.name = sName; this.index = index; sprite = GameSpriteManager.find(sName, index); imgToSwap = ImageManager.find(iName); }
public void Build(MetricDB db, int k, Index ref_index) { this.DB = db; this.K = k; this.R = ref_index; int sigma = this.R.DB.Count; this.INVINDEX = new List<List<int>> (sigma); for (int i = 0; i < sigma; ++i) { this.INVINDEX.Add(new List<int>()); } var A = new int[this.DB.Count][]; int count = 0; var compute_one = new Action<int>(delegate(int objID) { var u = this.GetKnr(this.DB[objID], this.K); A[objID] = u; ++count; if (count % 1000 == 0) { Console.WriteLine ("==== {0}/{1} db: {2}, k: {3}", count, this.DB.Count, this.DB.Name, k); } }); ParallelOptions ops = new ParallelOptions(); ops.MaxDegreeOfParallelism = -1; Parallel.ForEach(new ListGen<int>((int i) => i, this.DB.Count), ops, compute_one); for (int objID = 0; objID < this.DB.Count; ++objID) { var u = A[objID]; for (int i = 0; i < this.K; ++i) { this.INVINDEX[u[i]].Add (objID); } } }
public static void CreateIndexesForForeignKeys(this Configuration configuration, Func<string, bool> includeTablePredicate) { configuration.BuildMappings(); var tables = (ICollection<Table>) tableMappingsProperty.GetValue(configuration, null); foreach (var table in tables.Where(x => includeTablePredicate(x.Name))) { var columnsOfPk = table.HasPrimaryKey ? table.PrimaryKey.Columns.Select(x => x.Name).ToArray() : new string[0]; foreach (var foreignKey in table.ForeignKeyIterator) { if (table.HasPrimaryKey) { var columnsOfFk = foreignKey.Columns.Select(x => x.Name).ToArray(); var fkHasSameColumnsOfPk = !columnsOfPk.Except(columnsOfFk).Concat(columnsOfFk.Except(columnsOfPk)).Any(); if (fkHasSameColumnsOfPk) { continue; } } var idx = new Index(); idx.AddColumns(foreignKey.Columns); idx.Name = "IX" + foreignKey.Name.Substring(2); idx.Table = table; table.AddIndex(idx); } } }
public AnimatedSprite() { name = SpriteEnum.Not_Initialized; index = Index.Index_Null; sprite = null; imgToSwap = null; }
// Return a dynamicly allocated KeyInfo structure that can be used with OP_OpenRead or OP_OpenWrite to access database index pIdx. // // If successful, a pointer to the new structure is returned. In this case the caller is responsible for calling sqlite3DbFree(db, ) on the returned // pointer. If an error occurs (out of memory or missing collation sequence), NULL is returned and the state of pParse updated to reflect // the error. internal static KeyInfo sqlite3IndexKeyinfo(Parse pParse, Index pIdx) { var nCol = pIdx.nColumn; var db = pParse.db; var pKey = new KeyInfo(); if (pKey != null) { pKey.db = pParse.db; pKey.aSortOrder = new byte[nCol]; pKey.aColl = new CollSeq[nCol]; for (var i = 0; i < nCol; i++) { var zColl = pIdx.azColl[i]; Debug.Assert(zColl != null); pKey.aColl[i] = sqlite3LocateCollSeq(pParse, zColl); pKey.aSortOrder[i] = pIdx.aSortOrder[i]; } pKey.nField = (ushort)nCol; } if (pParse.nErr != 0) { pKey = null; sqlite3DbFree(db, ref pKey); } return pKey; }
public GridNode(Azul.Color colColor, GameObject.Name name, Index index = Index.Index_Null) : base(colColor, name, index) { this.deltaX = 10.0f; this.deltaY = -30.0f; moveVert = false; }
/// <summary> /// This API supports the Entity Framework Core infrastructure and is not intended to be used /// directly from your code. This API may change or be removed in future releases. /// </summary> public virtual void OnIndexRemoved([NotNull] InternalEntityTypeBuilder entityTypeBuilder, [NotNull] Index index) => _scope.OnIndexRemoved(Check.NotNull(entityTypeBuilder, nameof(entityTypeBuilder)), Check.NotNull(index, nameof(index)));
public void Synchronize() { Interlocked.Exchange(ref _running, 1); Task.Run(async() => { try { var blockCount = await RpcClient.GetBlockCountAsync(); var isIIB = true; // Initial Index Building phase while (IsRunning) { try { // If stop was requested return. if (IsRunning == false) { return; } Height height = StartingHeight; uint256 prevHash = null; using (await IndexLock.LockAsync()) { if (Index.Count != 0) { var lastIndex = Index.Last(); height = lastIndex.BlockHeight + 1; prevHash = lastIndex.BlockHash; } } if (blockCount - height <= 100) { isIIB = false; } Block block = null; try { block = await RpcClient.GetBlockAsync(height); } catch (RPCException) // if the block didn't come yet { await Task.Delay(1000); continue; } if (blockCount - height <= 2) { NewBlock?.Invoke(this, block); } if (!(prevHash is null)) { // In case of reorg: if (prevHash != block.Header.HashPrevBlock && !isIIB) // There is no reorg in IIB { Logger.LogInfo <IndexBuilderService>($"REORG Invalid Block: {prevHash}"); // 1. Rollback index using (await IndexLock.LockAsync()) { Index.RemoveLast(); } // 2. Serialize Index. (Remove last line.) var lines = File.ReadAllLines(IndexFilePath); File.WriteAllLines(IndexFilePath, lines.Take(lines.Length - 1).ToArray()); // 3. Rollback Bech32UtxoSet if (Bech32UtxoSetHistory.Count != 0) { Bech32UtxoSetHistory.Last().Rollback(Bech32UtxoSet); // The Bech32UtxoSet MUST be recovered to its previous state. Bech32UtxoSetHistory.RemoveLast(); // 4. Serialize Bech32UtxoSet. await File.WriteAllLinesAsync(Bech32UtxoSetFilePath, Bech32UtxoSet .Select(entry => entry.Key.Hash + ":" + entry.Key.N + ":" + ByteHelpers.ToHex(entry.Value.ToCompressedBytes()))); } // 5. Skip the current block. continue; } } if (!isIIB) { if (Bech32UtxoSetHistory.Count >= 100) { Bech32UtxoSetHistory.RemoveFirst(); } Bech32UtxoSetHistory.Add(new ActionHistoryHelper()); } var scripts = new HashSet <Script>(); foreach (var tx in block.Transactions) { // If stop was requested return. // Because this tx iteration can take even minutes // It doesn't need to be accessed with a thread safe fasion with Interlocked through IsRunning, this may have some performance benefit if (_running != 1) { return; } for (int i = 0; i < tx.Outputs.Count; i++) { var output = tx.Outputs[i]; if (!output.ScriptPubKey.IsPayToScriptHash && output.ScriptPubKey.IsWitness) { var outpoint = new OutPoint(tx.GetHash(), i); Bech32UtxoSet.Add(outpoint, output.ScriptPubKey); if (!isIIB) { Bech32UtxoSetHistory.Last().StoreAction(ActionHistoryHelper.Operation.Add, outpoint, output.ScriptPubKey); } scripts.Add(output.ScriptPubKey); } } foreach (var input in tx.Inputs) { OutPoint prevOut = input.PrevOut; if (Bech32UtxoSet.TryGetValue(prevOut, out Script foundScript)) { Bech32UtxoSet.Remove(prevOut); if (!isIIB) { Bech32UtxoSetHistory.Last().StoreAction(ActionHistoryHelper.Operation.Remove, prevOut, foundScript); } scripts.Add(foundScript); } } } GolombRiceFilter filter = null; if (scripts.Count != 0) { filter = new GolombRiceFilterBuilder() .SetKey(block.GetHash()) .SetP(20) .SetM(1 << 20) .AddEntries(scripts.Select(x => x.ToCompressedBytes())) .Build(); } var filterModel = new FilterModel { BlockHash = block.GetHash(), BlockHeight = height, Filter = filter }; await File.AppendAllLinesAsync(IndexFilePath, new[] { filterModel.ToLine() }); using (await IndexLock.LockAsync()) { Index.Add(filterModel); } if (File.Exists(Bech32UtxoSetFilePath)) { File.Delete(Bech32UtxoSetFilePath); } await File.WriteAllLinesAsync(Bech32UtxoSetFilePath, Bech32UtxoSet .Select(entry => entry.Key.Hash + ":" + entry.Key.N + ":" + ByteHelpers.ToHex(entry.Value.ToCompressedBytes()))); // If not close to the tip, just log debug. // Use height.Value instead of simply height, because it cannot be negative height. if (blockCount - height.Value <= 3 || height % 100 == 0) { Logger.LogInfo <IndexBuilderService>($"Created filter for block: {height}."); } else { Logger.LogDebug <IndexBuilderService>($"Created filter for block: {height}."); } } catch (Exception ex) { Logger.LogDebug <IndexBuilderService>(ex); } } } finally { if (IsStopping) { Interlocked.Exchange(ref _running, 3); } } }); }
public static bool GetButton(Button button, Index controlIndex) { KeyCode code = GetKeycode(button, controlIndex); return(Input.GetKey(code)); }
static KeyCode GetKeycode(Button button, Index controlIndex) { switch (controlIndex) { case Index.One: switch (button) { case Button.A: return(KeyCode.Joystick1Button0); case Button.B: return(KeyCode.Joystick1Button1); case Button.X: return(KeyCode.Joystick1Button2); case Button.Y: return(KeyCode.Joystick1Button3); case Button.RightShoulder: return(KeyCode.Joystick1Button5); case Button.LeftShoulder: return(KeyCode.Joystick1Button4); case Button.Back: return(KeyCode.Joystick1Button6); case Button.Start: return(KeyCode.Joystick1Button7); case Button.LeftStick: return(KeyCode.Joystick1Button8); case Button.RightStick: return(KeyCode.Joystick1Button9); } break; case Index.Two: switch (button) { case Button.A: return(KeyCode.Joystick2Button0); case Button.B: return(KeyCode.Joystick2Button1); case Button.X: return(KeyCode.Joystick2Button2); case Button.Y: return(KeyCode.Joystick2Button3); case Button.RightShoulder: return(KeyCode.Joystick2Button5); case Button.LeftShoulder: return(KeyCode.Joystick2Button4); case Button.Back: return(KeyCode.Joystick2Button6); case Button.Start: return(KeyCode.Joystick2Button7); case Button.LeftStick: return(KeyCode.Joystick2Button8); case Button.RightStick: return(KeyCode.Joystick2Button9); } break; case Index.Three: switch (button) { case Button.A: return(KeyCode.Joystick3Button0); case Button.B: return(KeyCode.Joystick3Button1); case Button.X: return(KeyCode.Joystick3Button2); case Button.Y: return(KeyCode.Joystick3Button3); case Button.RightShoulder: return(KeyCode.Joystick3Button5); case Button.LeftShoulder: return(KeyCode.Joystick3Button4); case Button.Back: return(KeyCode.Joystick3Button6); case Button.Start: return(KeyCode.Joystick3Button7); case Button.LeftStick: return(KeyCode.Joystick3Button8); case Button.RightStick: return(KeyCode.Joystick3Button9); } break; case Index.Four: switch (button) { case Button.A: return(KeyCode.Joystick4Button0); case Button.B: return(KeyCode.Joystick4Button1); case Button.X: return(KeyCode.Joystick4Button2); case Button.Y: return(KeyCode.Joystick4Button3); case Button.RightShoulder: return(KeyCode.Joystick4Button5); case Button.LeftShoulder: return(KeyCode.Joystick4Button4); case Button.Back: return(KeyCode.Joystick4Button6); case Button.Start: return(KeyCode.Joystick4Button7); case Button.LeftStick: return(KeyCode.Joystick4Button8); case Button.RightStick: return(KeyCode.Joystick4Button9); } break; case Index.Any: switch (button) { case Button.A: return(KeyCode.JoystickButton0); case Button.B: return(KeyCode.JoystickButton1); case Button.X: return(KeyCode.JoystickButton2); case Button.Y: return(KeyCode.JoystickButton3); case Button.RightShoulder: return(KeyCode.JoystickButton5); case Button.LeftShoulder: return(KeyCode.JoystickButton4); case Button.Back: return(KeyCode.JoystickButton6); case Button.Start: return(KeyCode.JoystickButton7); case Button.LeftStick: return(KeyCode.JoystickButton8); case Button.RightStick: return(KeyCode.JoystickButton9); } break; } return(KeyCode.None); }
protected HandleDocumentReferences(Index index, Dictionary <string, HashSet <CollectionName> > referencedCollections, DocumentsStorage documentsStorage, IndexStorage indexStorage, IndexStorage.ReferencesBase referencesStorage, IndexingConfiguration configuration) : base(index, referencedCollections, documentsStorage, indexStorage, referencesStorage, configuration) { }
public HandleDocumentReferences(Index index, Dictionary <string, HashSet <CollectionName> > referencedCollections, DocumentsStorage documentsStorage, IndexStorage indexStorage, IndexingConfiguration configuration) : this(index, referencedCollections, documentsStorage, indexStorage, indexStorage.ReferencesForDocuments, configuration) { }
public static Index ComputeRadixSortTempStorageSize( this Accelerator accelerator, Index dataLength) => 2;
// To Add the right relations listed in ListView to DB in a loop private void btnSaveRel_Click(object sender, EventArgs e) { Relation myRel; Field myField, indexfield; Index myInd = null; TableDef mytdef; ListViewItem saveItem = null; try { // Delete all existing relations to avoid conflicts DeleteAllRelations(mydb); // Read listview and created all relations in DB foreach (ListViewItem tmp in lstViewRelations.Items) { // Save item in case to remove from list view in a catch exception saveItem = tmp; ///////////////////////////////////////////////////////////// // Create an unique index to make the relation if is not a Primary Key Index ///////////////////////////////////// mytdef = mydb.TableDefs[tmp.SubItems[0].Text]; //Remove all non Primary Indexes DeleteIndexes(mytdef); myInd = mytdef.CreateIndex("IdxRel" + tmp.SubItems[0].Text + tmp.SubItems[2].Text); myInd.Primary = false; myInd.Unique = true; ////////////////////////////////// //Created indexed field indexfield = myInd.CreateField(tmp.SubItems[1].Text); //Add Index field to index ((IndexFields)(myInd.Fields)).Append(indexfield); //Add Index to left table mytdef.Indexes.Append(myInd); ///////////////////////////////////////////////////////////// // Create the Relation myRel = (Relation)mydb.CreateRelation(tmp.SubItems[0].Text + tmp.SubItems[2].Text, tmp.SubItems[0].Text, tmp.SubItems[2].Text); // Create the Relation's Field myField = myRel.CreateField(tmp.SubItems[1].Text); // Indicate the foreign field that has the relation myField.ForeignName = tmp.SubItems[3].Text; //Add field tu Relation myRel.Fields.Append(myField); //Add relation to DB mydb.Relations.Append(myRel); } MessageBox.Show("Relations created", "Relations", MessageBoxButtons.OK, MessageBoxIcon.Information); } catch (Exception ex) { saveItem.Remove(); MessageBox.Show(ex.Message); } }
private async void RunChecks() { var index = new Index(_gameDirectory); IProgress <(int current, int total)> progress = new Progress <(int current, int total)>((update) => { ProgressBar.Value = (((float)update.current / (float)update.total) * 100); ProgressLabel.Content = $"{update.current} / {update.total}"; }); AddText($"{UIStrings.ProblemCheck_Initialize}\n\n", textColor); AddText($"{UIStrings.ProblemCheck_IndexDat}\n", secondaryTextColor); if (await CheckIndexDatCounts()) { AddText($"\n{UIStrings.ProblemCheck_ErrorsFound}\n", secondaryTextColor); if (!index.IsIndexLocked(XivDataFile._0A_Exd)) { await FixIndexDatCounts(); AddText($"{UIStrings.ProblemCheck_RepairComplete}\n", "Green"); await CheckIndexDatCounts(); } else { AddText($"\n{UIStrings.ProblemCheck_IndexLocked} \n", "Red"); } } AddText($"\n{UIStrings.ProblemCheck_IndexBackups}\n", secondaryTextColor); await CheckBackups(); AddText($"\n{UIStrings.ProblemCheck_DatSize}\n", secondaryTextColor); await CheckDatSizes(); try { AddText($"\n{UIStrings.ProblemCheck_ModList}\n", secondaryTextColor); await CheckMods(progress); } catch (Exception ex) { Debug.WriteLine($"Loading Canceled\n\n{ex.Message}"); } try { AddText($"\n{UIStrings.ProblemCheck_LoD}\n", secondaryTextColor); cfpTextBox.ScrollToEnd(); await CheckLoD(); } catch (Exception ex) { Debug.WriteLine($"Loading Canceled\n\n{ex.Message}"); } ProgressBar.Value = 0; ProgressLabel.Content = UIStrings.Done; }
/// <summary> /// This method opens the modpack import wizard or imports a modpack silently /// </summary> /// <param name="path">The path to the modpack</param> /// <param name="silent">If the modpack wizard should be shown or the modpack should just be imported without any user interaction</param> /// <returns></returns> private async Task <int> ImportModpack(DirectoryInfo path, DirectoryInfo modPackDirectory, bool silent = false, bool messageInImport = false) { var importError = false; TextureView textureView = null; TextureViewModel textureViewModel = null; ModelView modelView = null; ModelViewModel modelViewModel = null; if (TextureTabItem != null && ModelTabItem != null) { textureView = TextureTabItem.Content as TextureView; textureViewModel = textureView.DataContext as TextureViewModel; modelView = ModelTabItem.Content as ModelView; modelViewModel = modelView.DataContext as ModelViewModel; } if (!path.Extension.Contains("ttmp")) { FlexibleMessageBox.Show(string.Format(UIMessages.UnsupportedFileExtensionErrorMessage, path.Extension), UIMessages.UnsupportedFileExtensionErrorTitle, MessageBoxButtons.OK, MessageBoxIcon.Error); return(0); } try { var ttmp = new TTMP(modPackDirectory, XivStrings.TexTools); var ttmpData = await ttmp.GetModPackJsonData(path); if (ttmpData.ModPackJson.TTMPVersion.Contains("w")) { var gameDirectory = new DirectoryInfo(Settings.Default.FFXIV_Directory); var index = new Index(gameDirectory); if (index.IsIndexLocked(XivDataFile._0A_Exd)) { FlexibleMessageBox.Show(UIMessages.IndexLockedErrorMessage, UIMessages.IndexLockedErrorTitle, MessageBoxButtons.OK, MessageBoxIcon.Error); return(0); } try { var importWizard = new ImportModPackWizard(ttmpData.ModPackJson, ttmpData.ImageDictionary, path, textureViewModel, modelViewModel, messageInImport); if (messageInImport) { importWizard.WindowStartupLocation = WindowStartupLocation.CenterScreen; } else { importWizard.Owner = this; } var result = importWizard.ShowDialog(); if (result == true) { return(importWizard.TotalModsImported); } } catch { importError = true; } } else if (ttmpData.ModPackJson.TTMPVersion.Contains("s")) { try { var simpleImport = new SimpleModPackImporter(path, ttmpData.ModPackJson, textureViewModel, modelViewModel, silent, messageInImport); if (messageInImport) { simpleImport.WindowStartupLocation = WindowStartupLocation.CenterScreen; } else { simpleImport.Owner = this; } var result = simpleImport.ShowDialog(); if (result == true) { return(simpleImport.TotalModsImported); } } catch { importError = true; } } } catch (Exception ex) { if (!importError) { var simpleImport = new SimpleModPackImporter(path, null, textureViewModel, modelViewModel, silent, messageInImport); if (messageInImport) { simpleImport.WindowStartupLocation = WindowStartupLocation.CenterScreen; } else { simpleImport.Owner = this; } var result = simpleImport.ShowDialog(); if (result == true) { return(simpleImport.TotalModsImported); } } else { FlexibleMessageBox.Show(string.Format(UIMessages.ModPackImportErrorMessage, path.FullName, ex.Message), UIMessages.ModPackImportErrorTitle, MessageBoxButtons.OK, MessageBoxIcon.Error); return(0); } } return(0); }
static void Main(string[] args) { // Useful tool for working with search service: https://azsearch.azurewebsites.net/ string searchServiceName = "ebornorth"; string indexname = "hotels"; string adminApiKey = "AF04406350DA2FA54BD24674A1F8B411"; string queryApiKey = "CEA041884F40DAC1F9B0D457067C9C7D"; #region Create Schema Console.WriteLine("Defining the schema....."); SearchServiceClient serviceClient = new SearchServiceClient(searchServiceName, new SearchCredentials(adminApiKey)); var definition = new Index() { Name = indexname, Fields = new[] { // Every entry need a unique ID, just like an primary key. The IsKey=true denotes this. new Field("hotelId", DataType.String) { IsKey = true, IsFilterable = true }, new Field("baseRate", DataType.Double) { IsFilterable = true, IsSortable = true, IsFacetable = true }, new Field("description", DataType.String) { IsSearchable = true }, //new Field("description_fr", AnalyzerName.FrLucene), // needed for multi-language support new Field("hotelName", DataType.String) { IsSearchable = true, IsFilterable = true, IsSortable = true }, new Field("category", DataType.String) { IsSearchable = true, IsFilterable = true, IsSortable = true, IsFacetable = true }, new Field("tags", DataType.Collection(DataType.String)) { IsSearchable = true, IsFilterable = true, IsFacetable = true }, new Field("parkingIncluded", DataType.Boolean) { IsFilterable = true, IsFacetable = true }, new Field("smokingAllowed", DataType.Boolean) { IsFilterable = true, IsFacetable = true }, new Field("lastRenovationDate", DataType.DateTimeOffset) { IsFilterable = true, IsSortable = true, IsFacetable = true }, new Field("rating", DataType.Int32) { IsFilterable = true, IsSortable = true, IsFacetable = true }, new Field("location", DataType.GeographyPoint) { IsFilterable = true, IsSortable = true } } }; // Now create the index. //serviceClient.Indexes.Create(definition); #endregion #region Upload data Console.WriteLine("Populating the index...."); SearchIndexClient indexClient = serviceClient.Indexes.GetClient("hotels"); var actions = new IndexAction <Hotel>[] { IndexAction.Upload( new Hotel { HotelId = "1", BaseRate = 199.0, Description = "Best hotel in town", //DescriptionFr = "Meilleur hôtel en ville", HotelName = "Fancy Stay", Category = "Luxury", Tags = new[] { "pool", "view", "wifi", "concierge" }, ParkingIncluded = false, SmokingAllowed = false, LastRenovationDate = new DateTimeOffset(2010, 6, 27, 0, 0, 0, TimeSpan.Zero), Rating = 5, Location = GeographyPoint.Create(47.678581, -122.131577) }), IndexAction.Upload( new Hotel() { HotelId = "2", BaseRate = 79.99, Description = "Cheapest hotel in town", //DescriptionFr = "Hôtel le moins cher en ville", HotelName = "Roach Motel", Category = "Budget", Tags = new[] { "motel", "budget" }, ParkingIncluded = true, SmokingAllowed = true, LastRenovationDate = new DateTimeOffset(1982, 4, 28, 0, 0, 0, TimeSpan.Zero), Rating = 1, Location = GeographyPoint.Create(49.678581, -122.131577) }), IndexAction.MergeOrUpload( new Hotel() { HotelId = "3", BaseRate = 129.99, Description = "Close to town hall and the river" }), IndexAction.Delete(new Hotel() { HotelId = "6" }) }; var batch = IndexBatch.New(actions); try { indexClient.Documents.Index(batch); } catch (IndexBatchException e) { // Sometimes when your Search service is under load, indexing will fail for some of the documents in // the batch. Depending on your application, you can take compensating actions like delaying and // retrying. For this simple demo, we just log the failed document keys and continue. Console.WriteLine( "Failed to index some of the documents: {0}", String.Join(", ", e.IndexingResults.Where(r => !r.Succeeded).Select(r => r.Key))); } Console.WriteLine("Waiting for documents to be indexed...\n"); Thread.Sleep(2000); #endregion #region Perform a query SearchIndexClient indexqueryClient = new SearchIndexClient(searchServiceName, indexname, new SearchCredentials(queryApiKey)); //Perform a query. // See https://azure.microsoft.com/en-gb/documentation/articles/search-query-dotnet/ for more examples Console.Write("Search the entire index, order by a specific field (lastRenovationDate) "); Console.Write("in descending order, take the top two results, and show only hotelName and "); Console.WriteLine("lastRenovationDate:\n"); SearchParameters parameters; DocumentSearchResult <Hotel> results; parameters = new SearchParameters() { OrderBy = new[] { "lastRenovationDate desc" }, Select = new[] { "hotelName", "lastRenovationDate" }, Top = 2 }; results = indexClient.Documents.Search <Hotel>("*", parameters); WriteDocuments(results); #endregion Console.ReadKey(); }
static public void Main(System.String[] args) { int i; Storage db = StorageFactory.Instance.CreateStorage(); db.Open("test1.dbs", pagePoolSize); Root root = (Root)db.Root; if (root == null) { root = new Root(); root.strIndex = db.CreateIndex(typeof(System.String), true); root.intIndex = db.CreateFieldIndex(typeof(Record), "intKey", true); root.compoundIndex = db.CreateFieldIndex(typeof(Record), new String[] { "strKey", "intKey" }, true); db.Root = root; } FieldIndex intIndex = root.intIndex; FieldIndex compoundIndex = root.compoundIndex; Index strIndex = root.strIndex; DateTime start = DateTime.Now; long key = 1999; for (i = 0; i < nRecords; i++) { Record rec = new Record(); key = (3141592621L * key + 2718281829L) % 1000000007L; rec.intKey = key; rec.strKey = System.Convert.ToString(key); rec.realKey = (double)key; intIndex.Put(rec); strIndex.Put(new Key(rec.strKey), rec); compoundIndex.Put(rec); } db.Commit(); System.Console.WriteLine("Elapsed time for inserting " + nRecords + " records: " + (DateTime.Now - start)); start = DateTime.Now; System.IO.StreamWriter writer = new System.IO.StreamWriter("test.xml"); db.ExportXML(writer); writer.Close(); System.Console.WriteLine("Elapsed time for for XML export: " + (DateTime.Now - start)); db.Close(); db.Open("test2.dbs", pagePoolSize); start = DateTime.Now; System.IO.StreamReader reader = new System.IO.StreamReader("test.xml"); db.ImportXML(reader); reader.Close(); System.Console.WriteLine("Elapsed time for for XML import: " + (DateTime.Now - start)); root = (Root)db.Root; intIndex = root.intIndex; strIndex = root.strIndex; compoundIndex = root.compoundIndex; start = DateTime.Now; key = 1999; for (i = 0; i < nRecords; i++) { key = (3141592621L * key + 2718281829L) % 1000000007L; String strKey = System.Convert.ToString(key); Record rec1 = (Record)intIndex.Get(new Key(key)); Record rec2 = (Record)strIndex.Get(new Key(strKey)); Record rec3 = (Record)compoundIndex.Get(new Key(strKey, key)); Debug.Assert(rec1 != null); Debug.Assert(rec1 == rec2); Debug.Assert(rec1 == rec3); Debug.Assert(rec1.intKey == key); Debug.Assert(rec1.realKey == (double)key); Debug.Assert(strKey.Equals(rec1.strKey)); } System.Console.WriteLine("Elapsed time for performing " + nRecords * 2 + " index searches: " + (DateTime.Now - start)); db.Close(); }
public void Synchronize() { Task.Run(async() => { try { if (Interlocked.Read(ref _runner) >= 2) { return; } Interlocked.Increment(ref _runner); while (Interlocked.Read(ref _runner) != 1) { await Task.Delay(100); } if (Interlocked.Read(ref _running) >= 2) { return; } try { Interlocked.Exchange(ref _running, 1); var isImmature = false; // The last 100 blocks are reorgable. (Assume it is mature at first.) SyncInfo syncInfo = null; while (IsRunning) { try { // If we didn't yet initialized syncInfo, do so. if (syncInfo is null) { syncInfo = await GetSyncInfoAsync(); } Height heightToRequest = StartingHeight; uint256 currentHash = null; using (await IndexLock.LockAsync()) { if (Index.Count != 0) { var lastIndex = Index.Last(); heightToRequest = lastIndex.BlockHeight + 1; currentHash = lastIndex.BlockHash; } } // If not synchronized or already 5 min passed since last update, get the latest blockchain info. if (!syncInfo.IsCoreSynchornized || (syncInfo.BlockchainInfoUpdated - DateTimeOffset.UtcNow) > TimeSpan.FromMinutes(5)) { syncInfo = await GetSyncInfoAsync(); } if (syncInfo.BlockCount - heightToRequest <= 100) { // Both Wasabi and our Core node is in sync. Start doing stuff through P2P from now on. if (syncInfo.IsCoreSynchornized && syncInfo.BlockCount == heightToRequest - 1) { syncInfo = await GetSyncInfoAsync(); // Double it to make sure not to accidentally miss any notification. if (syncInfo.IsCoreSynchornized && syncInfo.BlockCount == heightToRequest - 1) { // Mark the process notstarted, so it can be started again and finally block can mark it is stopped. Interlocked.Exchange(ref _running, 0); return; } } // Mark the synchronizing process is working with immature blocks from now on. isImmature = true; } Block block = await RpcClient.GetBlockAsync(heightToRequest); // Reorg check, except if we're requesting the starting height, because then the "currentHash" wouldn't exist. if (heightToRequest != StartingHeight && currentHash != block.Header.HashPrevBlock) { // Reorg can happen only when immature. (If it'd not be immature, that'd be a huge issue.) if (isImmature) { await ReorgOneAsync(); } else { Logger.LogCritical <IndexBuilderService>("This is something serious! Over 100 block reorg is noticed! We cannot handle that!"); } // Skip the current block. continue; } if (isImmature) { PrepareBech32UtxoSetHistory(); } var scripts = new HashSet <Script>(); foreach (var tx in block.Transactions) { // If stop was requested return. // Because this tx iteration can take even minutes // It doesn't need to be accessed with a thread safe fasion with Interlocked through IsRunning, this may have some performance benefit if (_running != 1) { return; } for (int i = 0; i < tx.Outputs.Count; i++) { var output = tx.Outputs[i]; if (!output.ScriptPubKey.IsPayToScriptHash && output.ScriptPubKey.IsWitness) { var outpoint = new OutPoint(tx.GetHash(), i); Bech32UtxoSet.Add(outpoint, output.ScriptPubKey); if (isImmature) { Bech32UtxoSetHistory.Last().StoreAction(ActionHistoryHelper.Operation.Add, outpoint, output.ScriptPubKey); } scripts.Add(output.ScriptPubKey); } } foreach (var input in tx.Inputs) { OutPoint prevOut = input.PrevOut; if (Bech32UtxoSet.TryGetValue(prevOut, out Script foundScript)) { Bech32UtxoSet.Remove(prevOut); if (isImmature) { Bech32UtxoSetHistory.Last().StoreAction(ActionHistoryHelper.Operation.Remove, prevOut, foundScript); } scripts.Add(foundScript); } } } GolombRiceFilter filter = null; if (scripts.Count != 0) { filter = new GolombRiceFilterBuilder() .SetKey(block.GetHash()) .SetP(20) .SetM(1 << 20) .AddEntries(scripts.Select(x => x.ToCompressedBytes())) .Build(); } var filterModel = new FilterModel { BlockHash = block.GetHash(), BlockHeight = heightToRequest, Filter = filter }; await File.AppendAllLinesAsync(IndexFilePath, new[] { filterModel.ToHeightlessLine() }); using (await IndexLock.LockAsync()) { Index.Add(filterModel); } if (File.Exists(Bech32UtxoSetFilePath)) { File.Delete(Bech32UtxoSetFilePath); } await File.WriteAllLinesAsync(Bech32UtxoSetFilePath, Bech32UtxoSet .Select(entry => entry.Key.Hash + ":" + entry.Key.N + ":" + ByteHelpers.ToHex(entry.Value.ToCompressedBytes()))); // If not close to the tip, just log debug. // Use height.Value instead of simply height, because it cannot be negative height. if (syncInfo.BlockCount - heightToRequest.Value <= 3 || heightToRequest % 100 == 0) { Logger.LogInfo <IndexBuilderService>($"Created filter for block: {heightToRequest}."); } else { Logger.LogDebug <IndexBuilderService>($"Created filter for block: {heightToRequest}."); } } catch (Exception ex) { Logger.LogDebug <IndexBuilderService>(ex); } } } finally { Interlocked.CompareExchange(ref _running, 3, 2); // If IsStopping, make it stopped. Interlocked.Decrement(ref _runner); } } catch (Exception ex) { Logger.LogError <IndexBuilderService>($"Synchronization attempt failed to start: {ex}"); } }); }
public Dictionary <int, T> LoadDefinitions <T>() where T : Definition { Type type = typeof(T); Index index = CACHE_INDEXES[typeof(T)]; int idNumBits = index.IdNumBits; int recordKey = index.RecordKey; Dictionary <int, T> definitions = new Dictionary <int, T>(); if (!LoadedDefinitions.TryGetValue(type, out Dictionary <int, object> loadedDefinitions)) { loadedDefinitions = new Dictionary <int, object>(); LoadedDefinitions[type] = loadedDefinitions; } if (!Records.TryGetValue(type, out Dictionary <int, Record> records)) { records = new Dictionary <int, Record>(); Records[type] = records; } using (SQLiteConnection connection = new SQLiteConnection("Data Source=" + CacheDirectory + "js5-" + index.FileId + ".jcache;Version=3;")) { connection.Open(); SQLiteCommand command = connection.CreateCommand(); command.CommandText = "SELECT DATA FROM cache_index"; SQLiteDataReader reader = command.ExecuteReader(); if (reader.Read()) { using (MemoryStream stream = new MemoryStream(DecompressBlob((byte[])reader[0]))) { stream.Position = 5; byte flags = ReadByte(stream); int[] recordKeys = DecodeIds(stream, ReadSmart(stream, 2, 4, false)); int recordKeyCount = recordKeys.Length; long position = stream.Position; if ((flags & FLAG_IDENTIFIERS) == FLAG_IDENTIFIERS) { position += recordKeyCount * 4; } if ((flags & FLAG_UNKNOWN_2) == FLAG_UNKNOWN_2) { position += recordKeyCount * 4; } if ((flags & FLAG_WHIRLPOOL) == FLAG_WHIRLPOOL) { position += recordKeyCount * WHIRLPOOL_SIZE; } if ((flags & FLAG_UNKNOWN_1) == FLAG_UNKNOWN_1) { position += recordKeyCount * 8; } stream.Position = position + recordKeyCount * 8; foreach (int key in recordKeys) { records[key] = new Record(new List <int>(ReadSmart(stream, 2, 4, false))); } foreach (int key in recordKeys) { List <int> ids = records[key].Ids; foreach (int id in DecodeIds(stream, ids.Capacity)) { ids.Add(id); } } } } reader.Close(); command.CommandText = "SELECT KEY, DATA FROM cache" + (recordKey == -1 ? "" : " WHERE KEY = " + recordKey); reader = command.ExecuteReader(); while (reader.Read()) { int currentRecordKey = reader.GetInt32(0); if (records.TryGetValue(currentRecordKey, out Record record)) { byte[] blobBytes = DecompressBlob((byte[])reader[1]); record.Bytes = blobBytes; using (MemoryStream blobStream = new MemoryStream(blobBytes)) { blobStream.Position = 1; int nextPosition = -1; foreach (int id in record.Ids) { int actualId = recordKey == -1 ? (currentRecordKey << idNumBits) + id : id; int startPosition = nextPosition == -1 ? ReadInt(blobStream) : nextPosition; nextPosition = ReadInt(blobStream); long prevPosition = blobStream.Position; blobStream.Position = startPosition; byte[] bytes = new byte[nextPosition - startPosition]; blobStream.Read(bytes, 0, bytes.Length); T definition = (T)Activator.CreateInstance(typeof(T), new object[] { actualId }); definition.Deserialize(this, bytes); loadedDefinitions[actualId] = definition; definitions[actualId] = definition; blobStream.Position = prevPosition; } } } } } return(definitions); }
protected void TestCanRoundTripNonNullableValueTypes() { SearchServiceClient serviceClient = Data.GetSearchServiceClient(); var index = new Index() { Name = SearchTestUtilities.GenerateName(), Fields = FieldBuilder.BuildForType <NonNullableModel>() }; serviceClient.Indexes.Create(index); SearchIndexClient indexClient = Data.GetSearchIndexClient(index.Name); var startDate = new DateTimeOffset(2015, 11, 24, 14, 01, 00, TimeSpan.FromHours(-8)); DateTime endDate = startDate.UtcDateTime + TimeSpan.FromDays(15); var doc1 = new NonNullableModel() { Key = "123", Count = 3, EndDate = endDate, IsEnabled = true, Rating = 5, Ratio = 3.14, StartDate = startDate, TopLevelBucket = new Bucket() { BucketName = "A", Count = 12 }, Buckets = new[] { new Bucket() { BucketName = "B", Count = 20 }, new Bucket() { BucketName = "C", Count = 7 } } }; var doc2 = new NonNullableModel() { Key = "456", Count = default(long), EndDate = default(DateTime), IsEnabled = default(bool), Rating = default(int), Ratio = default(double), StartDate = default(DateTimeOffset), TopLevelBucket = default(Bucket), Buckets = new[] { default(Bucket) } }; var batch = IndexBatch.Upload(new[] { doc1, doc2 }); indexClient.Documents.Index(batch); SearchTestUtilities.WaitForIndexing(); DocumentSearchResult <NonNullableModel> response = indexClient.Documents.Search <NonNullableModel>("*"); Assert.Equal(2, response.Results.Count); Assert.Equal(doc1, response.Results[0].Document); Assert.Equal(doc2, response.Results[1].Document); }
public override int GetHashCode() { return(Index.GetHashCode()); }
public override int GetHashCode() { return Kind.GetHashCode() ^ Index.GetHashCode(); }
private Model.Table GetNewTable(string tableName) { ArchAngel.Interfaces.ProjectHelper.RaiseObjectBeingProcessedEvent(tableName, "Table"); //_columns = null; // Reset the columns //_indexColumns = null; //_indexes = null; //_indexReferencedColumns = null; //_referencedColumns = null; Model.Table table = new Model.Table(tableName, false); #region Columns DataRow[] columnRows = Columns.Select(string.Format("TABLE_NAME = '{0}'", tableName)); foreach (DataRow columnRow in columnRows) { Column column = new Column( (string)columnRow["COLUMN_NAME"], false, table, (int)columnRow["ORDINAL_POSITION"], Slyce.Common.Utility.StringsAreEqual((string)columnRow["IS_NULLABLE"], "YES", false), (string)columnRow["DATA_TYPE"], columnRow.IsNull("CHARACTER_MAXIMUM_LENGTH") ? 0 : (int)columnRow["CHARACTER_MAXIMUM_LENGTH"], (int)columnRow["InPrimaryKey"] == 1, columnRow.IsNull("IsIdentity") ? false : (int)columnRow["IsIdentity"] == 1, columnRow.IsNull("COLUMN_DEFAULT") ? "" : (string)columnRow["COLUMN_DEFAULT"], columnRow.IsNull("IsComputed") ? false : (int)columnRow["IsComputed"] == 1); if (IsSupported(column)) { table.AddColumn(column); } } #endregion #region Indexes DataRow[] indexRows = Indexes.Select(string.Format("TABLE_NAME = '{0}'", tableName)); foreach (DataRow indexRow in indexRows) { string indexType; string indexKeyType = indexRow["CONSTRAINT_TYPE"].ToString(); if (indexKeyType == "PRIMARY KEY") { continue; } else if (indexKeyType == "FOREIGN KEY") { continue; } else if (indexKeyType == "UNIQUE") { continue; //indexType = DatabaseConstant.IndexType.Unique; } else if (indexKeyType == "CHECK") { indexType = DatabaseConstant.IndexType.Check; } else if (indexKeyType == "NONE") //TODO check is NONE { indexType = DatabaseConstant.IndexType.None; } else { //continue; throw new Exception("IndexType " + indexKeyType + " Not Defined"); } DataRow[] indexColumnRows;// = IndexColumns.Select(string.Format("TABLE_NAME = '{0}' AND CONSTRAINT_NAME = '{1}'", tableName, indexRow["CONSTRAINT_NAME"])); if (indexKeyType == "NONE") { indexColumnRows = Columns.Select(string.Format("TABLE_NAME = '{0}' AND COLUMN_NAME = '{1}'", tableName, indexRow["ColumnName"])); } else { indexColumnRows = IndexColumns.Select(string.Format("TABLE_NAME = '{0}' AND CONSTRAINT_NAME = '{1}'", tableName, indexRow["CONSTRAINT_NAME"])); } Index index = new Index(indexRow["CONSTRAINT_NAME"].ToString(), false, indexType, table); // Fill Columns foreach (DataRow indexColumnRow in indexColumnRows) { Column indexColumn = new Column(indexColumnRow["COLUMN_NAME"].ToString(), false); index.AddColumn(indexColumn); } index.ResetDefaults(); table.AddIndex(index); } // Indexes -- that should be keys foreach (DataRow keyRow in indexRows) { string keyType; string indexKeyType = keyRow["CONSTRAINT_TYPE"].ToString(); if (indexKeyType == "PRIMARY KEY") { keyType = DatabaseConstant.KeyType.Primary; } else if (indexKeyType == "FOREIGN KEY") { keyType = DatabaseConstant.KeyType.Foreign; } else if (indexKeyType == "UNIQUE") { keyType = DatabaseConstant.KeyType.Unique; } else if (indexKeyType == "CHECK") { continue; } else if (indexKeyType == "NONE") { continue; //keyType = DatabaseConstant.KeyType.None; } else { //continue; throw new Exception("KeyType " + indexKeyType + " Not Defined"); } // Create Alias string keyAlias = keyType + "_"; DataRow[] keyColumnRows = IndexColumns.Select(string.Format("TABLE_NAME = '{0}' AND CONSTRAINT_NAME = '{1}'", tableName, keyRow["CONSTRAINT_NAME"])); Key key = new Key(keyRow["CONSTRAINT_NAME"].ToString(), false, keyType, table, false); // Fill Columns foreach (DataRow keyColumnRow in keyColumnRows) { Column keyColumn = new Column(keyColumnRow["COLUMN_NAME"].ToString(), false); keyColumn.DataType = (string)keyColumnRow["DATA_TYPE"]; key.AddColumn(keyColumn); } if (keyType == DatabaseConstant.KeyType.Foreign) { DataRow[] keyReferencedColumnRows = IndexReferencedColumns.Select(string.Format("ForeignKey = '{0}'", keyRow["CONSTRAINT_NAME"])); DataRow firstKeyReferencedColumnRow = keyReferencedColumnRows[0]; // Fill References key.ReferencedTable = new Model.Table(firstKeyReferencedColumnRow["ReferencedTable"].ToString(), false); //if (dmoKey.ReferencedKey != null) //{ key.ReferencedKey = new Key(firstKeyReferencedColumnRow["ReferencedKey"].ToString(), false, true); //} // Fill Referenced Columns foreach (DataRow keyReferencedColumnRow in keyReferencedColumnRows) { Column keyReferencedColumn = new Column(keyReferencedColumnRow["ReferencedColumn"].ToString(), false); key.AddReferencedColumn(keyReferencedColumn); } } key.ResetDefaults(); table.AddKey(key); } #endregion return(table); }
internal static CombinedObservableNode Create(ObservableViewModel ownerViewModel, string name, CombinedObservableNode parent, Type contentType, IEnumerable <SingleObservableNode> combinedNodes, Index index) { var node = (CombinedObservableNode)Activator.CreateInstance(typeof(CombinedObservableNode <>).MakeGenericType(contentType), ownerViewModel, name, combinedNodes, index); return(node); }
public static bool CheckDB(string mdbPath) { TableList = new List <string>(); List <string> colList = new List <string>(); var dbe = new DBEngine(); var dbTemplate = dbe.OpenDatabase(global.ApplicationPath + "\\template.mdb"); var dbData = dbe.OpenDatabase(mdbPath); var tdName = ""; foreach (TableDef td in dbData.TableDefs) { if (td.Name.Length > 4 && td.Name.Substring(0, 4) != "MSys" && td.Name.Substring(0, 5) != "temp_") { TableList.Add(td.Name); } } foreach (TableDef tdTemplate in dbTemplate.TableDefs) { if (tdTemplate.Name.Length > 4 && tdTemplate.Name.Substring(0, 4) != "MSys" && tdTemplate.Name.Substring(0, 5) != "temp_") { tdName = tdTemplate.Name; if (!TableList.Contains(tdTemplate.Name)) { var sql = $@"SELECT [{tdTemplate.Name}].* INTO [{tdTemplate.Name}] IN '{dbData.Name}' FROM [{tdTemplate.Name}]"; var qdf = dbTemplate.CreateQueryDef("", sql); qdf.Execute(); dbData.TableDefs.Refresh(); qdf.Close(); } //enumerate all fields in the current data table foreach (Field f in dbData.TableDefs[tdTemplate.Name].Fields) { colList.Add(f.Name); } //enumerate all fields in the corresponding template table foreach (Field fldTemplate in tdTemplate.Fields) { //if a template field is not found in the data table field //we will add this field to the data table var fldData = new Field(); if (!colList.Contains(fldTemplate.Name)) { dbData.TableDefs[tdTemplate.Name].With(o => { fldData = o.CreateField(fldTemplate.Name, fldTemplate.Type, fldTemplate.Size); o.Fields.Append(fldData); o.Fields.Refresh(); }); } fldData = dbData.TableDefs[tdTemplate.Name].Fields[fldTemplate.Name]; fldData.AllowZeroLength = fldTemplate.AllowZeroLength; fldData.Required = fldTemplate.Required; if (fldData.Type != fldTemplate.Type) { FixField(dbData.TableDefs[tdName], fldData, fldTemplate, mdbPath); } foreach (Property pTemplate in fldTemplate.Properties) { if (pTemplate.Name == "Description") { try { fldData.Properties["Description"].Value = pTemplate.Value; } catch { fldData.Properties.Append(fldData.CreateProperty(pTemplate.Name, pTemplate.Type, pTemplate.Value)); } fldData.Properties.Refresh(); } } } colList.Clear(); //enumerate all indexes in the current data table and put in the list foreach (Index i in dbData.TableDefs[tdTemplate.Name].Indexes) { var name = tdTemplate.Name; colList.Add(i.Name); if (tdTemplate.Name == "tblGearInventoryBarangayData" && i.Name == "AltKey") { RemoveGearInventoryBarangayDataAltKey(mdbPath); } } //enumerate all indexes in the current template table foreach (Index templateIndex in tdTemplate.Indexes) { //if the current index is not in the list, //we will make one and add it to the indexes of the data table var dataIndex = new Index(); if (!colList.Contains(templateIndex.Name)) { dbData.TableDefs[tdTemplate.Name].With(o => { dataIndex = o.CreateIndex(templateIndex.Name); dataIndex.Fields = templateIndex.Fields; dataIndex.Primary = templateIndex.Primary; dataIndex.Required = templateIndex.Primary; dataIndex.IgnoreNulls = templateIndex.IgnoreNulls; dataIndex.Unique = templateIndex.Unique; try { o.Indexes.Append(dataIndex); } catch (Exception ex) { Logger.Log(ex.Message, "DBCheck.cs", $"CheckDB: Add index {templateIndex.Name} to table { tdTemplate.Name}"); } o.Indexes.Refresh(); }); } else { foreach (Index i in dbData.TableDefs[tdTemplate.Name].Indexes) { if (i.Name == templateIndex.Name) { } } } } } } colList.Clear(); foreach (Relation rel in dbData.Relations) { colList.Add(rel.Table + "|" + rel.ForeignTable); } foreach (Relation templateRel in dbTemplate.Relations) { var dataRel = new Relation(); if (!colList.Contains(templateRel.Table + "|" + templateRel.ForeignTable)) { dataRel = dbData.CreateRelation(templateRel.Name, templateRel.Table, templateRel.ForeignTable); foreach (Field f in templateRel.Fields) { dataRel.Fields.Append(dataRel.CreateField(f.Name)); dataRel.Fields[f.Name].ForeignName = templateRel.Fields[f.Name].ForeignName; } try { dbData.Relations.Append(dataRel); } catch { dataRel.Name += "1"; try { dbData.Relations.Append(dataRel); } catch (Exception ex) { if (templateRel.ForeignTable == "tblSampling" && templateRel.Table == "tblFishingExpense") { FixSamplingExpenseRelation(mdbPath); } Logger.LogError(ex.Message, ex.StackTrace); } } finally { dbData.Relations.Refresh(); } } } dbData.Close(); dbTemplate.Close(); dbData = null; dbTemplate = null; return(true); }
public void Enqueue(Type type, Message message) { Queue[Index.IndexOfValue(type)].Add(message); }
protected CombinedObservableNode(ObservableViewModel ownerViewModel, string name, IEnumerable <SingleObservableNode> combinedNodes, Index index) : base(ownerViewModel, index) { // ReSharper disable once DoNotCallOverridableMethodsInConstructor DependentProperties.Add(nameof(Value), new[] { nameof(HasMultipleValues), nameof(IsPrimitive), nameof(HasList), nameof(HasDictionary) }); this.combinedNodes = new List <SingleObservableNode>(combinedNodes); Name = name; DisplayName = this.combinedNodes.First().DisplayName; combinedNodeInitialValues = new List <object>(); distinctCombinedNodeInitialValues = new HashSet <object>(); bool isReadOnly = false; bool isVisible = false; bool nullOrder = false; foreach (var node in this.combinedNodes) { if (node.IsDestroyed) { throw new InvalidOperationException("One of the combined node is already disposed."); } if (node.IsReadOnly) { isReadOnly = true; } if (node.IsVisible) { isVisible = true; } if (node.Order == null) { nullOrder = true; } if (order == node.Order || (!nullOrder && order == null)) { order = node.Order; } combinedNodeInitialValues.Add(node.Value); distinctCombinedNodeInitialValues.Add(node.Value); } IsReadOnly = isReadOnly; IsVisible = isVisible; ResetInitialValues = new AnonymousCommand(ServiceProvider, () => { using (Owner.BeginCombinedAction(Owner.FormatCombinedUpdateMessage(this, null), Path)) { CombinedNodes.Zip(combinedNodeInitialValues).ForEach(x => x.Item1.Value = x.Item2); Refresh(); } }); }
public bool Enqueue <T>(Message message) => Queue[Index.IndexOfValue(typeof(T))].TryAdd(message);
public Message Dequeue(Type type, CancellationToken token) => Queue[Index.IndexOfValue(type)].Take(token);
static public void Main(string[] args) { Storage db = StorageFactory.Instance.CreateStorage(); if (args.Length > 0) { if ("altbtree" == args[0]) { db.SetProperty("perst.alternative.btree", true); } else { Console.WriteLine("Unrecognized option " + args[0]); } } db.Open("testenum.dbs", pagePoolSize); Indices root = (Indices)db.Root; if (root == null) { root = new Indices(); #if USE_GENERICS root.strIndex = db.CreateIndex <string, Record>(false); root.intIndex = db.CreateIndex <long, Record>(false); #else root.strIndex = db.CreateIndex(typeof(string), false); root.intIndex = db.CreateIndex(typeof(long), false); #endif db.Root = root; } #if USE_GENERICS Index <long, Record> intIndex = root.intIndex; Index <string, Record> strIndex = root.strIndex; Record[] records; #else Index intIndex = root.intIndex; Index strIndex = root.strIndex; object[] records; #endif DateTime start = DateTime.Now; long key = 1999; int i, j; for (i = 0; i < nRecords; i++) { Record rec = new Record(); key = (3141592621L * key + 2718281829L) % 1000000007L; rec.intKey = key; rec.strKey = Convert.ToString(key); for (j = (int)(key % 10); --j >= 0;) { intIndex[rec.intKey] = rec; strIndex[rec.strKey] = rec; } } db.Commit(); Console.WriteLine("Elapsed time for inserting " + nRecords + " records: " + (DateTime.Now - start)); start = DateTime.Now; key = 1999; for (i = 0; i < nRecords; i++) { key = (3141592621L * key + 2718281829L) % 1000000007L; Key fromInclusive = new Key(key); Key fromInclusiveStr = new Key(Convert.ToString(key)); Key fromExclusive = new Key(key, false); Key fromExclusiveStr = new Key(Convert.ToString(key), false); key = (3141592621L * key + 2718281829L) % 1000000007L; Key tillInclusive = new Key(key); Key tillInclusiveStr = new Key(Convert.ToString(key)); Key tillExclusive = new Key(key, false); Key tillExclusiveStr = new Key(Convert.ToString(key), false); // int key ascent order records = intIndex.Get(fromInclusive, tillInclusive); j = 0; foreach (Record rec in intIndex.Range(fromInclusive, tillInclusive, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = intIndex.Get(fromInclusive, tillExclusive); j = 0; foreach (Record rec in intIndex.Range(fromInclusive, tillExclusive, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = intIndex.Get(fromExclusive, tillInclusive); j = 0; foreach (Record rec in intIndex.Range(fromExclusive, tillInclusive, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = intIndex.Get(fromExclusive, tillExclusive); j = 0; foreach (Record rec in intIndex.Range(fromExclusive, tillExclusive, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = intIndex.Get(fromInclusive, null); j = 0; foreach (Record rec in intIndex.Range(fromInclusive, null, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = intIndex.Get(fromExclusive, null); j = 0; foreach (Record rec in intIndex.Range(fromExclusive, null, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = intIndex.Get(null, tillInclusive); j = 0; foreach (Record rec in intIndex.Range(null, tillInclusive, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = intIndex.Get(null, tillExclusive); j = 0; foreach (Record rec in intIndex.Range(null, tillExclusive, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = intIndex.ToArray(); j = 0; foreach (Record rec in intIndex) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); // int key descent order records = intIndex.Get(fromInclusive, tillInclusive); j = records.Length; foreach (Record rec in intIndex.Range(fromInclusive, tillInclusive, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = intIndex.Get(fromInclusive, tillExclusive); j = records.Length; foreach (Record rec in intIndex.Range(fromInclusive, tillExclusive, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = intIndex.Get(fromExclusive, tillInclusive); j = records.Length; foreach (Record rec in intIndex.Range(fromExclusive, tillInclusive, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = intIndex.Get(fromExclusive, tillExclusive); j = records.Length; foreach (Record rec in intIndex.Range(fromExclusive, tillExclusive, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = intIndex.Get(fromInclusive, null); j = records.Length; foreach (Record rec in intIndex.Range(fromInclusive, null, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = intIndex.Get(fromExclusive, null); j = records.Length; foreach (Record rec in intIndex.Range(fromExclusive, null, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = intIndex.Get(null, tillInclusive); j = records.Length; foreach (Record rec in intIndex.Range(null, tillInclusive, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = intIndex.Get(null, tillExclusive); j = records.Length; foreach (Record rec in intIndex.Range(null, tillExclusive, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = intIndex.ToArray(); j = records.Length; foreach (Record rec in intIndex.Reverse()) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); // str key ascent order records = strIndex.Get(fromInclusiveStr, tillInclusiveStr); j = 0; foreach (Record rec in strIndex.Range(fromInclusiveStr, tillInclusiveStr, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = strIndex.Get(fromInclusiveStr, tillExclusiveStr); j = 0; foreach (Record rec in strIndex.Range(fromInclusiveStr, tillExclusiveStr, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = strIndex.Get(fromExclusiveStr, tillInclusiveStr); j = 0; foreach (Record rec in strIndex.Range(fromExclusiveStr, tillInclusiveStr, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = strIndex.Get(fromExclusiveStr, tillExclusiveStr); j = 0; foreach (Record rec in strIndex.Range(fromExclusiveStr, tillExclusiveStr, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = strIndex.Get(fromInclusiveStr, null); j = 0; foreach (Record rec in strIndex.Range(fromInclusiveStr, null, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = strIndex.Get(fromExclusiveStr, null); j = 0; foreach (Record rec in strIndex.Range(fromExclusiveStr, null, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = strIndex.Get(null, tillInclusiveStr); j = 0; foreach (Record rec in strIndex.Range(null, tillInclusiveStr, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = strIndex.Get(null, tillExclusiveStr); j = 0; foreach (Record rec in strIndex.Range(null, tillExclusiveStr, IterationOrder.AscentOrder)) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); records = strIndex.ToArray(); j = 0; foreach (Record rec in strIndex) { Debug.Assert(rec == records[j++]); } Debug.Assert(j == records.Length); // str key descent order records = strIndex.Get(fromInclusiveStr, tillInclusiveStr); j = records.Length; foreach (Record rec in strIndex.Range(fromInclusiveStr, tillInclusiveStr, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = strIndex.Get(fromInclusiveStr, tillExclusiveStr); j = records.Length; foreach (Record rec in strIndex.Range(fromInclusiveStr, tillExclusiveStr, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = strIndex.Get(fromExclusiveStr, tillInclusiveStr); j = records.Length; foreach (Record rec in strIndex.Range(fromExclusiveStr, tillInclusiveStr, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = strIndex.Get(fromExclusiveStr, tillExclusiveStr); j = records.Length; foreach (Record rec in strIndex.Range(fromExclusiveStr, tillExclusiveStr, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = strIndex.Get(fromInclusiveStr, null); j = records.Length; foreach (Record rec in strIndex.Range(fromInclusiveStr, null, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = strIndex.Get(fromExclusiveStr, null); j = records.Length; foreach (Record rec in strIndex.Range(fromExclusiveStr, null, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = strIndex.Get(null, tillInclusiveStr); j = records.Length; foreach (Record rec in strIndex.Range(null, tillInclusiveStr, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = strIndex.Get(null, tillExclusiveStr); j = records.Length; foreach (Record rec in strIndex.Range(null, tillExclusiveStr, IterationOrder.DescentOrder)) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); records = strIndex.ToArray(); j = records.Length; foreach (Record rec in strIndex.Reverse()) { Debug.Assert(rec == records[--j]); } Debug.Assert(j == 0); if (i % 100 == 0) { Console.Write("Iteration " + i + "\n"); } } Console.WriteLine("\nElapsed time for performing " + nRecords * 36 + " index range searches: " + (DateTime.Now - start)); strIndex.Clear(); intIndex.Clear(); Debug.Assert(!strIndex.GetEnumerator().MoveNext()); Debug.Assert(!intIndex.GetEnumerator().MoveNext()); Debug.Assert(!strIndex.Reverse().GetEnumerator().MoveNext()); Debug.Assert(!intIndex.Reverse().GetEnumerator().MoveNext()); db.Commit(); db.Gc(); db.Close(); }
public Message Dequeue <T>(CancellationToken token) => Queue[Index.IndexOfValue(typeof(T))].Take(token);
public IndexDao() { index = new Index { IndexName = "目录名字", IndexPage = "1" }; }
public ArrayView <T> Allocate <T>(Index numElements) where T : struct { if (numElements < Index.One) { return(default);
public void Set(Index idx) { index = idx; }
private void createStgIndex(Index i, TableViewBase parent) { if (i.PartitionScheme == "") { throw (new System.NotSupportedException( String.Format("The index '{0}' is not aligned to a Partition Scheme", i.Name))); } // todo: differentiate between Base Table as source, and View as source // LZAV: Index stgIndex = new Index(parent, parent.Name + "_" + i.Name); String indexName = parent.Name + "_" + i.Name; // LZAV if (indexName.Length > 128) // LZAV { indexName = "IX_CL_" + parent.Name; // LZAV } Index stgIndex = new Index(parent, indexName); // LZAV foreach (IndexedColumn iCol in i.IndexedColumns) { IndexedColumn stgICol = new IndexedColumn(stgIndex, iCol.Name, iCol.Descending); stgICol.IsIncluded = iCol.IsIncluded; stgIndex.IndexedColumns.Add(stgICol); } stgIndex.IndexType = i.IndexType; stgIndex.IndexKeyType = i.IndexKeyType; stgIndex.IsClustered = i.IsClustered; stgIndex.IsUnique = i.IsUnique; stgIndex.CompactLargeObjects = i.CompactLargeObjects; stgIndex.IgnoreDuplicateKeys = i.IgnoreDuplicateKeys; stgIndex.IsFullTextKey = i.IsFullTextKey; stgIndex.PadIndex = i.PadIndex; stgIndex.FileGroup = db.PartitionSchemes[i.PartitionScheme].FileGroups[partitionNumber - 1]; // add the partitioning column to the index if it is not already there String partitionKeyName = i.PartitionSchemeParameters[0].Name; if (stgIndex.IndexedColumns[partitionKeyName] == null) { IndexedColumn stgICol = new IndexedColumn(stgIndex, partitionKeyName); // It is added as a Key to the Clustered index and as an Include column to a Nonclustered stgICol.IsIncluded = !stgIndex.IsClustered; stgIndex.IndexedColumns.Add(stgICol); } if (srv.VersionMajor >= 10) { // Define compression property to match by creating a Physical Partition object (not applicable to Colstore) { PhysicalPartition stgPartition = new PhysicalPartition(stgIndex, 1); if (i.IndexType != IndexType.NonClusteredColumnStoreIndex) { stgPartition.DataCompression = i.PhysicalPartitions[partitionNumber - 1].DataCompression; } stgIndex.PhysicalPartitions.Add(stgPartition); } // Handle Filtered Index if (i.HasFilter) { stgIndex.FilterDefinition = i.FilterDefinition; } } scriptChunks.Add(stgIndex.Script()); if (executeCommands) { stgIndex.Create(); } }