public WeakKeyDictionary(int capacity, IEqualityComparer <TKey> comparer) { _comparer = new WeakKeyComparer <TKey>(comparer); _dict = new Dictionary <object, TValue>(capacity, _comparer); _keyColl = new KeyCollection(this); _valueColl = new ValueCollection(this); }
public VersionDictionary(string name) { _syncRoot = (_innerList is ICollection) ? (((ICollection)_innerList).SyncRoot ?? new object()) : new object(); _name = name; _keys = new KeyCollection <AssemblyEntry, Version, Assembly>(_innerList, a => (a == null) ? null : a.Name.Version, a => (a == null) ? null : a.Assembly); _values = new ValueCollection <AssemblyEntry, Assembly>(_innerList, a => (a == null) ? null : a.Assembly); }
/// <summary> /// 将 <see cref="GenericDictionary{TKey, TValue}"/> 类的新实例初始化为指定字典的包装。 /// </summary> /// <param name="dictionary">要包装的字典。</param> /// <exception cref="ArgumentNullException"> /// <paramref name="dictionary"/> 为 <see langword="null"/>。</exception> public GenericDictionary(IDictionary dictionary) { this.Dictionary = dictionary ?? throw new ArgumentNullException(nameof(dictionary)); this.GenericKeys = new KeyCollection(this.Dictionary); this.GenericValues = new ValueCollection(this.Dictionary); }
public TestKeyCollection() { _driver = GetDriver(); _testAccessCount = 0; _target = new KeyCollection <TestModel, int>(_driver, GetTestModelByKey); }
public override bool Equals(object obj) { KeyCollection <T> collection = (obj as KeyCollection <T>); if (collection == null) { return(false); } IEnumerator <T> enumerator1 = m_KeyCollection.GetEnumerator(); IEnumerator enumerator2 = collection.GetEnumerator(); bool moveNext1 = false, moveNext2 = false; while (true) { moveNext1 = enumerator1.MoveNext(); moveNext2 = enumerator2.MoveNext(); if (moveNext1 && moveNext2) { T current1 = enumerator1.Current; T current2 = (enumerator2.Current as T); if ((current1 == null) || (current2 == null) || (!current1.Equals(current2))) { return(false); } continue; } return((!moveNext1) && (!moveNext2)); } }
public OrderedDictionaryG(int capacity) { mDictionary = new Dictionary <TKey, LinkedListNode <KeyValuePair <TKey, TValue> > >(capacity); mLinkedList = new LinkedList <KeyValuePair <TKey, TValue> >(); valueCollection = new ValueCollection(this); keyCollection = new KeyCollection(this); }
public OrderedDictionaryG(IEqualityComparer <TKey> comparer) { mDictionary = new Dictionary <TKey, LinkedListNode <KeyValuePair <TKey, TValue> > >(comparer); mLinkedList = new LinkedList <KeyValuePair <TKey, TValue> >(); valueCollection = new ValueCollection(this); keyCollection = new KeyCollection(this); }
void Awake() { Debug.Log("reset"); player = GameObject.Find("Ellen"); coordinates = GameObject.Find("scripts"); key = GameObject.Find("key"); keyHud = GameObject.Find("keyhud"); keyAnimator = keyHud.GetComponent <Animator>(); SpawnCoordinates spawnScript = coordinates.GetComponent <SpawnCoordinates>(); spawn = spawnScript.spawnCoord; rotation = spawnScript.spawnRotation; Character playerScript = player.GetComponent <Character>(); playerScript._controlRotation = new Vector2(0, rotation.y); player.transform.position = new Vector3(spawn.x, spawn.y, spawn.z); player.transform.eulerAngles = new Vector3(0, rotation.y, 0); player.GetComponent <CharacterController>().enabled = true; if (spawnScript.key == true) { KeyCollection keyScript = player.GetComponent <KeyCollection>(); keyScript.keyInventory = true; keyAnimator.SetBool("open", true); Destroy(key); } }
public void TestInit() { _driver = GetDriver(); _testAccessCount = 0; _target = new KeyCollection <TestModel, int>(_driver, _GetTestModelByKey); }
/// <summary> /// Creates a new read-only wrapper around a dictionary /// </summary> /// <param name="dictionary">The dictionary to wrap as read-only</param> /// <param name="conversion">A function that converts the input to the output</param> /// <param name="reverseConversion">An optional function to convert the output to the input, if possible</param> public ReadOnlyDictionaryConverter(IReadOnlyDictionary <TKey, TInput> dictionary, Func <TInput, TOutput> conversion, Func <TOutput, TInput>?reverseConversion = null) { Parent = dictionary; // We want to ensure all the Keys properties return the same values if (dictionary.Keys is ICollection <TKey> KeysCollection) { if (dictionary.Keys is IReadOnlyCollection <TKey> KeysReadOnly) { // The source Keys implement both interfaces we need Keys = KeysReadOnly; _Keys = KeysCollection; } else { // The source Keys implement ICollection but not IReadOnlyCollection var NewKeyCollection = new ReadOnlyCollection <TKey>(KeysCollection); Keys = NewKeyCollection; _Keys = NewKeyCollection; } } else { // The source Keys doesn't implement ICollection var NewKeyCollection = new KeyCollection(this); Keys = NewKeyCollection; _Keys = NewKeyCollection; } _Conversion = conversion; _ReverseConversion = reverseConversion; }
void OnTriggerEnter(Collider col) { // Sets the name to "nameText" and the dialogue // to "dialogueText" when the player enters the // npc's hitbox. if (col.tag == "Player") { KeyCollection transScript = player.GetComponent <KeyCollection>(); Dialogue dialogueScript = player.GetComponent <Dialogue>(); if (missingEye == true && transScript.eye == true) { npcName.text = missingEyeNameText; dialogue.text = missingEyeDialogue; dialogueScript.overMissingEye = true; } else { npcName.text = nameText; dialogue.text = dialogueText; } promptObject.transform.position = new Vector3(location.x, location.y + height, location.z); promptAnimator.SetBool("open", true); } }
public void CopyTo <TKey, TValue>(KeyCollection <TKey, TValue> collection, TKey[] array, int index) { if (array == null) { throw XExceptions.Argument.IsNull(nameof(array)); } if (index < 0 || index > array.Length) { throw XExceptions.Argument.NonNegativeNumberRequired(); } if (array.Length - index < collection.Dictionary.Count) { throw XExceptions.Argument.ArrayPlusOffTooSmall(); } var count = collection.Dictionary.Count; var entries = collection.Dictionary.Entries; for (var i = 0; i < count; i++) { if (entries[i].HashCode >= 0) { array[index++] = entries[i].Key; } } }
private void buttonPickMasterKeys_Click(object sender, EventArgs e) { string filename = FileDialogs.AskUserForFileNameToOpen(); if (filename != null) { KeyCollection keys = XmlFile.ReadFile <KeyCollection>(filename); this.keyPair = new KeyPair(); this.keyPair.Public = Convert.FromBase64String(keys.PublicKey); this.keyPair.Private = Convert.FromBase64String(keys.PrivateKey); if (keys.MasterPrivateKey != null && keys.MasterPublicKey != null) { this.masterKeypair = new KeyPair(); this.masterKeypair.Public = Convert.FromBase64String(keys.MasterPublicKey); this.masterKeypair.Private = Convert.FromBase64String(keys.MasterPrivateKey); this.labelKeyStatus.Text = "Keys including MASTER KEYS loaded"; } else { this.labelKeyStatus.Text = "Keys but NOT master keys loaded"; } } }
private void KeyMouseManager_SomeKeyPressd(KeyCollection keyCollection) { if (keyCollection == KeyMapperManager.Instance.GetKeyCollection(GameKeyCode.FullScreen)) { SwitchFullScreen(); } else if (keyCollection == KeyMapperManager.Instance.GetKeyCollection(GameKeyCode.Screenshot)) { renderWindow.WriteContentsToTimestampedFile("ScreenShot_", ".jpg"); outputMgr.DisplayMessage(string.Format(locateMgr.GetLocalizedString(LocateFileType.GameString, "str_screenshots_saved_to_{0}"), Environment.CurrentDirectory)); } //else if (keyCollection == KeyMapperManager.Instance.GetKeyCollection(GameKeyCode.ShowOgreLogo)) //{ // if (UIManager.Instance.isLogoVisible()) // { // UIManager.Instance.HideFrameStats(); // UIManager.Instance.hideLogo(); // } // else // { // UIManager.Instance.showFrameStats(UIWidgetLocation.TL_BOTTOMLEFT); // UIManager.Instance.ShowLogo(UIWidgetLocation.TL_BOTTOMRIGHT); // } //} }
public void TestInit() { _driver = GetDriver(); _testAccessCount = 0; _target = new KeyCollection<TestModel, int>(_driver, _GetTestModelByKey); }
public void TestSerialization() { _target.AddKeyAsync(_models[0].Key).Wait(); _target.AddKeyAsync(_models[1].Key).Wait(); Assert.IsTrue(_target.IsDirty, "Dirty flag not set."); _target.FlushAsync().Wait(); Assert.IsFalse(_target.IsDirty, "Dirty flag not reset on flush."); var secondTarget = new KeyCollection <TestModel, int>(_driver, _GetTestModelByKey); // are we able to grab things? Assert.AreEqual(2, secondTarget.Query.Count(), "Key count is incorrect."); Assert.AreEqual(0, _testAccessCount, "Lazy loader was accessed prematurely."); var testKey = (from k in secondTarget.Query where k.Key.Equals(_models[1].Key) select k).FirstOrDefault(); Assert.IsNotNull(testKey, "Test key not retrieved."); Assert.AreEqual(_models[1].Key, testKey.Key, "Key mismatch."); Assert.AreEqual(0, _testAccessCount, "Lazy loader was accessed prematurely."); var testModel = testKey.LazyValue.Value; Assert.AreSame(_models[1], testModel, "Model does not match."); Assert.AreEqual(1, _testAccessCount, "Lazy loader access count is incorrect."); // now let's test refresh secondTarget.AddKeyAsync(_models[2].Key).Wait(); secondTarget.FlushAsync().Wait(); Assert.AreEqual(2, _target.Query.Count(), "Unexpected key count in original collection."); _target.RefreshAsync().Wait(); Assert.AreEqual(3, _target.Query.Count(), "Refresh failed."); }
private void buttonGenerateAndSaveMasterKeypair_Click(object sender, EventArgs e) { try { if (string.IsNullOrEmpty(this.textBoxDOUsername.Text)) { MessageBox.Show("You must enter a DO user name"); return; } if (string.IsNullOrEmpty(this.textBoxDORoleName.Text)) { MessageBox.Show("You must enter a DO role name"); return; } IPreService proxy = GetPreProxy(); this.masterKeypair = proxy.GenerateKeyPair(); SignKeys doSignKeyPair = DataSigner.GenerateSignKeyPair(); proxy = GetPreProxy(); byte[] doUserName = proxy.Encrypt(this.masterKeypair.Public, this.textBoxDOUsername.Text.GetBytes()); proxy = GetPreProxy(); byte[] doRoleName = proxy.Encrypt(this.masterKeypair.Public, this.textBoxDORoleName.Text.GetBytes()); IGatewayService gwProxy = GetServiceProxy(); gwProxy.InitializeSystem(this.myId, doUserName, doRoleName, doSignKeyPair.PublicOnly); string filename = FileDialogs.AskUserForFileNameToSaveIn(); if (!string.IsNullOrEmpty(filename)) { if (!Path.HasExtension(filename)) { filename = filename + ".xml"; } KeyCollection keys = new KeyCollection(); keys.MasterPublicKey = Convert.ToBase64String(this.masterKeypair.Public); keys.MasterPrivateKey = Convert.ToBase64String(this.masterKeypair.Private); keys.PrivateKey = keys.MasterPrivateKey; keys.PublicKey = keys.MasterPublicKey; keys.SignKeys = Convert.ToBase64String(doSignKeyPair.PublicAndPrivate); XmlFile.WriteFile(keys, filename); this.labelKeyStatus.Text = "Keys including MASTER KEYS loaded"; MessageBox.Show("Done"); } } catch (Exception ex) { MessageBox.Show("Error: " + ex.Message); Logger.LogError("Error generating master keypair", ex); } }
/// <summary> /// Create a new <see cref="Dictionary"/> instance. /// </summary> /// <param name="ARef">The <see cref="Unsafe.AVDictionary"/>.</param> /// <param name="AFlags">The <see cref="AVDictFlags"/>.</param> internal Dictionary(Ref <Unsafe.AVDictionary> ARef, AVDictFlags AFlags) { Ref = ARef; Flags = AFlags; Keys = new KeyCollection(this); Values = new ValueCollection(this); }
public MySortedList(int capacity) { keys = new List <TKey>(capacity); values = new List <TValue>(capacity); this.keyCollection = new KeyCollection(keys); this.valueCollection = new ValueCollection(values); }
public OrderedDictionary(int capacity, IEqualityComparer <TKey>?comparer) { _innerDictionary = new Dictionary <TKey, TValue>(capacity, comparer); _orderedKeys = new LinkedList <TKey>(); _keyCollection = new KeyCollection(_orderedKeys, _innerDictionary); _valueCollection = new ValueCollection(_orderedKeys, _innerDictionary); }
private void init(IComparer <TKey> comparer) { var keyComparer = new KeyValuePairComparer <TKey, TValue>(comparer); _tree = new RedBlackTree <TKey, TValue>(keyComparer, false); _values = new ValueCollection(_tree); _keys = new KeyCollection(_tree); }
public AvlDictionary(IComparer <TK> comparer) { comparer ??= Comparer <TK> .Default; Comparer = comparer; _avl = new AvlTree <KeyValuePair <TK, TV> >(new KeyValuePairComparer(comparer)); Keys = new KeyCollection(this); Values = new ValueCollection(this); }
public AVLTreeDictionary() { _root = null; Count = 0; Keys = new KeyCollection(this); Values = new ValueCollection(this); }
public static void ConfigureFrom(this KeyCollection keys, IConfigurationSection section) { var newKeys = section.Get <KeyCollection>(); if (newKeys != null) { keys.AddRange(newKeys); } }
/// <summary> /// Construct /// </summary> /// <param name="driver">Sterling driver</param> /// <param name="resolver">The resolver for the instance</param> /// <param name="key">The resolver for the key</param> public TableDefinition(ISterlingDriver driver, Func <TKey, T> resolver, Func <T, TKey> key) { _driver = driver; FetchKey = key; _resolver = resolver; _isDirty = obj => true; KeyList = new KeyCollection <T, TKey>(driver, resolver); Indexes = new Dictionary <string, IIndexCollection>(); }
internal Dictionary(GroupingLinkedList <TKey, TValue> owner, IEnumerable <IGrouping <TKey, TValue> > values) { if (owner._dictionary != null) { throw new InvalidOperationException(); } _owner = owner; _keys = new KeyCollection(this); }
/// <summary> /// Creates a new empty observable sorted list with the specified comparer /// </summary> /// <param name="capacity">The default capacity of the dictionary</param> /// <param name="comparer">The equality comparer to use</param> public ObservableSortedList(int capacity, IComparer <TKey> comparer) { _Keys = new TKey[capacity]; _Values = new TValue[capacity]; Comparer = comparer; Keys = new KeyCollection(this); Values = new ValueCollection(this); }
public ConcurrentBTreeDictionary(IComparer <TKey> cmp) { this.Comparer = cmp ?? throw new ArgumentNullException(nameof(cmp)); this.version = 0; this.InternalNodeChildren = 16; this.LeafNodeChildren = InternalNodeChildren - 1; this.keyCollection = new KeyCollection(this); this.valueCollection = new ValueCollection(this); }
/// <summary> /// Initializes a new BTreeDictionary instance. /// </summary> /// <param name="keyComparer">The comparer for ordering keys in the structure.</param> /// <param name="nodeCapacity">The capacity in keys for each node in the tree structure.</param> public BTreeDictionary(IComparer <TKey> keyComparer, int nodeCapacity) { this.keyComparer = keyComparer; this.first = new Node(nodeCapacity); this.root = this.first; this.keys = new KeyCollection(this); this.values = new ValueCollection(this); }
public CacheManagerStreamWriter(out KeyCollection keys, int blockLength, HashAlgorithm hashAlgorithm, CacheManager cacheManager, BufferManager bufferManager) { keys = _keyList; _hashAlgorithm = hashAlgorithm; _cacheManager = cacheManager; _bufferManager = bufferManager; _blockBuffer = bufferManager.TakeBuffer(blockLength); _blockBufferLength = blockLength; }
public ObservableDictionary() { _helper = new BlockHelper(); _version = 1; _entrys = new Entry[4]; _keyEquals = EqualityComparer <TKey> .Default; _keys = new KeyCollection(this); _values = new ValueCollection(this); }
public WeakDictionary(IEqualityComparer <TKey> comparer, int initialProbing) { _keyComparer = comparer ?? EqualityComparer <TKey> .Default; var needleComparer = new NeedleConversionEqualityComparer <WeakNeedle <TKey>, TKey>(_keyComparer); _wrapped = new SafeDictionary <WeakNeedle <TKey>, TValue>(needleComparer, initialProbing); _keyCollection = new KeyCollection <TKey, TValue>(this); _valueCollection = new ValueCollection <TKey, TValue>(this); _reservoir = new NeedleReservoir <TKey, WeakNeedle <TKey> >(key => new WeakNeedle <TKey>(key)); }
public CacheManager_StreamReader(KeyCollection keys, CacheManager cacheManager, BufferManager bufferManager) { _cacheManager = cacheManager; _keys = keys; _bufferManager = bufferManager; _blockBuffer = _cacheManager[_keys[_keysIndex]]; _keysIndex++; _length = keys.Sum(n => (long)cacheManager.GetLength(n)); }
public void KeyCollection_Equal_False() { var a1 = new KeyCollection(new object[] { 100, "a", true, new DateTime(2015, 05, 12) }); var a2 = new KeyCollection(new object[] { 100, "a", false, new DateTime(2015, 05, 12) }); var dico = new Dictionary<KeyCollection, object>(); dico.Add(a1, null); Assert.That(a1, Is.Not.EqualTo(a2)); Assert.That(a1.GetHashCode(), Is.Not.EqualTo(a2.GetHashCode())); Assert.That(dico.ContainsKey(a2), Is.Not.True); }
void Awake() { KeyCollection.instance = this; dict = new Dictionary<KeyColor, Key>(); foreach(var key in colorKeys) { //if(key.enabled) dict.Add(key.keyColor, key); key.enabled = false; } var keys = GameObject.FindGameObjectsWithTag(Tags.Key); foreach(var key in keys) { if(key.activeSelf) { dict[key.GetComponent<DoorKey>().keyColor].enabled = true; } } }
/// <summary> /// .NET-specific: Used for DistributedRouting only. /// </summary> internal TaskCompletionSource<Message.Message> CloseNeighborsTcs(PeerAddress remotePeer, SearchValues searchValues, Message.Message.MessageType type, ChannelCreator channelCreator, IConnectionConfiguration configuration) { var message = CreateRequestMessage(remotePeer, Rpc.Commands.Neighbor.GetNr(), type); if (!message.IsRequest()) { throw new ArgumentException("The type must be a request."); } message.SetKey(searchValues.LocationKey); message.SetKey(searchValues.DomainKey ?? Number160.Zero); if (searchValues.From != null && searchValues.To != null) { ICollection<Number640> collection = new List<Number640>(); collection.Add(searchValues.From); collection.Add(searchValues.To); var keyCollection = new KeyCollection(collection); message.SetKeyCollection(keyCollection); } else { if (searchValues.ContentKey != null) { message.SetKey(searchValues.ContentKey); } if (searchValues.KeyBloomFilter != null) { message.SetBloomFilter(searchValues.KeyBloomFilter); } if (searchValues.ContentBloomFilter != null) { message.SetBloomFilter(searchValues.ContentBloomFilter); } } return Send(message, configuration, channelCreator); }
// 假写xml数据,得到检索点集合 // parameter: // strXml xml数据 // strID 记录ID,构造检索点用 // strLang 语言版本 // strStyle 风格,控制返回值 // keyColl out参数,返回检索点集合的 // strError out参数,返回出错信息 // return: // -1 出错 // 0 成功 // 线: 安全的 public int API_PretendWrite(string strXml, string strRecordID, string strLang, // string strStyle, out KeyCollection keys, out string strError) { keys = null; strError = ""; //**********对数据库加读锁************** this.m_db_lock.AcquireReaderLock(m_nTimeOut); #if DEBUG_LOCK this.container.WriteDebugInfo("PretendWrite(),对'" + this.GetCaption("zh-CN") + "'数据库加读锁。"); #endif try { //加载数据到DOM XmlDocument domData = new XmlDocument(); domData.PreserveWhitespace = true; //设PreserveWhitespace为true try { domData.LoadXml(strXml); } catch (Exception ex) { strError = "PretendWrite()里,加载参数中的xml数据出错。原因:" + ex.Message; return -1; } KeysCfg keysCfg = null; int nRet = this.GetKeysCfg(out keysCfg, out strError); if (nRet == -1) return -1; if (keysCfg != null) { //创建检索点 keys = new KeyCollection(); nRet = keysCfg.BuildKeys(domData, strRecordID, strLang, // strStyle, this.KeySize, out keys, out strError); if (nRet == -1) return -1; //排序去重 keys.Sort(); keys.RemoveDup(); } return 0; } finally { //****************对数据库解读锁************** this.m_db_lock.ReleaseReaderLock(); #if DEBUG_LOCK this.container.WriteDebugInfo("PretendWrite(),对'" + this.GetCaption("zh-CN") + "'数据库解读锁。"); #endif } }
// return: // -1 出错 // 0 成功 public int Write( string strDatabaseName, KeyCollection keys, delegate_getfilename getfilename, out string strError) { strError = ""; // 确保 keys 里面的事项是排序过的。如果没有排序,本函数也能工作,只是效率略低 DelayTable table = null; KeyCollection part_keys = new KeyCollection(); foreach (KeyItem item in keys) { if (table == null) { table = GetTable(strDatabaseName, item.SqlTableName); if (string.IsNullOrEmpty(table.FileName) == true) { string strFilename = getfilename(strDatabaseName, item.SqlTableName); int nRet = table.Create(strFilename, out strError); if (nRet == -1) return -1; } } else { if (table.TableName != item.SqlTableName) { if (part_keys.Count > 0) { table.Write(part_keys); part_keys.Clear(); } table = GetTable(strDatabaseName, item.SqlTableName); if (string.IsNullOrEmpty(table.FileName) == true) { string strFilename = getfilename(strDatabaseName, item.SqlTableName); int nRet = table.Create(strFilename, out strError); if (nRet == -1) return -1; } } } part_keys.Add(item); } if (part_keys.Count > 0) { Debug.Assert(table != null, ""); table.Write(part_keys); part_keys.Clear(); } return 0; }
public void Write(KeyCollection keys) { if (this.m_lock.TryEnterWriteLock(this.m_nLockTimeout) == false) throw new ApplicationException("为 DelayTable 加写锁时失败。Timeout=" + this.m_nLockTimeout.ToString()); try { foreach (KeyItem item in keys) { writer.WriteStartElement("item"); writer.WriteElementString("keystring", item.Key); // writer.WriteElementString("key1", item.KeyNoProcess); writer.WriteElementString("keystringnum", item.Num); writer.WriteElementString("fromstring", item.FromValue); writer.WriteElementString("idstring", item.RecordID); writer.WriteEndElement(); } } finally { this.m_lock.ExitWriteLock(); } }
private void UploadManagerThread() { for (; ; ) { Thread.Sleep(1000 * 1); if (this.State == ManagerState.Stop) return; BackgroundUploadItem item = null; try { lock (this.ThisLock) { if (_settings.BackgroundUploadItems.Count > 0) { item = _settings.BackgroundUploadItems .Where(n => n.State == BackgroundUploadState.Encoding) .FirstOrDefault(); } } } catch (Exception) { return; } if (item == null) continue; try { if (item.Groups.Count == 0 && item.Keys.Count == 0) { Stream stream = null; try { if (item.Type == BackgroundItemType.Link) { var link = item.Value as Link; if (link == null) throw new FormatException(); stream = link.Export(_bufferManager); } else if (item.Type == BackgroundItemType.Store) { var store = item.Value as Store; if (store == null) throw new FormatException(); stream = store.Export(_bufferManager); } else { throw new FormatException(); } if (stream.Length == 0) { lock (this.ThisLock) { item.Seed.Rank = 0; if (item.DigitalSignature != null) { item.Seed.CreateCertificate(item.DigitalSignature); } _connectionsManager.Upload(item.Seed); item.State = BackgroundUploadState.Completed; } } else { KeyCollection keys = null; byte[] cryptoKey = null; try { using (ProgressStream encodingProgressStream = new ProgressStream(stream, (object sender, long readSize, long writeSize, out bool isStop) => { isStop = (this.State == ManagerState.Stop || !_settings.BackgroundUploadItems.Contains(item)); }, 1024 * 1024, true)) { item.Seed.Length = stream.Length; if (item.Seed.Length == 0) throw new InvalidOperationException("Stream Length"); if (item.HashAlgorithm == HashAlgorithm.Sha256) { cryptoKey = Sha256.ComputeHash(encodingProgressStream); } encodingProgressStream.Seek(0, SeekOrigin.Begin); item.State = BackgroundUploadState.Encoding; keys = _cacheManager.Encoding(encodingProgressStream, item.CompressionAlgorithm, item.CryptoAlgorithm, cryptoKey, item.BlockLength, item.HashAlgorithm); } } catch (StopIoException) { continue; } lock (this.ThisLock) { foreach (var key in keys) { item.UploadKeys.Add(key); item.LockedKeys.Add(key); } item.CryptoKey = cryptoKey; item.Keys.AddRange(keys); } } } finally { if (stream != null) stream.Dispose(); } } else if (item.Groups.Count == 0 && item.Keys.Count == 1) { lock (this.ThisLock) { item.Seed.Rank = item.Rank; item.Seed.Key = item.Keys[0]; item.Keys.Clear(); item.Seed.CompressionAlgorithm = item.CompressionAlgorithm; item.Seed.CryptoAlgorithm = item.CryptoAlgorithm; item.Seed.CryptoKey = item.CryptoKey; if (item.DigitalSignature != null) { item.Seed.CreateCertificate(item.DigitalSignature); } item.UploadKeys.Add(item.Seed.Key); foreach (var key in item.UploadKeys) { _connectionsManager.Upload(key); } this.SetKeyCount(item); foreach (var key in item.LockedKeys) { _cacheManager.Unlock(key); } item.LockedKeys.Clear(); item.State = BackgroundUploadState.Uploading; _connectionsManager.Upload(item.Seed); } } else if (item.Keys.Count > 0) { var length = Math.Min(item.Keys.Count, 128); var keys = new KeyCollection(item.Keys.Take(length)); Group group = null; try { group = _cacheManager.ParityEncoding(keys, item.HashAlgorithm, item.BlockLength, item.CorrectionAlgorithm, (object state2) => { return (this.State == ManagerState.Stop || !_settings.BackgroundUploadItems.Contains(item)); }); } catch (StopException) { continue; } lock (this.ThisLock) { foreach (var key in group.Keys.Skip(group.InformationLength)) { item.UploadKeys.Add(key); item.LockedKeys.Add(key); } item.Groups.Add(group); item.Keys.RemoveRange(0, length); } } else if (item.Groups.Count > 0 && item.Keys.Count == 0) { var index = new Index(); index.Groups.AddRange(item.Groups); index.CompressionAlgorithm = item.CompressionAlgorithm; index.CryptoAlgorithm = item.CryptoAlgorithm; index.CryptoKey = item.CryptoKey; byte[] cryptoKey = null; KeyCollection keys = null; try { using (var stream = index.Export(_bufferManager)) using (ProgressStream encodingProgressStream = new ProgressStream(stream, (object sender, long readSize, long writeSize, out bool isStop) => { isStop = (this.State == ManagerState.Stop || !_settings.BackgroundUploadItems.Contains(item)); }, 1024 * 1024, true)) { if (item.HashAlgorithm == HashAlgorithm.Sha256) { cryptoKey = Sha256.ComputeHash(encodingProgressStream); } encodingProgressStream.Seek(0, SeekOrigin.Begin); item.State = BackgroundUploadState.Encoding; keys = _cacheManager.Encoding(encodingProgressStream, item.CompressionAlgorithm, item.CryptoAlgorithm, cryptoKey, item.BlockLength, item.HashAlgorithm); } } catch (StopIoException) { continue; } lock (this.ThisLock) { foreach (var key in keys) { item.UploadKeys.Add(key); item.LockedKeys.Add(key); } item.CryptoKey = cryptoKey; item.Keys.AddRange(keys); item.Rank++; item.Groups.Clear(); } } } catch (Exception e) { item.State = BackgroundUploadState.Error; Log.Error(e); this.Remove(item); } } }
private void EncodeThread() { for (;;) { Thread.Sleep(1000 * 1); if (this.EncodeState == ManagerState.Stop) return; UploadItem item = null; try { lock (_thisLock) { if (_settings.UploadItems.Count > 0) { item = _settings.UploadItems .Where(n => n.State == UploadState.ComputeHash || n.State == UploadState.Encoding || n.State == UploadState.ParityEncoding) .Where(n => n.Priority != 0) .OrderBy(n => -n.Priority) .Where(n => !_workingPaths.Contains(n.FilePath)) .FirstOrDefault(); if (item != null) { _workingPaths.Add(item.FilePath); } } } } catch (Exception) { return; } if (item == null) continue; try { if (item.Groups.Count == 0 && item.Keys.Count == 0) { if (item.Type == UploadType.Upload) { item.State = UploadState.Encoding; KeyCollection keys = null; byte[] cryptoKey = null; try { using (var stream = new UnbufferedFileStream(item.FilePath, FileMode.Open, FileAccess.Read, FileShare.Read, FileOptions.None, _bufferManager)) using (ProgressStream hashProgressStream = new ProgressStream(stream, (object sender, long readSize, long writeSize, out bool isStop) => { isStop = (this.EncodeState == ManagerState.Stop || !_settings.UploadItems.Contains(item)); item.EncodeOffset = Math.Min(readSize, stream.Length); }, 1024 * 1024, true)) using (ProgressStream encodingProgressStream = new ProgressStream(stream, (object sender, long readSize, long writeSize, out bool isStop) => { isStop = (this.EncodeState == ManagerState.Stop || !_settings.UploadItems.Contains(item)); item.EncodeOffset = Math.Min(readSize, stream.Length); }, 1024 * 1024, true)) { if (stream.Length == 0) throw new InvalidOperationException("Stream Length"); item.Length = stream.Length; item.EncodeLength = stream.Length; item.State = UploadState.ComputeHash; if (item.HashAlgorithm == HashAlgorithm.Sha256) { cryptoKey = Sha256.ComputeHash(hashProgressStream); } stream.Seek(0, SeekOrigin.Begin); item.EncodeOffset = 0; item.State = UploadState.Encoding; keys = _cacheManager.Encoding(encodingProgressStream, item.CompressionAlgorithm, item.CryptoAlgorithm, cryptoKey, item.BlockLength, item.HashAlgorithm); } } catch (StopIoException) { continue; } lock (_thisLock) { if (!_settings.UploadItems.Contains(item)) { foreach (var key in keys) { _cacheManager.Unlock(key); } continue; } foreach (var key in keys) { item.UploadKeys.Add(key); item.LockedKeys.Add(key); } item.EncodeOffset = 0; item.EncodeLength = 0; item.CryptoKey = cryptoKey; item.Keys.AddRange(keys); } } else if (item.Type == UploadType.Share) { item.State = UploadState.ComputeHash; KeyCollection keys = null; try { using (var stream = new UnbufferedFileStream(item.FilePath, FileMode.Open, FileAccess.Read, FileShare.Read, FileOptions.None, _bufferManager)) using (ProgressStream hashProgressStream = new ProgressStream(stream, (object sender, long readSize, long writeSize, out bool isStop) => { isStop = (this.EncodeState == ManagerState.Stop || !_settings.UploadItems.Contains(item)); item.EncodeOffset = Math.Min(readSize, stream.Length); }, 1024 * 1024, true)) { if (stream.Length == 0) throw new InvalidOperationException("Stream Length"); item.Length = stream.Length; item.EncodeLength = stream.Length; keys = _cacheManager.Share(hashProgressStream, stream.Name, item.HashAlgorithm, item.BlockLength); } } catch (StopIoException) { continue; } if (keys.Count == 1) { lock (_thisLock) { if (!_settings.UploadItems.Contains(item)) { foreach (var key in keys) { _cacheManager.Unlock(key); } continue; } item.EncodeOffset = 0; item.EncodeLength = 0; item.UploadKeys.Add(keys[0]); item.Keys.Add(keys[0]); item.State = UploadState.Encoding; } } else { var groups = new List<Group>(); for (int i = 0, remain = keys.Count; 0 < remain; i++, remain -= 256) { var tempKeys = keys.GetRange(i * 256, Math.Min(remain, 256)); var group = new Group(); group.CorrectionAlgorithm = CorrectionAlgorithm.None; group.InformationLength = tempKeys.Count; group.BlockLength = item.BlockLength; group.Length = tempKeys.Sum(n => (long)_cacheManager.GetLength(n)); group.Keys.AddRange(tempKeys); groups.Add(group); } lock (_thisLock) { if (!_settings.UploadItems.Contains(item)) { foreach (var key in keys) { _cacheManager.Unlock(key); } continue; } item.EncodeOffset = 0; item.EncodeLength = 0; foreach (var key in keys) { item.UploadKeys.Add(key); } item.Groups.AddRange(groups); item.State = UploadState.Encoding; } } } } else if (item.Groups.Count == 0 && item.Keys.Count == 1) { lock (_thisLock) { if (!_settings.UploadItems.Contains(item)) continue; Metadata metadata = null; { if (item.Type == UploadType.Upload) { metadata = new Metadata(item.Depth, item.Keys[0], item.CompressionAlgorithm, item.CryptoAlgorithm, item.CryptoKey); } else if (item.Type == UploadType.Share) { if (item.Depth == 1) { metadata = new Metadata(item.Depth, item.Keys[0], CompressionAlgorithm.None, CryptoAlgorithm.None, null); } else { metadata = new Metadata(item.Depth, item.Keys[0], item.CompressionAlgorithm, item.CryptoAlgorithm, item.CryptoKey); } } item.Keys.Clear(); } item.Seed = new Seed(metadata); item.Seed.Name = item.Name; item.Seed.Length = item.Length; item.Seed.CreationTime = item.CreationTime; item.Seed.Keywords.AddRange(item.Keywords); if (item.DigitalSignature != null) { item.Seed.CreateCertificate(item.DigitalSignature); } foreach (var key in item.UploadKeys) { _connectionsManager.Upload(key); } { if (item.Type == UploadType.Upload) { _cacheManager.SetSeed(item.Seed.Clone(), item.RetainKeys.ToArray()); } else if (item.Type == UploadType.Share) { _cacheManager.SetSeed(item.Seed.Clone(), item.FilePath, item.RetainKeys.ToArray()); } item.RetainKeys.Clear(); } foreach (var key in item.LockedKeys) { _cacheManager.Unlock(key); } item.LockedKeys.Clear(); item.State = UploadState.Uploading; this.CheckState(item); } } else if (item.Keys.Count > 0) { item.State = UploadState.ParityEncoding; item.EncodeLength = item.Groups.Sum(n => { long sumLength = 0; for (int i = 0; i < n.InformationLength; i++) { if (_cacheManager.Contains(n.Keys[i])) { sumLength += (long)_cacheManager.GetLength(n.Keys[i]); } } return sumLength; }) + item.Keys.Sum(n => { if (_cacheManager.Contains(n)) { return (long)_cacheManager.GetLength(n); } return 0; }); var length = Math.Min(item.Keys.Count, 128); var keys = new KeyCollection(item.Keys.Take(length)); Group group = null; try { using (var tokenSource = new CancellationTokenSource()) { var task = _cacheManager.ParityEncoding(keys, item.HashAlgorithm, item.BlockLength, item.CorrectionAlgorithm, tokenSource.Token); while (!task.IsCompleted) { if ((this.EncodeState == ManagerState.Stop || !_settings.UploadItems.Contains(item))) tokenSource.Cancel(); Thread.Sleep(1000); } group = task.Result; } } catch (Exception) { continue; } lock (_thisLock) { if (!_settings.UploadItems.Contains(item)) { foreach (var key in group.Keys.Skip(group.InformationLength)) { _cacheManager.Unlock(key); } continue; } foreach (var key in group.Keys.Skip(group.InformationLength)) { item.UploadKeys.Add(key); item.LockedKeys.Add(key); } foreach (var key in group.Keys.Skip(group.Keys.Count - group.InformationLength)) { item.RetainKeys.Add(key); } item.Groups.Add(group); item.EncodeOffset = item.Groups.Sum(n => { long sumLength = 0; for (int i = 0; i < n.InformationLength; i++) { if (_cacheManager.Contains(n.Keys[i])) { sumLength += (long)_cacheManager.GetLength(n.Keys[i]); } } return sumLength; }); item.Keys.RemoveRange(0, length); } } else if (item.Groups.Count > 0 && item.Keys.Count == 0) { item.State = UploadState.Encoding; var index = new Index(); if (item.Type == UploadType.Upload) { index.Groups.AddRange(item.Groups); index.CompressionAlgorithm = item.CompressionAlgorithm; index.CryptoAlgorithm = item.CryptoAlgorithm; index.CryptoKey = item.CryptoKey; } else if (item.Type == UploadType.Share) { index.Groups.AddRange(item.Groups); if (item.Depth != 1) { index.CompressionAlgorithm = item.CompressionAlgorithm; index.CryptoAlgorithm = item.CryptoAlgorithm; index.CryptoKey = item.CryptoKey; } } byte[] cryptoKey = null; KeyCollection keys = null; try { using (var stream = index.Export(_bufferManager)) using (ProgressStream hashProgressStream = new ProgressStream(stream, (object sender, long readSize, long writeSize, out bool isStop) => { isStop = (this.EncodeState == ManagerState.Stop || !_settings.UploadItems.Contains(item)); item.EncodeOffset = Math.Min(readSize, stream.Length); }, 1024 * 1024, true)) using (ProgressStream encodingProgressStream = new ProgressStream(stream, (object sender, long readSize, long writeSize, out bool isStop) => { isStop = (this.EncodeState == ManagerState.Stop || !_settings.UploadItems.Contains(item)); item.EncodeOffset = Math.Min(readSize, stream.Length); }, 1024 * 1024, true)) { item.EncodeLength = stream.Length; item.State = UploadState.ComputeHash; if (item.HashAlgorithm == HashAlgorithm.Sha256) { cryptoKey = Sha256.ComputeHash(hashProgressStream); } stream.Seek(0, SeekOrigin.Begin); item.EncodeOffset = 0; item.State = UploadState.Encoding; keys = _cacheManager.Encoding(encodingProgressStream, item.CompressionAlgorithm, item.CryptoAlgorithm, cryptoKey, item.BlockLength, item.HashAlgorithm); } } catch (StopIoException) { continue; } lock (_thisLock) { if (!_settings.UploadItems.Contains(item)) { foreach (var key in keys) { _cacheManager.Unlock(key); } continue; } foreach (var key in keys) { item.UploadKeys.Add(key); item.LockedKeys.Add(key); } item.EncodeOffset = 0; item.EncodeLength = 0; item.CryptoKey = cryptoKey; item.Keys.AddRange(keys); item.Depth++; item.Groups.Clear(); } } } catch (Exception e) { item.State = UploadState.Error; Log.Error(e); } finally { _workingPaths.Remove(item.FilePath); } } }
private void DecodeThread() { var random = new Random(); for (;;) { Thread.Sleep(1000 * 3); if (this.DecodeState == ManagerState.Stop) return; DownloadItem item = null; try { lock (_thisLock) { if (_settings.DownloadItems.Count > 0) { item = _settings.DownloadItems .Where(n => !_workingSeeds.Contains(n.Seed)) .Where(n => n.State == DownloadState.Decoding || n.State == DownloadState.ParityDecoding) .Where(n => n.Priority != 0) .OrderBy(n => (n.Depth != n.Seed.Metadata.Depth) ? 0 : 1) .OrderBy(n => (n.State == DownloadState.Decoding) ? 0 : 1) .FirstOrDefault(); if (item != null) { _workingSeeds.Add(item.Seed); } } } } catch (Exception) { return; } if (item == null) continue; try { { if ((item.Depth == 1 && !_cacheManager.Contains(item.Seed.Metadata.Key)) || (item.Depth > 1 && !item.Index.Groups.All(n => _existManager.GetCount(n) >= n.InformationLength))) { item.State = DownloadState.Downloading; } else { var keys = new KeyCollection(); var compressionAlgorithm = CompressionAlgorithm.None; var cryptoAlgorithm = CryptoAlgorithm.None; byte[] cryptoKey = null; if (item.Depth == 1) { keys.Add(item.Seed.Metadata.Key); compressionAlgorithm = item.Seed.Metadata.CompressionAlgorithm; cryptoAlgorithm = item.Seed.Metadata.CryptoAlgorithm; cryptoKey = item.Seed.Metadata.CryptoKey; } else { item.State = DownloadState.ParityDecoding; item.DecodeOffset = 0; item.DecodeLength = item.Index.Groups.Sum(n => n.Length); try { foreach (var group in item.Index.Groups.ToArray()) { using (var tokenSource = new CancellationTokenSource()) { var task = _cacheManager.ParityDecoding(group, tokenSource.Token); while (!task.IsCompleted) { if (this.DecodeState == ManagerState.Stop || !_settings.DownloadItems.Contains(item)) tokenSource.Cancel(); Thread.Sleep(1000); } keys.AddRange(task.Result); } item.DecodeOffset += group.Length; } } catch (Exception) { continue; } compressionAlgorithm = item.Index.CompressionAlgorithm; cryptoAlgorithm = item.Index.CryptoAlgorithm; cryptoKey = item.Index.CryptoKey; } item.State = DownloadState.Decoding; if (item.Depth < item.Seed.Metadata.Depth) { string fileName = null; bool largeFlag = false; try { item.DecodeOffset = 0; item.DecodeLength = keys.Sum(n => (long)_cacheManager.GetLength(n)); using (var stream = DownloadManager.GetUniqueFileStream(Path.Combine(_workDirectory, "index"))) using (ProgressStream decodingProgressStream = new ProgressStream(stream, (object sender, long readSize, long writeSize, out bool isStop) => { isStop = (this.DecodeState == ManagerState.Stop || !_settings.DownloadItems.Contains(item)); if (!isStop && (stream.Length > item.Seed.Length)) { isStop = true; largeFlag = true; } item.DecodeOffset = writeSize; }, 1024 * 1024, true)) { fileName = stream.Name; _cacheManager.Decoding(decodingProgressStream, compressionAlgorithm, cryptoAlgorithm, cryptoKey, keys); } } catch (StopIoException) { if (File.Exists(fileName)) { File.Delete(fileName); } if (largeFlag) { throw new Exception("size too large."); } continue; } catch (Exception) { if (File.Exists(fileName)) { File.Delete(fileName); } throw; } Index index; using (var stream = new UnbufferedFileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read, FileOptions.None, _bufferManager)) { index = Index.Import(stream, _bufferManager); } File.Delete(fileName); lock (_thisLock) { if (!_settings.DownloadItems.Contains(item)) continue; item.DecodeOffset = 0; item.DecodeLength = 0; this.UncheckState(item.Index); item.Index = index; this.CheckState(item.Index); foreach (var group in item.Index.Groups) { foreach (var key in group.Keys) { _cacheManager.Lock(key); } } item.Indexes.Add(index); item.Depth++; item.State = DownloadState.Downloading; } } else { item.State = DownloadState.Decoding; string fileName = null; bool largeFlag = false; string downloadDirectory; if (item.Path == null) { downloadDirectory = this.BaseDirectory; } else { if (Path.IsPathRooted(item.Path)) { downloadDirectory = item.Path; } else { downloadDirectory = Path.Combine(this.BaseDirectory, item.Path); } } Directory.CreateDirectory(downloadDirectory); try { item.DecodeOffset = 0; item.DecodeLength = keys.Sum(n => (long)_cacheManager.GetLength(n)); using (var stream = DownloadManager.GetUniqueFileStream(Path.Combine(downloadDirectory, string.Format("{0}.tmp", DownloadManager.GetNormalizedPath(item.Seed.Name))))) using (ProgressStream decodingProgressStream = new ProgressStream(stream, (object sender, long readSize, long writeSize, out bool isStop) => { isStop = (this.DecodeState == ManagerState.Stop || !_settings.DownloadItems.Contains(item)); if (!isStop && (stream.Length > item.Seed.Length)) { isStop = true; largeFlag = true; } item.DecodeOffset = writeSize; }, 1024 * 1024, true)) { fileName = stream.Name; _cacheManager.Decoding(decodingProgressStream, compressionAlgorithm, cryptoAlgorithm, cryptoKey, keys); if (stream.Length != item.Seed.Length) throw new Exception("Stream.Length != Seed.Length"); } } catch (StopIoException) { if (File.Exists(fileName)) { File.Delete(fileName); } if (largeFlag) { throw new Exception("size too large."); } continue; } catch (Exception) { if (File.Exists(fileName)) { File.Delete(fileName); } throw; } File.Move(fileName, DownloadManager.GetUniqueFilePath(Path.Combine(downloadDirectory, DownloadManager.GetNormalizedPath(item.Seed.Name)))); lock (_thisLock) { if (!_settings.DownloadItems.Contains(item)) continue; item.DecodeOffset = 0; item.DecodeLength = 0; { var usingKeys = new HashSet<Key>(); foreach (var index in item.Indexes) { foreach (var group in index.Groups) { usingKeys.UnionWith(group.Keys .Where(n => _cacheManager.Contains(n)) .Reverse() .Take(group.InformationLength)); } } _cacheManager.SetSeed(item.Seed.Clone(), usingKeys.ToArray()); } _settings.DownloadedSeeds.Add(item.Seed.Clone()); _cacheManager.Unlock(item.Seed.Metadata.Key); foreach (var index in item.Indexes) { foreach (var group in index.Groups) { foreach (var key in group.Keys) { _cacheManager.Unlock(key); } } } item.Indexes.Clear(); item.State = DownloadState.Completed; } } } } } catch (Exception e) { // Check { var list = new List<Key>(); list.Add(item.Seed.Metadata.Key); foreach (var index in item.Indexes) { foreach (var group in index.Groups) { foreach (var key in group.Keys) { list.Add(key); } } } foreach (var key in list) { if (this.DecodeState == ManagerState.Stop) return; if (!_cacheManager.Contains(key)) continue; var buffer = new ArraySegment<byte>(); try { buffer = _cacheManager[key]; } catch (Exception) { } finally { if (buffer.Array != null) { _bufferManager.ReturnBuffer(buffer.Array); } } } } item.State = DownloadState.Error; Log.Error(e); } finally { _workingSeeds.Remove(item.Seed); } } }
// 删除旧检索点 public void DeleteKeys(KeyCollection keys) { foreach (KeyItem oneKey in keys) { string strTablePath; strTablePath = this.TableName2TableFileName(oneKey.SqlTableName); XmlDocument domTable = new XmlDocument(); domTable.PreserveWhitespace = true; //设PreserveWhitespace为true domTable.Load(strTablePath); string strXpath = "/root/key[keystring='" + oneKey.Key + "' and fromstring='" + oneKey.FromValue + "' and idstring='" + oneKey.RecordID + "']"; XmlNode nodeKey = domTable.SelectSingleNode(strXpath); if (nodeKey != null) { domTable.DocumentElement.RemoveChild(nodeKey); } else { throw (new Exception("根据xpath'" + strXpath + "'没找到节点,不可能的情况!")); } domTable.Save(strTablePath); } }
private bool DecodePayload(AlternativeCompositeByteBuf buffer) { Logger.Debug("About to pass message {0} to {1}. Buffer to read: {2}.", Message, Message.SenderSocket, buffer.ReadableBytes); if (!Message.HasContent()) { return true; } int size; IPublicKey receivedPublicKey; while (_contentTypes.Count > 0) { Message.Content content = _contentTypes.Peek(); Logger.Debug("Go for content: {0}.", content); switch (content) { case Message.Content.Integer: if (buffer.ReadableBytes < Utils.Utils.IntegerByteSize) { return false; } Message.SetIntValue(buffer.ReadInt()); LastContent = _contentTypes.Dequeue(); break; case Message.Content.Long: if (buffer.ReadableBytes < Utils.Utils.LongByteSize) { return false; } Message.SetLongValue(buffer.ReadLong()); LastContent = _contentTypes.Dequeue(); break; case Message.Content.Key: if (buffer.ReadableBytes < Number160.ByteArraySize) { return false; } var keyBytes = new sbyte[Number160.ByteArraySize]; buffer.ReadBytes(keyBytes); Message.SetKey(new Number160(keyBytes)); LastContent = _contentTypes.Dequeue(); break; case Message.Content.BloomFilter: if (buffer.ReadableBytes < Utils.Utils.ShortByteSize) { return false; } size = buffer.GetUShort(buffer.ReaderIndex); if (buffer.ReadableBytes < size) { return false; } Message.SetBloomFilter(new SimpleBloomFilter<Number160>(buffer)); LastContent = _contentTypes.Dequeue(); break; case Message.Content.SetNeighbors: if (_neighborSize == -1 && buffer.ReadableBytes < Utils.Utils.ByteByteSize) { return false; } if (_neighborSize == -1) { _neighborSize = buffer.ReadByte(); } if (_neighborSet == null) { _neighborSet = new NeighborSet(-1, new List<PeerAddress>(_neighborSize)); } for (int i = _neighborSet.Size; i < _neighborSize; i++) { if (buffer.ReadableBytes < Utils.Utils.ShortByteSize) { return false; } int header = buffer.GetUShort(buffer.ReaderIndex); size = PeerAddress.CalculateSize(header); if (buffer.ReadableBytes < size) { return false; } var pa = new PeerAddress(buffer); _neighborSet.Add(pa); } Message.SetNeighborSet(_neighborSet); LastContent = _contentTypes.Dequeue(); _neighborSize = -1; // TODO why here? not in prepareFinish()? _neighborSet = null; break; case Message.Content.SetPeerSocket: if (_peerSocketAddressSize == -1 && buffer.ReadableBytes < Utils.Utils.ByteByteSize) { return false; } if (_peerSocketAddressSize == -1) { _peerSocketAddressSize = buffer.ReadUByte(); } if (_peerSocketAddresses == null) { _peerSocketAddresses = new List<PeerSocketAddress>(_peerSocketAddressSize); } for (int i = _peerSocketAddresses.Count; i < _peerSocketAddressSize; i++) { if (buffer.ReadableBytes < Utils.Utils.ByteByteSize) { return false; } int header = buffer.GetUByte(buffer.ReaderIndex); bool isIPv4 = header == 0; // TODO check if works size = PeerSocketAddress.Size(isIPv4); if (buffer.ReadableBytes < size + Utils.Utils.ByteByteSize) { return false; } // skip the ipv4/ipv6 header buffer.SkipBytes(1); _peerSocketAddresses.Add(PeerSocketAddress.Create(buffer, isIPv4)); } Message.SetPeerSocketAddresses(_peerSocketAddresses); LastContent = _contentTypes.Dequeue(); _peerSocketAddressSize = -1; // TODO why here? not in prepareFinish()? _peerSocketAddresses = null; break; case Message.Content.SetKey640: if (_keyCollectionSize == -1 && buffer.ReadableBytes < Utils.Utils.IntegerByteSize) { return false; } if (_keyCollectionSize == -1) { _keyCollectionSize = buffer.ReadInt(); } if (_keyCollection == null) { _keyCollection = new KeyCollection(new List<Number640>(_keyCollectionSize)); } for (int i = _keyCollection.Size; i < _keyCollectionSize; i++) { if (buffer.ReadableBytes < 4 * Number160.ByteArraySize) { return false; } var me = new sbyte[Number160.ByteArraySize]; buffer.ReadBytes(me); var locationKey = new Number160(me); buffer.ReadBytes(me); var domainKey = new Number160(me); buffer.ReadBytes(me); var contentKey = new Number160(me); buffer.ReadBytes(me); var versionKey = new Number160(me); _keyCollection.Add(new Number640(locationKey, domainKey, contentKey, versionKey)); } Message.SetKeyCollection(_keyCollection); LastContent = _contentTypes.Dequeue(); _keyCollectionSize = -1; // TODO why here? not in prepareFinish()? _keyCollection = null; break; case Message.Content.MapKey640Data: if (_mapSize == -1 && buffer.ReadableBytes < Utils.Utils.IntegerByteSize) { return false; } if (_mapSize == -1) { _mapSize = buffer.ReadInt(); } if (_dataMap == null) { _dataMap = new DataMap(new Dictionary<Number640, Data>(2 * _mapSize)); } if (_data != null) { if (!_data.DecodeBuffer(buffer)) { return false; } if (!_data.DecodeDone(buffer, Message.PublicKey(0), _signatureFactory)) { return false; } _data = null; // TODO why here? not in prepareFinish()? _key = null; } for (int i = _dataMap.Size; i < _mapSize; i++) { if (_key == null) { if (buffer.ReadableBytes < 4 * Number160.ByteArraySize) { return false; } var me = new sbyte[Number160.ByteArraySize]; buffer.ReadBytes(me); var locationKey = new Number160(me); buffer.ReadBytes(me); var domainKey = new Number160(me); buffer.ReadBytes(me); var contentKey = new Number160(me); buffer.ReadBytes(me); var versionKey = new Number160(me); _key = new Number640(locationKey, domainKey, contentKey, versionKey); } _data = Data.DeocdeHeader(buffer, _signatureFactory); if (_data == null) { return false; } _dataMap.BackingDataMap.Add(_key, _data); if (!_data.DecodeBuffer(buffer)) { return false; } if (!_data.DecodeDone(buffer, Message.PublicKey(0), _signatureFactory)) { return false; } // if we have signed the message, set the public key anyway, but only if we indicated so if (Message.IsSign && Message.PublicKey(0) != null && _data.HasPublicKey && (_data.PublicKey == null || _data.PublicKey == PeerBuilder.EmptyPublicKey)) // TODO check empty key condition { _data.SetPublicKey(Message.PublicKey(0)); } _data = null; // TODO why here? not in prepareFinish()? _key = null; } Message.SetDataMap(_dataMap); LastContent = _contentTypes.Dequeue(); _mapSize = -1; // TODO why here? not in prepareFinish()? _dataMap = null; break; case Message.Content.MapKey640Keys: if (_keyMap640KeysSize == -1 && buffer.ReadableBytes < Utils.Utils.IntegerByteSize) { return false; } if (_keyMap640KeysSize == -1) { _keyMap640KeysSize = buffer.ReadInt(); } if (_keyMap640Keys == null) { _keyMap640Keys = new KeyMap640Keys(new SortedDictionary<Number640, ICollection<Number160>>()); // TODO check TreeMap equivalent } const int meta = 4 * Number160.ByteArraySize; for (int i = _keyMap640Keys.Size; i < _keyMap640KeysSize; i++) { if (buffer.ReadableBytes < meta + Utils.Utils.ByteByteSize) { return false; } size = buffer.GetUByte(buffer.ReaderIndex + meta); if (buffer.ReadableBytes < meta + Utils.Utils.ByteByteSize + (size * Number160.ByteArraySize)) { return false; } var me = new sbyte[Number160.ByteArraySize]; buffer.ReadBytes(me); var locationKey = new Number160(me); buffer.ReadBytes(me); var domainKey = new Number160(me); buffer.ReadBytes(me); var contentKey = new Number160(me); buffer.ReadBytes(me); var versionKey = new Number160(me); int numBasedOn = buffer.ReadByte(); var value = new HashSet<Number160>(); for (int j = 0; j < numBasedOn; j++) { buffer.ReadBytes(me); var basedOnKey = new Number160(me); value.Add(basedOnKey); } _keyMap640Keys.Put(new Number640(locationKey, domainKey, contentKey, versionKey), value); } Message.SetKeyMap640Keys(_keyMap640Keys); LastContent = _contentTypes.Dequeue(); _keyMap640KeysSize = -1; // TODO why here? not in prepareFinish()? _keyMap640Keys = null; break; case Message.Content.MapKey640Byte: if (_keyMapByteSize == -1 && buffer.ReadableBytes < Utils.Utils.IntegerByteSize) { return false; } if (_keyMapByteSize == -1) { _keyMapByteSize = buffer.ReadInt(); } if (_keyMapByte == null) { _keyMapByte = new KeyMapByte(new Dictionary<Number640, sbyte>(2 * _keyMapByteSize)); } for (int i = _keyMapByte.Size; i < _keyMapByteSize; i++) { if (buffer.ReadableBytes < 4 * Number160.ByteArraySize + 1) { return false; } var me = new sbyte[Number160.ByteArraySize]; buffer.ReadBytes(me); var locationKey = new Number160(me); buffer.ReadBytes(me); var domainKey = new Number160(me); buffer.ReadBytes(me); var contentKey = new Number160(me); buffer.ReadBytes(me); var versionKey = new Number160(me); sbyte value = buffer.ReadByte(); _keyMapByte.Put(new Number640(locationKey, domainKey, contentKey, versionKey), value); } Message.SetKeyMapByte(_keyMapByte); LastContent = _contentTypes.Dequeue(); _keyMapByteSize = -1; // TODO why here? not in prepareFinish()? _keyMapByte = null; break; case Message.Content.ByteBuffer: if (_bufferSize == -1 && buffer.ReadableBytes < Utils.Utils.IntegerByteSize) { return false; } if (_bufferSize == -1) { _bufferSize = buffer.ReadInt(); } if (_buffer == null) { _buffer = new DataBuffer(); } int already = _buffer.AlreadyTransferred; int remaining = _bufferSize - already; // already finished if (remaining != 0) { int read = _buffer.TransferFrom(buffer, remaining); if (read != remaining) { Logger.Debug( "Still looking for data. Indicating that its not finished yet. Already Transferred = {0}, Size = {1}.", _buffer.AlreadyTransferred, _bufferSize); return false; } } ByteBuf buf2 = AlternativeCompositeByteBuf.CompBuffer(_buffer.ToByteBufs()); Message.SetBuffer(new Buffer(buf2, _bufferSize)); LastContent = _contentTypes.Dequeue(); _bufferSize = -1; _buffer = null; break; case Message.Content.SetTrackerData: if (_trackerDataSize == -1 && buffer.ReadableBytes < Utils.Utils.ByteByteSize) { return false; } if (_trackerDataSize == -1) { _trackerDataSize = buffer.ReadUByte(); } if (_trackerData == null) { _trackerData = new TrackerData(new Dictionary<PeerAddress, Data>(2 * _trackerDataSize)); } if (_currentTrackerData != null) { if (!_currentTrackerData.DecodeBuffer(buffer)) { return false; } if (!_currentTrackerData.DecodeDone(buffer, Message.PublicKey(0), _signatureFactory)) { return false; } _currentTrackerData = null; } for (int i = _trackerData.Size; i < _trackerDataSize; i++) { if (buffer.ReadableBytes < Utils.Utils.ShortByteSize) { return false; } int header = buffer.GetUShort(buffer.ReaderIndex); size = PeerAddress.CalculateSize(header); if (buffer.ReadableBytes < Utils.Utils.ShortByteSize) { return false; } var pa = new PeerAddress(buffer); _currentTrackerData = Data.DeocdeHeader(buffer, _signatureFactory); if (_currentTrackerData == null) { return false; } _trackerData.PeerAddresses.Add(pa, _currentTrackerData); if (Message.IsSign) { _currentTrackerData.SetPublicKey(Message.PublicKey(0)); } if (!_currentTrackerData.DecodeBuffer(buffer)) { return false; } if (!_currentTrackerData.DecodeDone(buffer, Message.PublicKey(0), _signatureFactory)) { return false; } _currentTrackerData = null; // TODO why here? } Message.SetTrackerData(_trackerData); LastContent = _contentTypes.Dequeue(); _trackerDataSize = -1; _trackerData = null; break; case Message.Content.PublicKey: // fall-through case Message.Content.PublicKeySignature: receivedPublicKey = _signatureFactory.DecodePublicKey(buffer); if (content == Message.Content.PublicKeySignature) { if (receivedPublicKey == PeerBuilder.EmptyPublicKey) // TODO check if works { // TODO throw InvalidKeyException throw new SystemException("The public key cannot be empty."); } } if (receivedPublicKey == null) { return false; } Message.SetPublicKey(receivedPublicKey); LastContent = _contentTypes.Dequeue(); break; default: break; } } if (Message.IsSign) { var signatureEncode = _signatureFactory.SignatureCodec; size = signatureEncode.SignatureSize; if (buffer.ReadableBytes < size) { return false; } signatureEncode.Read(buffer); Message.SetReceivedSignature(signatureEncode); } return true; }
void _loginStorage_OnAddItem(object sender, KeyCollection<UserIdentity>.KeyEventArgs e) { _store.Notify(e.Key, this, "LoginStatusChange", e.Key, LogOnStatus.LogOn); }
// 合并检索点 // parameters: // strNewXml 新记录的XML。可以为""或者null // strOldXml 旧记录的XML。可以为""或者null // bOutputDom 是否利用newDom/oldDom顺便输出DOM? // return: // -1 出错 // 0 成功 public int MergeKeys(string strID, string strNewXml, string strOldXml, bool bOutputDom, out KeyCollection newKeys, out KeyCollection oldKeys, out XmlDocument newDom, out XmlDocument oldDom, out string strError) { newKeys = null; oldKeys = null; newDom = null; oldDom = null; strError = ""; int nRet; KeysCfg keysCfg = null; nRet = this.GetKeysCfg(out keysCfg, out strError); if (nRet == -1) return -1; // 根据新xml创建检索点 newKeys = new KeyCollection(); if (String.IsNullOrEmpty(strNewXml) == false) { newDom = new XmlDocument(); newDom.PreserveWhitespace = true; //设PreserveWhitespace为true try { newDom.LoadXml(strNewXml); } catch (Exception ex) { strError = "加载新数据到dom时出错。" + ex.Message; return -1; } if (keysCfg != null) { nRet = keysCfg.BuildKeys(newDom, strID, "zh",//strLang, // "",//strStyle, this.KeySize, out newKeys, out strError); if (nRet == -1) return -1; newKeys.Sort(); newKeys.RemoveDup(); } } oldKeys = new KeyCollection(); if (String.IsNullOrEmpty(strOldXml) == false && strOldXml.Length > 1) // 2012/1/31 { oldDom = new XmlDocument(); oldDom.PreserveWhitespace = true; //设PreserveWhitespace为true try { oldDom.LoadXml(strOldXml); } catch (Exception ex) { strError = "加载旧数据到dom时出错。" + ex.Message; return -1; } if (keysCfg != null) { nRet = keysCfg.BuildKeys(oldDom, strID, "zh",//strLang, // "",//strStyle, this.KeySize, out oldKeys, out strError); if (nRet == -1) return -1; oldKeys.Sort(); oldKeys.RemoveDup(); } } // 新旧检索点碰 KeyCollection dupKeys = new KeyCollection(); dupKeys = KeyCollection.Merge(newKeys, oldKeys); if (bOutputDom == false) { newDom = null; oldDom = null; } return 0; }
public Message PrepareFinish() { Message ret = Message; Message.SetDone(); _contentTypes.Clear(); Message = null; _neighborSize = -1; _neighborSet = null; // TODO set peerSocketAddressSize/peerSocketAddresses -1/null? _keyCollectionSize = -1; _keyCollection = null; _mapSize = -1; _dataMap = null; _data = null; // TODO set _key to null? _keyMap640KeysSize = -1; _keyMap640Keys = null; // TODO set _keyMapBytesSize/list to -1/null? _bufferSize = -1; _buffer = null; // TODO set _trackerDataSize/list to -1/null? // TODO set _signatureFactory to null? return ret; }
public void TestSerialization() { _target.AddKeyAsync(_models[0].Key).Wait(); _target.AddKeyAsync(_models[1].Key).Wait(); Assert.IsTrue(_target.IsDirty, "Dirty flag not set."); _target.FlushAsync().Wait(); Assert.IsFalse(_target.IsDirty, "Dirty flag not reset on flush."); var secondTarget = new KeyCollection<TestModel, int>(_driver, _GetTestModelByKey); // are we able to grab things? Assert.AreEqual(2, secondTarget.Query.Count(), "Key count is incorrect."); Assert.AreEqual(0, _testAccessCount, "Lazy loader was accessed prematurely."); var testKey = (from k in secondTarget.Query where k.Key.Equals(_models[1].Key) select k).FirstOrDefault(); Assert.IsNotNull(testKey, "Test key not retrieved."); Assert.AreEqual(_models[1].Key, testKey.Key, "Key mismatch."); Assert.AreEqual(0, _testAccessCount, "Lazy loader was accessed prematurely."); var testModel = testKey.LazyValue.Value; Assert.AreSame(_models[1], testModel, "Model does not match."); Assert.AreEqual(1, _testAccessCount, "Lazy loader access count is incorrect."); // now let's test refresh secondTarget.AddKeyAsync(_models[2].Key).Wait(); secondTarget.FlushAsync().Wait(); Assert.AreEqual(2, _target.Query.Count(), "Unexpected key count in original collection."); _target.RefreshAsync().Wait(); Assert.AreEqual(3, _target.Query.Count(), "Refresh failed."); }
// 创建指定记录的检索点集合 // parameters: // domData 记录数据dom 不能为null // strRecordID 记录id 不能为null或空 // strLang 语言版本 // strStyle 风格,暂没有用上 // nKeySize 检索点尺寸 // keys out参数,返回生成的检索点集合 // strError out参数,出错信息 // return: // -1 出错 // 0 成功 public int BuildKeys(XmlDocument domData, string strRecordID, string strLang, // string strStyle, int nKeySize, out KeyCollection keys, out string strError) { strError = ""; keys = new KeyCollection(); if (this.dom == null) return 0; if (domData == null) { strError = "BuildKeys()调用错误,domData参数不能为null。"; Debug.Assert(false, strError); return -1; } // Debug.Assert(strRecordID != null && strRecordID != "", "BuildKeys()调用错误,strRecordID参数不能为null或为空。"); if (String.IsNullOrEmpty(strLang) == true) { strError = "BuildKeys()调用错误,strLang参数不能为null。"; Debug.Assert(false, strError); return -1; } /* if (String.IsNullOrEmpty(strStyle) == true) { strError = "BuildKeys()调用错误,strStyle参数不能为null。"; Debug.Assert(false, strError); return -1; } * */ if (nKeySize < 0) { strError = "BuildKeys()调用错误,nKeySize参数不能小于0。"; Debug.Assert(false, strError); return -1; } int nRet = 0; // 找到所有<key>节点 // TODO: <key> 是否有明确的位置? 那样就可以避免 // 查找。或者预先缓存起来 XmlNodeList keyList = dom.SelectNodes("//key"); XPathNavigator nav = domData.CreateNavigator(); CREATE_CACHE: // 创建Cache if (m_exprCache.Count == 0 && keyList.Count > 0) { for (int i = 0; i < keyList.Count; i++) { XmlNode nodeKey = keyList[i]; XmlElement nodeXPath = (XmlElement)nodeKey.SelectSingleNode("xpath"); if (nodeXPath == null) continue; string strScriptAttr = nodeXPath.GetAttribute("scripting"); if (String.Compare(strScriptAttr, "on", true) == 0) continue; string strXPath = nodeXPath.InnerText.Trim(); if (string.IsNullOrEmpty(strXPath) == true) continue; // strNstableName 如果为 null 表示属性不存在 string strNstableName = DomUtil.GetAttrDiff(nodeXPath, "nstable"); XmlNamespaceManager nsmgr = (XmlNamespaceManager)this.tableNsClient[nodeXPath]; #if DEBUG if (nsmgr != null) { Debug.Assert(strNstableName != null, "如果具备名字空间对象,表明<xpath>元素应该有 'nstable' 属性。"); } else { Debug.Assert(strNstableName == null, "如果不具备名字空间对象,表明<xpath>元素必须没有定义 'nstable' 属性。"); } #endif XPathExpression expr = nav.Compile(strXPath); if (nsmgr != null) expr.SetContext(nsmgr); m_exprCache[nodeXPath] = expr; } } string strKey = ""; string strKeyNoProcess = ""; string strFromName = ""; string strFromValue = ""; string strSqlTableName = ""; string strNum = ""; for (int i = 0; i < keyList.Count; i++) { XmlElement nodeKey = (XmlElement)keyList[i]; strKey = ""; strKeyNoProcess = ""; strFromName = ""; strFromValue = ""; strSqlTableName = ""; strNum = ""; // TODO: 用 GetElementsByTagName 优化 XmlNode nodeFrom = nodeKey.SelectSingleNode("from"); if (nodeFrom != null) strFromValue = nodeFrom.InnerText.Trim(); // 2012/2/16 // 找不到<key>下级的<table>节点,就应该报错 XmlNode nodeTable = nodeKey.SelectSingleNode("table"); if (nodeTable == null) { strError = "<key>下级未定义<table>节点。"; return -1; } TableInfo tableInfo = (TableInfo)this.tableTableInfoClient[nodeTable]; Debug.Assert(tableInfo != null, "从Hashtable里取出的tabInfo不可能为null。"); strSqlTableName = tableInfo.SqlTableName.Trim(); // 根据语言版本获得来源名称 strFromName = tableInfo.GetCaption(strLang); // 所有的检索点字符串 List<string> aKey = new List<string>(); XmlNode nodeXpath = nodeKey.SelectSingleNode("xpath"); string strScriptAttr = ""; if (nodeXpath != null) strScriptAttr = DomUtil.GetAttr(nodeXpath, "scripting"); if (String.Compare(strScriptAttr, "on", true) == 0) { // 执行脚本得到检索点 //aKey.Add("abc"); //string strOutputString = ""; List<String> OutputStrings = null; string strFunctionName = nodeXpath.InnerText.Trim(); // 2012/2/16 nRet = this.DoScriptFunction(domData, strFunctionName, "", //strInputString // out strOutputString, out OutputStrings, out strError); if (nRet == -1) return -1; // 2007/1/23 if (OutputStrings != null) { for (int j = 0; j < OutputStrings.Count; j++) { if (String.IsNullOrEmpty(OutputStrings[j]) == false) { aKey.Add(OutputStrings[j]); // nCount++; } } } } else { string strXpath = ""; if (nodeXpath != null) strXpath = nodeXpath.InnerText.Trim(); // 2012/2/16 string strNstableName = DomUtil.GetAttrDiff(nodeXpath, "nstable"); #if NO XmlNamespaceManager nsmgr = (XmlNamespaceManager)this.tableNsClient[nodeXpath]; #if DEBUG if (nsmgr != null) { Debug.Assert(strNstableName != null, "此时应该没有定义'nstable'属性。"); } else { Debug.Assert(strNstableName == null, "此时必须没有定义'nstable'属性。"); } #endif XPathExpression expr = nav.Compile(strXpath); // TODO 可以优化 if (nsmgr != null) expr.SetContext(nsmgr); #endif // 2012/7/20优化 XPathExpression expr = (XPathExpression)m_exprCache[nodeXpath]; if (expr == null) { this.m_exprCache.Clear(); goto CREATE_CACHE; // TODO: 如何预防死循环? } string strMyKey = ""; if (expr.ReturnType == XPathResultType.Number) { strMyKey = nav.Evaluate(expr).ToString();//Convert.ToString((int)(nav.Evaluate(expr))); aKey.Add(strMyKey); } else if (expr.ReturnType == XPathResultType.Boolean) { strMyKey = Convert.ToString((bool)(nav.Evaluate(expr))); aKey.Add(strMyKey); } else if (expr.ReturnType == XPathResultType.String) { strMyKey = (string)(nav.Evaluate(expr)); aKey.Add(strMyKey); } else if (expr.ReturnType == XPathResultType.NodeSet) { // ????????xpath命中多个节点时,是否创建多个key XPathNodeIterator iterator = null; try { iterator = nav.Select(expr); } catch (Exception ex) { string strTempNstableName = ""; if (strNstableName == null) strTempNstableName = "null"; else strTempNstableName = "'" + strNstableName + "'"; strError = "用路径'" + strXpath + "'选节点时出错," + ex.Message + " \r\n使用的名字空间表名为" + strTempNstableName + "。"; return -1; } if (iterator != null) { while (iterator.MoveNext()) { XPathNavigator navigator = iterator.Current; strMyKey = navigator.Value; if (strMyKey == "") continue; aKey.Add(strMyKey); } } } else { throw (new Exception("XPathExpression的ReturnType为'" + expr.ReturnType.ToString() + "'无效")); } } for (int j = 0; j < aKey.Count; j++) { strKey = aKey[j]; //???????注意,如果key内容为空,是否也应该算作一个key呢? if (strKey == "") continue; strKeyNoProcess = strKey; strNum = "-1"; List<string> outputKeys = new List<string>(); if (tableInfo.nodeConvertKeyString != null) { nRet = ConvertKeyWithStringNode(domData, strKey, tableInfo.nodeConvertKeyString, out outputKeys, out strError); if (nRet == -1) return -1; } else { outputKeys = new List<string>(); outputKeys.Add(strKey); } for (int k = 0; k < outputKeys.Count; k++) { string strOneKey = outputKeys[k]; //根据自身的配置进行处理,得到num if (tableInfo.nodeConvertKeyNumber != null) { nRet = ConvertKeyWithNumberNode( domData, strOneKey, tableInfo.nodeConvertKeyNumber, out strNum, out strError); if (nRet == -1) return -1; if (nRet == 1) { // 2010/9/27 strOneKey = strError + " -- " + strOneKey; strNum = "-1"; } // 2010/11/20 if (String.IsNullOrEmpty(strNum) == true) continue; } if (strOneKey.Length > nKeySize) strOneKey = strOneKey.Substring(0, nKeySize); if (strNum.Length >= 20) strNum = strNum.Substring(0, 19); KeyItem keyItem = new KeyItem(strSqlTableName, strOneKey, strFromValue, strRecordID, strNum, strKeyNoProcess, strFromName); keys.Add(keyItem); } } } return 0; }
private void EncodeThread() { for (;;) { Thread.Sleep(1000 * 1); if (this.State == ManagerState.Stop) return; BackgroundUploadItem item = null; try { lock (_thisLock) { if (_settings.UploadItems.Count > 0) { item = _settings.UploadItems .Where(n => n.State == BackgroundUploadState.Encoding) .FirstOrDefault(); } } } catch (Exception) { return; } if (item == null) continue; try { if (item.Groups.Count == 0 && item.Keys.Count == 0) { Stream stream = null; try { if (item.Scheme == "Broadcast") { if (item.Type == "Link") { var value = item.Link; if (value == null) throw new FormatException(); stream = ContentConverter.ToStream(value); } else if (item.Type == "Profile") { var value = item.Profile; if (value == null) throw new FormatException(); stream = ContentConverter.ToStream(value); } else if (item.Type == "Store") { var value = item.Store; if (value == null) throw new FormatException(); stream = ContentConverter.ToStream(value); } } else if (item.Scheme == "Unicast") { if (item.Type == "Message") { var value = item.Message; if (value == null) throw new FormatException(); stream = ContentConverter.ToCryptoStream(value, item.ExchangePublicKey); } } else if (item.Scheme == "Multicast") { if (item.Type == "Message") { var value = item.Message; if (value == null) throw new FormatException(); stream = ContentConverter.ToStream(value); } } else { throw new FormatException(); } if (stream.Length == 0) { lock (_thisLock) { if (item.Scheme == "Broadcast") { _connectionsManager.Upload(new BroadcastMetadata(item.Type, item.CreationTime, null, item.DigitalSignature)); } else if (item.Scheme == "Unicast") { _connectionsManager.Upload(new UnicastMetadata(item.Type, item.Signature, item.CreationTime, null, item.DigitalSignature)); } else if (item.Scheme == "Multicast") { _connectionsManager.Upload(new MulticastMetadata(item.Type, item.Tag, item.CreationTime, null, null, item.DigitalSignature)); } item.State = BackgroundUploadState.Completed; } } else { KeyCollection keys = null; try { using (ProgressStream encodingProgressStream = new ProgressStream(stream, (object sender, long readSize, long writeSize, out bool isStop) => { isStop = (this.State == ManagerState.Stop || !_settings.UploadItems.Contains(item)); }, 1024 * 1024, true)) { if (stream.Length == 0) throw new InvalidOperationException("Stream Length"); encodingProgressStream.Seek(0, SeekOrigin.Begin); item.State = BackgroundUploadState.Encoding; keys = _cacheManager.Encoding(encodingProgressStream, CompressionAlgorithm.None, CryptoAlgorithm.None, null, item.BlockLength, item.HashAlgorithm); } } catch (StopIoException) { continue; } lock (_thisLock) { if (!_settings.UploadItems.Contains(item)) { foreach (var key in keys) { _cacheManager.Unlock(key); } continue; } foreach (var key in keys) { item.UploadKeys.Add(key); item.LockedKeys.Add(key); } item.Keys.AddRange(keys); } } } finally { if (stream != null) stream.Dispose(); } } else if (item.Groups.Count == 0 && item.Keys.Count == 1) { BroadcastMetadata broadcastMetadata = null; UnicastMetadata unicastMetadata = null; MulticastMetadata multicastMetadata = null; { var metadata = new Metadata(item.Depth, item.Keys[0], CompressionAlgorithm.None, CryptoAlgorithm.None, null); if (item.Scheme == "Broadcast") { broadcastMetadata = new BroadcastMetadata(item.Type, item.CreationTime, metadata, item.DigitalSignature); } else if (item.Scheme == "Unicast") { unicastMetadata = new UnicastMetadata(item.Type, item.Signature, item.CreationTime, metadata, item.DigitalSignature); } else if (item.Scheme == "Multicast") { var miner = new Miner(CashAlgorithm.Version1, item.MiningLimit, item.MiningTime); try { var task = Task.Run(() => { return new MulticastMetadata(item.Type, item.Tag, item.CreationTime, metadata, miner, item.DigitalSignature); }); while (!task.IsCompleted) { if (this.State == ManagerState.Stop) miner.Cancel(); lock (_thisLock) { if (!_settings.UploadItems.Contains(item)) { miner.Cancel(); } } Thread.Sleep(1000); } multicastMetadata = task.Result; } catch (Exception) { continue; } } } lock (_thisLock) { if (!_settings.UploadItems.Contains(item)) continue; if (item.Scheme == "Broadcast") { _connectionsManager.Upload(broadcastMetadata); } else if (item.Scheme == "Unicast") { _connectionsManager.Upload(unicastMetadata); } else if (item.Scheme == "Multicast") { _connectionsManager.Upload(multicastMetadata); } item.Keys.Clear(); foreach (var key in item.UploadKeys) { _connectionsManager.Upload(key); } item.State = BackgroundUploadState.Uploading; this.CheckState(item); } } else if (item.Keys.Count > 0) { var length = Math.Min(item.Keys.Count, 128); var keys = new KeyCollection(item.Keys.Take(length)); Group group = null; try { using (var tokenSource = new CancellationTokenSource()) { var task = _cacheManager.ParityEncoding(keys, item.HashAlgorithm, item.BlockLength, item.CorrectionAlgorithm, tokenSource.Token); while (!task.IsCompleted) { if (this.State == ManagerState.Stop || !_settings.UploadItems.Contains(item)) tokenSource.Cancel(); Thread.Sleep(1000); } group = task.Result; } } catch (Exception) { continue; } lock (_thisLock) { if (!_settings.UploadItems.Contains(item)) { foreach (var key in group.Keys.Skip(group.InformationLength)) { _cacheManager.Unlock(key); } continue; } foreach (var key in group.Keys.Skip(group.InformationLength)) { item.UploadKeys.Add(key); item.LockedKeys.Add(key); } item.Groups.Add(group); item.Keys.RemoveRange(0, length); } } else if (item.Groups.Count > 0 && item.Keys.Count == 0) { var index = new Index(); index.Groups.AddRange(item.Groups); KeyCollection keys = null; try { using (var stream = index.Export(_bufferManager)) using (ProgressStream encodingProgressStream = new ProgressStream(stream, (object sender, long readSize, long writeSize, out bool isStop) => { isStop = (this.State == ManagerState.Stop || !_settings.UploadItems.Contains(item)); }, 1024 * 1024, true)) { encodingProgressStream.Seek(0, SeekOrigin.Begin); item.State = BackgroundUploadState.Encoding; keys = _cacheManager.Encoding(encodingProgressStream, CompressionAlgorithm.None, CryptoAlgorithm.None, null, item.BlockLength, item.HashAlgorithm); } } catch (StopIoException) { continue; } lock (_thisLock) { if (!_settings.UploadItems.Contains(item)) { foreach (var key in keys) { _cacheManager.Unlock(key); } continue; } foreach (var key in keys) { item.UploadKeys.Add(key); item.LockedKeys.Add(key); } item.Keys.AddRange(keys); item.Depth++; item.Groups.Clear(); } } } catch (Exception e) { item.State = BackgroundUploadState.Error; Log.Error(e); this.Remove(item); } } }
// return: // -1 出错 // 0 成功 public int ModifyKeys(SqlConnection connection, KeyCollection keysAdd, KeyCollection keysDelete, out string strError) { strError = ""; string strCommand = ""; string strRecordID = ""; if (keysAdd != null && keysAdd.Count > 0) strRecordID = ((KeyItem)keysAdd[0]).RecordID; else if (keysDelete != null && keysDelete.Count > 0) strRecordID = ((KeyItem)keysDelete[0]).RecordID; SqlCommand command = new SqlCommand("", connection); int i = 0; // int nCount = 0; int nNameIndex = 0; // 2006/12/8 把删除提前到增加以前 if (keysDelete != null) { // 删除keys for (i = 0; i < keysDelete.Count; i++) { KeyItem oneKey = (KeyItem)keysDelete[i]; string strKeysTableName = oneKey.SqlTableName; string strIndex = Convert.ToString(nNameIndex++); string strKeyParamName = "@key" + strIndex; string strFromParamName = "@from" + strIndex; string strIdParamName = "@id" + strIndex; string strKeynumParamName = "@keynum" + strIndex; strCommand += " DELETE FROM " + strKeysTableName + " WHERE keystring = " + strKeyParamName + " AND fromstring= " + strFromParamName + " AND idstring= " + strIdParamName + " AND keystringnum= " + strKeynumParamName; SqlParameter keyParam = command.Parameters.Add(strKeyParamName, SqlDbType.NVarChar); keyParam.Value = oneKey.Key; SqlParameter fromParam = command.Parameters.Add(strFromParamName, SqlDbType.NVarChar); fromParam.Value = oneKey.FromValue; SqlParameter idParam = command.Parameters.Add(strIdParamName, SqlDbType.NVarChar); idParam.Value = oneKey.RecordID; SqlParameter keynumParam = command.Parameters.Add(strKeynumParamName, SqlDbType.NVarChar); keynumParam.Value = oneKey.Num; } } if (keysAdd != null) { // nCount = keysAdd.Count; // 增加keys for (i = 0; i < keysAdd.Count; i++) { KeyItem oneKey = (KeyItem)keysAdd[i]; string strKeysTableName = oneKey.SqlTableName; // string strIndex = Convert.ToString(i); string strIndex = Convert.ToString(nNameIndex++); string strKeyParamName = "@key" + strIndex; string strFromParamName = "@from" + strIndex; string strIdParamName = "@id" + strIndex; string strKeynumParamName = "@keynum" + strIndex; //加keynum strCommand += " INSERT INTO " + strKeysTableName + " (keystring,fromstring,idstring,keystringnum) " + " VALUES(" + strKeyParamName + "," + strFromParamName + "," + strIdParamName + "," + strKeynumParamName + ")"; //+ " VALUES(@key,@from,@id,@keynum)"; SqlParameter keyParam = command.Parameters.Add(strKeyParamName, SqlDbType.NVarChar); keyParam.Value = oneKey.Key; SqlParameter fromParam = command.Parameters.Add(strFromParamName, SqlDbType.NVarChar); fromParam.Value = oneKey.FromValue; SqlParameter idParam = command.Parameters.Add(strIdParamName, SqlDbType.NVarChar); idParam.Value = oneKey.RecordID; SqlParameter keynumParam = command.Parameters.Add(strKeynumParamName, SqlDbType.NVarChar); keynumParam.Value = oneKey.Num; } } if (strCommand != "") { strCommand = "use " + this.m_strSqlDbName + " \n" + strCommand + " use master " + "\n"; command.CommandText = strCommand; try { command.ExecuteNonQuery(); } catch (Exception ex) { strError = "创建检索点出错,记录路径'" + this.GetCaption("zh-cn") + "/" + strRecordID + ",原因:" + ex.Message; return -1; } } return 0; }
public Message SetKeyCollection(KeyCollection keyCollection) { if (!_presetContentTypes) { SetContentType(Content.SetKey640); } if (_keyCollectionList == null) { _keyCollectionList = new List<KeyCollection>(); } _keyCollectionList.Add(keyCollection); return this; }
// 增加新检索点 public void AddKeys(KeyCollection keys) { foreach (KeyItem oneKey in keys) { string strTablePath; strTablePath = this.TableName2TableFileName(oneKey.SqlTableName); XmlDocument domTable = new XmlDocument(); domTable.PreserveWhitespace = true; //设PreserveWhitespace为true domTable.Load(strTablePath); //新建key节点 XmlNode nodeKey = domTable.CreateElement("key"); XmlNode nodeKeystring = domTable.CreateElement("keystring"); nodeKeystring.InnerText = oneKey.Key; // 2012/2/16 nodeKey.AppendChild(nodeKeystring); XmlNode nodeFromstring = domTable.CreateElement("fromstring"); nodeFromstring.InnerText = oneKey.FromValue; // 2012/2/16 nodeKey.AppendChild(nodeFromstring); XmlNode nodeIdstring = domTable.CreateElement("idstring"); nodeIdstring.InnerText = oneKey.RecordID; // 2012/2/16 nodeKey.AppendChild(nodeIdstring); XmlNode nodeKeystringnum = domTable.CreateElement("keystringnum"); nodeKeystringnum.InnerText = oneKey.Num; // 2012/2/16 nodeKey.AppendChild(nodeKeystringnum); domTable.DocumentElement.AppendChild(nodeKey); domTable.Save(strTablePath); } }