public void Update() { if (this.pairs.Count > 0) { for (int i = 0; i < this.pairs.Count; i++) { if (this.pairs[i].elapsedTime < 1f) { MoveTo(i); Scale(i, 0f, 1f); MergeUpdate(i); } else { MergePair pair = this.pairs[i]; if (pair.second != null) { Network.Destroy(pair.second); } if (pair.first != null) { Selectable select = pair.first.GetComponent <Selectable>(); if (select) { select.EnableSelection(); select.Deselect(); } Divisible div = pair.first.GetComponent <Divisible>(); if (div != null) { div.SetDivisible(false); } } if (pair.first == null || pair.second == null || pair.confirmedDestroyed) { this.pendingToRemove.Add(pair); break; } } } } if (this.pendingToRemove.Count > 0) { foreach (MergePair pair in this.pendingToRemove) { if (this.pairs.Contains(pair)) { Selectable select = pair.first.GetComponent <Selectable>(); if (select != null) { select.EnableSelection(); select.Deselect(); } this.pairs.Remove(pair); } } this.pendingToRemove.Clear(); } }
private void MergeUpdate(int i) { MergePair pair = this.pairs[i]; pair.elapsedTime += Time.deltaTime / this.mergeCooldown; this.pairs[i] = pair; }
public void RPC_AddPair(NetworkViewID firstViewID, NetworkViewID secondViewID) { NetworkView first = NetworkView.Find(firstViewID); NetworkView second = NetworkView.Find(secondViewID); this.pairReference = new MergePair(first.gameObject, second.gameObject); this.mergeManager.pairs.Add(this.pairReference); }
private List <MergeBuilder> MergeStep(List <MergeBuilder> builders, List <MultiSearchResult> results, int currentIndex) { MultiSearchResult nextResult = results[currentIndex]; PriorityQueue <MergePair> pairHeap = new PriorityQueue <MergePair>(); List <MergeBuilder> ret = new List <MultiSearchMerger.MergeBuilder>(); if ((builders.Count == 0) || (nextResult.Count == 0)) { return(ret); } pairHeap.Add(new MergePair(0, 0, builders[0].Cost + nextResult.GetCost(0))); HashSet <int> visited = new HashSet <int>(); while (ret.Count < maxCount && pairHeap.Count > 0) { MergePair top = pairHeap.Poll(); if (GetCostLowerBound(top.Cost, currentIndex) - baseCost > costSlack) { break; } int i = top.LeftIndex; int j = top.RightIndex; MergeBuilder nextBuilder = new MergeBuilder(results, builders[i].Indices); nextBuilder.Add(j); ret.Add(nextBuilder); if (i + 1 < builders.Count) { MergePair newMergePair = new MergePair(i + 1, j, builders[i + 1].Cost + nextResult.GetCost(j)); int positionValue = GetPositionValue(i + 1, j); if (!visited.Contains(positionValue)) { pairHeap.Add(newMergePair); visited.Add(positionValue); } } if (j + 1 < nextResult.Count) { MergePair newMergePair = new MergePair(i, j + 1, builders[i].Cost + nextResult.GetCost(j + 1)); int positionValue = GetPositionValue(i, j + 1); if (!visited.Contains(positionValue)) { pairHeap.Add(newMergePair); visited.Add(positionValue); } } } return(ret); }
List <MergeBuilder> MergeStep(List <MergeBuilder> builders, List <MultiSearchResult> results, int currentIndex) { var nextResult = results[currentIndex]; var pairHeap = new PriorityQueue <MergePair>(); var ret = new List <MergeBuilder>(); if (builders.Count == 0 || nextResult.Count == 0) { return(ret); } pairHeap.Enqueue(new MergePair(0, 0, builders[0].Cost + nextResult.GetCost(0))); var visited = new HashSet <int>(); while (ret.Count < MaxCount && pairHeap.Count > 0) { var top = pairHeap.Dequeue(); if (GetCostLowerBound(top.Cost, currentIndex) - BaseCost > CostSlack) { break; } var i = top.LeftIndex; var j = top.RightIndex; var nextBuilder = new MergeBuilder(results, builders[i].Indices); nextBuilder.Add(j); ret.Add(nextBuilder); if (i + 1 < builders.Count) { var newMergePair = new MergePair(i + 1, j, builders[i + 1].Cost + nextResult.GetCost(j)); var positionValue = GetPositionValue(i + 1, j); if (!visited.Contains(positionValue)) { pairHeap.Add(newMergePair); visited.Add(positionValue); } } if (j + 1 < nextResult.Count) { var newMergePair = new MergePair(i, j + 1, builders[i].Cost + nextResult.GetCost(j + 1)); var positionValue = GetPositionValue(i, j + 1); if (!visited.Contains(positionValue)) { pairHeap.Add(newMergePair); visited.Add(positionValue); } } } return(ret); }
private void MoveTo(int i) { MergePair pair = this.pairs[i]; if (pair.first != null && pair.second != null) { pair.first.transform.position = Vector3.Lerp(pair.firstInitialPosition, pair.average, pair.elapsedTime); pair.second.transform.position = Vector3.Lerp(pair.secondInitialPosition, pair.average, pair.elapsedTime); this.pairs[i] = pair; } }
private void Scale(int i, float multiplierFrom, float multiplierTo) { MergePair pair = this.pairs[i]; if (pair.first != null && pair.second != null) { float value = Mathf.Lerp(multiplierFrom, multiplierTo, pair.elapsedTime); pair.first.transform.localScale = pair.firstInitialScale + new Vector3(value, value, value); pair.second.transform.localScale = pair.secondInitialScale + new Vector3(value, value, value); this.pairs[i] = pair; } }
public void Merge() { lock (_mergeSyncRoot) { IFirkinFile[] oldFiles; IFirkinFile head; int recordCount; lock (_indexSyncRoot) { head = _head; oldFiles = _files.Values.Where(x => x != head).OrderBy(x => x.FileId).ToArray(); recordCount = Count; } _log.DebugFormat("starting merge of {0} files, {1} records (with head at id {2}) in '{3}' ", oldFiles.Length, recordCount, head.FileId, _storeDirectory); if (oldFiles.Length == 0) { // not merging if there is only one archive file return; } // merge current data into new data files and write out accompanying hint files ushort fileId = 0; var mergePairs = new List <MergePair>(); MergePair current = null; uint serial = 0; foreach (var file in oldFiles) { var deleted = 0; var outofdate = 0; var active = 0; foreach (var record in file.GetRecords()) { if (current == null) { fileId++; serial = 0; current = new MergePair() { Data = FirkinFile.CreateActive(GetMergeDataFilename(fileId), fileId), Hint = new FirkinHintFile(GetMergeHintFilename(fileId)) }; mergePairs.Add(current); } if (record.ValueSize == 0) { // not including deletes on merge deleted++; continue; } var key = _serializer.Deserialize(record.Key); // TODO: do i need a lock on _index here? KeyInfo info; if (!_index.TryGetValue(key, out info)) { // not including record that's no longer in index outofdate++; continue; } if (info.FileId != file.FileId || info.Serial != record.Serial) { // not including out-of-date record outofdate++; continue; } var newRecord = record; newRecord.Serial = ++serial; var valuePosition = current.Data.Write(newRecord); current.Hint.WriteHint(newRecord, valuePosition); // if our current file is over the maxsize and not about to collide with the head's id ... if (current.Data.Size > _maxFileSize && fileId < head.FileId) { // ... set it to null, so we can create the next file current = null; } active++; } _log.DebugFormat("read {0} records, skipped {1} deleted and {2} outofdate", active, deleted, outofdate); } _log.DebugFormat("merged {0} file(s) into {1} file(s)", oldFiles.Length, mergePairs.Count); // rebuild the index based on new files var newIndex = new Dictionary <TKey, KeyInfo>(); var newFiles = new Dictionary <ushort, IFirkinFile>(); var mergeFiles = new List <IFirkinFile>(); var mergedRecords = 0; foreach (var pair in mergePairs) { var file = FirkinFile.OpenArchiveFromActive(pair.Data); newFiles.Add(file.FileId, file); mergeFiles.Add(file); foreach (var hint in pair.Hint) { var keyInfo = new KeyInfo(pair.Data.FileId, hint); var key = _serializer.Deserialize(hint.Key); newIndex[key] = keyInfo; mergedRecords++; } pair.Hint.Dispose(); } _log.DebugFormat("read {0} records from hint files", mergedRecords); // add records && files not part of merge lock (_indexSyncRoot) { foreach (var file in _files.Values.Where(x => x.FileId >= head.FileId).OrderBy(x => x.FileId)) { newFiles[file.FileId] = file; foreach (var pair in file) { var key = _serializer.Deserialize(pair.Key); if (pair.Value.ValueSize == 0) { newIndex.Remove(key); } else { newIndex[key] = pair.Value; } } _log.DebugFormat("added entries from file {0}: {1}", file.FileId, newIndex.Count); } _log.DebugFormat("total records in merged index: {0}", newIndex.Count); // swap out index and file list _index = newIndex; _files = newFiles; } try { // move old files out of the way foreach (var file in oldFiles) { file.Dispose(); var oldFile = GetOldDataFilename(file.FileId); #if DEBUG _log.DebugFormat("moving old from {0} to {1}", Path.GetFileName(file.Filename), Path.GetFileName(oldFile)); #endif File.Move(file.Filename, oldFile); var hintfile = GetHintFilename(file.FileId); if (File.Exists(hintfile)) { var oldHintFile = GetOldHintFilename(file.FileId); #if DEBUG _log.DebugFormat("moving old hint from {0} to {1}", Path.GetFileName(hintfile), Path.GetFileName(oldHintFile)); #endif File.Move(hintfile, oldHintFile); } } // move new files into place foreach (var file in mergeFiles) { #if DEBUG _log.DebugFormat("creating file and hint for id {0}", file.FileId); #endif file.Rename(GetDataFilename(file.FileId)); File.Move(GetMergeHintFilename(file.FileId), GetHintFilename(file.FileId)); } // delete old files foreach (var file in oldFiles) { var oldFile = GetOldDataFilename(file.FileId); #if DEBUG _log.DebugFormat("deleting old file {0}", Path.GetFileName(oldFile)); #endif File.Delete(oldFile); var hintfile = GetOldHintFilename(file.FileId); if (File.Exists(hintfile)) { #if DEBUG _log.DebugFormat("deleting old hint file {0}", Path.GetFileName(hintfile)); #endif File.Delete(hintfile); } } } catch (Exception e) { // something went wrong, try to recover to pre-merge state // TODO: go back to pre-merge state _log.Warn("Unable to complete merge", e); } } _log.DebugFormat("completed merge in '{0}'", _storeDirectory); }