/// <summary> /// Runs a garbage collector on a /// <see cref="FileRepository">FileRepository</see> /// . It will /// <ul> /// <li>pack loose references into packed-refs</li> /// <li>repack all reachable objects into new pack files and delete the old /// pack files</li> /// <li>prune all loose objects which are now reachable by packs</li> /// </ul> /// </summary> /// <returns> /// the collection of /// <see cref="PackFile">PackFile</see> /// 's which are newly created /// </returns> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> /// <exception cref="Sharpen.ParseException"> /// If the configuration parameter "gc.pruneexpire" couldn't be /// parsed /// </exception> public virtual ICollection <PackFile> Gc() { pm.Start(6); PackRefs(); // TODO: implement reflog_expire(pm, repo); ICollection <PackFile> newPacks = Repack(); Prune(Sharpen.Collections.EmptySet <ObjectId>()); // TODO: implement rerere_gc(pm); return(newPacks); }
internal MergeCommandResult Apply(ProgressMonitor monitor, Stash stash) { monitor.Start(1); monitor.BeginTask("Applying stash", 100); ObjectId cid = _repo.Resolve(stash.CommitId); RevWalk rw = new RevWalk(_repo); RevCommit wip = rw.ParseCommit(cid); RevCommit oldHead = wip.Parents.First(); rw.ParseHeaders(oldHead); MergeCommandResult res = GitUtil.MergeTrees(monitor, _repo, oldHead, wip, "Stash", false); monitor.EndTask(); return(res); }
/// <summary> /// Load profiling data. /// </summary> private void Load(string path) { Initialize(path); var startTime = DateTime.Now; var lastTime = startTime; ulong cnt = 0; var progressMonitor = new ProgressMonitor() { Start = delegate { ProfilerPlugin.Instance.SaveExplorerWindowCaption(); ProfilerPlugin.Instance.UpdateExplorerWindowProgress(0); }, Stop = delegate { ProfilerPlugin.Instance.RestoreExplorerWindowCaption(); }, Tick = delegate { if (++cnt % 1000 == 0) { var now = DateTime.Now; if ((now - lastTime).TotalSeconds >= 0.5) { ProfilerPlugin.Instance.UpdateExplorerWindowProgress((long)Math.Min(((now - startTime).TotalSeconds) * 5, 99)); lastTime = now; } } } }; try { progressMonitor.Start(); LoadData(progressMonitor); } finally { progressMonitor.Stop(); } }
public Task RebuildIndexAsync(ProgressMonitor progressMonitor) { return(Task.Run(() => { using (var progress = progressMonitor.Start("Rebuilding index...")) { cache.SwitchBuffer(); var converter = new ThreadLocal <LogEntryToDocumentConverter>(() => new LogEntryToDocumentConverter()); Parallel.ForEach(cache.ReadDataFromInactiveBuffer(progressMonitor), new ParallelOptions { MaxDegreeOfParallelism = 4 }, buffered => { writer.AddDocument(converter.Value.ToDocument(buffered)); }); writer.Commit(); searcher.Refresh(); } })); }
private async Task LoadEntriesAsync(ProgressMonitor progressMonitor, Direction direction, Action afterCallback = null) { var view = new ViewFilter { LogLevels = levels.ToList(), CustomFilter = filterTextEntry.Text }; using (var progress = progressMonitor.Start("Filtering log...")) { var referenceId = (direction == Direction.Backward) ? listViewLog.FirstItemId : listViewLog.LastItemId; var entries = await luceneStore.FilterHistoryViewAsync(filterTextEntry.Text, view, pageSize, direction, referenceId); if (entries.Entries.Any()) { if (direction == Direction.Backward) { ApplicationExtensions.InvokeInUIThread(() => { foreach (var entry in entries.Entries.Where(e => e.Id < referenceId).Reverse()) { listViewLog.InsertItem(entry, false, maxPageCount * pageSize); } }); } else { ApplicationExtensions.InvokeInUIThread(() => { foreach (var entry in entries.Entries.Where(e => e.Id > referenceId)) { listViewLog.AddItem(entry, false, maxPageCount * pageSize); } }); } } } if (afterCallback != null) { afterCallback(); } }
private async Task SearchEntriesAsync(ProgressMonitor progressMonitor, Direction direction) { using (var progress = progressMonitor.Start("Searching in log...")) { var view = new ViewFilter { LogLevels = levels.ToList(), CustomFilter = filterTextEntry.Text }; var entries = await luceneStore.FindAsync(searchTextEntry.Text, view, 2 *pageSize, direction, searchContext != null?searchContext.PreviousResultId : null); if (entries.Entries == null) { ApplicationExtensions.InvokeInUIThread(() => MessageDialog.ShowWarning("No results found!")); ResetSearch(false); return; } if (searchContext == null) { searchContext = new SearchContext(entries.TotalHits); } else { searchContext.Advance(direction); } searchContext.PreviousResultId = entries.FoundId; ApplicationExtensions.InvokeInUIThread(() => { listViewLog.ClearItems(); foreach (var entry in entries.Entries) { listViewLog.AddItem(entry, entries.FoundId == entry.Id); } searchLabel.Text = string.Format("Showing result: {0} / {1}", searchContext.CurrentResult, searchContext.ResultsCount); nextSearchResultButton.Sensitive = (searchContext.CurrentResult > 1); prevSearchResultButton.Sensitive = (searchContext.CurrentResult < searchContext.ResultsCount); }); } }
private async Task FilterEntriesAsync(ProgressMonitor progressMonitor, bool selectLastAddedItem = false) { using (var progress = progressMonitor.Start("Filtering log...")) { var view = new ViewFilter { LogLevels = levels.ToList(), CustomFilter = filterTextEntry.Text }; var entries = await luceneStore.FilterHistoryViewAsync(filterTextEntry.Text, view, maxPageCount *pageSize, Direction.Backward); ApplicationExtensions.InvokeInUIThread(() => { listViewLog.ClearItems(); foreach (var entry in entries.Entries) { listViewLog.AddItem(entry, selectLastAddedItem, maxPageCount * pageSize); } RefreshPaging(entries.TotalHits); }); } }
public IEnumerable <LogEntry> ReadDataFromInactiveBuffer(ProgressMonitor progressMonitor) { var monitoredAction = progressMonitor.Start("Reading log entries...", progressable: true); var lastUpdate = CustomDateTime.Now; switchSync.WaitOne(); try { var bufferPath = Path.Combine(TemporaryFilesManager.Instance.EmulatorTemporaryPath, string.Format("{0}.{1}", BUFFER_FILE, (currentBuffer + 1) % 2)); using (var inactiveBuffer = File.OpenRead(bufferPath)) { var primitiveReader = new PrimitiveReader(inactiveBuffer, false); var stopwatch = new Stopwatch(); stopwatch.Start(); var entriesCount = 0; LogEntry entry; while (TryDeserializeEntry(primitiveReader, out entry)) { if (stopwatch.Elapsed > TimeSpan.FromMilliseconds(50)) { monitoredAction.UpdateProgress((int)(100.0 * inactiveBuffer.Position / inactiveBuffer.Length), string.Format("Reading log entries ({0})...", Interlocked.Add(ref entriesCount, 0))); stopwatch.Restart(); } yield return(entry); Interlocked.Increment(ref entriesCount); } monitoredAction.UpdateProgress(100); } } finally { switchSync.Release(); } }
public void index(ProgressMonitor progress) { progress.Start(2 /* tasks */); try { try { ReadPackHeader(); _entries = new PackedObjectInfo[(int)_objectCount]; _baseById = new ObjectIdSubclassMap<DeltaChain>(); _baseByPos = new LongMap<UnresolvedDelta>(); progress.BeginTask(PROGRESS_DOWNLOAD, (int)_objectCount); for (int done = 0; done < _objectCount; done++) { IndexOneObject(); progress.Update(1); if (progress.IsCancelled) { throw new IOException("Download cancelled"); } } ReadPackFooter(); EndInput(); progress.EndTask(); if (_deltaCount > 0) { if (_packOut == null) { throw new IOException("need packOut"); } ResolveDeltas(progress); if (_entryCount < _objectCount) { if (!_fixThin) { throw new IOException("pack has " + (_objectCount - _entryCount) + " unresolved deltas"); } FixThinPack(progress); } } if (_packOut != null && (_keepEmpty || _entryCount > 0)) { _packOut.Flush(); } _packDigest = null; _baseById = null; _baseByPos = null; if (_dstIdx != null && (_keepEmpty || _entryCount > 0)) { WriteIdx(); } } finally { try { InflaterCache.Instance.release(_inflater); } finally { _inflater = null; } _windowCursor = WindowCursor.Release(_windowCursor); progress.EndTask(); if (_packOut != null) { _packOut.Close(); } } if (_keepEmpty || _entryCount > 0) { if (_dstPack != null) { _dstPack.IsReadOnly = true; } if (_dstIdx != null) { _dstIdx.IsReadOnly = true; } } } catch (IOException) { if (_dstPack != null) _dstPack.Delete(); if (_dstIdx != null) _dstIdx.Delete(); throw; } }
public Stash Create(ProgressMonitor monitor, string message) { if (monitor != null) { monitor.Start(1); monitor.BeginTask("Stashing changes", 100); } UserConfig config = _repo.GetConfig().Get(UserConfig.KEY); RevWalk rw = new RevWalk(_repo); ObjectId headId = _repo.Resolve(Constants.HEAD); var parent = rw.ParseCommit(headId); PersonIdent author = new PersonIdent(config.GetAuthorName() ?? "unknown", config.GetAuthorEmail() ?? "unknown@(none)."); if (string.IsNullOrEmpty(message)) { // Use the commit summary as message message = parent.Abbreviate(7) + " " + parent.GetShortMessage(); int i = message.IndexOfAny(new char[] { '\r', '\n' }); if (i != -1) { message = message.Substring(0, i); } } // Create the index tree commit ObjectInserter inserter = _repo.NewObjectInserter(); DirCache dc = _repo.ReadDirCache(); if (monitor != null) { monitor.Update(10); } var tree_id = dc.WriteTree(inserter); inserter.Release(); if (monitor != null) { monitor.Update(10); } string commitMsg = "index on " + _repo.GetBranch() + ": " + message; ObjectId indexCommit = GitUtil.CreateCommit(_repo, commitMsg + "\n", new ObjectId[] { headId }, tree_id, author, author); if (monitor != null) { monitor.Update(20); } // Create the working dir commit tree_id = WriteWorkingDirectoryTree(parent.Tree, dc); commitMsg = "WIP on " + _repo.GetBranch() + ": " + message; var wipCommit = GitUtil.CreateCommit(_repo, commitMsg + "\n", new ObjectId[] { headId, indexCommit }, tree_id, author, author); if (monitor != null) { monitor.Update(20); } string prevCommit = null; FileInfo sf = StashRefFile; if (sf.Exists) { prevCommit = File.ReadAllText(sf.FullName).Trim(' ', '\t', '\r', '\n'); } Stash s = new Stash(prevCommit, wipCommit.Name, author, commitMsg); FileInfo stashLog = StashLogFile; File.AppendAllText(stashLog.FullName, s.FullLine + "\n"); File.WriteAllText(sf.FullName, s.CommitId + "\n"); if (monitor != null) { monitor.Update(5); } // Wipe all local changes GitUtil.HardReset(_repo, Constants.HEAD); monitor.EndTask(); s.StashCollection = this; return(s); }
/// <summary> /// Consume data from the input stream until the packfile is indexed. /// </summary> /// <param name="progress">progress feedback</param> public void index(ProgressMonitor progress) { progress.Start(2 /* tasks */); try { try { ReadPackHeader(); _entries = new PackedObjectInfo[(int)_objectCount]; _baseById = new ObjectIdSubclassMap <DeltaChain>(); _baseByPos = new LongMap <UnresolvedDelta>(); progress.BeginTask(PROGRESS_DOWNLOAD, (int)_objectCount); for (int done = 0; done < _objectCount; done++) { IndexOneObject(); progress.Update(1); if (progress.IsCancelled) { throw new IOException("Download cancelled"); } } ReadPackFooter(); EndInput(); progress.EndTask(); if (_deltaCount > 0) { if (_packOut == null) { throw new IOException("need packOut"); } ResolveDeltas(progress); if (_needBaseObjectIds) { _baseIds = new HashSet <ObjectId>(); foreach (var c in _baseById) { _baseIds.Add(c); } } if (_entryCount < _objectCount) { if (!_fixThin) { throw new IOException("pack has " + (_objectCount - _entryCount) + " unresolved deltas"); } FixThinPack(progress); } } if (_packOut != null && (_keepEmpty || _entryCount > 0)) { _packOut.Flush(); } _packDigest = null; _baseById = null; _baseByPos = null; if (_dstIdx != null && (_keepEmpty || _entryCount > 0)) { WriteIdx(); } } finally { try { InflaterCache.Instance.release(_inflater); } finally { _inflater = null; _objectDatabase.close(); } _windowCursor = WindowCursor.Release(_windowCursor); progress.EndTask(); if (_packOut != null) { _packOut.Dispose(); } } if (_keepEmpty || _entryCount > 0) { if (_dstPack != null) { _dstPack.IsReadOnly = true; } if (_dstIdx != null) { _dstIdx.IsReadOnly = true; } } } catch (IOException) { if (_dstPack != null) { _dstPack.DeleteFile(); } if (_dstIdx != null) { _dstIdx.DeleteFile(); } throw; } }