public GitFileSystemStatusEntry(String repositoryWorkingDirectory, String filesystemWorkingDirectory, String filePath, FileStatus status) { this.repositoryWorkingDirectory = repositoryWorkingDirectory; this.filesystemWorkingDirectory = filesystemWorkingDirectory; this.filePath = filePath; this.status = status; }
public void SetFileStatus(FileStatus status) { switch (status) { case FileStatus.Waiting: this.h9kFNwRSkp(RNaihRhYEl0wUmAftnB.aYu7exFQKN(12286), 0); break; case FileStatus.Importing: this.h9kFNwRSkp(RNaihRhYEl0wUmAftnB.aYu7exFQKN(12304), 1); break; case FileStatus.Testing: this.h9kFNwRSkp(RNaihRhYEl0wUmAftnB.aYu7exFQKN(12326), 4); break; case FileStatus.DoneOk: this.h9kFNwRSkp(RNaihRhYEl0wUmAftnB.aYu7exFQKN(12344), 2); break; case FileStatus.DoneError: this.h9kFNwRSkp(RNaihRhYEl0wUmAftnB.aYu7exFQKN(12356), 3); break; case FileStatus.Aborted: this.h9kFNwRSkp(RNaihRhYEl0wUmAftnB.aYu7exFQKN(12392), 5); break; default: throw new NotSupportedException(RNaihRhYEl0wUmAftnB.aYu7exFQKN(12410) + ((object) status).ToString()); } }
public void SetFileStatus(FileStatus status) { switch (status) { case FileStatus.Waiting: this.SetStatus("Waiting", 0); break; case FileStatus.Importing: this.SetStatus("Importing", 1); break; case FileStatus.Testing: this.SetStatus("Testing", 4); break; case FileStatus.DoneOk: this.SetStatus("Done", 2); break; case FileStatus.DoneError: this.SetStatus("Done with errors", 3); break; case FileStatus.Aborted: this.SetStatus("Aborted", 5); break; default: throw new NotSupportedException("Unknown status - " + status.ToString()); } }
public TreeDirectory(Repository repository, string relativePath, TreeDirectory parent, FileStatus status, string name) : base(repository, relativePath, parent, status, name) { _directories = new List<TreeDirectory>(); _files = new List<TreeFile>(); _commits = new List<TreeCommit>(); }
public void CanResolveConflictsByRemovingFromTheIndex( bool removeFromWorkdir, string filename, bool existsBeforeRemove, bool existsAfterRemove, FileStatus lastStatus, int removedIndexEntries) { var path = CloneMergedTestRepo(); using (var repo = new Repository(path)) { int count = repo.Index.Count; string fullpath = Path.Combine(repo.Info.WorkingDirectory, filename); Assert.Equal(existsBeforeRemove, File.Exists(fullpath)); Assert.NotNull(repo.Index.Conflicts[filename]); Assert.Equal(0, repo.Index.Conflicts.ResolvedConflicts.Count()); repo.Index.Remove(filename, removeFromWorkdir); Assert.Null(repo.Index.Conflicts[filename]); Assert.Equal(count - removedIndexEntries, repo.Index.Count); Assert.Equal(existsAfterRemove, File.Exists(fullpath)); Assert.Equal(lastStatus, repo.Index.RetrieveStatus(filename)); Assert.Equal(1, repo.Index.Conflicts.ResolvedConflicts.Count()); Assert.NotNull(repo.Index.Conflicts.ResolvedConflicts[filename]); } }
// Executes GET requsts for all files private static ActionResult GetHandler(IFileHandler handler) { IFileStatus status = new FileStatus(); using (FilesModel db = new FilesModel()) { foreach (var row in db.Files) { string url = handler.Context.Request.Url.OriginalString + "?fileName=" + row.Id.ToString(); IFileStatusItem file = new FileStatusItem() { ContentType = row.Type, DeleteType = "DELETE", FileName = row.Name, FileSize = row.Size, OriginalName = row.Original, Progress = "100", Success = true, ThumbnailUrl = row.Preview, // Set an identifier for GET and DELETE requests DeleteUrl = url, FileUrl = url }; status.Files.Add(file); } } handler.FileStatus = status; // Create client plugin specific result and return an ActionResult IBackloadResult result = handler.Services.Core.CreatePluginResult(); return ResultCreator.Create((IFileStatusResult)result); }
public void CanRemoveAnUnalteredFileFromTheIndexWithoutRemovingItFromTheWorkingDirectory( bool removeFromWorkdir, string filename, bool throws, FileStatus initialStatus, bool existsBeforeRemove, bool existsAfterRemove, FileStatus lastStatus) { string path = SandboxStandardTestRepo(); using (var repo = new Repository(path)) { int count = repo.Index.Count; string fullpath = Path.Combine(repo.Info.WorkingDirectory, filename); Assert.Equal(initialStatus, repo.RetrieveStatus(filename)); Assert.Equal(existsBeforeRemove, File.Exists(fullpath)); if (throws) { Assert.Throws<RemoveFromIndexException>(() => repo.Remove(filename, removeFromWorkdir)); Assert.Equal(count, repo.Index.Count); } else { repo.Remove(filename, removeFromWorkdir); Assert.Equal(count - 1, repo.Index.Count); Assert.Equal(existsAfterRemove, File.Exists(fullpath)); Assert.Equal(lastStatus, repo.RetrieveStatus(filename)); } } }
public TreeDirectoryData(string name, string shortName, TreeDirectoryData parent, FileStatus fileStatus, StagedStatus stagedStatus) : base(name, fileStatus, stagedStatus) { _parent = parent; ShortName = shortName; _files = new List<TreeFileData>(); _directories = new List<TreeDirectoryData>(); }
public void ComparingTheWorkDirAgainstTheIndexWithStrictUnmatchedExplicitPathsValidationAndANonExistentPathspecThrows(string relativePath, FileStatus currentStatus) { using (var repo = new Repository(StandardTestRepoPath)) { Assert.Equal(currentStatus, repo.Index.RetrieveStatus(relativePath)); Assert.Throws<UnmatchedPathException>(() => repo.Diff.Compare<TreeChanges>(new[] { relativePath }, false, new ExplicitPathsOptions())); } }
public TaskResultData() { sFormat = ""; sKey = ""; eStatus = FileStatus.None; nStatusInfo = 0; oLastOpenDate = DateTime.UtcNow; sTitle = ""; }
public FileEventArgs(FileInfo file, FileStatus status, int objectCount) { eX4XcIhHpDXt70u2x3N.k8isAcYzkUOGF(); // ISSUE: explicit constructor call base.\u002Ector(); this.ATYdgQ6h0H = file; this.Q7OdM6TVN3 = status; this.awIdJYrTF0 = objectCount; }
public void StagingAnUnknownFileThrowsIfExplicitPath(string relativePath, FileStatus status) { using (var repo = new Repository(StandardTestRepoPath)) { Assert.Null(repo.Index[relativePath]); Assert.Equal(status, repo.Index.RetrieveStatus(relativePath)); Assert.Throws<UnmatchedPathException>(() => repo.Index.Stage(relativePath, new StageOptions { ExplicitPathsOptions = new ExplicitPathsOptions() })); } }
public void ParsingIndividualLines_WorksCorrectly(string statusLine, string[] parsedFiles, FileStatus[] parsedStatuses) { var app = new ApplicationDefinition(null, string.Empty); var command = new Commands.Status(); var status = command.ProcessResult(app, new Tuple<int, string>(0, statusLine)); var expected = parsedFiles.Zip(parsedStatuses, (name, fileStatus) => new Status(name, fileStatus)).ToArray(); Assert.That(status.ToArray(), Is.EquivalentTo(expected)); }
/// <summary> /// Add and stage specified file. /// </summary> /// <param name="fileName">Name of the file to be added to Lib2GitSharp, then staged ready for commit.</param> public void Add(string fileName, FileStatus status) { using (var repo = new Repository(_LocalRepo)) { if (status != FileStatus.Removed && status != FileStatus.Missing) { repo.Index.Add(fileName); } repo.Stage(fileName); } }
public MyFile(string Name, string Path, long Size, FileStatus status, DateTime LastWriteTime, bool IsDirectory, int FileId, int ParentDirectoryId) { this.Name = Name; this.Path = Path; this.Size = Size; this.status = status; this.LastWriteTime = LastWriteTime; this.IsDirectory = IsDirectory; this.FileId = FileId; this.ParentDirectoryId = ParentDirectoryId; }
public void CanLimitStatusToWorkDirOnly(StatusShowOption show, FileStatus expected) { var clone = SandboxStandardTestRepo(); using (var repo = new Repository(clone)) { Touch(repo.Info.WorkingDirectory, "file.txt", "content"); RepositoryStatus status = repo.RetrieveStatus(new StatusOptions() { Show = show }); Assert.Equal(expected, status["file.txt"].State); } }
protected TreeItem(Repository repository, string relativePath, TreeDirectory parent, FileStatus status, string name) : base(repository, name) { _parent = parent; if(parent != null) { _stagedStatus = parent._stagedStatus; } _status = status; _relativePath = relativePath; }
public void CanStageAnUnknownFileWithLaxUnmatchedExplicitPathsValidation(string relativePath, FileStatus status) { using (var repo = new Repository(StandardTestRepoPath)) { Assert.Null(repo.Index[relativePath]); Assert.Equal(status, repo.Index.RetrieveStatus(relativePath)); Assert.DoesNotThrow(() => repo.Index.Stage(relativePath)); Assert.DoesNotThrow(() => repo.Index.Stage(relativePath, new StageOptions { ExplicitPathsOptions = new ExplicitPathsOptions { ShouldFailOnUnmatchedPath = false } })); Assert.Equal(status, repo.Index.RetrieveStatus(relativePath)); } }
public void CanCompareTheWorkDirAgainstTheIndexWithLaxUnmatchedExplicitPathsValidation(string relativePath, FileStatus currentStatus) { using (var repo = new Repository(StandardTestRepoPath)) { Assert.Equal(currentStatus, repo.Index.RetrieveStatus(relativePath)); var changes = repo.Diff.Compare<TreeChanges>(new[] { relativePath }, false, new ExplicitPathsOptions { ShouldFailOnUnmatchedPath = false }); Assert.Equal(0, changes.Count()); changes = repo.Diff.Compare<TreeChanges>(new[] { relativePath }); Assert.Equal(0, changes.Count()); } }
public void CanMoveAnExistingFileOverANonExistingFile(string sourcePath, FileStatus sourceStatus, string destPath, FileStatus destStatus, FileStatus sourcePostStatus, FileStatus destPostStatus) { string path = CloneStandardTestRepo(); using (var repo = new Repository(path)) { Assert.Equal(sourceStatus, repo.Index.RetrieveStatus(sourcePath)); Assert.Equal(destStatus, repo.Index.RetrieveStatus(destPath)); repo.Index.Move(sourcePath, destPath); Assert.Equal(sourcePostStatus, repo.Index.RetrieveStatus(sourcePath)); Assert.Equal(destPostStatus, repo.Index.RetrieveStatus(destPath)); } }
public void CanAddAnEntryToTheIndexFromAFileInTheWorkdir(string pathInTheWorkdir, FileStatus expectedBeforeStatus, FileStatus expectedAfterStatus) { var path = SandboxStandardTestRepoGitDir(); using (var repo = new Repository(path)) { var before = repo.RetrieveStatus(pathInTheWorkdir); Assert.Equal(expectedBeforeStatus, before); repo.Index.Add(pathInTheWorkdir); var after = repo.RetrieveStatus(pathInTheWorkdir); Assert.Equal(expectedAfterStatus, after); } }
public void CanMoveAnExistingFileOverANonExistingFile(string sourcePath, FileStatus sourceStatus, string destPath, FileStatus destStatus, FileStatus sourcePostStatus, FileStatus destPostStatus) { TemporaryCloneOfTestRepo path = BuildTemporaryCloneOfTestRepo(StandardTestRepoWorkingDirPath); using (var repo = new Repository(path.RepositoryPath)) { Assert.Equal(sourceStatus, repo.Index.RetrieveStatus(sourcePath)); Assert.Equal(destStatus, repo.Index.RetrieveStatus(destPath)); repo.Index.Move(sourcePath, destPath); Assert.Equal(sourcePostStatus, repo.Index.RetrieveStatus(sourcePath)); Assert.Equal(destPostStatus, repo.Index.RetrieveStatus(destPath)); } }
public void CallbackForUnmatchedExplicitPathsIsCalledWhenSet(string relativePath, FileStatus currentStatus) { var callback = new AssertUnmatchedPathspecsCallbackIsCalled(); using (var repo = new Repository(StandardTestRepoPath)) { Assert.Equal(currentStatus, repo.Index.RetrieveStatus(relativePath)); repo.Diff.Compare(new[] { relativePath }, false, new ExplicitPathsOptions { ShouldFailOnUnmatchedPath = false, OnUnmatchedPath = callback.OnUnmatchedPath }); Assert.True(callback.WasCalled); } }
public void CanStage(string relativePath, FileStatus currentStatus, bool doesCurrentlyExistInTheIndex, FileStatus expectedStatusOnceStaged, bool doesExistInTheIndexOnceStaged, int expectedIndexCountVariation) { string path = SandboxStandardTestRepo(); using (var repo = new Repository(path)) { int count = repo.Index.Count; Assert.Equal(doesCurrentlyExistInTheIndex, (repo.Index[relativePath] != null)); Assert.Equal(currentStatus, repo.RetrieveStatus(relativePath)); Commands.Stage(repo, relativePath); Assert.Equal(count + expectedIndexCountVariation, repo.Index.Count); Assert.Equal(doesExistInTheIndexOnceStaged, (repo.Index[relativePath] != null)); Assert.Equal(expectedStatusOnceStaged, repo.RetrieveStatus(relativePath)); } }
public void CanStage(string relativePath, FileStatus currentStatus, bool doesCurrentlyExistInTheIndex, FileStatus expectedStatusOnceStaged, bool doesExistInTheIndexOnceStaged, int expectedIndexCountVariation) { TemporaryCloneOfTestRepo path = BuildTemporaryCloneOfTestRepo(StandardTestRepoWorkingDirPath); using (var repo = new Repository(path.RepositoryPath)) { int count = repo.Index.Count; Assert.Equal(doesCurrentlyExistInTheIndex, (repo.Index[relativePath] != null)); Assert.Equal(currentStatus, repo.Index.RetrieveStatus(relativePath)); repo.Index.Stage(relativePath); Assert.Equal(count + expectedIndexCountVariation, repo.Index.Count); Assert.Equal(doesExistInTheIndexOnceStaged, (repo.Index[relativePath] != null)); Assert.Equal(expectedStatusOnceStaged, repo.Index.RetrieveStatus(relativePath)); } }
public ConflictResolutionDialog(string fileName, FileStatus oursStatus, FileStatus theirsStatus, ConflictResolution resolution1, ConflictResolution resolution2) { InitializeComponent(); Text = Resources.StrConflictResolution; _lblFileName.Text = fileName; _lblOursStatus.Text = StatusToString(oursStatus); _lblTheirsStatus.Text = StatusToString(theirsStatus); _lblOursStatus.BackColor = StatusToColor(oursStatus); _lblTheirsStatus.BackColor = StatusToColor(theirsStatus); _resolution1 = resolution1; _resolution2 = resolution2; }
public void Update(TaskResultDataToUpdate oUpdate) { if (oUpdate != null) { if (oUpdate.eStatus != null) this.eStatus = (FileStatus)oUpdate.eStatus; if (oUpdate.oLastOpenDate.HasValue) this.oLastOpenDate = oUpdate.oLastOpenDate.Value; if (oUpdate.nStatusInfo != null) this.nStatusInfo = (int)oUpdate.nStatusInfo; if (oUpdate.sFormat != null) this.sFormat = oUpdate.sFormat; if (oUpdate.sTitle != null) this.sTitle = oUpdate.sTitle; } }
public virtual void TestRefreshJobRetentionSettings() { string root = "mockfs://foo/"; string historyDoneDir = root + "mapred/history/done"; long now = Runtime.CurrentTimeMillis(); long someTimeYesterday = now - (25l * 3600 * 1000); long timeBefore200Secs = now - (200l * 1000); // Get yesterday's date in YY/MM/DD format string timestampComponent = JobHistoryUtils.TimestampDirectoryComponent(someTimeYesterday ); // Create a folder under yesterday's done dir Path donePathYesterday = new Path(historyDoneDir, timestampComponent + "/" + "000000" ); FileStatus dirCreatedYesterdayStatus = new FileStatus(0, true, 0, 0, someTimeYesterday , donePathYesterday); // Get today's date in YY/MM/DD format timestampComponent = JobHistoryUtils.TimestampDirectoryComponent(timeBefore200Secs ); // Create a folder under today's done dir Path donePathToday = new Path(historyDoneDir, timestampComponent + "/" + "000000" ); FileStatus dirCreatedTodayStatus = new FileStatus(0, true, 0, 0, timeBefore200Secs , donePathToday); // Create a jhist file with yesterday's timestamp under yesterday's done dir Path fileUnderYesterdayDir = new Path(donePathYesterday.ToString(), "job_1372363578825_0015-" + someTimeYesterday + "-user-Sleep+job-" + someTimeYesterday + "-1-1-SUCCEEDED-default.jhist" ); FileStatus fileUnderYesterdayDirStatus = new FileStatus(10, false, 0, 0, someTimeYesterday , fileUnderYesterdayDir); // Create a jhist file with today's timestamp under today's done dir Path fileUnderTodayDir = new Path(donePathYesterday.ToString(), "job_1372363578825_0016-" + timeBefore200Secs + "-user-Sleep+job-" + timeBefore200Secs + "-1-1-SUCCEEDED-default.jhist" ); FileStatus fileUnderTodayDirStatus = new FileStatus(10, false, 0, 0, timeBefore200Secs , fileUnderTodayDir); HistoryFileManager historyManager = Org.Mockito.Mockito.Spy(new HistoryFileManager ()); jobHistory = Org.Mockito.Mockito.Spy(new JobHistory()); IList <FileStatus> fileStatusList = new List <FileStatus>(); fileStatusList.AddItem(dirCreatedYesterdayStatus); fileStatusList.AddItem(dirCreatedTodayStatus); // Make the initial delay of history job cleaner as 4 secs Org.Mockito.Mockito.DoReturn(4).When(jobHistory).GetInitDelaySecs(); Org.Mockito.Mockito.DoReturn(historyManager).When(jobHistory).CreateHistoryFileManager (); IList <FileStatus> list1 = new List <FileStatus>(); list1.AddItem(fileUnderYesterdayDirStatus); Org.Mockito.Mockito.DoReturn(list1).When(historyManager).ScanDirectoryForHistoryFiles (Eq(donePathYesterday), Any <FileContext>()); IList <FileStatus> list2 = new List <FileStatus>(); list2.AddItem(fileUnderTodayDirStatus); Org.Mockito.Mockito.DoReturn(list2).When(historyManager).ScanDirectoryForHistoryFiles (Eq(donePathToday), Any <FileContext>()); Org.Mockito.Mockito.DoReturn(fileStatusList).When(historyManager).GetHistoryDirsForCleaning (Org.Mockito.Mockito.AnyLong()); Org.Mockito.Mockito.DoReturn(true).When(historyManager).DeleteDir(Any <FileStatus> ()); HistoryFileManager.JobListCache jobListCache = Org.Mockito.Mockito.Mock <HistoryFileManager.JobListCache >(); HistoryFileManager.HistoryFileInfo fileInfo = Org.Mockito.Mockito.Mock <HistoryFileManager.HistoryFileInfo >(); Org.Mockito.Mockito.DoReturn(jobListCache).When(historyManager).CreateJobListCache (); Org.Mockito.Mockito.When(jobListCache.Get(Any <JobId>())).ThenReturn(fileInfo); Org.Mockito.Mockito.DoNothing().When(fileInfo).Delete(); // Set job retention time to 24 hrs and cleaner interval to 2 secs Configuration conf = new Configuration(); conf.SetLong(JHAdminConfig.MrHistoryMaxAgeMs, 24l * 3600 * 1000); conf.SetLong(JHAdminConfig.MrHistoryCleanerIntervalMs, 2 * 1000); jobHistory.Init(conf); jobHistory.Start(); NUnit.Framework.Assert.AreEqual(2 * 1000l, jobHistory.GetCleanerInterval()); // Only yesterday's jhist file should get deleted Org.Mockito.Mockito.Verify(fileInfo, Org.Mockito.Mockito.Timeout(20000).Times(1)) .Delete(); fileStatusList.Remove(dirCreatedYesterdayStatus); // Now reset job retention time to 10 secs conf.SetLong(JHAdminConfig.MrHistoryMaxAgeMs, 10 * 1000); // Set cleaner interval to 1 sec conf.SetLong(JHAdminConfig.MrHistoryCleanerIntervalMs, 1 * 1000); Org.Mockito.Mockito.DoReturn(conf).When(jobHistory).CreateConf(); // Do refresh job retention settings jobHistory.RefreshJobRetentionSettings(); // Cleaner interval should be updated NUnit.Framework.Assert.AreEqual(1 * 1000l, jobHistory.GetCleanerInterval()); // Today's jhist file will also be deleted now since it falls below the // retention threshold Org.Mockito.Mockito.Verify(fileInfo, Org.Mockito.Mockito.Timeout(20000).Times(2)) .Delete(); }
/// <summary> /// 过滤文件登记数据 /// </summary> /// <param name="data"></param> /// <param name="treeEnum"></param> /// <returns></returns> private DataSet FilterData(DataSet data, FileStatus treeEnum) { return(null); }
public void RemovingAnUnknownFileWithLaxExplicitPathsValidationDoesntThrow(string relativePath, FileStatus status) { for (int i = 0; i < 2; i++) { using (var repo = new Repository(StandardTestRepoPath)) { Assert.Null(repo.Index[relativePath]); Assert.Equal(status, repo.Index.RetrieveStatus(relativePath)); repo.Index.Remove(relativePath, i % 2 == 0); repo.Index.Remove(relativePath, i % 2 == 0, new ExplicitPathsOptions { ShouldFailOnUnmatchedPath = false }); } } }
public static bool IsChangedStatus(FileStatus state) { return(IsChangedStatus(GetGitFileStatus(state))); }
public void CanUnstageUnknownPathsAgainstAnOrphanedHeadWithLaxUnmatchedExplicitPathsValidation(string relativePath, FileStatus currentStatus) { using (var repo = new Repository(CloneStandardTestRepo())) { repo.Refs.UpdateTarget("HEAD", "refs/heads/orphaned"); Assert.True(repo.Info.IsHeadOrphaned); Assert.Equal(currentStatus, repo.Index.RetrieveStatus(relativePath)); Assert.DoesNotThrow(() => repo.Index.Unstage(relativePath)); Assert.DoesNotThrow(() => repo.Index.Unstage(relativePath, new ExplicitPathsOptions { ShouldFailOnUnmatchedPath = false })); Assert.Equal(currentStatus, repo.Index.RetrieveStatus(relativePath)); } }
public void CanUnstageUnknownPathsWithLaxUnmatchedExplicitPathsValidation(string relativePath, FileStatus currentStatus) { using (var repo = new Repository(CloneStandardTestRepo())) { Assert.Equal(currentStatus, repo.Index.RetrieveStatus(relativePath)); Assert.DoesNotThrow(() => repo.Index.Unstage(relativePath, new ExplicitPathsOptions() { ShouldFailOnUnmatchedPath = false })); Assert.Equal(currentStatus, repo.Index.RetrieveStatus(relativePath)); } }
private void DoDiffScroll(Event current) { float elementHeight = diffElementRenderer.ElementHeight; if (IsGrouping()) { float totalTypesCount = diffWindowStatusList.Select(i => GetMergedStatus(i.State)).Distinct().Count(); float elementsTotalHeight = (diffWindowStatusList.Count(IsVisible) + totalTypesCount) * elementHeight; diffScrollContentRect = new Rect(0, 0, Mathf.Max(DiffRect.width - 16, 420), elementsTotalHeight); } else { diffScrollContentRect = new Rect(0, 0, Mathf.Max(DiffRect.width - 16, 420), diffWindowStatusList.Count(IsVisible) * elementHeight); } diffScroll = GUI.BeginScrollView(DiffRect, diffScroll, diffScrollContentRect); int index = 0; FileStatus?lastFileStatus = null; float infoX = 0; for (int i = 0; i < diffWindowStatusList.Count; i++) { var info = diffWindowStatusList[i]; bool isVisible = IsVisible(info); Rect elementRect; if (IsGrouping()) { FileStatus mergedStatus = GetMergedStatus(info.State); if (!lastFileStatus.HasValue || lastFileStatus != mergedStatus) { elementRect = new Rect(0, infoX, diffScrollContentRect.width + 16, elementHeight); lastFileStatus = mergedStatus; FileStatus newState = lastFileStatus.Value; if (current.type == EventType.Repaint) { styles.diffScrollHeader.Draw(elementRect, GitGUI.GetTempContent(mergedStatus.ToString()), false, false, false, false); GUIStyle.none.Draw(new Rect(elementRect.x + 12, elementRect.y + 14, elementRect.width - 12, elementRect.height - 24), GitGUI.GetTempContent(gitOverlay.GetDiffTypeIcon(info.State, false).image), false, false, false, false); } if (elementRect.Contains(current.mousePosition)) { if (current.type == EventType.ContextClick) { GenericMenu selectAllMenu = new GenericMenu(); elementContextFactory.Build(newState, selectAllMenu, this); selectAllMenu.ShowAsContext(); current.Use(); } else if (current.type == EventType.MouseDown && current.button == 0) { settings.MinimizedFileStatus = settings.MinimizedFileStatus.SetFlags(mergedStatus, !isVisible); if (!isVisible) { ClearSelected(e => e.State == newState); } Repaint(); current.Use(); } } infoX += elementRect.height; } } if (!isVisible) { continue; } elementRect = new Rect(0, infoX, diffScrollContentRect.width + 16, elementHeight); //check visibility if (elementRect.y <= DiffRect.height + diffScroll.y && elementRect.y + elementRect.height >= diffScroll.y) { bool isUpdating = (info.MetaChange.IsFlagSet(MetaChangeEnum.Object) && gitManager.IsFileUpdating(info.LocalPath)) || (info.MetaChange.IsFlagSet(MetaChangeEnum.Meta) && gitManager.IsFileUpdating(GitManager.MetaPathFromAsset(info.LocalPath))) || updatingPaths.Contains(info.LocalPath) || pathsToBeUpdated.Contains(info.LocalPath); bool isStaging = (info.MetaChange.IsFlagSet(MetaChangeEnum.Object) && gitManager.IsFileStaging(info.LocalPath)) || (info.MetaChange.IsFlagSet(MetaChangeEnum.Meta) && gitManager.IsFileStaging(GitManager.MetaPathFromAsset(info.LocalPath))); bool isDirty = (info.MetaChange.IsFlagSet(MetaChangeEnum.Object) && gitManager.IsFileDirty(info.LocalPath)) || (info.MetaChange.IsFlagSet(MetaChangeEnum.Meta) && gitManager.IsFileDirty(GitManager.MetaPathFromAsset(info.LocalPath))); bool selected = IsSelected(info); bool enabled = !isUpdating && !isDirty && !isStaging; diffElementRenderer.DoFileDiff(elementRect, info, enabled, selected, this); DoFileDiffSelection(elementRect, info, index, enabled, selected); } infoX += elementRect.height; index++; } GUI.EndScrollView(); if (DiffRect.Contains(current.mousePosition)) { if (current.type == EventType.ContextClick) { if (gitSettings.UseSimpleContextMenus) { GenericMenuWrapper genericMenuWrapper = new GenericMenuWrapper(new GenericMenu()); elementContextFactory.Build(genericMenuWrapper, this); genericMenuWrapper.GenericMenu.ShowAsContext(); } else { ContextGenericMenuPopup popup = injectionHelper.CreateInstance <ContextGenericMenuPopup>(); elementContextFactory.Build(popup, this); PopupWindow.Show(new Rect(Event.current.mousePosition, Vector2.zero), popup); } current.Use(); } else if (current.type == EventType.KeyUp && current.keyCode == KeyCode.Delete) { foreach (var id in selections) { var entry = diffWindowStatusList.FirstOrDefault(e => SelectionPredicate(id, e)); if (!string.IsNullOrEmpty(entry.LocalPath)) { DeleteAsset(entry.LocalPath); current.Use(); } } } if (current.type == EventType.MouseDrag && current.button == 2) { diffScroll.y -= current.delta.y; Repaint(); } } }
internal static extern int LStat(string path, out FileStatus output);
public void ComparingTheWorkDirAgainstTheIndexWithStrictUnmatchedExplicitPathsValidationAndANonExistentPathspecThrows(string relativePath, FileStatus currentStatus) { var path = SandboxStandardTestRepoGitDir(); using (var repo = new Repository(path)) { Assert.Equal(currentStatus, repo.RetrieveStatus(relativePath)); Assert.Throws <UnmatchedPathException>(() => repo.Diff.Compare <TreeChanges>(new[] { relativePath }, false, new ExplicitPathsOptions())); } }
public void CallbackForUnmatchedExplicitPathsIsCalledWhenSet(string relativePath, FileStatus currentStatus) { var callback = new AssertUnmatchedPathspecsCallbackIsCalled(); var path = SandboxStandardTestRepoGitDir(); using (var repo = new Repository(path)) { Assert.Equal(currentStatus, repo.RetrieveStatus(relativePath)); repo.Diff.Compare <TreeChanges>(new[] { relativePath }, false, new ExplicitPathsOptions { ShouldFailOnUnmatchedPath = false, OnUnmatchedPath = callback.OnUnmatchedPath }); Assert.True(callback.WasCalled); } }
public void CanCompareTheWorkDirAgainstTheIndexWithLaxUnmatchedExplicitPathsValidation(string relativePath, FileStatus currentStatus) { var path = SandboxStandardTestRepoGitDir(); using (var repo = new Repository(path)) { Assert.Equal(currentStatus, repo.RetrieveStatus(relativePath)); var changes = repo.Diff.Compare <TreeChanges>(new[] { relativePath }, false, new ExplicitPathsOptions { ShouldFailOnUnmatchedPath = false }); Assert.Equal(0, changes.Count()); changes = repo.Diff.Compare <TreeChanges>(new[] { relativePath }); Assert.Equal(0, changes.Count()); } }
public void CanResolveConflictsByRemovingFromTheIndex( bool removeFromWorkdir, string filename, bool existsBeforeRemove, bool existsAfterRemove, FileStatus lastStatus, int removedIndexEntries) { var path = SandboxMergedTestRepo(); using (var repo = new Repository(path)) { int count = repo.Index.Count; string fullpath = Path.Combine(repo.Info.WorkingDirectory, filename); Assert.Equal(existsBeforeRemove, File.Exists(fullpath)); Assert.NotNull(repo.Index.Conflicts[filename]); Assert.Empty(repo.Index.Conflicts.ResolvedConflicts); Commands.Remove(repo, filename, removeFromWorkdir); Assert.Null(repo.Index.Conflicts[filename]); Assert.Equal(count - removedIndexEntries, repo.Index.Count); Assert.Equal(existsAfterRemove, File.Exists(fullpath)); Assert.Equal(lastStatus, repo.RetrieveStatus(filename)); Assert.Single(repo.Index.Conflicts.ResolvedConflicts); Assert.NotNull(repo.Index.Conflicts.ResolvedConflicts[filename]); } }
internal static extern int FStat(int fd, out FileStatus output);
private static string SetCell(string txt, RvFile tRomTable, FileStatus dat, FileStatus file, FileStatus verified) { string flags = ""; if (tRomTable.FileStatusIs(dat)) { flags += "D"; } if (tRomTable.FileStatusIs(file)) { flags += "F"; } if (tRomTable.FileStatusIs(verified)) { flags += "V"; } if (!string.IsNullOrEmpty(flags)) { flags = " (" + flags + ")"; } return(txt + flags); }
private void DoFileDiffSelection(Rect elementRect, StatusListEntry info, int index, bool enabled, bool selected) { Event current = Event.current; if (elementRect.Contains(current.mousePosition) && enabled) { if (current.type == EventType.MouseDown) { if (current.button == 0) { if (current.modifiers == EventModifiers.Control) { lastSelectedIndex = index; if (selected) { RemoveSelected(info); } else { AddSelected(info); } GUI.FocusControl(info.LocalPath); } else if (current.shift) { if (!current.control) { ClearSelection(); } int tmpIndex = 0; foreach (var selectInfo in diffWindowStatusList) { FileStatus mergedStatus = GetMergedStatus(selectInfo.State); bool isExpanded = settings.MinimizedFileStatus.IsFlagSet(mergedStatus); if (!isExpanded) { continue; } if (tmpIndex >= Mathf.Min(lastSelectedIndex, index) && tmpIndex <= Mathf.Max(lastSelectedIndex, index)) { AddSelected(selectInfo); } tmpIndex++; } if (current.control) { lastSelectedIndex = index; } GUI.FocusControl(info.LocalPath); } else { if (current.clickCount == 2) { Selection.activeObject = AssetDatabase.LoadAssetAtPath(gitManager.ToProjectPath(info.LocalPath), typeof(Object)); } else { lastSelectedIndex = index; ClearSelection(); AddSelected(info); GUI.FocusControl(info.LocalPath); } } current.Use(); Repaint(); } else if (current.button == 1) { if (!selected) { ClearSelection(); AddSelected(info); current.Use(); Repaint(); } } } } }
internal static extern int FStat(int fileDescriptor, out FileStatus output);
public TreeCommit(Repository repository, string relativePath, TreeDirectory parent, FileStatus status, string name) : base(repository, relativePath, parent, status, name) { }
public void UnstagingUnknownPathsWithStrictUnmatchedExplicitPathsValidationThrows(string relativePath, FileStatus currentStatus) { using (var repo = new Repository(CloneStandardTestRepo())) { Assert.Equal(currentStatus, repo.Index.RetrieveStatus(relativePath)); Assert.Throws <UnmatchedPathException>(() => repo.Index.Unstage(relativePath, new ExplicitPathsOptions())); } }
public NTStatus CreateFile(out object handle, out FileStatus fileStatus, string path, AccessMask desiredAccess, FileAttributes fileAttributes, ShareAccess shareAccess, CreateDisposition createDisposition, CreateOptions createOptions, SecurityContext securityContext) { handle = null; fileStatus = FileStatus.FILE_DOES_NOT_EXIST; FileAccess createAccess = NTFileStoreHelper.ToCreateFileAccess(desiredAccess, createDisposition); bool requestedWriteAccess = (createAccess & FileAccess.Write) > 0; bool forceDirectory = (createOptions & CreateOptions.FILE_DIRECTORY_FILE) > 0; bool forceFile = (createOptions & CreateOptions.FILE_NON_DIRECTORY_FILE) > 0; if (forceDirectory & (createDisposition != CreateDisposition.FILE_CREATE && createDisposition != CreateDisposition.FILE_OPEN && createDisposition != CreateDisposition.FILE_OPEN_IF && createDisposition != CreateDisposition.FILE_SUPERSEDE)) { return(NTStatus.STATUS_INVALID_PARAMETER); } // Windows will try to access named streams (alternate data streams) regardless of the FILE_NAMED_STREAMS flag, we need to prevent this behaviour. if (!m_fileSystem.SupportsNamedStreams && path.Contains(":")) { // Windows Server 2003 will return STATUS_OBJECT_NAME_NOT_FOUND return(NTStatus.STATUS_NO_SUCH_FILE); } FileSystemEntry entry = null; try { entry = m_fileSystem.GetEntry(path); } catch (FileNotFoundException) { } catch (DirectoryNotFoundException) { } catch (Exception ex) { if (ex is IOException || ex is UnauthorizedAccessException) { NTStatus status = ToNTStatus(ex); Log(Severity.Verbose, "CreateFile: Error retrieving '{0}'. {1}.", path, status); return(status); } else { throw; } } if (createDisposition == CreateDisposition.FILE_OPEN) { if (entry == null) { return(NTStatus.STATUS_NO_SUCH_FILE); } fileStatus = FileStatus.FILE_EXISTS; if (entry.IsDirectory && forceFile) { return(NTStatus.STATUS_FILE_IS_A_DIRECTORY); } if (!entry.IsDirectory && forceDirectory) { return(NTStatus.STATUS_OBJECT_PATH_INVALID); } } else if (createDisposition == CreateDisposition.FILE_CREATE) { if (entry != null) { // File already exists, fail the request Log(Severity.Verbose, "CreateFile: File '{0}' already exists.", path); fileStatus = FileStatus.FILE_EXISTS; return(NTStatus.STATUS_OBJECT_NAME_COLLISION); } if (!requestedWriteAccess) { return(NTStatus.STATUS_ACCESS_DENIED); } try { if (forceDirectory) { Log(Severity.Information, "CreateFile: Creating directory '{0}'", path); entry = m_fileSystem.CreateDirectory(path); } else { Log(Severity.Information, "CreateFile: Creating file '{0}'", path); entry = m_fileSystem.CreateFile(path); } } catch (Exception ex) { if (ex is IOException || ex is UnauthorizedAccessException) { NTStatus status = ToNTStatus(ex); Log(Severity.Verbose, "CreateFile: Error creating '{0}'. {1}.", path, status); return(status); } else { throw; } } fileStatus = FileStatus.FILE_CREATED; } else if (createDisposition == CreateDisposition.FILE_OPEN_IF || createDisposition == CreateDisposition.FILE_OVERWRITE || createDisposition == CreateDisposition.FILE_OVERWRITE_IF || createDisposition == CreateDisposition.FILE_SUPERSEDE) { if (entry == null) { if (createDisposition == CreateDisposition.FILE_OVERWRITE) { return(NTStatus.STATUS_OBJECT_PATH_NOT_FOUND); } if (!requestedWriteAccess) { return(NTStatus.STATUS_ACCESS_DENIED); } try { if (forceDirectory) { Log(Severity.Information, "CreateFile: Creating directory '{0}'", path); entry = m_fileSystem.CreateDirectory(path); } else { Log(Severity.Information, "CreateFile: Creating file '{0}'", path); entry = m_fileSystem.CreateFile(path); } } catch (Exception ex) { if (ex is IOException || ex is UnauthorizedAccessException) { NTStatus status = ToNTStatus(ex); Log(Severity.Verbose, "CreateFile: Error creating '{0}'. {1}.", path, status); return(status); } else { throw; } } fileStatus = FileStatus.FILE_CREATED; } else { fileStatus = FileStatus.FILE_EXISTS; if (createDisposition == CreateDisposition.FILE_OPEN_IF) { if (entry.IsDirectory && forceFile) { return(NTStatus.STATUS_FILE_IS_A_DIRECTORY); } if (!entry.IsDirectory && forceDirectory) { return(NTStatus.STATUS_OBJECT_PATH_INVALID); } } else { if (!requestedWriteAccess) { return(NTStatus.STATUS_ACCESS_DENIED); } if (createDisposition == CreateDisposition.FILE_OVERWRITE || createDisposition == CreateDisposition.FILE_OVERWRITE_IF) { // Truncate the file try { Stream temp = m_fileSystem.OpenFile(path, FileMode.Truncate, FileAccess.ReadWrite, FileShare.ReadWrite, FileOptions.None); temp.Close(); } catch (Exception ex) { if (ex is IOException || ex is UnauthorizedAccessException) { NTStatus status = ToNTStatus(ex); Log(Severity.Verbose, "CreateFile: Error truncating '{0}'. {1}.", path, status); return(status); } else { throw; } } fileStatus = FileStatus.FILE_OVERWRITTEN; } else if (createDisposition == CreateDisposition.FILE_SUPERSEDE) { // Delete the old file try { m_fileSystem.Delete(path); } catch (Exception ex) { if (ex is IOException || ex is UnauthorizedAccessException) { NTStatus status = ToNTStatus(ex); Log(Severity.Verbose, "CreateFile: Error deleting '{0}'. {1}.", path, status); return(status); } else { throw; } } try { if (forceDirectory) { Log(Severity.Information, "CreateFile: Creating directory '{0}'", path); entry = m_fileSystem.CreateDirectory(path); } else { Log(Severity.Information, "CreateFile: Creating file '{0}'", path); entry = m_fileSystem.CreateFile(path); } } catch (Exception ex) { if (ex is IOException || ex is UnauthorizedAccessException) { NTStatus status = ToNTStatus(ex); Log(Severity.Verbose, "CreateFile: Error creating '{0}'. {1}.", path, status); return(status); } else { throw; } } fileStatus = FileStatus.FILE_SUPERSEDED; } } } } else { return(NTStatus.STATUS_INVALID_PARAMETER); } FileAccess fileAccess = NTFileStoreHelper.ToFileAccess(desiredAccess); Stream stream; if (fileAccess == (FileAccess)0 || entry.IsDirectory) { stream = null; } else { // Note that SetFileInformationByHandle/FILE_DISPOSITION_INFO has no effect if the handle was opened with FILE_DELETE_ON_CLOSE. NTStatus openStatus = OpenFileStream(out stream, path, fileAccess, shareAccess, createOptions); if (openStatus != NTStatus.STATUS_SUCCESS) { return(openStatus); } } bool deleteOnClose = (createOptions & CreateOptions.FILE_DELETE_ON_CLOSE) > 0; handle = new FileHandle(path, entry.IsDirectory, stream, deleteOnClose); if (fileStatus != FileStatus.FILE_CREATED && fileStatus != FileStatus.FILE_OVERWRITTEN && fileStatus != FileStatus.FILE_SUPERSEDED) { fileStatus = FileStatus.FILE_OPENED; } return(NTStatus.STATUS_SUCCESS); }
public void UnstagingUnknownPathsAgainstAnOrphanedHeadWithStrictUnmatchedExplicitPathsValidationThrows(string relativePath, FileStatus currentStatus) { using (var repo = new Repository(CloneStandardTestRepo())) { repo.Refs.UpdateTarget("HEAD", "refs/heads/orphaned"); Assert.True(repo.Info.IsHeadOrphaned); Assert.Equal(currentStatus, repo.Index.RetrieveStatus(relativePath)); Assert.Throws <UnmatchedPathException>(() => repo.Index.Unstage(relativePath, new ExplicitPathsOptions())); } }
internal static extern int FStat(SafePipeHandle fd, out FileStatus output);
public void CanRemoveAnUnalteredFileFromTheIndexWithoutRemovingItFromTheWorkingDirectory( bool removeFromWorkdir, string filename, bool throws, FileStatus initialStatus, bool existsBeforeRemove, bool existsAfterRemove, FileStatus lastStatus) { string path = CloneStandardTestRepo(); using (var repo = new Repository(path)) { int count = repo.Index.Count; string fullpath = Path.Combine(repo.Info.WorkingDirectory, filename); Assert.Equal(initialStatus, repo.Index.RetrieveStatus(filename)); Assert.Equal(existsBeforeRemove, File.Exists(fullpath)); if (throws) { Assert.Throws <RemoveFromIndexException>(() => repo.Index.Remove(filename, removeFromWorkdir)); Assert.Equal(count, repo.Index.Count); } else { repo.Index.Remove(filename, removeFromWorkdir); Assert.Equal(count - 1, repo.Index.Count); Assert.Equal(existsAfterRemove, File.Exists(fullpath)); Assert.Equal(lastStatus, repo.Index.RetrieveStatus(filename)); } } }
private static GitFileStatus GetGitFileStatus(FileStatus state) { if (state == FileStatus.ModifiedInIndex || state.HasFlag(FileStatus.ModifiedInIndex)) { return(GitFileStatus.Staged); } if (state == FileStatus.ModifiedInWorkdir || state.HasFlag(FileStatus.ModifiedInWorkdir)) { return(GitFileStatus.Modified); } if (state == FileStatus.TypeChangeInWorkdir || state.HasFlag(FileStatus.TypeChangeInWorkdir)) { return(GitFileStatus.Modified); } if (state == FileStatus.TypeChangeInIndex || state.HasFlag(FileStatus.TypeChangeInIndex)) { return(GitFileStatus.Modified); } switch (state) { case FileStatus.Nonexistent: return(GitFileStatus.Nonexistent); case FileStatus.Unaltered: return(GitFileStatus.Unaltered); case FileStatus.NewInIndex: return(GitFileStatus.Added); case FileStatus.ModifiedInIndex: return(GitFileStatus.Staged); case FileStatus.DeletedFromIndex: return(GitFileStatus.Removed); case FileStatus.RenamedInIndex: return(GitFileStatus.Renamed); case FileStatus.NewInWorkdir: return(GitFileStatus.New); case FileStatus.DeletedFromWorkdir: return(GitFileStatus.Deleted); case FileStatus.RenamedInWorkdir: return(GitFileStatus.Renamed); case FileStatus.Unreadable: return(GitFileStatus.Unreadable); case FileStatus.Ignored: return(GitFileStatus.Ignored); case FileStatus.Conflicted: return(GitFileStatus.Conflict); default: return(GitFileStatus.Ignored); } }
public void CanCheckoutPath(string originalBranch, string checkoutFrom, string path, FileStatus expectedStatus) { string repoPath = CloneStandardTestRepo(); using (var repo = new Repository(repoPath)) { // Set the working directory to the current head ResetAndCleanWorkingDirectory(repo); repo.Checkout(originalBranch); Assert.False(repo.Index.RetrieveStatus().IsDirty); repo.CheckoutPaths(checkoutFrom, new[] { path }); Assert.Equal(expectedStatus, repo.Index.RetrieveStatus(path)); Assert.Equal(1, repo.Index.RetrieveStatus().Count()); } }
public void StagingAnUnknownFileThrows(string relativePath, FileStatus status) { using (var repo = new Repository(StandardTestRepoPath)) { Assert.Null(repo.Index[relativePath]); Assert.Equal(status, repo.Index.RetrieveStatus(relativePath)); Assert.Throws<LibGit2SharpException>(() => repo.Index.Stage(relativePath)); } }
public virtual void TestRestartDFS() { Configuration conf = new HdfsConfiguration(); MiniDFSCluster cluster = null; FSNamesystem fsn = null; int numNamenodeDirs; DFSTestUtil files = new DFSTestUtil.Builder().SetName("TestRestartDFS").SetNumFiles (200).Build(); string dir = "/srcdat"; Path rootpath = new Path("/"); Path dirpath = new Path(dir); long rootmtime; FileStatus rootstatus; FileStatus dirstatus; try { cluster = new MiniDFSCluster.Builder(conf).Format(true).NumDataNodes(NumDatanodes ).Build(); string[] nameNodeDirs = conf.GetStrings(DFSConfigKeys.DfsNamenodeNameDirKey, new string[] { }); numNamenodeDirs = nameNodeDirs.Length; NUnit.Framework.Assert.IsTrue("failed to get number of Namenode StorageDirs", numNamenodeDirs != 0); FileSystem fs = cluster.GetFileSystem(); files.CreateFiles(fs, dir); rootmtime = fs.GetFileStatus(rootpath).GetModificationTime(); rootstatus = fs.GetFileStatus(dirpath); dirstatus = fs.GetFileStatus(dirpath); fs.SetOwner(rootpath, rootstatus.GetOwner() + "_XXX", null); fs.SetOwner(dirpath, null, dirstatus.GetGroup() + "_XXX"); } finally { if (cluster != null) { cluster.Shutdown(); } } try { // Force the NN to save its images on startup so long as // there are any uncheckpointed txns conf.SetInt(DFSConfigKeys.DfsNamenodeCheckpointTxnsKey, 1); // Here we restart the MiniDFScluster without formatting namenode cluster = new MiniDFSCluster.Builder(conf).Format(false).NumDataNodes(NumDatanodes ).Build(); fsn = cluster.GetNamesystem(); FileSystem fs = cluster.GetFileSystem(); NUnit.Framework.Assert.IsTrue("Filesystem corrupted after restart.", files.CheckFiles (fs, dir)); FileStatus newrootstatus = fs.GetFileStatus(rootpath); NUnit.Framework.Assert.AreEqual(rootmtime, newrootstatus.GetModificationTime()); NUnit.Framework.Assert.AreEqual(rootstatus.GetOwner() + "_XXX", newrootstatus.GetOwner ()); NUnit.Framework.Assert.AreEqual(rootstatus.GetGroup(), newrootstatus.GetGroup()); FileStatus newdirstatus = fs.GetFileStatus(dirpath); NUnit.Framework.Assert.AreEqual(dirstatus.GetOwner(), newdirstatus.GetOwner()); NUnit.Framework.Assert.AreEqual(dirstatus.GetGroup() + "_XXX", newdirstatus.GetGroup ()); rootmtime = fs.GetFileStatus(rootpath).GetModificationTime(); string checkAfterRestart = CheckImages(fsn, numNamenodeDirs); // Modify the system and then perform saveNamespace files.Cleanup(fs, dir); files.CreateFiles(fs, dir); fsn.SetSafeMode(HdfsConstants.SafeModeAction.SafemodeEnter); cluster.GetNameNodeRpc().SaveNamespace(); string checkAfterModify = CheckImages(fsn, numNamenodeDirs); NUnit.Framework.Assert.IsFalse("Modified namespace should change fsimage contents. " + "was: " + checkAfterRestart + " now: " + checkAfterModify, checkAfterRestart. Equals(checkAfterModify)); fsn.SetSafeMode(HdfsConstants.SafeModeAction.SafemodeLeave); files.Cleanup(fs, dir); } finally { if (cluster != null) { cluster.Shutdown(); } } }
public GitFileSystemStatusEntry(String repositoryWorkingDirectory, String filesystemWorkingDirectory, String filePath, FileStatus status) { this.repositoryWorkingDirectory = repositoryWorkingDirectory; this.filesystemWorkingDirectory = filesystemWorkingDirectory; this.filePath = filePath; this.status = status; }
public GitStatusEntry(string localPath, FileStatus status) { this.localPath = localPath; this.status = status; }
public virtual void TestSafeModeWhenZeroBlockLocations() { try { Path file1 = new Path("/tmp/testManualSafeMode/file1"); Path file2 = new Path("/tmp/testManualSafeMode/file2"); System.Console.Out.WriteLine("Created file1 and file2."); // create two files with one block each. DFSTestUtil.CreateFile(fs, file1, 1000, (short)1, 0); DFSTestUtil.CreateFile(fs, file2, 2000, (short)1, 0); CheckGetBlockLocationsWorks(fs, file1); NameNode namenode = cluster.GetNameNode(); // manually set safemode. dfs.SetSafeMode(HdfsConstants.SafeModeAction.SafemodeEnter); NUnit.Framework.Assert.IsTrue("should still be in SafeMode", namenode.IsInSafeMode ()); // getBlock locations should still work since block locations exists CheckGetBlockLocationsWorks(fs, file1); dfs.SetSafeMode(HdfsConstants.SafeModeAction.SafemodeLeave); NUnit.Framework.Assert.IsFalse("should not be in SafeMode", namenode.IsInSafeMode ()); // Now 2nd part of the tests where there aren't block locations cluster.ShutdownDataNodes(); cluster.ShutdownNameNode(0); // now bring up just the NameNode. cluster.RestartNameNode(); cluster.WaitActive(); System.Console.Out.WriteLine("Restarted cluster with just the NameNode"); namenode = cluster.GetNameNode(); NUnit.Framework.Assert.IsTrue("No datanode is started. Should be in SafeMode", namenode .IsInSafeMode()); FileStatus stat = fs.GetFileStatus(file1); try { fs.GetFileBlockLocations(stat, 0, 1000); NUnit.Framework.Assert.IsTrue("Should have got safemode exception", false); } catch (SafeModeException) { } catch (RemoteException re) { // as expected if (!re.GetClassName().Equals(typeof(SafeModeException).FullName)) { NUnit.Framework.Assert.IsTrue("Should have got safemode exception", false); } } dfs.SetSafeMode(HdfsConstants.SafeModeAction.SafemodeLeave); NUnit.Framework.Assert.IsFalse("Should not be in safemode", namenode.IsInSafeMode ()); CheckGetBlockLocationsWorks(fs, file1); } finally { if (fs != null) { fs.Close(); } if (cluster != null) { cluster.Shutdown(); } } }
/// <exception cref="System.IO.IOException"/> /// <exception cref="Sharpen.URISyntaxException"/> /// <exception cref="System.Exception"/> public virtual void TestDownload() { Configuration conf = new Configuration(); conf.Set(CommonConfigurationKeys.FsPermissionsUmaskKey, "077"); FileContext files = FileContext.GetLocalFSFileContext(conf); Path basedir = files.MakeQualified(new Path("target", typeof(TestFSDownload).Name )); files.Mkdir(basedir, null, true); conf.SetStrings(typeof(TestFSDownload).FullName, basedir.ToString()); IDictionary <LocalResource, LocalResourceVisibility> rsrcVis = new Dictionary <LocalResource , LocalResourceVisibility>(); Random rand = new Random(); long sharedSeed = rand.NextLong(); rand.SetSeed(sharedSeed); System.Console.Out.WriteLine("SEED: " + sharedSeed); IDictionary <LocalResource, Future <Path> > pending = new Dictionary <LocalResource, Future <Path> >(); ExecutorService exec = Executors.NewSingleThreadExecutor(); LocalDirAllocator dirs = new LocalDirAllocator(typeof(TestFSDownload).FullName); int[] sizes = new int[10]; for (int i = 0; i < 10; ++i) { sizes[i] = rand.Next(512) + 512; LocalResourceVisibility vis = LocalResourceVisibility.Private; if (i % 2 == 1) { vis = LocalResourceVisibility.Application; } Path p = new Path(basedir, string.Empty + i); LocalResource rsrc = CreateFile(files, p, sizes[i], rand, vis); rsrcVis[rsrc] = vis; Path destPath = dirs.GetLocalPathForWrite(basedir.ToString(), sizes[i], conf); destPath = new Path(destPath, System.Convert.ToString(uniqueNumberGenerator.IncrementAndGet ())); FSDownload fsd = new FSDownload(files, UserGroupInformation.GetCurrentUser(), conf , destPath, rsrc); pending[rsrc] = exec.Submit(fsd); } exec.Shutdown(); while (!exec.AwaitTermination(1000, TimeUnit.Milliseconds)) { } foreach (Future <Path> path in pending.Values) { NUnit.Framework.Assert.IsTrue(path.IsDone()); } try { foreach (KeyValuePair <LocalResource, Future <Path> > p in pending) { Path localized = p.Value.Get(); NUnit.Framework.Assert.AreEqual(sizes[Sharpen.Extensions.ValueOf(localized.GetName ())], p.Key.GetSize()); FileStatus status = files.GetFileStatus(localized.GetParent()); FsPermission perm = status.GetPermission(); NUnit.Framework.Assert.AreEqual("Cache directory permissions are incorrect", new FsPermission((short)0x1ed), perm); status = files.GetFileStatus(localized); perm = status.GetPermission(); System.Console.Out.WriteLine("File permission " + perm + " for rsrc vis " + p.Key .GetVisibility().ToString()); System.Diagnostics.Debug.Assert((rsrcVis.Contains(p.Key))); NUnit.Framework.Assert.IsTrue("Private file should be 500", perm.ToShort() == FSDownload .PrivateFilePerms.ToShort()); } } catch (ExecutionException e) { throw new IOException("Failed exec", e); } }
public _PrivilegedExceptionAction_368(FileSystem remoteFS, FileStatus remove) { this.remoteFS = remoteFS; this.remove = remove; }
internal static extern int LStat(string path, out FileStatus output);
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> public virtual void TestDirDownload() { Configuration conf = new Configuration(); FileContext files = FileContext.GetLocalFSFileContext(conf); Path basedir = files.MakeQualified(new Path("target", typeof(TestFSDownload).Name )); files.Mkdir(basedir, null, true); conf.SetStrings(typeof(TestFSDownload).FullName, basedir.ToString()); IDictionary <LocalResource, LocalResourceVisibility> rsrcVis = new Dictionary <LocalResource , LocalResourceVisibility>(); Random rand = new Random(); long sharedSeed = rand.NextLong(); rand.SetSeed(sharedSeed); System.Console.Out.WriteLine("SEED: " + sharedSeed); IDictionary <LocalResource, Future <Path> > pending = new Dictionary <LocalResource, Future <Path> >(); ExecutorService exec = Executors.NewSingleThreadExecutor(); LocalDirAllocator dirs = new LocalDirAllocator(typeof(TestFSDownload).FullName); for (int i = 0; i < 5; ++i) { LocalResourceVisibility vis = LocalResourceVisibility.Private; if (i % 2 == 1) { vis = LocalResourceVisibility.Application; } Path p = new Path(basedir, "dir" + i + ".jar"); LocalResource rsrc = CreateJar(files, p, vis); rsrcVis[rsrc] = vis; Path destPath = dirs.GetLocalPathForWrite(basedir.ToString(), conf); destPath = new Path(destPath, System.Convert.ToString(uniqueNumberGenerator.IncrementAndGet ())); FSDownload fsd = new FSDownload(files, UserGroupInformation.GetCurrentUser(), conf , destPath, rsrc); pending[rsrc] = exec.Submit(fsd); } exec.Shutdown(); while (!exec.AwaitTermination(1000, TimeUnit.Milliseconds)) { } foreach (Future <Path> path in pending.Values) { NUnit.Framework.Assert.IsTrue(path.IsDone()); } try { foreach (KeyValuePair <LocalResource, Future <Path> > p in pending) { Path localized = p.Value.Get(); FileStatus status = files.GetFileStatus(localized); System.Console.Out.WriteLine("Testing path " + localized); System.Diagnostics.Debug.Assert((status.IsDirectory())); System.Diagnostics.Debug.Assert((rsrcVis.Contains(p.Key))); VerifyPermsRecursively(localized.GetFileSystem(conf), files, localized, rsrcVis[p .Key]); } } catch (ExecutionException e) { throw new IOException("Failed exec", e); } }