/// <summary>Scans the directory.</summary> /// <returns> /// Return true if at least two bots are active. /// </returns> private bool ScanDirectory() { int active = 0; BotLocations.Clear(); // Disable all. foreach (var bot in Bots) { bot.Info = bot.Info.SetIsActive(true); } foreach (var dir in RootDirectory.GetDirectories().Where(d => d.Name != "bin" && d.Name != "zips")) { BotInfo info; if (BotInfo.TryCreate(dir, out info)) { var bot = Bots.GetOrCreate(info); if (!info.Inactive) { BotLocations[bot.Info] = dir; active++; } } } return(active > 1); }
protected void submit_Click(object sender, EventArgs e) { MongoDataContext mongoContext = new MongoDataContext(); if (username.Text.Length > 0) { RootDirectory user = mongoContext.GetFileStructure(username.Text).FirstOrDefault(); if (user != null) { if (validatePassword(user.pw)) { Response.Redirect("Default.aspx"); } else { Response.Write("Please remember that the Guest password is \"Guest\"" + user.un); } } else { Response.Write("The username or password you entered is invalid<br>"); } } else { Response.Write("Please enter some sort of value"); } }
private Int32 GetSPLevel() { // SP4, never can be too sure! FileInfo[] spFiles = RootDirectory.GetFiles("*.sp4"); if (spFiles.Length >= 1) { return(4); } spFiles = RootDirectory.GetFiles("*.sp3"); if (spFiles.Length >= 1) { return(3); } spFiles = RootDirectory.GetFiles("*.sp2"); if (spFiles.Length >= 1) { return(2); } spFiles = RootDirectory.GetFiles("*.sp1"); if (spFiles.Length >= 1) { return(1); } return(0); }
//--------------------------------------------------------------------------------------------------------------------- /// <summary>Loads service the root directories for file system and web access.</summary> protected void GetRootDirectories() { if (fileRootDir != null || relativeUrl != null) { return; } if (RootDirectory != null) { if (RootDirectory.StartsWith("$(SERVICEROOT)")) { fileRootDir = (RootDirectory.Replace("$(SERVICEROOT)", context.ServiceFileRoot)).Replace('/', System.IO.Path.DirectorySeparatorChar); relativeUrl = RootDirectory.Replace("$(SERVICEROOT)", context.ServiceWebRoot); } else { fileRootDir = (context.SiteRootFolder + "/" + RootDirectory).Replace('/', System.IO.Path.DirectorySeparatorChar); relativeUrl = RootDirectory; } relativeUrl = Regex.Replace(relativeUrl, "/+$", String.Empty); } if (IconUrl != null) { IconUrl = IconUrl.Replace("$(SERVICEROOT)", context.ServiceWebRoot); } if (ViewUrl != null) { ViewUrl = ViewUrl.Replace("$(SERVICEROOT)", context.ServiceWebRoot); } }
public void ValidateLayout() { // Ensure there are no files in the root directory if (RootDirectory.EnumerateFiles("*", SearchOption.TopDirectoryOnly).Any()) { throw new Exception("The imaging cache directory should not contain any files at its root."); } // Ensure all directories are named after valid dates by filtering on those that don't parse exactly // todo: reinstate correct version after an actual layout is decided on! // Currently (30/7/15) the layout contains monthly folders in the root and day-hour zips inside (needs to be changed as is pretty crappy) /* * const string rootSubdirectoryFormat = "yyyyMM"; * var invalidDirs = RootDirectory.EnumerateDirectories().Where(info => * { * DateTime dt; * return !DateTime.TryParseExact(info.Name, rootSubdirectoryFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out dt); * }).ToList(); * * if (invalidDirs.Any()) * throw new Exception("The following directories could not be parsed as valid " + rootSubdirectoryFormat + * " dates: " + string.Join(", ", invalidDirs.Select(info => info.Name))); */ // todo: could evaluate contents of subdirectories, but this layout will likely change so no point implementing for now }
public LiteLoader(string gameAssembly) { LogLevel = LogLevel.Development; TemporaryDirectory = Path.Combine(Path.GetTempPath(), "LiteLoader"); TemporaryDirectory = Path.Combine(TemporaryDirectory, Guid.NewGuid().ToString("B")); #if !NET35 CancellationSource = new System.Threading.CancellationTokenSource(); #endif ServiceProvider = new DynamicServiceProvider(); // Setup Directories RootDirectory = Environment.CurrentDirectory; if (RootDirectory.StartsWith(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData))) { RootDirectory = AppDomain.CurrentDomain.BaseDirectory; } if (RootDirectory == null) { throw new InvalidProgramException("Unable to identify root directory"); } FrameworkDirectory = Path.Combine(RootDirectory, "LiteLoader"); ModuleDirectory = Path.Combine(FrameworkDirectory, "Modules"); string gameModule = Path.Combine(ModuleDirectory, gameAssembly + ".dll"); //if (!File.Exists(gameModule)) //{ // throw new IOException($"Game module not found | {gameModule}"); //} FileInfo g = new FileInfo(gameModule); GameModule = g.FullName; }
/// <summary> /// Создаёт файл-образ раздела жёсткого диска с файловой системой DehaxFS с параметрами. /// </summary> /// <param name="diskPartitionSize">Размер раздела</param> /// <param name="diskClusterFactor">Размер кластера, множитель секторов диска</param> public DFSImage(long diskPartitionSize, byte diskClusterFactor) { diskClusterFactor = (byte)Math.Pow(2, diskClusterFactor - 1); _diskPartitionSize = diskPartitionSize; _diskClusterFactor = diskClusterFactor; _diskClusterSize = DISK_BYTES_PER_SECTOR * diskClusterFactor; _bitMap = new BitMap((int)(_diskPartitionSize / _diskClusterSize)); _inodes = new Inodes((int)(_diskPartitionSize / _diskClusterSize)); _rootDirectory = new RootDirectory(_diskClusterSize); _emptyData = new byte[_diskPartitionSize - Marshal.SizeOf<Superblock>() - _bitMap.GetLength() - _inodes.GetLength() - _rootDirectory.GetLength()]; _superblock = new Superblock() { filesystemType = 0x28, numClusters = (int)(_diskPartitionSize / _diskClusterSize), clusterFactor = diskClusterFactor, inodeArraySize = _inodes.GetLength(), bitMapSize = _bitMap.GetLength(), numFreeClusters = _emptyData.Length / _diskClusterSize, numFreeInode = (int)(_diskPartitionSize / _diskClusterSize - 1) }; InitializeFileSystem(); }
public void GetFileStructureHasHaragethFileStructure() { var dataContext = new MongoDataContext(); RootDirectory result = dataContext.GetFileStructure("harageth").FirstOrDefault(); Assert.AreEqual <string>("harageth", result.un); }
public override void RemoveFile(File _file) { if (_file == null) { return; } //Clear selection ClearSelection(); //Get index of cluster to remove var listIndex = GetIndexFragmentClusterFileOfFile(_file); //Clear cluster foreach (var index in listIndex) { Clusters.ElementAt(index).Content = new EmptyContentCluster(); } Files.Remove(_file); SelectedFile = null; var needUpdate = RootDirectory.NeedUpdateRootDirectory(); if (needUpdate) { DeleteRootDirectory(); } }
public override IVirtualFile GetFile(string virtualPath) { var virtualFile = RootDirectory.GetFile(CleanPath(virtualPath)); virtualFile?.Refresh(); return(virtualFile); }
/// <summary> /// Generates all website files. /// </summary> /// <param name="token"></param> public void GenerateWebsite(CancellationToken token) { if (!IsImageTreePopulated) { throw new Exception("ImageTree is not populated. Call PopulateImageTree() first."); } DirectoryInfo wwwDirectory; if (DeleteWebFolder) { wwwDirectory = new DirectoryInfo(Path.Combine(RootDirectory.FullName, WebFolderName)); if (wwwDirectory.Exists) { wwwDirectory.Delete(true); } } if (token.IsCancellationRequested) { return; } wwwDirectory = RootDirectory.CreateSubdirectory(WebFolderName); WriteStaticResources(wwwDirectory); if (token.IsCancellationRequested) { return; } CreateGalleryPage(RootImageTree, wwwDirectory, token); }
public void Requesting_Filtered_Folders_Should_Work() { for (int i = 0; i < RootDirectory.GetDirectories().Length; i++) { //create a few folders with a common naming scheme var dir = RootDirectory.GetDirectories()[i]; dir.CreateSubdirectory("TestXXX" + i); dir.CreateSubdirectory("XXXTest" + i); } int matches = 0; Traverse((clientFolder, serviceFolder) => { var sFolders1 = serviceFolder.GetFolders("*XXX*").ToArray(); var sFolders2 = serviceFolder.GetFolderContents("*XXX*").Folders.ToArray(); Assert.AreEqual(sFolders1.Count(), sFolders2.Count()); sFolders1.Do(f => Assert.IsTrue(f.MetaData.Name.Contains("XXX"))); sFolders2.Do(f => Assert.IsTrue(f.MetaData.Name.Contains("XXX"))); if (serviceFolder.MetaData.IsRootFolder) { //root folder doesn't have matches Assert.IsEmpty(sFolders1); Assert.IsEmpty(sFolders2); } matches += sFolders1.Count(); }); Assert.AreEqual(6, matches); }
//protected Projects Projects { get; set; } protected override void BeforeBuildExecution(ITaskContext context) { ProjectFiles = context.GetFiles(RootDirectory, "**/*.csproj"); var abs = WorkingCopy .Discover(RootDirectory.ToFileFullPath(), context) .Versionize(); }
public virtual IVirtualFile GetFile(string virtualPath) { var virtualFile = RootDirectory.GetFile(SanitizePath(virtualPath)); virtualFile?.Refresh(); return(virtualFile); }
/// <summary> /// Создаёт файл-образ раздела жёсткого диска с файловой системой DehaxFS с параметрами. /// </summary> /// <param name="diskPartitionSize">Размер раздела</param> /// <param name="diskClusterFactor">Размер кластера, множитель секторов диска</param> public DFSImage(long diskPartitionSize, byte diskClusterFactor) { diskClusterFactor = (byte)Math.Pow(2, diskClusterFactor - 1); _diskPartitionSize = diskPartitionSize; _diskClusterFactor = diskClusterFactor; _diskClusterSize = DISK_BYTES_PER_SECTOR * diskClusterFactor; _bitMap = new BitMap((int)(_diskPartitionSize / _diskClusterSize)); _inodes = new Inodes((int)(_diskPartitionSize / _diskClusterSize)); _rootDirectory = new RootDirectory(_diskClusterSize); _emptyData = new byte[_diskPartitionSize - Marshal.SizeOf <Superblock>() - _bitMap.GetLength() - _inodes.GetLength() - _rootDirectory.GetLength()]; _superblock = new Superblock() { filesystemType = 0x28, numClusters = (int)(_diskPartitionSize / _diskClusterSize), clusterFactor = diskClusterFactor, inodeArraySize = _inodes.GetLength(), bitMapSize = _bitMap.GetLength(), numFreeClusters = _emptyData.Length / _diskClusterSize, numFreeInode = (int)(_diskPartitionSize / _diskClusterSize - 1) }; InitializeFileSystem(); }
/// <summary> /// Gets the root of the file system. This is a dummy folder, which /// represents the file system as a whole, and provides the top level contents /// of the underlying file system as files and folders.<br/> /// In case of this provider, the root either corresponds to /// the <see cref="RootDirectory"/>, if set, or the machine itself. /// </summary> public override VirtualFolderInfo GetFileSystemRoot() { VirtualFolderInfo root; if (RootDirectory == null) { root = new VirtualFolderInfo { IsRootFolder = true, Name = RootName, FullName = String.Empty }; } else { root = RootDirectory.CreateFolderResourceInfo(); root.Name = RootName; root.IsRootFolder = true; //hide the path if (UseRelativePaths) { root.FullName = PathUtil.RelativeRootPrefix; } } return(root); }
/// <summary> /// Write a manifest to a stream /// </summary> /// <param name="stream"> /// The stream /// </param> public void WriteManifestStream(Stream stream) { if (UseJSON) { RootDirectory.ClearParentReferences(); StreamWriter writer = new StreamWriter(stream); IsoDateTimeConverter dateTimeConverter = new IsoDateTimeConverter(); dateTimeConverter.DateTimeFormat = "yyyy-MM-ddTHH:mm:ss.fffZ"; writer.Write(JsonConvert.SerializeObject(this, dateTimeConverter)); writer.Flush(); RootDirectory.RestoreParentReferences(); } else { BinaryFormatter formatter = new BinaryFormatter(); RootDirectory.SaveToStore(); formatter.Serialize(stream, this); RootDirectory.RestoreFromStore(); } }
string TryGetProperty(string name) { SourceValue val; if (_doc.Properties.TryGetValue(name, out val)) { return(val.String); } Tuple <PropertyType, string> def; if (PropertyDefinitions.Items.TryGetValue(name, out def)) { return(def.Item2); } switch (name) { case "Name": return(Name); case "Identifier": return(TryGetProperty("Name").ToIdentifier()); case "QIdentifier": return(TryGetProperty("Name").ToIdentifier(true)); case "ProjectDirectory": return(RootDirectory.NativeToUnix()); } return(null); }
private static void ParseVersion() { if ((Version == null || Version < _INITVersion) && RootDirectory != null && RootDirectory.Exists) { var files = RootDirectory.GetFiles("VERSION", SearchOption.TopDirectoryOnly); foreach (var file in files.Where(f => String.Equals("VERSION", f.Name) && String.IsNullOrWhiteSpace(f.Extension))) { using (var stream = file.OpenText()) { string ver = stream.ReadToEnd().Trim(); VersionInfo v; if (!VersionInfo.TryParse(ver, out v)) { continue; } Version = v; break; } } } if (Version == null || Version < _INITVersion) { Version = _INITVersion; } }
internal override void WriteTo(byte[] buffer, int offset) { base.WriteTo(buffer, offset); IsoUtilities.WriteA1Chars(buffer, offset + 8, 32, SystemIdentifier, CharacterEncoding); IsoUtilities.WriteString(buffer, offset + 40, 32, true, VolumeIdentifier, CharacterEncoding, true); IsoUtilities.ToBothFromUInt32(buffer, offset + 80, VolumeSpaceSize); IsoUtilities.EncodingToBytes(CharacterEncoding, buffer, offset + 88); IsoUtilities.ToBothFromUInt16(buffer, offset + 120, VolumeSetSize); IsoUtilities.ToBothFromUInt16(buffer, offset + 124, VolumeSequenceNumber); IsoUtilities.ToBothFromUInt16(buffer, offset + 128, LogicalBlockSize); IsoUtilities.ToBothFromUInt32(buffer, offset + 132, PathTableSize); IsoUtilities.ToBytesFromUInt32(buffer, offset + 140, TypeLPathTableLocation); IsoUtilities.ToBytesFromUInt32(buffer, offset + 144, OptionalTypeLPathTableLocation); IsoUtilities.ToBytesFromUInt32(buffer, offset + 148, Utilities.BitSwap(TypeMPathTableLocation)); IsoUtilities.ToBytesFromUInt32(buffer, offset + 152, Utilities.BitSwap(OptionalTypeMPathTableLocation)); RootDirectory.WriteTo(buffer, offset + 156, CharacterEncoding); IsoUtilities.WriteD1Chars(buffer, offset + 190, 129, VolumeSetIdentifier, CharacterEncoding); IsoUtilities.WriteA1Chars(buffer, offset + 318, 129, PublisherIdentifier, CharacterEncoding); IsoUtilities.WriteA1Chars(buffer, offset + 446, 129, DataPreparerIdentifier, CharacterEncoding); IsoUtilities.WriteA1Chars(buffer, offset + 574, 129, ApplicationIdentifier, CharacterEncoding); IsoUtilities.WriteD1Chars(buffer, offset + 702, 37, CopyrightFileIdentifier, CharacterEncoding); // FIXME!! IsoUtilities.WriteD1Chars(buffer, offset + 739, 37, AbstractFileIdentifier, CharacterEncoding); // FIXME!! IsoUtilities.WriteD1Chars(buffer, offset + 776, 37, BibliographicFileIdentifier, CharacterEncoding); // FIXME!! IsoUtilities.ToVolumeDescriptorTimeFromUTC(buffer, offset + 813, CreationDateAndTime); IsoUtilities.ToVolumeDescriptorTimeFromUTC(buffer, offset + 830, ModificationDateAndTime); IsoUtilities.ToVolumeDescriptorTimeFromUTC(buffer, offset + 847, ExpirationDateAndTime); IsoUtilities.ToVolumeDescriptorTimeFromUTC(buffer, offset + 864, EffectiveDateAndTime); buffer[offset + 881] = FileStructureVersion; }
public void ComplexTest() { RootDirectory root = new RootDirectory(); root.AddReview(1, new string[] { "fileA.txt", "a/b/c/fileA.txt", "a/b/b/fileX.txt" }); root.AddReview(1, new string[] { "fileA.txt", "a/b/b/fileX.txt" }); root.AddReview(2, new string[] { "fileA.txt", "a/b/c/fileA.txt", "a/b/b/fileX.txt" }); root.AddReview(3, new string[] { "c/b/a/fileX.txt", "c/b/a/fileY.txt" }); root.AddReview(2, new string[] { "c/b/a/fileX.txt" }); IDictionary <int, double> dictExpertises = root.CalculateDeveloperExpertisesForFile("fileA.txt") .ToDictionary(dev => dev.DeveloperId, dev => dev.Expertise); Assert.AreEqual(2, dictExpertises.Count); // 1 and 2 Assert.IsTrue(aboutEqual(1d / 3d + 1d / 2d, dictExpertises[1])); Assert.IsTrue(aboutEqual(1d / 3d, dictExpertises[2])); dictExpertises = root.CalculateDeveloperExpertisesForFile("c/b/a/fileX.txt") .ToDictionary(dev => dev.DeveloperId, dev => dev.Expertise); Assert.AreEqual(2, dictExpertises.Count); // 3 and 2 Assert.IsTrue(aboutEqual(1d / 2d + 1d / 2d * 3d / 4d, dictExpertises[3])); // some indirection already Assert.IsTrue(aboutEqual(1d, dictExpertises[2])); dictExpertises = root.CalculateDeveloperExpertisesForFile("a/b/c/d/e/f/g/fileX.txt") .ToDictionary(dev => dev.DeveloperId, dev => dev.Expertise); Assert.AreEqual(2, dictExpertises.Count); // 1 and 2 Assert.IsTrue(aboutEqual(5d / 24d + 1d / 8d, dictExpertises[1])); Assert.IsTrue(aboutEqual(5d / 24d, dictExpertises[2])); }
public FileSystem(string rootDirectory, FileSystem nextFileSystem = null, IEnumerable <KeyValuePair <string, string> > realPathsToVirtualPaths = null) { RootDirectory = NormalizeFilePath(rootDirectory); RootDirectoryWithSlash = RootDirectory.EndsWith(Path.DirectorySeparatorChar) ? RootDirectory : RootDirectory + Path.DirectorySeparatorChar; NextFileSystem = nextFileSystem; _fileTable = new Dictionary <string, FileSystemEntry>(); _bigArchives = new List <BigArchive>(); _realPathsToVirtualPaths = new Dictionary <string, string>(); // Create mapping for virtual paths // e.g. C:\Users\lanyi\AppData\Red Alert 3\Maps -> data\maps\internal if (realPathsToVirtualPaths is not null) { foreach (var(real, @virtual) in realPathsToVirtualPaths) { _realPathsToVirtualPaths[NormalizeFilePath(real)] = NormalizeFilePath(@virtual); } } // First create entries for all non-.big files if (Directory.Exists(rootDirectory)) { foreach (var file in Directory.GetFiles(rootDirectory, "*.*", SearchOption.AllDirectories)) { var ext = Path.GetExtension(file).ToLowerInvariant(); if (ext != ".big") { var relativePath = NormalizeFilePath(file[RootDirectoryWithSlash.Length..]);
public override void Remove() { // Deletes the current IsoFile's directory and the identity folder if possible. // (e.g. @"C:\Users\jerem\AppData\Local\IsolatedStorage\10v31ho4.bo2\eeolfu22.f2w\Url.qgeirsoc3cznuklvq5xlalurh1m0unxl\AssemFiles\") // This matches .NET Framework logic. We want to try and clean as well as possible without being more aggressive with the identity folders. // (e.g. Url.qgeirsoc3cznuklvq5xlalurh1m0unxl, etc.) We don't want to inadvertently yank folders for a different scope under the same // identity (at least no more so than NetFX). try { Directory.Delete(RootDirectory, recursive: true); } catch { throw new IsolatedStorageException(SR.IsolatedStorage_DeleteDirectories); } Close(); string?parentDirectory = Path.GetDirectoryName(RootDirectory.TrimEnd(Path.DirectorySeparatorChar)); Debug.Assert(parentDirectory != null); if (ContainsUnknownFiles(parentDirectory)) { return; } try { Directory.Delete(parentDirectory, recursive: true); } catch { return; } // Domain paths are doubly nested // @"C:\Users\jerem\AppData\Local\IsolatedStorage\10v31ho4.bo2\eeolfu22.f2w\Url.qgeirsoc3cznuklvq5xlalurh1m0unxl\Url.qgeirsoc3cznuklvq5xlalurh1m0unxl\Files\" if (Helper.IsDomain(Scope)) { parentDirectory = Path.GetDirectoryName(parentDirectory); Debug.Assert(parentDirectory != null); if (ContainsUnknownFiles(parentDirectory)) { return; } try { Directory.Delete(parentDirectory, recursive: true); } catch { return; } } }
protected void DownloadFile(object sender, EventArgs e) { RedisDataContext redisContext = new RedisDataContext(); LinkButton value = (LinkButton)sender; string fileName = ""; string ext = ""; int val = value.Text.LastIndexOf("."); if (val >= 0) { fileName = value.Text.Substring(0, val); ext = value.Text.Substring(val, value.Text.Length - val); } Response.ContentType = "application/octet-stream"; Response.AppendHeader("Content-Disposition", "attachment; filename=" + value.Text); RootDirectory directory = (RootDirectory)Session["directory"]; Byte[] buffer; if (virtualPath.Text.Equals("/")) { buffer = redisContext.ReadFile(directory.un + virtualPath.Text + "/" + value.Text); } else { buffer = redisContext.ReadFile(directory.un + virtualPath.Text + "/" + value.Text); } //string fileContents = System.Text.Encoding.Default.GetString(buffer); //Response.Write(fileContents); Response.BinaryWrite(buffer); //Response.Write(directory.un+virtualPath.Text+value.Text); Response.End(); }
private static void ParseVersion() { TryCatch( () => { if ((Version != null && Version >= _INITVersion) || RootDirectory == null || !RootDirectory.Exists) { return; } var files = RootDirectory.EnumerateFiles("VERSION", SearchOption.TopDirectoryOnly); var file = files.FirstOrDefault(f => String.Equals("VERSION", f.Name) && String.IsNullOrWhiteSpace(f.Extension)); var ver = file.ReadAllText().Trim(); VersionInfo v; if (VersionInfo.TryParse(ver, out v)) { Version = v; } }, e => Version = _INITVersion); if (Version == null || Version < _INITVersion) { Version = _INITVersion; } }
protected void Page_Load(object sender, EventArgs e) { /* * grab the username and grab the file structure for that username * Bind the filestructure to the page * */ //DatabaseInitialize.DatabaseInitializeFactory("Mongo"); //DatabaseInitialize.DatabaseInitializeFactory("Redis"); if (!IsPostBack)//so pretty much everything is a postback. So I need to figure out a way to simply traverse my file tree. I need to go back to storing the CWD in the session as well as the whole document. { MongoDataContext mongoContext = new MongoDataContext(); RootDirectory directory = mongoContext.GetFileStructure("harageth").FirstOrDefault(); Session["directory"] = directory; folders.DataSource = directory.folders; folders.DataBind(); IEnumerable <string> dataBindFiles = directory.files; files.DataSource = dataBindFiles; files.DataBind(); } else { Response.Write("Not a postback"); } }
protected void ChangeCWD_Click(object sender, EventArgs e) { /* * What do we need to do here? * We have our CWD and we need to change to a new CWD... Whether that is going back a directory or if we are moving forward a directory * * It might be prudent to change the dataModel to hold a parent.. Not sure how that would look in the DB though. Let me think about it. * It might also be prudent to store the CWD in Session for easy access. * * * */ LinkButton val = (LinkButton)sender; string newCWDName = val.Text; RootDirectory directory = (RootDirectory)Session["directory"]; int index = directory.folders.FindIndex(x => x.folderName == newCWDName); Folder newCWD = directory.folders[index]; folders.DataSource = newCWD.folders; folders.DataBind(); IEnumerable <string> dataBindFiles = newCWD.files; files.DataSource = dataBindFiles; files.DataBind(); virtualPath.Text = virtualPath.Text + newCWDName; }
/// <summary> /// Creates a new document in the project, and adds it to unoproj file. /// Also creates any missing directories in path. /// </summary> public async Task CreateDocument(RelativeFilePath relativePath, SourceFragment contents = null) { contents = contents ?? SourceFragment.Empty; var rootDir = await RootDirectory.FirstAsync(); var newFilePath = rootDir.Combine(relativePath); var containingDir = newFilePath.ContainingDirectory; _shell.Create(containingDir); using (var stream = _shell.CreateNew(newFilePath)) { var bytes = contents.ToBytes(); stream.Write(bytes, 0, bytes.Length); } var projectFilePath = await FilePath.FirstAsync(); var project = Uno.ProjectFormat.Project.Load(projectFilePath.NativePath); if (project.AllFiles.None(item => item.UnixPath == relativePath.NativeRelativePath)) { project.MutableIncludeItems.Add(new Uno.ProjectFormat.IncludeItem(relativePath.NativeRelativePath)); project.Save(); } }
protected FolderItem GetFileSystemRootImplementation2() { VirtualFolderInfo rootInfo; if (RootDirectory == null) { //create artificial one rootInfo = new VirtualFolderInfo { FullName = String.Empty, IsEmpty = false }; } else { //create from root directory rootInfo = RootDirectory.CreateFolderResourceInfo(); if (UseRelativePaths) { rootInfo.FullName = PathUtil.RelativeRootPrefix; rootInfo.ParentFolderPath = null; } } rootInfo.Name = RootName; rootInfo.IsRootFolder = true; return(new FolderItem(RootDirectory, rootInfo)); }
/// <summary> /// Validates whether a <see cref="LocalFileSystemProvider"/> was configured /// with access restricted to a given <see cref="LocalFileSystemProvider.RootDirectory"/>, /// and makes sure that the requested <paramref name="file"/> is indeed contained /// within that folder. /// </summary> /// <param name="file">The requested file resource.</param> /// <param name="submittedFilePath">The path that was submitted in the original request.</param> /// <param name="context">The currently performed file system operation.</param> /// <exception cref="ResourceAccessException">If the requested resource is not /// a descendant of a configured <see cref="LocalFileSystemProvider.RootDirectory"/>.</exception> /// <exception cref="ArgumentNullException">If <paramref name="file"/> /// is a null reference.</exception> private void ValidateFileRequestAccess(FileItem file, string submittedFilePath, FileSystemTask context) { if (file == null) { throw new ArgumentNullException("file"); } //if there isn't a restricted custom root, every file resource can be accessed //(if the path is invalid, this will fail otherwise, depending on the action) if (RootDirectory == null) { return; } try { //if we have a custom root, make sure the resource is indeed a descendant of the root if (RootDirectory.IsParentOf(file.LocalFile.FullName)) { return; } } catch (ResourceAccessException e) { //just bubble a resource access exception if (e.Resource == null) { e.Resource = file.ResourceInfo; } throw; } catch (Exception e) { //exceptions can happen in case of invalid file paths //log detailed info string error = "Resource request for file [{0}] caused exception when validating against root directory [{1}]."; error = String.Format(error, submittedFilePath, RootDirectory.FullName); AuditHelper.AuditException(Auditor, e, AuditLevel.Warning, context, AuditEvent.InvalidFilePathFormat, error); //do not expose too much path information (e.g. absolute paths if disabled) error = String.Format("Invalid file path: [{0}].", submittedFilePath); throw new ResourceAccessException(error, e) { Resource = file.ResourceInfo, IsAudited = true }; } //if none of the above is true, the request is invalid //log detailed info string msg = "Resource request for file [{0}] was blocked. The resource is outside the root directory [{1}]."; msg = String.Format(msg, file.ResourceInfo.FullName, RootDirectory.FullName); Auditor.Audit(AuditLevel.Warning, context, AuditEvent.InvalidResourceLocationRequested, msg); //do not expose too much path information (e.g. absolute paths if disabled) msg = String.Format("Invalid file path: [{0}].", submittedFilePath); throw new ResourceAccessException(msg) { Resource = file.ResourceInfo, IsAudited = true }; }
private static void initDirectories() { Binaries = RootDirectory.CreateSubdirectory("Binaries"); Test = RootDirectory.CreateSubdirectory("Test"); Cache = RootDirectory.CreateSubdirectory("Cache"); GameData = RootDirectory.CreateSubdirectory("GameData"); Scripts = RootDirectory.CreateSubdirectory("Scripts"); }
public void init() { testRoot = new RootDirectory() { un = "harageth", folders = new List<Folder>() { new Folder() { folderName = "firstFolder", files = new List<string>() { "temp1.txt", "file1.txt" }, folders = new List<Folder> { new Folder() { folderName = "firstFolder2"}} }, new Folder() { folderName = "secondFolder", files = new List<string>() { "temp2.txt", "file2.txt" } }, new Folder() { folderName = "thirdFolder", files = new List<string>() { "temp3.txt", "file3.txt" } } }, files = new List<string>() { "temp.txt", "file.txt" } }; }