/// <include file='.\ISyncService.xml' path='/SyncService/Push/*'/> public static SyncResult Push(this ISyncService syncService, IEnumerable<String> local, FileEntry remote, ISyncProgressMonitor monitor) { if (monitor == null) { throw new ArgumentNullException("monitor", "Monitor cannot be null"); } if (!remote.IsDirectory) { return new SyncResult(ErrorCodeHelper.RESULT_REMOTE_IS_FILE); } // make a list of File from the list of String List<FileSystemInfo> files = new List<FileSystemInfo>(); foreach (String path in local) { files.Add(path.GetFileSystemInfo()); } // get the total count of the bytes to transfer long total = syncService.GetTotalLocalFileSize(files); monitor.Start(total); SyncResult result = syncService.DoPush(files, remote.FullPath, monitor); monitor.Stop(); return result; }
/// <summary> /// Load the specified file. /// </summary> public byte[] LoadFile(string fileName) { for (int i = 0; i < mSavedFiles.size; ++i) { FileEntry fi = mSavedFiles[i]; if (fi.fileName == fileName) return fi.data; } #if !UNITY_WEBPLAYER string fn = CleanupFilename(fileName); if (File.Exists(fn)) { try { byte[] bytes = File.ReadAllBytes(fn); if (bytes != null) { FileEntry fi = new FileEntry(); fi.fileName = fileName; fi.data = bytes; mSavedFiles.Add(fi); return bytes; } } catch (System.Exception ex) { Error(fileName + ": " + ex.Message); } } #endif return null; }
/// <summary> /// Create an ls receiver/parser. /// </summary> /// <param name="parent">The list of current children. To prevent collapse during update, reusing the same /// FileEntry objects for files that were already there is paramount.</param> /// <param name="entries">the list of new children to be filled by the receiver.</param> /// <param name="links">the list of link path to compute post ls, to figure out if the link /// pointed to a file or to a directory.</param> public ListingServiceReceiver( FileEntry parent, List<FileEntry> entries, List<String> links ) { Parent = parent; Entries = entries ?? new List<FileEntry>(); Links = links ?? new List<String> ( ); CurrentChildren = Parent.Children.ToArray ( ); }
public void IncludeFile(FileEntry file) { // TODO: check for directory ItsExcludedFileEntries.Remove(file); ItsIncludedFileEntriesDict.Add(file.GetHashCode(), file); ItsNewFileEntries.Remove(file); // TODO: update xml RaiseRegistryUpdateEvent(); ItsInstallerProjectManagementService.AddNewFile(file.FullPath); }
public CompoundFileReader(Directory dir, System.String name, int readBufferSize) { directory = dir; fileName = name; this.readBufferSize = readBufferSize; bool success = false; try { stream = dir.OpenInput(name, readBufferSize); // read the directory and init files int count = stream.ReadVInt(); FileEntry entry = null; for (int i = 0; i < count; i++) { long offset = stream.ReadLong(); System.String id = stream.ReadString(); if (entry != null) { // set length of the previous entry entry.length = offset - entry.offset; } entry = new FileEntry(); entry.offset = offset; entries[id] = entry; } // set the length of the final entry if (entry != null) { entry.length = stream.Length() - entry.offset; } success = true; } finally { if (!success && (stream != null)) { try { stream.Close(); } catch (System.IO.IOException) { } } } }
public static ITorrentFile CreateFromEntry(FileEntry entry, long progress, int priority) { using (entry) { return new TorrentFile { Path = entry.Path, Priority = priority, Progress = (progress / (float) entry.Size) * 100f, Size = entry.Size }; } }
/// <include file='.\ISyncService.xml' path='/SyncService/PullFile/*'/> public static SyncResult PullFile(this ISyncService syncService, FileEntry remote, String localFilename, ISyncProgressMonitor monitor) { if (monitor == null) { throw new ArgumentNullException("monitor", "Monitor cannot be null"); } long total = remote.Size; monitor.Start(total); SyncResult result = syncService.DoPullFile(remote.FullPath, localFilename, monitor); monitor.Stop(); return result; }
public FileEntry ensureFileEntry( string path, byte[] data ) { FileEntry fe = findFileEntry(path); if(fe!=null) { Debug.Log( "ensureFileEntry: found entry:" + path ); return fe; } for(int i=0;i<m_fents.Length;i++) { if( m_fents[i] == null ) { Debug.Log("allocated new fileentry:" + path + " len:" + data.Length + " at:" + i ); fe = new FileEntry(path, data); m_fents[i] = fe; return fe; } } Debug.Log( "ensureFileEntry: full!"); return null; }
public static List<WorkShopAddonFile> Extract(Stream gmaFile) { const UInt32 HEADER = 1145130311; var binaryReader = new BinaryReader(gmaFile, Encoding.GetEncoding(1252)); binaryReader.BaseStream.Position = 0; if (binaryReader.ReadUInt32() == HEADER) { gmaFile.Seek(18, SeekOrigin.Current); string workshopFileName = ReadString(binaryReader); string metadata = ReadString(binaryReader); string authorname = ReadString(binaryReader); gmaFile.Seek(4, SeekOrigin.Current); var Files = new List<FileEntry>(); while (binaryReader.BaseStream.CanRead) { if (binaryReader.ReadUInt32() == 0) { break; } var file = new FileEntry {Filename = ReadString(binaryReader), Size = binaryReader.ReadUInt32()}; Files.Add(file); gmaFile.Seek(8, SeekOrigin.Current); } if (Files.Count >= 1) { return (from fileEntry in Files let fileName = Path.GetFileName(fileEntry.Filename) let fileData = binaryReader.ReadBytes(Convert.ToInt32(fileEntry.Size)) let filePath = Path.GetDirectoryName(fileEntry.Filename) select new WorkShopAddonFile { FileName = fileName, Path = filePath, Contents = fileData, Size = fileData.Length }).ToList(); } } return null; }
public FileEntry(string[] fparts) { if (fparts[0].StartsWith("/")) fparts[0] = fparts[0].Substring(1); string fullname = fparts[0]; if (fparts.Length > 1) size = fparts[1]; string[] parts = fullname.Split('/'); isDirectory = fullname.EndsWith("/"); if (isDirectory) { name = parts[parts.Length - 2]; folder = string.Join("/", parts, 0, parts.Length - 2); if(parts.Length>2) folder+="/"; if (name != "..") { SDCard.f.allDirs.Add(fullname, this); SDCard.f.allFiles.AddLast(new FileEntry(new string[] { fullname.ToLower() + "../", "" })); } } else { name = parts[parts.Length - 1]; if (parts.Length == 1) folder = ""; else { folder = string.Join("/", parts, 0, parts.Length - 1) + "/"; if (name!=".." && !SDCard.f.allDirs.Keys.Contains(folder)) { FileEntry ent = new FileEntry(folder); SDCard.f.allDirs.Add(folder,ent ); SDCard.f.allFiles.AddLast(ent); SDCard.f.allFiles.AddLast(new FileEntry(new string[] { folder.ToLower() + "../", "" })); } } } }
public void Load(X360IO io) { IO = io; IO.Stream.Position = 0x0; Magic = IO.Reader.ReadInt64(); if (Magic != 0x5343455546000000) return; Version = IO.Reader.ReadInt64(); ImageVersion = IO.Reader.ReadInt64(); FileCount = IO.Reader.ReadInt64(); HeaderSize = IO.Reader.ReadInt64(); DataSize = IO.Reader.ReadInt64(); Files = new List<FileEntry>(); for(int i = 0; i < FileCount; i++) { FileEntry entry = new FileEntry { ID = IO.Reader.ReadInt64(), Offset = IO.Reader.ReadInt64(), Size = IO.Reader.ReadInt64(), Padding = IO.Reader.ReadInt64() }; Files.Add(entry); } Hashes = new List<HashEntry>(); for(int i = 0; i < FileCount; i++) { HashEntry entry = new HashEntry(); entry.FileID = IO.Reader.ReadInt64(); entry.HMACSHA1 = IO.Reader.ReadBytes(0x14); entry.Padding = IO.Reader.ReadInt32(); Hashes.Add(entry); Files[(int) entry.FileID].Hash = entry; } HeaderHash = IO.Reader.ReadBytes(0x14); Padding = IO.Reader.ReadBytes(0xC); }
public static System.Windows.Forms.TreeNode ParseFile(string path) { // Read archive tree uint nFiles, baseOffset; System.Windows.Forms.TreeNode node = new System.Windows.Forms.TreeNode(); System.IO.BinaryReader stream = new System.IO.BinaryReader(System.IO.File.OpenRead(path)); stream.ReadUInt32(); nFiles = stream.ReadUInt32(); baseOffset = stream.ReadUInt32(); for (int i = 0; i < nFiles; i++) { char b; FileEntry f = new FileEntry(); do { b = (char)stream.ReadByte(); if (b != 0) f.name += b; } while (b != 0); f.length = stream.ReadUInt32(); stream.ReadUInt32(); f.offset = baseOffset; baseOffset += f.length; f.idx = (uint)i; System.Windows.Forms.TreeNode n = new System.Windows.Forms.TreeNode(f.name); n.Tag = f; node.Nodes.Add(n); } return node; }
public void LoadFile(string path) { initfs = new TOCFile(path); list = new List<FileEntry>(); foreach (BJSON.Entry e in initfs.lines) if (e.fields != null && e.fields.Count != 0) { BJSON.Field file = e.fields[0]; List<BJSON.Field> data = (List<BJSON.Field>)file.data; FileEntry entry = new FileEntry(); foreach (BJSON.Field f in data) switch (f.fieldname) { case "name": entry.name = (string)f.data; break; case "payload": entry.data = (byte[])f.data; break; } list.Add(entry); } RefreshList(); }
private void cmdChgRDir_Click(object sender, EventArgs e) { if (lstFiles.SelectedItems.Count == 0) { return; } List <string> s = new List <string>(); foreach (ListViewItem l in lstFiles.SelectedItems) { FileEntry fe = (FileEntry)l.Tag; s.Add(fe.RemoteFile); } string Same = CommonUtilities.GetCommonBeginning(s); string New = ""; if (string.IsNullOrWhiteSpace(Same) == true) { MessageBox.Show(this, "Paths are different.", Program.Title, MessageBoxButtons.OK, MessageBoxIcon.Information); return; } frmChangePaths frm = new frmChangePaths("Change paths", Same, MID, frmChangePaths.BrowseWhere.Local); if (frm.ShowDialog(this) != DialogResult.OK) { return; } New = frm.ReturnedPath; if (New.Trim() == "") { return; } if (Same.EndsWith("\\") == true) { if (New.EndsWith("\\") == false) { New += "\\"; } } else { if (New.EndsWith("\\") == true) { New = New.Substring(0, New.Length - 1); } } foreach (ListViewItem l in lstFiles.SelectedItems) { FileEntry fe = (FileEntry)l.Tag; if (fe.RemoteFile.ToLower().StartsWith(Same.ToLower()) == false) { continue; } fe.RemoteFile = New + fe.RemoteFile.Substring(Same.Length, fe.RemoteFile.Length - Same.Length); } LoadList(); }
public void Load(System.IO.Stream stream) { CanSave = true; CanRenameFiles = true; CanReplaceFiles = true; files.Clear(); using (var reader = new FileReader(stream)) { _savedDirectories.Clear(); reader.ByteOrder = Syroot.BinaryData.ByteOrder.BigEndian; string signature = reader.ReadString(4, Encoding.ASCII); if (signature == "CRAR") { IsLittleEndian = true; reader.ByteOrder = Syroot.BinaryData.ByteOrder.LittleEndian; } uint FileSize = reader.ReadUInt32(); HeaderSize = reader.ReadUInt32(); uint DataOffset = reader.ReadUInt32(); uint FileDataSize = reader.ReadUInt32(); uint MRamSize = reader.ReadUInt32(); uint ARamSize = reader.ReadUInt32(); byte[] Padding = reader.ReadBytes(4); if (MRamSize != 0) { RamType |= RamAllocation.MRAM; } else if (ARamSize != 0) { RamType |= RamAllocation.ARAM; } //Info Block long pos = reader.Position; uint DirectoryCount = reader.ReadUInt32(); uint DirectoryOffset = reader.ReadUInt32() + (uint)pos; uint TotalNodeCount = reader.ReadUInt32(); uint NodeOffset = reader.ReadUInt32() + (uint)pos; uint StringTableSize = reader.ReadUInt32(); uint StringTablOffset = reader.ReadUInt32() + (uint)pos; ushort NodeCount = reader.ReadUInt16(); Unknown = reader.ReadUInt16(); byte[] Padding2 = reader.ReadBytes(4); Directories = new DirectoryEntry[DirectoryCount]; for (int dir = 0; dir < DirectoryCount; dir++) { Directories[dir] = new DirectoryEntry(this); } Console.WriteLine($"DirectoryCount {DirectoryCount}"); Console.WriteLine($"StringTablOffset {StringTablOffset}"); reader.SeekBegin(DirectoryOffset); for (int dir = 0; dir < DirectoryCount; dir++) { Directories[dir].Read(reader); } for (int dir = 0; dir < DirectoryCount; dir++) { uint NamePointer = StringTablOffset + Directories[dir].NameOffset; Directories[dir].Name = ReadStringAtTable(reader, NamePointer); Console.WriteLine($"{ Directories[dir].Name } {dir}"); for (int n = 0; n < Directories[dir].NodeCount; n++) { reader.SeekBegin(NodeOffset + ((n + Directories[dir].FirstNodeIndex) * 0x14)); FileEntry entry = new FileEntry(); entry.Read(reader, IsLittleEndian); NamePointer = StringTablOffset + entry.NameOffset; entry.Name = ReadStringAtTable(reader, NamePointer); //Root and parent strings. Skip these unecessary nodes. if (entry.Name == "." || entry.Name == "..") { continue; } if (entry.IsDirectory) { // Directories[entry.Offset].ID = entry.FileId; //0xFFFF or 0 Directories[dir].AddNode(Directories[entry.Offset]); _savedDirectories.Add(entry); } else { using (reader.TemporarySeek(pos + DataOffset + entry.Offset, System.IO.SeekOrigin.Begin)) { entry.FileData = reader.ReadBytes((int)entry.Size); } entry.FileName = entry.Name; files.Add(entry); Directories[dir].AddNode(entry); } } } this.Name = Directories[0].Name; nodes.AddRange(Directories[0].Nodes); } }
private List <FileEntry> ParseCentralDirectory() { BinaryReader reader = new BinaryReader(this.stream); List <FileEntry> entries = new List <FileEntry>(); reader.BaseStream.Seek(-4, SeekOrigin.End); while (reader.ReadInt32() != 101010256) { reader.BaseStream.Seek(-5, SeekOrigin.Current); } reader.BaseStream.Seek(6, SeekOrigin.Current); short entryCount = reader.ReadInt16(); int directorySize = reader.ReadInt32(); int directoryStart = reader.ReadInt32(); reader.BaseStream.Seek(directoryStart, SeekOrigin.Begin); bool needsFixing = false; for (int i = 0; i < entryCount; i++) { int headerSignature = reader.ReadInt32(); if (headerSignature == 33639248) //Central directory file header signature { reader.BaseStream.Seek(4, SeekOrigin.Current); byte flag = reader.ReadByte(); if ((flag & 8) > 0) //Silverlight doesn't like this format. We'll "fix it" further below { needsFixing = true; } reader.BaseStream.Seek(7, SeekOrigin.Current); int crc32 = reader.ReadInt32(); int compressedSize = reader.ReadInt32(); int unCompressedSize = reader.ReadInt32(); short fileNameLenght = reader.ReadInt16(); short extraFieldLength = reader.ReadInt16(); short fileCommentLength = reader.ReadInt16(); reader.BaseStream.Seek(8, SeekOrigin.Current); int fileStart = reader.ReadInt32(); string filename = new string(reader.ReadChars(fileNameLenght)); entries.Add(new FileEntry() { Filename = filename, FileStart = fileStart, CRC32 = crc32, CompressedSize = compressedSize, UncompressedSize = unCompressedSize }); reader.BaseStream.Seek(extraFieldLength + fileCommentLength, SeekOrigin.Current); } } if (needsFixing) { //We are using a zipformat that Silverlight doesn't like. //Zipfiles where the file size is reported after the compressed data //is a no-go, so we rebuild the header and report the information there. MemoryStream newZip = new MemoryStream(); BinaryWriter writer = new BinaryWriter(newZip); //Rebuild file entries foreach (FileEntry entry in entries) { FileEntry e = entry; reader.BaseStream.Seek(entry.FileStart, SeekOrigin.Begin); e.FileStart = (int)writer.BaseStream.Position; CopyBytes(reader, writer, 6); byte flag = reader.ReadByte(); writer.Write((byte)(247 & flag)); //set 3rd bit to 0 to indicate the new format CopyBytes(reader, writer, 7); writer.Write(entry.CRC32); //Update CRC writer.Write(entry.CompressedSize); //Update Compressed size writer.Write(entry.UncompressedSize); //Update Uncompressed size writer.Write((short)entry.Filename.Length); reader.BaseStream.Seek(14, SeekOrigin.Current); short fieldLength = reader.ReadInt16(); writer.Write(fieldLength); CopyBytes(reader, writer, entry.Filename.Length + fieldLength + entry.CompressedSize); } //Rebuild directory reader.BaseStream.Seek(directoryStart, SeekOrigin.Begin); for (int i = 0; i < entryCount; i++) { CopyBytes(reader, writer, 8); byte flag = reader.ReadByte(); writer.Write((byte)(247 & flag)); //set 3rd bit to 0 to indicate the new format CopyBytes(reader, writer, 19); short filenamelength = reader.ReadInt16(); writer.Write(filenamelength); short extrafieldlength = reader.ReadInt16(); writer.Write(extrafieldlength); short filecommentlength = reader.ReadInt16(); writer.Write(filecommentlength); CopyBytes(reader, writer, 8); writer.Write(entries[i].FileStart); reader.BaseStream.Seek(4, SeekOrigin.Current); CopyBytes(reader, writer, filenamelength + extrafieldlength + filecommentlength); } CopyBytes(reader, writer, (int)(reader.BaseStream.Length - reader.BaseStream.Position)); this.stream = newZip; //Replace stream with new stream } return(entries); }
public static MemoryStream CompileFromMemory(Dictionary <string, string> fileMapping) { var virtualizedXmlHeader = new XmlCoalesceFile(); var assets = new List <CoalesceAsset>(); //Virtual load assets. foreach (var include in fileMapping) { var asset = XmlCoalesceAsset.LoadFromMemory(include.Value); if (asset != null && !string.IsNullOrEmpty(asset.Source)) { assets.Add(asset); } } virtualizedXmlHeader.Assets = assets; var coal = new CoalescedFileXml { Version = 1 }; foreach (var asset in assets) { var entry = new FileEntry(asset.Source) { Sections = new Dictionary <string, Dictionary <string, List <PropertyValue> > >() }; foreach (var section in asset.Sections) { var eSection = new Dictionary <string, List <PropertyValue> >(); foreach (var property in section.Value) { var eProperty = new List <PropertyValue>(); foreach (var value in property.Value) { //if (!file.Settings.CompileTypes.Contains(value.ValueType)) //{ // continue; //} var valueValue = value.Value; if (!string.IsNullOrEmpty(valueValue)) { valueValue = SpecialCharacters.Aggregate(valueValue, (current, c) => current.Replace(c.Value, c.Key)); } eProperty.Add(new PropertyValue(value.ValueType, valueValue)); } eSection.Add(property.Key, eProperty); } entry.Sections.Add(section.Key, eSection); } coal.Files.Add(entry); } MemoryStream outputStream = new MemoryStream(); coal.Serialize(outputStream); return(outputStream); }
/// <summary> /// Creates class instance /// </summary> /// <param name="name"></param> /// <param name="rootEntry"> Root directory</param> public FileSystemInfo(string name, FileEntry rootEntry = null) { Name = name; Root = rootEntry; }
public void MarkAsNew(Guid subject, FileEntry fileEntry) { }
public IEnumerable <Tag> GetNewTags(Guid subject, FileEntry fileEntry) { return(null); }
public void Load(System.IO.Stream stream) { modelFolder = new LM2_ModelFolder(this); DrawableContainer.Name = FileName; Renderer = new LM2_Renderer(); DrawableContainer.Drawables.Add(Renderer); Text = FileName; using (var reader = new FileReader(stream)) { reader.ByteOrder = Syroot.BinaryData.ByteOrder.LittleEndian; uint Identifier = reader.ReadUInt32(); ushort Unknown = reader.ReadUInt16(); //Could also be 2 bytes, not sure. Always 0x0401 IsCompressed = reader.ReadByte() == 1; reader.ReadByte(); //Padding uint FileCount = reader.ReadUInt32(); uint LargestCompressedFile = reader.ReadUInt32(); reader.SeekBegin(0x2C); byte[] Unknowns = reader.ReadBytes((int)FileCount); TreeNode tableNodes = new TreeNode("File Section Entries"); long FileTablePos = reader.Position; for (int i = 0; i < FileCount; i++) { var file = new FileEntry(this); file.Text = $"entry {i}"; file.Read(reader); fileEntries.Add(file); tableNodes.Nodes.Add(file); //The first file stores a chunk layout //The second one seems to be a duplicate? if (i == 0) { using (var tableReader = new FileReader(file.GetData())) { ChunkTable = new LM2_ChunkTable(); ChunkTable.Read(tableReader); TreeNode debugFolder = new TreeNode("DEBUG TABLE INFO"); Nodes.Add(debugFolder); TreeNode list1 = new TreeNode("Entry List 1"); TreeNode list2 = new TreeNode("Entry List 2 "); debugFolder.Nodes.Add(tableNodes); debugFolder.Nodes.Add(list1); debugFolder.Nodes.Add(list2); debugFolder.Nodes.Add(chunkFolder); foreach (var chunk in ChunkTable.ChunkEntries) { list1.Nodes.Add($"ChunkType {chunk.ChunkType} ChunkOffset {chunk.ChunkOffset} Unknown1 {chunk.Unknown1} ChunkSubCount {chunk.ChunkSubCount} Unknown3 {chunk.Unknown3}"); } foreach (var chunk in ChunkTable.ChunkSubEntries) { list2.Nodes.Add($"ChunkType {chunk.ChunkType} ChunkSize {chunk.ChunkSize} ChunkOffset {chunk.ChunkOffset}"); } } } } //Set an instance of our current data //Chunks are in order, so you build off of when an instance gets loaded TexturePOWE currentTexture = new TexturePOWE(); LM2_Model currentModel = new LM2_Model(this); //Each part of the file is divided into multiple file/section entries //The first entry being the chunk table parsed before this //The second file being a duplicate (sometimes slightly larger than the first) //The third file stores texture headers, while the fourth one usually has the rest of the main data //Any additional ones currently are unknown how they work. Some of which have unknown compression aswell byte[] File002Data = fileEntries[2].GetData(); //Get the third file byte[] File003Data = fileEntries[3].GetData(); //Get the fourth file LuigisMansion3.LM3_DICT.LoadHashes(); int chunkId = 0; uint ImageHeaderIndex = 0; uint modelIndex = 0; foreach (var chunk in ChunkTable.ChunkSubEntries) { var chunkEntry = new ChunkDataEntry(this, chunk); chunkEntry.DataFile = File003Data; chunkEntry.Text = $"Chunk {chunk.ChunkType.ToString("X")} {chunk.ChunkType} {chunkId++}"; chunkEntries.Add(chunkEntry); chunkFolder.Nodes.Add(chunkEntry); switch (chunk.ChunkType) { case SubDataType.TextureHeader: chunkEntry.DataFile = File002Data; //Read the info using (var textureReader = new FileReader(chunkEntry.FileData)) { currentTexture = new TexturePOWE(); currentTexture.ImageKey = "texture"; currentTexture.SelectedImageKey = currentTexture.ImageKey; currentTexture.Index = ImageHeaderIndex; currentTexture.Read(textureReader); currentTexture.Text = $"Texture {ImageHeaderIndex}"; textureFolder.Nodes.Add(currentTexture); Renderer.TextureList.Add(currentTexture); ImageHeaderIndex++; } break; case SubDataType.TextureData: currentTexture.ImageData = chunkEntry.FileData; break; case SubDataType.ModelStart: currentModel = new LM2_Model(this); currentModel.ModelInfo = new LM2_ModelInfo(); currentModel.Text = $"Model {modelIndex}"; currentModel.ModelInfo.Data = chunkEntry.FileData; modelFolder.Nodes.Add(currentModel); modelIndex++; break; case SubDataType.MeshBuffers: currentModel.BufferStart = chunkEntry.Entry.ChunkOffset; currentModel.BufferSize = chunkEntry.Entry.ChunkSize; break; case SubDataType.BoneData: if (chunk.ChunkSize > 0x40 && currentModel.Skeleton == null) { using (var boneReader = new FileReader(chunkEntry.FileData)) { currentModel.Skeleton = new STSkeleton(); DrawableContainer.Drawables.Add(currentModel.Skeleton); uint numBones = chunk.ChunkSize / 68; for (int i = 0; i < numBones; i++) { boneReader.SeekBegin(i * 68); uint hash = boneReader.ReadUInt32(); STBone bone = new STBone(currentModel.Skeleton); bone.Text = hash.ToString("X"); if (LuigisMansion3.LM3_DICT.HashNames.ContainsKey(hash)) { bone.Text = LuigisMansion3.LM3_DICT.HashNames[hash]; } bone.position = new float[3] { 0, 0, 0 }; bone.rotation = new float[4] { 0, 0, 0, 1 }; bone.scale = new float[3] { 0.2f, 0.2f, 0.2f }; boneReader.SeekBegin(52 + (i * 68)); var Position = new OpenTK.Vector3(boneReader.ReadSingle(), boneReader.ReadSingle(), boneReader.ReadSingle()); Position = OpenTK.Vector3.TransformPosition(Position, OpenTK.Matrix4.CreateRotationX(OpenTK.MathHelper.DegreesToRadians(90))); bone.position[0] = Position.X; bone.position[2] = Position.Y; bone.position[1] = Position.Z; bone.RotationType = STBone.BoneRotationType.Euler; currentModel.Skeleton.bones.Add(bone); } currentModel.Skeleton.reset(); currentModel.Skeleton.update(); } } break; case SubDataType.VertexStartPointers: using (var vtxPtrReader = new FileReader(chunkEntry.FileData)) { while (!vtxPtrReader.EndOfStream) { currentModel.VertexBufferPointers.Add(vtxPtrReader.ReadUInt32()); } } break; case SubDataType.SubmeshInfo: int MeshCount = chunkEntry.FileData.Length / 0x28; using (var meshReader = new FileReader(chunkEntry.FileData)) { for (uint i = 0; i < MeshCount; i++) { LM2_Mesh mesh = new LM2_Mesh(); mesh.Read(meshReader); currentModel.Meshes.Add(mesh); } } currentModel.ModelInfo.Read(new FileReader(currentModel.ModelInfo.Data), currentModel.Meshes); break; case SubDataType.ModelTransform: using (var transformReader = new FileReader(chunkEntry.FileData)) { //This is possibly very wrong //The data isn't always per mesh, but sometimes is if (transformReader.BaseStream.Length / 0x40 == currentModel.Meshes.Count) { for (int i = 0; i < currentModel.Meshes.Count; i++) { currentModel.Meshes[i].Transform = transformReader.ReadMatrix4(); } } } break; case SubDataType.BoneHashes: using (var chunkReader = new FileReader(chunkEntry.FileData)) { while (chunkReader.Position <= chunkReader.BaseStream.Length - 4) { uint hash = chunkReader.ReadUInt32(); string strHash = hash.ToString("X"); if (LuigisMansion3.LM3_DICT.HashNames.ContainsKey(hash)) { strHash = LuigisMansion3.LM3_DICT.HashNames[hash]; } Console.WriteLine("Hash! T " + strHash); } } break; case (SubDataType)0x12017105: using (var chunkReader = new FileReader(chunkEntry.FileData)) { while (chunkReader.Position <= chunkReader.BaseStream.Length - 8) { uint hash = chunkReader.ReadUInt32(); uint unk = chunkReader.ReadUInt32(); string strHash = hash.ToString("X"); if (LuigisMansion3.LM3_DICT.HashNames.ContainsKey(hash)) { strHash = LuigisMansion3.LM3_DICT.HashNames[hash]; } foreach (var bone in currentModel.Skeleton.bones) { if (bone.Text == strHash) { } } } } currentModel.Skeleton.reset(); currentModel.Skeleton.update(); break; case SubDataType.MaterialName: using (var matReader = new FileReader(chunkEntry.FileData)) { materialNamesFolder.Nodes.Add(matReader.ReadZeroTerminatedString()); } break; default: break; } } foreach (LM2_Model model in modelFolder.Nodes) { model.ReadVertexBuffers(); } if (modelFolder.Nodes.Count > 0) { Nodes.Add(modelFolder); } if (textureFolder.Nodes.Count > 0) { Nodes.Add(textureFolder); } if (materialNamesFolder.Nodes.Count > 0) { Nodes.Add(materialNamesFolder); } } }
public void AddFile(string name) { var f = new FileEntry(name, DirEntry, true); DirEntry.files.Add(f); }
public RemoteFileEntryStreamReader(IJSRunner jsRunner, ElementReference elementRef, FileEntry fileEntry, FileEdit fileEdit, int maxMessageSize) : base(jsRunner, elementRef, fileEntry, fileEdit) { this.maxMessageSize = maxMessageSize; }
public IEnumerable <FileShareRecord> GetPureShareRecords(FileEntry entry) { return(null); }
public static bool IsFileValid(string fullPath, FileEntry fileEntry) { return(IsFileValid(fullPath, fileEntry.checksum, fileEntry.size)); }
/// <summary> /// Processes the new lines. /// </summary> /// <param name="lines">The lines.</param> protected override void ProcessNewLines(IEnumerable<string> lines ) { foreach ( String line in lines ) { // no need to handle empty lines. if ( line.Length == 0 ) { continue; } // run the line through the regexp var m = line.Trim ( ).Match ( FileListingService.LS_PATTERN_EX, RegexOptions.Compiled ); if ( !m.Success ) { Log.v ( "madb", "no match on file pattern: {0}", line ); continue; } // get the name String name = m.Groups[9].Value; if ( String.Compare ( name, ".", true ) == 0 || String.Compare ( name, "..", true ) == 0 ) { // we don't care if the entry is a "." or ".." continue; } // get the rest of the groups String permissions = m.Groups[1].Value; String owner = m.Groups[2].Value; String group = m.Groups[3].Value; bool isExec = String.Compare ( m.Groups[10].Value, "*", true ) == 0; long size = 0; String sizeData = m.Groups[4].Value.Trim ( ); long.TryParse ( String.IsNullOrEmpty ( sizeData ) ? "0" : sizeData, out size ); String date1 = m.Groups[5].Value.Trim ( ); String date2 = m.Groups[6].Value.Trim ( ); String date3 = m.Groups[7].Value.Trim ( ); DateTime date = DateTimeHelper.Epoch; String time = m.Groups[8].Value.Trim(); if ( String.IsNullOrEmpty ( time ) ) { time = date.ToString ( "HH:mm" ); } if ( date1.Length == 3 ) { // check if we don't have a year and use current if we don't String tyear = String.IsNullOrEmpty ( date3 ) ? DateTime.Now.Year.ToString ( ) : date3; date = DateTime.ParseExact ( String.Format ( "{0}-{1}-{2} {3}", date1, date2.PadLeft(2,'0'), tyear, time ), "MMM-dd-yyyy HH:mm", CultureInfo.CreateSpecificCulture("en-US")); } else if ( date1.Length == 4 ) { date = DateTime.ParseExact(String.Format("{0}-{1}-{2} {3}", date1, date2.PadLeft(2, '0'), date3, time), "yyyy-MM-dd HH:mm", CultureInfo.CreateSpecificCulture("en-US")); } String info = null; String linkName = null; // and the type FileListingService.FileTypes objectType = FileListingService.FileTypes.Other; switch ( permissions[0] ) { case '-': objectType = FileListingService.FileTypes.File; break; case 'b': objectType = FileListingService.FileTypes.Block; break; case 'c': objectType = FileListingService.FileTypes.Character; break; case 'd': objectType = FileListingService.FileTypes.Directory; break; case 'l': objectType = FileListingService.FileTypes.Link; break; case 's': objectType = FileListingService.FileTypes.Socket; break; case 'p': objectType = FileListingService.FileTypes.FIFO; break; } // now check what we may be linking to if ( objectType == FileListingService.FileTypes.Link ) { String[] segments = name.Split ( new string[] { " -> " }, StringSplitOptions.RemoveEmptyEntries ); // we should have 2 segments if ( segments.Length == 2 ) { // update the entry name to not contain the link name = segments[0]; // and the link name info = segments[1]; // Links to directories don't have any size information. if(string.IsNullOrEmpty(sizeData)) { objectType = FileListingService.FileTypes.DirectoryLink; } } else { } linkName = info; // add an arrow in front to specify it's a link. info = String.Format ( LINK_FORMAT, info ); } // get the entry, either from an existing one, or a new one FileEntry entry = GetExistingEntry ( name ); if ( entry == null ) { entry = new FileEntry ( new FileSystem(Parent.Device), Parent, name, objectType, false /* isRoot */); } // add some misc info entry.Permissions = new FilePermissions ( permissions ); entry.Size = size; entry.Date = date; entry.Owner = owner; entry.Group = group; entry.IsExecutable = isExec; entry.LinkName = linkName; if ( objectType == FileListingService.FileTypes.Link || objectType == FileListingService.FileTypes.DirectoryLink ) { entry.Info = info; } Entries.Add ( entry ); } }
// 已知清单文件校验值和大小的情况下, 可以使用此接口, 略过 checksum 文件的获取 public static void GetManifestDirect(string localPathRoot, DownloadWorker worker, FileEntry fileEntry, string password, Action <Manifest, FileEntry> callback) { var manifestPath = Path.Combine(localPathRoot, Manifest.ManifestFileName); var manifest_t = ParseManifestFile(manifestPath, fileEntry, password); if (manifest_t != null) { callback(manifest_t, fileEntry); } else { var manifestJob = new DownloadWorker.JobInfo(Manifest.ManifestFileName, fileEntry.checksum, "Manifest", 0, fileEntry.size, manifestPath) { emergency = true, callback = () => { var manifest = ParseManifestStream(File.OpenRead(manifestPath), fileEntry, password); callback(manifest, fileEntry); } }; worker.AddJob(manifestJob); } }
public IEnumerable <IFileEntry> List(string filename) { var req = CreateRequest(filename); req.Method = System.Net.WebRequestMethods.Ftp.ListDirectoryDetails; req.UseBinary = false; System.Net.WebResponse resp = null; System.IO.Stream rs = null; System.IO.StreamReader sr = null; try { HandleListExceptions( () => { var areq = new Utility.AsyncHttpRequest(req); resp = areq.GetResponse(); rs = areq.GetResponseStream(); sr = new System.IO.StreamReader(new StreamReadHelper(rs)); }, req); string line; while ((line = HandleListExceptions(() => sr.ReadLine(), req)) != null) { FileEntry f = ParseLine(line); if (f != null) { yield return(f); } } } finally { try { if (sr != null) { sr.Dispose(); } } finally { try { if (rs != null) { rs.Dispose(); } } finally { if (resp != null) { resp.Dispose(); } } } } }
/// <summary>Add a source stream. <c>file</c> is the string by which the /// sub-stream will be known in the compound stream. /// /// </summary> /// <throws> IllegalStateException if this writer is closed </throws> /// <throws> NullPointerException if <c>file</c> is null </throws> /// <throws> IllegalArgumentException if a file with the same name </throws> /// <summary> has been added already /// </summary> public void AddFile(System.String file) { if (merged) throw new System.SystemException("Can't add extensions after merge has been called"); if (file == null) throw new System.NullReferenceException("file cannot be null"); try { ids.Add(file, file); } catch (Exception) { throw new System.ArgumentException("File " + file + " already added"); } FileEntry entry = new FileEntry(); entry.file = file; entries.Add(entry); }
public bool CanCreate(FileEntry file, Guid userId) { return(true); }
public bool CanCreate(FileEntry entry, Guid userId) { return(Can(entry, userId, SecurityAction.Create)); }
public bool CanReview(FileEntry file, Guid userId) { return(CanEdit(file, userId)); }
private async void DownloadJob(object Args) { object[] ArgsArray = (object[])Args; List <FileEntry> Files = (List <FileEntry>)ArgsArray[0]; int Offset = (int)ArgsArray[1], Total = Files.Count; try { CurrentUICulture = Instance.OSCulture; for (;;) { int CurrentChunk, CurrentFile; lock (ProgressLock) { if (FileIndex == Total) { return; } if (Files[FileIndex].Chunks.Count == 0) { ChunkIndex = 0; FileIndex++; continue; } CurrentChunk = ChunkIndex++; if (ChunkIndex > Files[CurrentFile = FileIndex].Chunks.Count) { ChunkIndex = 0; if (++FileIndex == Total) { return; } continue; } } if (DownloadFailed || Token.IsCancellationRequested) { return; } FileEntry File = Files[CurrentFile]; ChunkEntry Chunk = File.Chunks[CurrentChunk]; using (FileStream Writer = new FileStream($@"{BaseDownloadPath}\{File.Name}", FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite)) { long Position; if (File.IsSliced) { Position = 0L; for (int Iterator = 0; Iterator < CurrentChunk; Iterator++) { Position += File.Chunks[Iterator].UncompressedSize; } Writer.Position = Position; } else { Writer.Position = Position = Chunk.Offset; } byte[] Data = new byte[Chunk.UncompressedSize]; Writer.Read(Data, 0, Chunk.UncompressedSize); bool HashMatch = ComputeAdlerHash(Data) == Chunk.Checksum; if (!HashMatch) { Data = null; string GID = BitConverter.ToString(Chunk.GID).Replace("-", string.Empty), Message = null; for (int AttemptsCount = 0; AttemptsCount < 5 + ThreadsCount; AttemptsCount++) { try { Data = await DownloadSteamChunk(BaseURLs[AttemptsCount < 5 ? Offset : AttemptsCount - 5], GID, Chunk.CompressedSize, Token); } catch (Exception Exception) { while (Exception is AggregateException) { Exception = Exception.InnerException; } Message = Exception.Message; } if (Token.IsCancellationRequested) { return; } if (Data is null) { continue; } int ErrorIndex = 0; try { Data = AESDecrypt(Data, DepotKey, Decryptors[Offset]); ErrorIndex++; Data = Data[0] == 'V' && Data[1] == 'Z' ? Decompressor.Decompress(Data, Offset) : Decompress(Data); } catch { Data = null; Message = LocString(LocCode.DecryptionFailure + ErrorIndex); continue; } if (ComputeAdlerHash(Data) != Chunk.Checksum) { Data = null; Message = LocString(LocCode.AdlerHashMismatch); continue; } break; } if (Data is null) { Log($"({BaseURLs[Offset]}) Failed to download chunk {GID}: {Message}"); throw new ValidatorException(Message); } Writer.Position = Position; Writer.Write(Data, 0, Chunk.UncompressedSize); } lock (ProgressLock) { if (HashMatch) { Progress.IncreaseNoETA(Chunk.CompressedSize); } else { Progress.Increase(Chunk.CompressedSize); } } } } } catch (Exception Exception) { DownloadFailed = true; CaughtException = Exception; } finally { WaitHandles[Offset].Set(); } }
/// <summary> /// Finds the file entry. /// </summary> /// <param name="parent">The parent.</param> /// <param name="path">The path.</param> /// <returns></returns> public FileEntry FindFileEntry( FileEntry parent, String path ) { var rpath = Device.FileSystem.ResolveLink ( path ); var entriesString = rpath.Split ( new char[] { LinuxPath.DirectorySeparatorChar }, StringSplitOptions.RemoveEmptyEntries ); FileEntry current = parent; foreach ( var pathItem in entriesString ) { FileEntry[] entries = GetChildren ( current, true, null ); foreach ( var e in entries ) { if ( String.Compare ( e.Name, pathItem, false ) == 0 ) { current = e; break; } } } // better checking if the file is the "same" based on the link or the reference if ( ( String.Compare ( current.FullPath, path, false ) == 0 || String.Compare ( current.FullResolvedPath, path, false ) == 0 || String.Compare ( current.FullPath, rpath, false ) == 0 || String.Compare ( current.FullResolvedPath, rpath, false ) == 0 ) ) { return current; } else { throw new FileNotFoundException ( String.Format ( "Unable to locate {0}", path ) ); } }
public IEnumerable <FileShareRecord> GetShares(FileEntry <string> entry) { return(null); }
/// <summary> /// Does the LS. /// </summary> /// <param name="entry">The entry.</param> private void DoLS( FileEntry entry ) { // create a list that will receive the list of the entries List<FileEntry> entryList = new List<FileEntry> ( ); // create a list that will receive the link to compute post ls; List<String> linkList = new List<String> ( ); try { // create the command String command = String.Format ( ForceBusyBox ? BUSYBOX_LS : TOOLBOX_LS, entry.FullPath ); // create the receiver object that will parse the result from ls ListingServiceReceiver receiver = new ListingServiceReceiver ( entry, entryList, linkList ); // call ls. Device.ExecuteShellCommand ( command, receiver ); // finish the process of the receiver to handle links receiver.FinishLinks ( ); } catch ( IOException e ) { Log.e ( "ddms", e ); throw; } // at this point we need to refresh the viewer entry.FetchTime = DateTime.Now.CurrentTimeMillis ( ); // sort the children and set them as the new children entryList.Sort ( new FileEntry.FileEntryComparer ( ) ); entry.Children = entryList; }
private bool ContainsUserWithWatchedDate(FileEntry entry) => entry.GetWatchesByUserAndWatchDate(_userName, WatchContainsDate).Any();
public PropertyEditorFileEntry (string key, FileEntry entry) : base (key, entry) { }
public void Add(FileEntry entry) { Entries.Add(entry); }
private FileEntry[] GetDirDataRecursive(VirtualFilesystemDirectory rootDir, VirtualFilesystemDirectory parentDir) { List<FileEntry> dirFileEntries = new List<FileEntry>(); FileEntry file; Node dirNode; // I'll admit this isn't ideal. If I'm looking at the native archives right, they tend // to follow the rule of "files first, directories second" when it comes to file entries. // Therefore, I'm going to set it up right now so that it will get files first, *then* directories. foreach (VirtualFilesystemNode node in rootDir.Children) { // We just need a file entry here if (node.Type == NodeType.File) { VirtualFilesystemFile virtFile = node as VirtualFilesystemFile; file = new FileEntry { ID = (ushort)exportFileEntries.Count, NameHashcode = HashName(virtFile.Name + virtFile.Extension), Type = 0x11, Name = virtFile.Name + virtFile.Extension, Data = virtFile.File.GetData(), }; dirFileEntries.Add(file); } } foreach (VirtualFilesystemNode node in rootDir.Children) { // We need a node and a file entry here if (node.Type == NodeType.Directory) { VirtualFilesystemDirectory virtDir = node as VirtualFilesystemDirectory; dirNode = new Node { Type = virtDir.Name.Substring(0, 3).ToUpper() + " ", Name = virtDir.Name, NameHashcode = HashName(virtDir.Name), FirstFileOffset = (uint)exportFileEntries.Count }; exportNodes.Add(dirNode); file = new FileEntry { ID = ushort.MaxValue, NameHashcode = HashName(virtDir.Name), Type = 0x02, Name = virtDir.Name, Data = new byte[] { (byte)(exportNodes.IndexOf(exportNodes.Find(i => i.Name == virtDir.Name))) }, }; dirFileEntries.Add(file); } } exportFileEntries.AddRange(dirFileEntries.ToArray()); InsertDirOperatorEntries(rootDir, parentDir); // The recursive part. One more foreach! foreach (VirtualFilesystemNode node in rootDir.Children) { if (node.Type == NodeType.Directory) { VirtualFilesystemDirectory dir = node as VirtualFilesystemDirectory; Node tempNode = exportNodes.Find(i => i.Name == node.Name); tempNode.Entries = GetDirDataRecursive(dir, rootDir); } } return dirFileEntries.ToArray(); }
public static FileEntry GetEntry(string filePath) { try { FileEntry entry = new FileEntry(filePath); return entry; } catch (Exception) { return null; } }
public static void ConvertToBin(string source, string destination) { var inputPath = Path.IsPathRooted(source) ? source : Path.Combine(GetExePath(), source); var outputPath = !string.IsNullOrEmpty(destination) ? destination : Path.ChangeExtension(inputPath, ".bin"); if (!Path.IsPathRooted(outputPath)) { outputPath = Path.Combine(GetExePath(), outputPath); } if (!File.Exists(inputPath)) { return; } var file = XmlCoalesceFile.Load(inputPath); var coal = new CoalescedFileXml { Version = 1 }; foreach (var asset in file.Assets) { var entry = new FileEntry(asset.Source) { Sections = new Dictionary <string, Dictionary <string, List <PropertyValue> > >() }; foreach (var section in asset.Sections) { var eSection = new Dictionary <string, List <PropertyValue> >(); foreach (var property in section.Value) { var eProperty = new List <PropertyValue>(); foreach (var value in property.Value) { if (!file.Settings.CompileTypes.Contains(value.ValueType)) { continue; } var valueValue = value.Value; if (!string.IsNullOrEmpty(valueValue)) { valueValue = SpecialCharacters.Aggregate(valueValue, (current, c) => current.Replace(c.Value, c.Key)); } eProperty.Add(new PropertyValue(value.ValueType, valueValue)); } eSection.Add(property.Key, eProperty); } entry.Sections.Add(section.Key, eSection); } coal.Files.Add(entry); } using (var output = File.Create(outputPath)) { if (file.Settings != null) { coal.OverrideCompileValueTypes = file.Settings.OverrideCompileValueTypes; coal.CompileTypes = file.Settings.CompileTypes; } coal.Serialize(output); } }
public bool CanEdit(FileEntry file, Guid userId) { return(file.CreateBy == userId || file.ModifiedBy == userId || CRMSecurity.IsAdmin); }
private IEnumerable <IFileEntry> ListWithouExceptionCatch() { var req = CreateRequest(""); req.Method = "PROPFIND"; req.Headers.Add("Depth", "1"); req.ContentType = "text/xml"; req.ContentLength = PROPFIND_BODY.Length; var areq = new Utility.AsyncHttpRequest(req); using (System.IO.Stream s = areq.GetRequestStream()) s.Write(PROPFIND_BODY, 0, PROPFIND_BODY.Length); var doc = new System.Xml.XmlDocument(); using (var resp = (System.Net.HttpWebResponse)areq.GetResponse()) { int code = (int)resp.StatusCode; if (code < 200 || code >= 300) //For some reason Mono does not throw this automatically { throw new System.Net.WebException(resp.StatusDescription, null, System.Net.WebExceptionStatus.ProtocolError, resp); } if (!string.IsNullOrEmpty(m_debugPropfindFile)) { using (var rs = areq.GetResponseStream()) using (var fs = new System.IO.FileStream(m_debugPropfindFile, System.IO.FileMode.Create, System.IO.FileAccess.Write, System.IO.FileShare.None)) Utility.Utility.CopyStream(rs, fs, false, m_copybuffer); doc.Load(m_debugPropfindFile); } else { using (var rs = areq.GetResponseStream()) doc.Load(rs); } } System.Xml.XmlNamespaceManager nm = new System.Xml.XmlNamespaceManager(doc.NameTable); nm.AddNamespace("D", "DAV:"); List <IFileEntry> files = new List <IFileEntry>(); m_filenamelist = new List <string>(); foreach (System.Xml.XmlNode n in doc.SelectNodes("D:multistatus/D:response/D:href", nm)) { //IIS uses %20 for spaces and %2B for + //Apache uses %20 for spaces and + for + string name = Library.Utility.Uri.UrlDecode(n.InnerText.Replace("+", "%2B")); string cmp_path; //TODO: This list is getting ridiculous, should change to regexps if (name.StartsWith(m_url, StringComparison.Ordinal)) { cmp_path = m_url; } else if (name.StartsWith(m_rawurl, StringComparison.Ordinal)) { cmp_path = m_rawurl; } else if (name.StartsWith(m_rawurlPort, StringComparison.Ordinal)) { cmp_path = m_rawurlPort; } else if (name.StartsWith(m_path, StringComparison.Ordinal)) { cmp_path = m_path; } else if (name.StartsWith("/" + m_path, StringComparison.Ordinal)) { cmp_path = "/" + m_path; } else if (name.StartsWith(m_sanitizedUrl, StringComparison.Ordinal)) { cmp_path = m_sanitizedUrl; } else if (name.StartsWith(m_reverseProtocolUrl, StringComparison.Ordinal)) { cmp_path = m_reverseProtocolUrl; } else { continue; } if (name.Length <= cmp_path.Length) { continue; } name = name.Substring(cmp_path.Length); long size = -1; DateTime lastAccess = new DateTime(); DateTime lastModified = new DateTime(); bool isCollection = false; System.Xml.XmlNode stat = n.ParentNode.SelectSingleNode("D:propstat/D:prop", nm); if (stat != null) { System.Xml.XmlNode s = stat.SelectSingleNode("D:getcontentlength", nm); if (s != null) { size = long.Parse(s.InnerText); } s = stat.SelectSingleNode("D:getlastmodified", nm); if (s != null) { try { //Not important if this succeeds lastAccess = lastModified = DateTime.Parse(s.InnerText, System.Globalization.CultureInfo.InvariantCulture); } catch { } } s = stat.SelectSingleNode("D:iscollection", nm); if (s != null) { isCollection = s.InnerText.Trim() == "1"; } else { isCollection = (stat.SelectSingleNode("D:resourcetype/D:collection", nm) != null); } } FileEntry fe = new FileEntry(name, size, lastAccess, lastModified); fe.IsFolder = isCollection; files.Add(fe); m_filenamelist.Add(name); } return(files); }
/// <summary> /// Gets the children. /// </summary> /// <param name="entry">The entry.</param> /// <param name="useCache">if set to <c>true</c> [use cache].</param> /// <param name="receiver">The receiver.</param> /// <returns></returns> public FileEntry[] GetChildren( FileEntry entry, bool useCache, IListingReceiver receiver ) { // first thing we do is check the cache, and if we already have a recent // enough children list, we just return that. if ( useCache && !entry.NeedFetch ) { return entry.Children.ToArray ( ); } // if there's no receiver, then this is a synchronous call, and we // return the result of ls if ( receiver == null ) { DoLS ( entry ); return entry.Children.ToArray ( ); } // this is a asynchronous call. // we launch a thread that will do ls and give the listing // to the receiver Thread t = new Thread ( new ParameterizedThreadStart ( delegate ( object stateData ) { var state = stateData as ThreadState; DoLS ( entry ); receiver.SetChildren ( state.Entry, state.Entry.Children.ToArray ( ) ); FileEntry[] children = state.Entry.Children.ToArray ( ); if ( children.Length > 0 && children[0].IsApplicationPackage ) { var map = new Dictionary<String, FileEntry> ( ); children.ForEach ( child => { map.Add ( child.FullPath, child ); } ); // call pm. String command = PM_FULL_LISTING; try { this.Device.ExecuteShellCommand ( command, new PackageManagerListingReceiver ( map, receiver ) ); } catch ( IOException e ) { // adb failed somehow, we do nothing. Log.e ( "FileListingService", e ); } } // if another thread is pending, launch it lock ( Threads ) { // first remove ourselves from the list Threads.Remove ( state.Thread ); // then launch the next one if applicable. if ( Threads.Count > 0 ) { Thread ct = Threads[0]; ct.Start ( new ThreadState { Thread = ct, Entry = entry } ); } } } ) ); t.Name = "ls " + entry.FullPath; // we don't want to run multiple ls on the device at the same time, so we // store the thread in a list and launch it only if there's no other thread running. // the thread will launch the next one once it's done. lock ( Threads ) { // add to the list Threads.Add ( t ); // if it's the only one, launch it. if ( Threads.Count == 1 ) { t.Start ( new ThreadState { Thread = t } ); } } // and we return null. return null; }
public IEnumerable <Guid> WhoCanRead(FileEntry fileEntry) { throw new NotImplementedException(); }
public void ParseListingWithErrorTest() { DummyDevice device = new DummyDevice(); FileEntry root = new FileEntry(device, "/"); List<FileEntry> entries = new List<FileEntry>(); List<string> links = new List<string>(); ListingServiceReceiver receiver = new ListingServiceReceiver(root, entries, links); string output = @"drwxr-xr-x root root 2015-06-01 10:17 acct drwxrwx--- system cache 2015-05-13 02:03 cache -rw-r--r-- root root 297 1970-01-01 01:00 default.prop lstat '//factory' failed: Permission denied lrwxrwxrwx root root 2015-06-01 10:17 etc -> /system/etc"; string[] lines = output.Split(new char[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); foreach (var line in lines) { byte[] data = Encoding.ASCII.GetBytes(line + "\r\n"); receiver.AddOutput(data, 0, data.Length); } receiver.Flush(); receiver.FinishLinks(); Assert.AreEqual<int>(4, entries.Count); // Validate the first entry (/acct/) // drwxr-xr-x root root 2015-06-01 10:17 acct Assert.AreEqual(new DateTime(2015, 6, 1, 10, 17, 00), entries[0].Date); Assert.AreEqual(device, entries[0].Device); Assert.IsTrue(entries[0].Exists); Assert.AreEqual(0, entries[0].FetchTime); Assert.AreEqual("/acct", entries[0].FullEscapedPath); Assert.AreEqual("/acct/", entries[0].FullPath); Assert.AreEqual("root", entries[0].Group); Assert.IsNull(entries[0].Info); Assert.IsFalse(entries[0].IsApplicationFileName); Assert.IsFalse(entries[0].IsApplicationPackage); Assert.IsTrue(entries[0].IsDirectory); Assert.IsFalse(entries[0].IsExecutable); Assert.IsFalse(entries[0].IsLink); Assert.IsFalse(entries[0].IsRoot); Assert.IsNull(entries[0].LinkName); Assert.AreEqual("acct", entries[0].Name); Assert.IsTrue(entries[0].NeedFetch); Assert.AreEqual("root", entries[0].Owner); Assert.AreEqual(root, entries[0].Parent); Assert.AreEqual(1, entries[0].PathSegments.Length); Assert.AreEqual("acct", entries[0].PathSegments[0]); Assert.AreEqual("rwxr-tr-t", entries[0].Permissions.ToString()); Assert.AreEqual(0, entries[0].Size); Assert.AreEqual(FileListingService.FileTypes.Directory, entries[0].Type); // Validate the second entry (/cache) // drwxrwx--- system cache 2015-05-13 02:03 cache Assert.AreEqual(new DateTime(2015, 5, 13, 2, 3, 00), entries[1].Date); Assert.AreEqual(device, entries[1].Device); Assert.IsTrue(entries[1].Exists); Assert.AreEqual(0, entries[1].FetchTime); Assert.AreEqual("/cache", entries[1].FullEscapedPath); Assert.AreEqual("/cache/", entries[1].FullPath); Assert.AreEqual("cache", entries[1].Group); Assert.IsNull(entries[1].Info); Assert.IsFalse(entries[1].IsApplicationFileName); Assert.IsFalse(entries[1].IsApplicationPackage); Assert.IsTrue(entries[1].IsDirectory); Assert.IsFalse(entries[1].IsExecutable); Assert.IsFalse(entries[1].IsLink); Assert.IsFalse(entries[1].IsRoot); Assert.IsNull(entries[1].LinkName); Assert.AreEqual("cache", entries[1].Name); Assert.IsTrue(entries[1].NeedFetch); Assert.AreEqual("system", entries[1].Owner); Assert.AreEqual(root, entries[1].Parent); Assert.AreEqual(1, entries[1].PathSegments.Length); Assert.AreEqual("cache", entries[1].PathSegments[0]); Assert.AreEqual("rwxrwx---", entries[1].Permissions.ToString()); Assert.AreEqual(0, entries[1].Size); Assert.AreEqual(FileListingService.FileTypes.Directory, entries[1].Type); // Validate the third entry (/default.prop) // -rw-r--r-- root root 297 1970-01-01 01:00 default.prop Assert.AreEqual(new DateTime(1970, 1, 1, 1, 0, 0), entries[2].Date); Assert.AreEqual(device, entries[2].Device); Assert.IsTrue(entries[2].Exists); Assert.AreEqual(0, entries[2].FetchTime); Assert.AreEqual("/default.prop", entries[2].FullEscapedPath); Assert.AreEqual("/default.prop", entries[2].FullPath); Assert.AreEqual("root", entries[2].Group); Assert.IsNull(entries[2].Info); Assert.IsFalse(entries[2].IsApplicationFileName); Assert.IsFalse(entries[2].IsApplicationPackage); Assert.IsFalse(entries[2].IsDirectory); Assert.IsFalse(entries[2].IsExecutable); Assert.IsFalse(entries[2].IsLink); Assert.IsFalse(entries[2].IsRoot); Assert.IsNull(entries[2].LinkName); Assert.AreEqual("default.prop", entries[2].Name); Assert.IsTrue(entries[2].NeedFetch); Assert.AreEqual("root", entries[2].Owner); Assert.AreEqual(root, entries[2].Parent); Assert.AreEqual(1, entries[2].PathSegments.Length); Assert.AreEqual("default.prop", entries[2].PathSegments[0]); Assert.AreEqual("rw-r--r--", entries[2].Permissions.ToString()); Assert.AreEqual(297, entries[2].Size); Assert.AreEqual(FileListingService.FileTypes.File, entries[2].Type); // Validate the fourth and final entry (/etc) // lrwxrwxrwx root root 2015-06-01 10:17 etc -> /system/etc Assert.AreEqual(new DateTime(2015, 6, 1, 10, 17, 0), entries[3].Date); Assert.AreEqual(device, entries[3].Device); Assert.IsTrue(entries[3].Exists); Assert.AreEqual(0, entries[3].FetchTime); Assert.AreEqual("/system/etc", entries[3].FullEscapedPath); Assert.AreEqual("/etc/", entries[3].FullPath); Assert.AreEqual("root", entries[3].Group); Assert.AreEqual("-> /system/etc", entries[3].Info); Assert.IsFalse(entries[3].IsApplicationFileName); Assert.IsFalse(entries[3].IsApplicationPackage); Assert.IsTrue(entries[3].IsDirectory); Assert.IsFalse(entries[3].IsExecutable); Assert.IsTrue(entries[3].IsLink); Assert.IsFalse(entries[3].IsRoot); Assert.AreEqual("/system/etc", entries[3].LinkName); Assert.AreEqual("etc", entries[3].Name); Assert.IsTrue(entries[3].NeedFetch); Assert.AreEqual("root", entries[3].Owner); Assert.AreEqual(root, entries[3].Parent); Assert.AreEqual(1, entries[3].PathSegments.Length); Assert.AreEqual("etc", entries[3].PathSegments[0]); Assert.AreEqual("rwxrwxrwx", entries[3].Permissions.ToString()); Assert.AreEqual(0, entries[3].Size); Assert.AreEqual(FileListingService.FileTypes.DirectoryLink, entries[3].Type); }
/// <inheritdoc/> public Task WriteToStreamAsync(FileEntry fileEntry, Stream stream) { return(new RemoteFileEntryStreamReader(JSFileModule, ElementRef, fileEntry, this, MaxUploadImageMessageSize) .WriteToStreamAsync(stream, CancellationToken.None)); }
private Entry[] entries() { FileInfo[] all = directory.GetFiles(); if (all == null) return EOF; Entry[] r = new Entry[all.Length]; for (int i = 0; i < r.Length; i++) r[i] = new FileEntry(all[i]); return r; }
/// <inheritdoc/> public Stream OpenReadStream(FileEntry fileEntry, CancellationToken cancellationToken = default) { return(new RemoteFileEntryStream(JSFileModule, ElementRef, fileEntry, this, MaxUploadImageMessageSize, SegmentFetchTimeout, cancellationToken)); }
public FileEntry(string directory) { isDirectory = true; string[] parts = directory.Split('/'); name = parts[parts.Length - 2]; if (parts.Length == 2) folder = ""; else { folder = string.Join("/", parts, 0, parts.Length - 2) + "/"; if (!SDCard.f.allDirs.Keys.Contains(folder)) { FileEntry ent = new FileEntry(folder); SDCard.f.allDirs.Add(folder, ent); SDCard.f.allFiles.AddLast(ent); SDCard.f.allFiles.AddLast(new FileEntry(new string[] { folder.ToLower() + "../", "" })); } } }
/// <summary> /// Extracts an a RAR archive /// </summary> /// <param name="fileEntry"> </param> /// <returns> </returns> public IEnumerable <FileEntry> Extract(FileEntry fileEntry, ExtractorOptions options, ResourceGovernor governor) { var rarArchive = GetRarArchive(fileEntry, options); if (rarArchive != null) { var entries = rarArchive.Entries.Where(x => x.IsComplete && !x.IsDirectory); if (options.Parallel) { var files = new ConcurrentStack <FileEntry>(); while (entries.Any()) { var batchSize = Math.Min(options.BatchSize, entries.Count()); var streams = entries.Take(batchSize).Select(entry => (entry, entry.OpenEntryStream())).ToList(); governor.CheckResourceGovernor(streams.Sum(x => x.Item2.Length)); streams.AsParallel().ForAll(streampair => { try { var newFileEntry = new FileEntry(streampair.entry.Key, streampair.Item2, fileEntry); if (Extractor.IsQuine(newFileEntry)) { Logger.Info(Extractor.IS_QUINE_STRING, fileEntry.Name, fileEntry.FullPath); governor.CurrentOperationProcessedBytesLeft = -1; } else { files.PushRange(Context.ExtractFile(newFileEntry, options, governor).ToArray()); } } catch (Exception e) { Logger.Debug(Extractor.DEBUG_STRING, ArchiveFileType.RAR, fileEntry.FullPath, streampair.entry.Key, e.GetType()); } }); governor.CheckResourceGovernor(0); entries = entries.Skip(streams.Count); while (files.TryPop(out var result)) { if (result != null) { yield return(result); } } } } else { foreach (var entry in entries) { governor.CheckResourceGovernor(entry.Size); FileEntry?newFileEntry = null; try { var name = entry.Key.Replace('/', Path.DirectorySeparatorChar); newFileEntry = new FileEntry(name, entry.OpenEntryStream(), fileEntry); } catch (Exception e) { Logger.Debug(Extractor.DEBUG_STRING, ArchiveFileType.RAR, fileEntry.FullPath, entry.Key, e.GetType()); } if (newFileEntry != null) { if (Extractor.IsQuine(newFileEntry)) { Logger.Info(Extractor.IS_QUINE_STRING, fileEntry.Name, fileEntry.FullPath); throw new OverflowException(); } foreach (var extractedFile in Context.ExtractFile(newFileEntry, options, governor)) { yield return(extractedFile); } } } } } else { if (options.ExtractSelfOnFail) { yield return(fileEntry); } } }
private void InsertDirOperatorEntries(VirtualFilesystemDirectory currentDir, VirtualFilesystemDirectory parentDir) { FileEntry dot1; FileEntry dot2; // Working dir reference dot1 = new FileEntry { ID = ushort.MaxValue, NameHashcode = HashName("."), Type = 0x02, Name = ".", Data = new byte[] { (byte)(exportNodes.IndexOf(exportNodes.Find(i => i.Name == currentDir.Name))) }, }; if (parentDir != null) { // Parent dir reference. This isn't the root, so we get the parent dot2 = new FileEntry { ID = ushort.MaxValue, NameHashcode = HashName(".."), Type = 0x02, Name = "..", Data = new byte[] { (byte)(exportNodes.IndexOf(exportNodes.Find(i => i.Name == parentDir.Name))) }, }; } else { // Parent dir reference. This IS the root, so we say the parent dir is null dot2 = new FileEntry { ID = ushort.MaxValue, NameHashcode = HashName(".."), Type = 0x02, Name = "..", Data = new byte[] { (byte)(255) }, }; } exportFileEntries.Add(dot1); exportFileEntries.Add(dot2); }
public static bool AssertNotOversizeSafe(FileEntry fileEntry) => AssertNotOversizeSafe(fileEntry.Content);
public static FileEntry GetEntry(string filePath) { FileEntry entry = null; try { entry = new FileEntry(filePath); } catch (Exception ex) { Debug.WriteLine("Exception in GetEntry for filePath :: " + filePath + " " + ex.Message); } return entry; }
public bool CanRead(FileEntry entry, Guid userId) { return(Can(entry, userId, SecurityAction.Read)); }
/// <summary>Add a source stream. <c>file</c> is the string by which the /// sub-stream will be known in the compound stream. /// /// </summary> /// <throws> IllegalStateException if this writer is closed </throws> /// <throws> NullPointerException if <c>file</c> is null </throws> /// <throws> IllegalArgumentException if a file with the same name </throws> /// <summary> has been added already /// </summary> public void AddFile(String file) { if (merged) throw new InvalidOperationException("Can't add extensions after merge has been called"); if (file == null) throw new ArgumentNullException("file"); try { ids.Add(file); } catch (Exception) { throw new ArgumentException("File " + file + " already added"); } var entry = new FileEntry {file = file}; entries.AddLast(entry); }
public bool CanCustomFilterEdit(FileEntry file, Guid userId) { return(CanEdit(file, userId)); }
/// <summary>Copy the contents of the file with specified extension into the /// provided output stream. Use the provided buffer for moving data /// to reduce memory allocation. /// </summary> private void CopyFile(FileEntry source, IndexOutput os, byte[] buffer) { IndexInput is_Renamed = null; try { long startPtr = os.GetFilePointer(); is_Renamed = directory.OpenInput(source.file); long length = is_Renamed.Length(); long remainder = length; int chunk = buffer.Length; while (remainder > 0) { int len = (int) System.Math.Min(chunk, remainder); is_Renamed.ReadBytes(buffer, 0, len, false); os.WriteBytes(buffer, len); remainder -= len; if (checkAbort != null) // Roughly every 2 MB we will check if // it's time to abort checkAbort.Work(80); } // Verify that remainder is 0 if (remainder != 0) throw new System.IO.IOException("Non-zero remainder length after copying: " + remainder + " (id: " + source.file + ", length: " + length + ", buffer size: " + chunk + ")"); // Verify that the output length diff is equal to original file long endPtr = os.GetFilePointer(); long diff = endPtr - startPtr; if (diff != length) throw new System.IO.IOException("Difference in the output file offsets " + diff + " does not match the original file length " + length); } finally { if (is_Renamed != null) is_Renamed.Close(); } }
public bool CanReview(FileEntry entry, Guid userId) { return(Can(entry, userId, SecurityAction.Edit)); }