public void AddFile(FileEntryInfo file, byte[] data) { this.Open(); //ToDo: MemoryStream ms = new MemoryStream(data); //第一层目录是就是对应的目录,第2层 string filefullpath = file.GetDirPathToLowerNorm(file.FileName); this.ZipTarget.AddEntry(filefullpath, ms, true, ms.Length, file.DateTimeFromStr(file.FileUpdateTime)); }
public override bool Equals(object obj) { FileEntryInfo fei = obj as FileEntryInfo; if (obj == null) { return(false); } return(Number == fei.Number && Path == fei.Path); }
/// <summary> /// 取得内部使用的规范 dir name 不以/ \ 开头和结尾,并转成小写 /// </summary> /// <param name="path"></param> /// <returns></returns> internal static string _getFileLegalLowerDir(string path) { return(FileEntryInfo.GetDirPathToLowerNorm_STC(path)); //if (string.IsNullOrEmpty(path)) return ""; //string pp = path.Replace("\\", "/"); //pp = pp.Replace("//", "/"); //pp = pp.EndsWith("/") ? pp.Remove(pp.Length - 1) : pp; //if (pp.StartsWith("/")) { // pp = pp.Substring(1); //} //return pp.ToLower(); }
public Errno Stat(string path, out FileEntryInfo stat) { stat = null; Errno error = LookupFileId(path, out short fileId, out bool isDir); if (error != Errno.NoError) { return(error); } return(isDir ? StatDir(fileId, out stat) : Stat(fileId, out stat)); }
public void UpdateFile(string strFileName, byte[] fileData, DateTime date) { strFileName = FileEntryInfo.GetDirPathToLowerNorm_STC(strFileName); if (string.IsNullOrEmpty(strFileName)) { throw new ArgumentNullException(strFileName); } string firstDir = ""; string childDirFileName = ""; firstDir = FileEntryInfo.GetFirstDir_STC(strFileName, out childDirFileName); DiskZip_ConnectInfo zipConn = _checkFileDataZip(strFileName);//获取存储位置 //_checkTopChildDirZipPath(firstDir);//检测ZIP,并创建 FileEntryInfo file = new FileEntryInfo(); file.FileDir = firstDir; file.FileName = childDirFileName; file.FileLen = fileData.Length; file.FileUpdateTime = file.DateTimeToStr(date); // zipConn.Open(); SharpCompress.Archive.Zip.ZipArchiveEntry findFile = null; foreach (var ze in zipConn.ZipTarget.Entries) { if (ze.IsDirectory) { continue; } if (string.Compare(ze.Key, file.FileName, true) == 0) { findFile = ze; break; } } // if (findFile != null) { zipConn.ZipTarget.RemoveEntry(findFile); //findFile.Close();//删除老的 } //修改文件 AddFile(strFileName, fileData, date); if (m_InUpdateState) { m_InUpdateConnZips.Add(zipConn); } else { zipConn.Save(); } }
/// <summary> /// 目录不能为空 /// </summary> /// <param name="strDir"></param> /// <param name="fileNames"></param> /// <param name="totalSize"></param> /// <returns></returns> public int GetFiles(string strDir, out List <string> fileNames, out int totalSize) { // strDir = FileEntryInfo.GetDirPathToLowerNorm_STC(strDir); if (string.IsNullOrEmpty(strDir)) { throw new ArgumentNullException(strDir); } strDir = strDir + "/";//补充 fileNames = new List <string>(); totalSize = 0; string firstDir = ""; string childDirFileName = ""; //string strFileName= Path.Combine(strDir, "checkfiledata.zip"); firstDir = FileEntryInfo.GetFirstDir_STC(strDir, out childDirFileName); DiskZip_ConnectInfo zipConn = _checkFileDataZip(strDir); zipConn.Open(); // List <SharpCompress.Archive.Zip.ZipArchiveEntry> filesInDir = new List <SharpCompress.Archive.Zip.ZipArchiveEntry>(); bool all = string.IsNullOrEmpty(childDirFileName); //ToDo:查找里面的所有文件 foreach (var ze in zipConn.ZipTarget.Entries) { if (!ze.IsDirectory) { if (all) { filesInDir.Add(ze); } else if (ze.Key.ToLower().StartsWith(firstDir)) { filesInDir.Add(ze); } } } foreach (var v in filesInDir) { fileNames.Add(v.Key); totalSize += (int)v.Size; //v.Close(); } // int count = filesInDir.Count; return(count); }
/// <summary> /// 【关键】获取相对与table的根目录 文件路径对应存储的.z文件 /// </summary> /// <param name="topChildDir"></param> /// <returns></returns> string _getFileDataZipPath(string relativeTableDirFilePath) { string firstDir = ""; string remainFileName = ""; firstDir = FileEntryInfo.GetFirstDir_STC(relativeTableDirFilePath, out remainFileName); if (string.IsNullOrEmpty(firstDir)) { return(this.RootDir_FileTableZipDBPath); } string root = this.RootDir_FileTable; string cp = Path.Combine(root, firstDir); string zipdb = Path.Combine(cp, FileDataDBName); return(zipdb); }
public IEnumerable <int> GetNumberTree() { Stack <FileEntryInfo> filo = new Stack <FileEntryInfo>(); FileEntryInfo entry = this; while (entry.Root != null) { filo.Push(entry.Root); entry = entry.Root; } while (filo.Count > 0) { yield return(filo.Pop().Number); } yield return(this.Number); }
/// <summary> /// 只能是最顶层的目录重命名,注意:会中断 m_InUpdateState状态 /// </summary> /// <param name="strFirstDirName">顶层目录</param> /// <param name="strNewFirstDirName">顶层目录</param> public void RenameDir(string strFirstDirName, string strNewFirstDirName) { //目录修改 strFirstDirName = FileEntryInfo.GetDirPathToLowerNorm_STC(strFirstDirName); // +"/";//规范化 strNewFirstDirName = FileEntryInfo.GetDirPathToLowerNorm_STC(strNewFirstDirName); // +"/";//规范化 if (string.IsNullOrEmpty(strFirstDirName) || string.IsNullOrEmpty(strNewFirstDirName)) { return;//根目录名称不能改 } if (string.Compare(strFirstDirName, strNewFirstDirName) == 0) { return;//没有改名 } strFirstDirName = strFirstDirName + "/"; strNewFirstDirName = strNewFirstDirName + "/"; string remian1 = ""; string remian2 = ""; string firstTopDir = FileEntryInfo.GetFirstDir_STC(strFirstDirName, out remian1); string firstTopDir2 = FileEntryInfo.GetFirstDir_STC(strFirstDirName, out remian2); string realDir1 = Path.Combine(RootDir_FileTable, strFirstDirName); string realDir2 = Path.Combine(RootDir_FileTable, strNewFirstDirName); if (!string.IsNullOrEmpty(remian1) || !string.IsNullOrEmpty(remian2)) { throw new DiskZip_AccessPackerException("目录必须都是最顶层目录", null); } if (Directory.Exists(realDir2)) { //已经存在,不能改 return; } this.EndUpdate(true);//只能先保存,不然会被占用 if (Directory.Exists(realDir1)) { Directory.Move(realDir1, realDir2); } else { throw new DiskZip_AccessPackerException("目录必须都是最顶层目录", null); } }
public Errno Stat(string path, out FileEntryInfo stat) { stat = null; if (!_mounted) { return(Errno.AccessDenied); } Errno err = GetFileEntry(path, out DirectoryEntryWithPointers entryWithPointers); if (err != Errno.NoError) { return(err); } DirectoryEntry entry = entryWithPointers.entry; stat = new FileEntryInfo { Attributes = new FileAttributes(), Blocks = entry.block_count, BlockSize = entry.block_size, Length = entry.byte_count, Inode = entry.id, Links = (ulong)entryWithPointers.pointers.Length }; var flags = (FileFlags)(entry.flags & FLAGS_MASK); if (flags == FileFlags.Directory) { stat.Attributes |= FileAttributes.Directory; } if (flags == FileFlags.Special) { stat.Attributes |= FileAttributes.Device; } return(Errno.NoError); }
public void DelFile(string strFileName) { strFileName = FileEntryInfo.GetDirPathToLowerNorm_STC(strFileName); if (string.IsNullOrEmpty(strFileName)) { return; } string firstDir = ""; string childDirFileName = ""; firstDir = FileEntryInfo.GetFirstDir_STC(strFileName, out childDirFileName); DiskZip_ConnectInfo zipConn = _checkFileDataZip(strFileName); zipConn.Open(); SharpCompress.Archive.Zip.ZipArchiveEntry findFile = null; foreach (var v in zipConn.ZipTarget.Entries) { if (v.IsDirectory) { continue; } if (string.Compare(v.Key, childDirFileName, true) == 0) { findFile = v; break; } } if (findFile != null) { zipConn.ZipTarget.RemoveEntry(findFile); //findFile.Close(); } if (m_InUpdateState) { m_InUpdateConnZips.Add(zipConn); } else { zipConn.Save(); } }
/// <summary> /// 根据文件名,取得存储数据的zip /// </summary> /// <param name="strFileName">相对与table根目录的路径</param> /// <returns></returns> DiskZip_ConnectInfo _checkFileDataZip(string strFileName) { string zipDataPath = _getFileDataZipPath(strFileName); bool fileExist = File.Exists(zipDataPath); if (!fileExist) { FileEntryInfo.CheckDirectory_STC(Path.GetDirectoryName(zipDataPath)); using (SharpCompress.Archive.Zip.ZipArchive con = SharpCompress.Archive.Zip.ZipArchive.Create()) { using (FileStream fst = new FileStream(zipDataPath, FileMode.Create, FileAccess.Write)) { con.SaveTo(fst, new SharpCompress.Common.CompressionInfo());//保存完毕 } } } if (!m_FileDataZips.ContainsKey(zipDataPath)) { //创建 if (fileExist) { //检测是否为压缩文件 using (FileStream fst = new FileStream(zipDataPath, FileMode.Open, FileAccess.Read)) { fileExist = SharpCompress.Archive.Zip.ZipArchive.IsZipFile(fst);//非压缩文件 //压缩文件系统 没必要一直处于打开状态吧 用完销毁 } } if (!fileExist) { FileEntryInfo.CheckDirectory_STC(Path.GetDirectoryName(zipDataPath)); using (SharpCompress.Archive.Zip.ZipArchive con = SharpCompress.Archive.Zip.ZipArchive.Create()) { using (FileStream fst = new FileStream(zipDataPath, FileMode.Create, FileAccess.Write)) { con.SaveTo(fst, new SharpCompress.Common.CompressionInfo());//保存完毕 } } } DiskZip_ConnectInfo dc = new DiskZip_ConnectInfo(); dc.ConnString = zipDataPath; m_FileDataZips.Add(zipDataPath, dc); } return(m_FileDataZips[zipDataPath]); }
Errno StatDir(short dirId, out FileEntryInfo stat) { stat = null; if (!mounted) { return(Errno.AccessDenied); } stat = new FileEntryInfo { Attributes = new FileAttributes(), Inode = FILEID_CATALOG, Mode = 0x16D, Links = 0, UID = 0, GID = 0, DeviceNo = 0, Length = 0, BlockSize = mddf.datasize, Blocks = 0 }; directoryDtcCache.TryGetValue(dirId, out DateTime tmp); stat.CreationTime = tmp; return(Errno.NoError); }
public bool FileExists(string strFileName) { strFileName = FileEntryInfo.GetDirPathToLowerNorm_STC(strFileName); if (string.IsNullOrEmpty(strFileName)) { return(false); } string firstDir = ""; string childDirFileName = ""; firstDir = FileEntryInfo.GetFirstDir_STC(strFileName, out childDirFileName); DiskZip_ConnectInfo zipConn = _checkFileDataZip(strFileName); zipConn.Open(); SharpCompress.Archive.Zip.ZipArchiveEntry findFile = null; foreach (var v in zipConn.ZipTarget.Entries) { if (v.IsDirectory) { continue; } if (string.Compare(v.Key, childDirFileName, true) == 0) { findFile = v; break; } } bool find = false; if (findFile != null) { //findFile.Close(); find = true; } return(find); }
public byte[] OpenFile(string strFileName) { strFileName = FileEntryInfo.GetDirPathToLowerNorm_STC(strFileName); if (string.IsNullOrEmpty(strFileName)) { throw new ArgumentNullException(strFileName); } string firstDir = ""; string childDirFileName = ""; firstDir = FileEntryInfo.GetFirstDir_STC(strFileName, out childDirFileName); DiskZip_ConnectInfo zipConn = _checkFileDataZip(strFileName);//获取存储位置 zipConn.Open(); SharpCompress.Archive.Zip.ZipArchiveEntry findFile = null; foreach (var v in zipConn.ZipTarget.Entries) { if (v.IsDirectory) { continue; } if (string.Compare(v.Key, childDirFileName, true) == 0) { findFile = v; break; } } byte[] buf = null; if (findFile != null) { using (MemoryStream ms = zipConn.GetUnCompressStream(findFile)) { buf = ms.ToArray(); ms.Close();//关闭流 } } return(buf); }
Errno Stat(short fileId, out FileEntryInfo stat) { stat = null; if (!mounted) { return(Errno.AccessDenied); } Errno error; ExtentFile file; if (fileId <= 4) { if (!debug || fileId == 0) { return(Errno.NoSuchFile); } else { stat = new FileEntryInfo { Attributes = new FileAttributes() }; error = GetAttributes(fileId, out stat.Attributes); if (error != Errno.NoError) { return(error); } if (fileId < 0 && fileId != FILEID_BOOT_SIGNED && fileId != FILEID_LOADER_SIGNED) { error = ReadExtentsFile((short)(fileId * -1), out file); if (error != Errno.NoError) { return(error); } stat.CreationTime = DateHandlers.LisaToDateTime(file.dtc); stat.AccessTime = DateHandlers.LisaToDateTime(file.dta); stat.BackupTime = DateHandlers.LisaToDateTime(file.dtb); stat.LastWriteTime = DateHandlers.LisaToDateTime(file.dtm); stat.Inode = (ulong)fileId; stat.Mode = 0x124; stat.Links = 0; stat.UID = 0; stat.GID = 0; stat.DeviceNo = 0; stat.Length = mddf.datasize; stat.BlockSize = mddf.datasize; stat.Blocks = 1; } else { error = ReadSystemFile(fileId, out byte[] buf); if (error != Errno.NoError) { return(error); } stat.CreationTime = fileId != 4 ? mddf.dtvc : mddf.dtcc; stat.BackupTime = mddf.dtvb; stat.Inode = (ulong)fileId; stat.Mode = 0x124; stat.Links = 0; stat.UID = 0; stat.GID = 0; stat.DeviceNo = 0; stat.Length = buf.Length; stat.BlockSize = mddf.datasize; stat.Blocks = buf.Length / mddf.datasize; } return(Errno.NoError); } } stat = new FileEntryInfo { Attributes = new FileAttributes() }; error = GetAttributes(fileId, out stat.Attributes); if (error != Errno.NoError) { return(error); } error = ReadExtentsFile(fileId, out file); if (error != Errno.NoError) { return(error); } stat.CreationTime = DateHandlers.LisaToDateTime(file.dtc); stat.AccessTime = DateHandlers.LisaToDateTime(file.dta); stat.BackupTime = DateHandlers.LisaToDateTime(file.dtb); stat.LastWriteTime = DateHandlers.LisaToDateTime(file.dtm); stat.Inode = (ulong)fileId; stat.Mode = 0x1B6; stat.Links = 1; stat.UID = 0; stat.GID = 0; stat.DeviceNo = 0; if (!fileSizeCache.TryGetValue(fileId, out int len)) { stat.Length = srecords[fileId].filesize; } else { stat.Length = len; } stat.BlockSize = mddf.datasize; stat.Blocks = file.length; return(Errno.NoError); }
public Errno Stat(string path, out FileEntryInfo stat) { stat = null; if (!mounted) { return(Errno.AccessDenied); } Errno err = GetFileEntry(path, out CompleteDirectoryEntry completeEntry); if (err != Errno.NoError) { return(err); } DirectoryEntry entry = completeEntry.Dirent; stat = new FileEntryInfo { Attributes = new FileAttributes(), Blocks = entry.size / bytesPerCluster, BlockSize = bytesPerCluster, Length = entry.size, Inode = (ulong)(fat32 ? (entry.ea_handle << 16) + entry.start_cluster : entry.start_cluster), Links = 1, CreationTime = DateHandlers.DosToDateTime(entry.cdate, entry.ctime) }; if (@namespace != Namespace.Human) { stat.LastWriteTime = DateHandlers.DosToDateTime(entry.mdate, entry.mtime); stat.CreationTime = stat.CreationTime?.AddMilliseconds(entry.ctime_ms * 10); } if (entry.size % bytesPerCluster > 0) { stat.Blocks++; } if (entry.attributes.HasFlag(FatAttributes.Subdirectory)) { stat.Attributes |= FileAttributes.Directory; stat.Blocks = fat32 ? GetClusters((uint)((entry.ea_handle << 16) + entry.start_cluster)).Length : GetClusters(entry.start_cluster).Length; stat.Length = stat.Blocks * stat.BlockSize; } if (entry.attributes.HasFlag(FatAttributes.ReadOnly)) { stat.Attributes |= FileAttributes.ReadOnly; } if (entry.attributes.HasFlag(FatAttributes.Hidden)) { stat.Attributes |= FileAttributes.Hidden; } if (entry.attributes.HasFlag(FatAttributes.System)) { stat.Attributes |= FileAttributes.System; } if (entry.attributes.HasFlag(FatAttributes.Archive)) { stat.Attributes |= FileAttributes.Archive; } if (entry.attributes.HasFlag(FatAttributes.Device)) { stat.Attributes |= FileAttributes.Device; } return(Errno.NoError); }
public void AddFile(string strFileName, byte[] fileData, DateTime date) { strFileName = FileEntryInfo.GetDirPathToLowerNorm_STC(strFileName); if (string.IsNullOrEmpty(strFileName)) { throw new ArgumentNullException(strFileName); } string firstDir = ""; string childDirFileName = ""; firstDir = FileEntryInfo.GetFirstDir_STC(strFileName, out childDirFileName); DiskZip_ConnectInfo zipConn = _checkFileDataZip(strFileName);//获取存储位置 zipConn.Open(); //_checkTopChildDirZipPath(firstDir);//检测ZIP,并创建 FileEntryInfo file = new FileEntryInfo(); file.FileDir = firstDir; file.FileName = childDirFileName; file.FileLen = fileData.Length; file.FileUpdateTime = file.DateTimeToStr(date); // zipConn.AddFile(file, fileData); if (m_InUpdateState) { m_InUpdateConnZips.Add(zipConn); } else { zipConn.Save(); } }
public Errno Mount(IMediaImage imagePlugin, Partition partition, Encoding encoding, Dictionary <string, string> options, string @namespace) { _device = imagePlugin; Encoding = encoding ?? Encoding.GetEncoding("IBM437"); options ??= GetDefaultOptions(); if (options.TryGetValue("debug", out string debugString)) { bool.TryParse(debugString, out _debug); } // As the identification is so complex, just call Identify() and relay on its findings if (!Identify(_device, partition) || !_cpmFound || _workingDefinition == null || _dpb == null) { return(Errno.InvalidArgument); } // Build the software interleaving sector mask if (_workingDefinition.sides == 1) { _sectorMask = new int[_workingDefinition.side1.sectorIds.Length]; for (int m = 0; m < _sectorMask.Length; m++) { _sectorMask[m] = _workingDefinition.side1.sectorIds[m] - _workingDefinition.side1.sectorIds[0]; } } else { // Head changes after every track if (string.Compare(_workingDefinition.order, "SIDES", StringComparison.InvariantCultureIgnoreCase) == 0) { _sectorMask = new int[_workingDefinition.side1.sectorIds.Length + _workingDefinition.side2.sectorIds.Length]; for (int m = 0; m < _workingDefinition.side1.sectorIds.Length; m++) { _sectorMask[m] = _workingDefinition.side1.sectorIds[m] - _workingDefinition.side1.sectorIds[0]; } // Skip first track (first side) for (int m = 0; m < _workingDefinition.side2.sectorIds.Length; m++) { _sectorMask[m + _workingDefinition.side1.sectorIds.Length] = (_workingDefinition.side2.sectorIds[m] - _workingDefinition.side2.sectorIds[0]) + _workingDefinition.side1.sectorIds.Length; } } // Head changes after whole side else if (string.Compare(_workingDefinition.order, "CYLINDERS", StringComparison.InvariantCultureIgnoreCase) == 0) { for (int m = 0; m < _workingDefinition.side1.sectorIds.Length; m++) { _sectorMask[m] = _workingDefinition.side1.sectorIds[m] - _workingDefinition.side1.sectorIds[0]; } // Skip first track (first side) and first track (second side) for (int m = 0; m < _workingDefinition.side1.sectorIds.Length; m++) { _sectorMask[m + _workingDefinition.side1.sectorIds.Length] = (_workingDefinition.side1.sectorIds[m] - _workingDefinition.side1.sectorIds[0]) + _workingDefinition.side1.sectorIds.Length + _workingDefinition.side2.sectorIds.Length; } // TODO: Implement CYLINDERS ordering AaruConsole.DebugWriteLine("CP/M Plugin", "CYLINDERS ordering not yet implemented."); return(Errno.NotImplemented); } // TODO: Implement COLUMBIA ordering else if (string.Compare(_workingDefinition.order, "COLUMBIA", StringComparison.InvariantCultureIgnoreCase) == 0) { AaruConsole.DebugWriteLine("CP/M Plugin", "Don't know how to handle COLUMBIA ordering, not proceeding with this definition."); return(Errno.NotImplemented); } // TODO: Implement EAGLE ordering else if (string.Compare(_workingDefinition.order, "EAGLE", StringComparison.InvariantCultureIgnoreCase) == 0) { AaruConsole.DebugWriteLine("CP/M Plugin", "Don't know how to handle EAGLE ordering, not proceeding with this definition."); return(Errno.NotImplemented); } else { AaruConsole.DebugWriteLine("CP/M Plugin", "Unknown order type \"{0}\", not proceeding with this definition.", _workingDefinition.order); return(Errno.NotSupported); } } // Deinterleave whole volume Dictionary <ulong, byte[]> deinterleavedSectors = new Dictionary <ulong, byte[]>(); if (_workingDefinition.sides == 1 || string.Compare(_workingDefinition.order, "SIDES", StringComparison.InvariantCultureIgnoreCase) == 0) { AaruConsole.DebugWriteLine("CP/M Plugin", "Deinterleaving whole volume."); for (int p = 0; p <= (int)(partition.End - partition.Start); p++) { byte[] readSector = _device.ReadSector((ulong)((int)partition.Start + ((p / _sectorMask.Length) * _sectorMask.Length) + _sectorMask[p % _sectorMask.Length])); if (_workingDefinition.complement) { for (int b = 0; b < readSector.Length; b++) { readSector[b] = (byte)(~readSector[b] & 0xFF); } } deinterleavedSectors.Add((ulong)p, readSector); } } int blockSize = 128 << _dpb.bsh; var blockMs = new MemoryStream(); ulong blockNo = 0; int sectorsPerBlock = 0; Dictionary <ulong, byte[]> allocationBlocks = new Dictionary <ulong, byte[]>(); AaruConsole.DebugWriteLine("CP/M Plugin", "Creating allocation blocks."); // For each volume sector for (ulong a = 0; a < (ulong)deinterleavedSectors.Count; a++) { deinterleavedSectors.TryGetValue(a, out byte[] sector); // May it happen? Just in case, CP/M blocks are smaller than physical sectors if (sector.Length > blockSize) { for (int i = 0; i < sector.Length / blockSize; i++) { byte[] tmp = new byte[blockSize]; Array.Copy(sector, blockSize * i, tmp, 0, blockSize); allocationBlocks.Add(blockNo++, tmp); } } // CP/M blocks are larger than physical sectors else if (sector.Length < blockSize) { blockMs.Write(sector, 0, sector.Length); sectorsPerBlock++; if (sectorsPerBlock != blockSize / sector.Length) { continue; } allocationBlocks.Add(blockNo++, blockMs.ToArray()); sectorsPerBlock = 0; blockMs = new MemoryStream(); } // CP/M blocks are same size than physical sectors else { allocationBlocks.Add(blockNo++, sector); } } AaruConsole.DebugWriteLine("CP/M Plugin", "Reading directory."); int dirOff; int dirSectors = ((_dpb.drm + 1) * 32) / _workingDefinition.bytesPerSector; if (_workingDefinition.sofs > 0) { dirOff = _workingDefinition.sofs; } else { dirOff = _workingDefinition.ofs * _workingDefinition.sectorsPerTrack; } // Read the whole directory blocks var dirMs = new MemoryStream(); for (int d = 0; d < dirSectors; d++) { deinterleavedSectors.TryGetValue((ulong)(d + dirOff), out byte[] sector); dirMs.Write(sector, 0, sector.Length); } byte[] directory = dirMs.ToArray(); if (directory == null) { return(Errno.InvalidArgument); } int dirCnt = 0; string file1 = null; string file2 = null; string file3 = null; Dictionary <string, Dictionary <int, List <ushort> > > fileExtents = new Dictionary <string, Dictionary <int, List <ushort> > >(); _statCache = new Dictionary <string, FileEntryInfo>(); _cpmStat = new FileSystemInfo(); bool atime = false; _dirList = new List <string>(); _labelCreationDate = null; _labelUpdateDate = null; _passwordCache = new Dictionary <string, byte[]>(); AaruConsole.DebugWriteLine("CP/M Plugin", "Traversing directory."); // For each directory entry for (int dOff = 0; dOff < directory.Length; dOff += 32) { // Describes a file (does not support PDOS entries with user >= 16, because they're identical to password entries if ((directory[dOff] & 0x7F) < 0x10) { if (allocationBlocks.Count > 256) { DirectoryEntry16 entry = Marshal.ByteArrayToStructureLittleEndian <DirectoryEntry16>(directory, dOff, 32); bool hidden = (entry.statusUser & 0x80) == 0x80; bool rdOnly = (entry.filename[0] & 0x80) == 0x80 || (entry.extension[0] & 0x80) == 0x80; bool system = (entry.filename[1] & 0x80) == 0x80 || (entry.extension[2] & 0x80) == 0x80; //bool backed = (entry.filename[3] & 0x80) == 0x80 || (entry.extension[3] & 0x80) == 0x80; int user = entry.statusUser & 0x0F; bool validEntry = true; for (int i = 0; i < 8; i++) { entry.filename[i] &= 0x7F; validEntry &= entry.filename[i] >= 0x20; } for (int i = 0; i < 3; i++) { entry.extension[i] &= 0x7F; validEntry &= entry.extension[i] >= 0x20; } if (!validEntry) { continue; } string filename = Encoding.ASCII.GetString(entry.filename).Trim(); string extension = Encoding.ASCII.GetString(entry.extension).Trim(); // If user is != 0, append user to name to have identical filenames if (user > 0) { filename = $"{user:X1}:{filename}"; } if (!string.IsNullOrEmpty(extension)) { filename = filename + "." + extension; } int entryNo = ((32 * entry.extentCounter) + entry.extentCounterHigh) / (_dpb.exm + 1); // Do we have a stat for the file already? if (_statCache.TryGetValue(filename, out FileEntryInfo fInfo)) { _statCache.Remove(filename); } else { fInfo = new FileEntryInfo { Attributes = new FileAttributes() } }; // And any extent? if (fileExtents.TryGetValue(filename, out Dictionary <int, List <ushort> > extentBlocks)) { fileExtents.Remove(filename); } else { extentBlocks = new Dictionary <int, List <ushort> >(); } // Do we already have this extent? Should never happen if (extentBlocks.TryGetValue(entryNo, out List <ushort> blocks)) { extentBlocks.Remove(entryNo); } else { blocks = new List <ushort>(); } // Attributes if (hidden) { fInfo.Attributes |= FileAttributes.Hidden; } if (rdOnly) { fInfo.Attributes |= FileAttributes.ReadOnly; } if (system) { fInfo.Attributes |= FileAttributes.System; } // Supposedly there is a value in the directory entry telling how many blocks are designated in // this entry. However some implementations tend to do whatever they wish, but none will ever // allocate block 0 for a file because that's where the directory resides. // There is also a field telling how many bytes are used in the last block, but its meaning is // non-standard so we must ignore it. foreach (ushort blk in entry.allocations.Where(blk => !blocks.Contains(blk) && blk != 0)) { blocks.Add(blk); } // Save the file fInfo.UID = (ulong)user; extentBlocks.Add(entryNo, blocks); fileExtents.Add(filename, extentBlocks); _statCache.Add(filename, fInfo); // Add the file to the directory listing if (!_dirList.Contains(filename)) { _dirList.Add(filename); } // Count entries 3 by 3 for timestamps switch (dirCnt % 3) { case 0: file1 = filename; break; case 1: file2 = filename; break; case 2: file3 = filename; break; } dirCnt++; } else { DirectoryEntry entry = Marshal.ByteArrayToStructureLittleEndian <DirectoryEntry>(directory, dOff, 32); bool hidden = (entry.statusUser & 0x80) == 0x80; bool rdOnly = (entry.filename[0] & 0x80) == 0x80 || (entry.extension[0] & 0x80) == 0x80; bool system = (entry.filename[1] & 0x80) == 0x80 || (entry.extension[2] & 0x80) == 0x80; //bool backed = (entry.filename[3] & 0x80) == 0x80 || (entry.extension[3] & 0x80) == 0x80; int user = entry.statusUser & 0x0F; bool validEntry = true; for (int i = 0; i < 8; i++) { entry.filename[i] &= 0x7F; validEntry &= entry.filename[i] >= 0x20; } for (int i = 0; i < 3; i++) { entry.extension[i] &= 0x7F; validEntry &= entry.extension[i] >= 0x20; } if (!validEntry) { continue; } string filename = Encoding.ASCII.GetString(entry.filename).Trim(); string extension = Encoding.ASCII.GetString(entry.extension).Trim(); // If user is != 0, append user to name to have identical filenames if (user > 0) { filename = $"{user:X1}:{filename}"; } if (!string.IsNullOrEmpty(extension)) { filename = filename + "." + extension; } int entryNo = ((32 * entry.extentCounterHigh) + entry.extentCounter) / (_dpb.exm + 1); // Do we have a stat for the file already? if (_statCache.TryGetValue(filename, out FileEntryInfo fInfo)) { _statCache.Remove(filename); } else { fInfo = new FileEntryInfo { Attributes = new FileAttributes() } }; // And any extent? if (fileExtents.TryGetValue(filename, out Dictionary <int, List <ushort> > extentBlocks)) { fileExtents.Remove(filename); } else { extentBlocks = new Dictionary <int, List <ushort> >(); } // Do we already have this extent? Should never happen if (extentBlocks.TryGetValue(entryNo, out List <ushort> blocks)) { extentBlocks.Remove(entryNo); } else { blocks = new List <ushort>(); } // Attributes if (hidden) { fInfo.Attributes |= FileAttributes.Hidden; } if (rdOnly) { fInfo.Attributes |= FileAttributes.ReadOnly; } if (system) { fInfo.Attributes |= FileAttributes.System; } // Supposedly there is a value in the directory entry telling how many blocks are designated in // this entry. However some implementations tend to do whatever they wish, but none will ever // allocate block 0 for a file because that's where the directory resides. // There is also a field telling how many bytes are used in the last block, but its meaning is // non-standard so we must ignore it. foreach (ushort blk in entry.allocations.Where(blk => !blocks.Contains(blk) && blk != 0)) { blocks.Add(blk); } // Save the file fInfo.UID = (ulong)user; extentBlocks.Add(entryNo, blocks); fileExtents.Add(filename, extentBlocks); _statCache.Add(filename, fInfo); // Add the file to the directory listing if (!_dirList.Contains(filename)) { _dirList.Add(filename); } // Count entries 3 by 3 for timestamps switch (dirCnt % 3) { case 0: file1 = filename; break; case 1: file2 = filename; break; case 2: file3 = filename; break; } dirCnt++; } } // A password entry (or a file entry in PDOS, but this does not handle that case) else if ((directory[dOff] & 0x7F) >= 0x10 && (directory[dOff] & 0x7F) < 0x20) { PasswordEntry entry = Marshal.ByteArrayToStructureLittleEndian <PasswordEntry>(directory, dOff, 32); int user = entry.userNumber & 0x0F; for (int i = 0; i < 8; i++) { entry.filename[i] &= 0x7F; } for (int i = 0; i < 3; i++) { entry.extension[i] &= 0x7F; } string filename = Encoding.ASCII.GetString(entry.filename).Trim(); string extension = Encoding.ASCII.GetString(entry.extension).Trim(); // If user is != 0, append user to name to have identical filenames if (user > 0) { filename = $"{user:X1}:{filename}"; } if (!string.IsNullOrEmpty(extension)) { filename = filename + "." + extension; } // Do not repeat passwords if (_passwordCache.ContainsKey(filename)) { _passwordCache.Remove(filename); } // Copy whole password entry byte[] tmp = new byte[32]; Array.Copy(directory, dOff, tmp, 0, 32); _passwordCache.Add(filename, tmp); // Count entries 3 by 3 for timestamps switch (dirCnt % 3) { case 0: file1 = filename; break; case 1: file2 = filename; break; case 2: file3 = filename; break; } dirCnt++; } // Volume label and password entry. Volume password is ignored. else { switch (directory[dOff] & 0x7F) { case 0x20: LabelEntry labelEntry = Marshal.ByteArrayToStructureLittleEndian <LabelEntry>(directory, dOff, 32); // The volume label defines if one of the fields in CP/M 3 timestamp is a creation or an // access time atime |= (labelEntry.flags & 0x40) == 0x40; _label = Encoding.ASCII.GetString(directory, dOff + 1, 11).Trim(); _labelCreationDate = new byte[4]; _labelUpdateDate = new byte[4]; Array.Copy(directory, dOff + 24, _labelCreationDate, 0, 4); Array.Copy(directory, dOff + 28, _labelUpdateDate, 0, 4); // Count entries 3 by 3 for timestamps switch (dirCnt % 3) { case 0: file1 = null; break; case 1: file2 = null; break; case 2: file3 = null; break; } dirCnt++; break; case 0x21: if (directory[dOff + 10] == 0x00 && directory[dOff + 20] == 0x00 && directory[dOff + 30] == 0x00 && directory[dOff + 31] == 0x00) { DateEntry dateEntry = Marshal.ByteArrayToStructureLittleEndian <DateEntry>(directory, dOff, 32); FileEntryInfo fInfo; // Entry contains timestamps for last 3 entries, whatever the kind they are. if (!string.IsNullOrEmpty(file1)) { if (_statCache.TryGetValue(file1, out fInfo)) { _statCache.Remove(file1); } else { fInfo = new FileEntryInfo(); } if (atime) { fInfo.AccessTime = DateHandlers.CpmToDateTime(dateEntry.date1); } else { fInfo.CreationTime = DateHandlers.CpmToDateTime(dateEntry.date1); } fInfo.LastWriteTime = DateHandlers.CpmToDateTime(dateEntry.date2); _statCache.Add(file1, fInfo); } if (!string.IsNullOrEmpty(file2)) { if (_statCache.TryGetValue(file2, out fInfo)) { _statCache.Remove(file2); } else { fInfo = new FileEntryInfo(); } if (atime) { fInfo.AccessTime = DateHandlers.CpmToDateTime(dateEntry.date3); } else { fInfo.CreationTime = DateHandlers.CpmToDateTime(dateEntry.date3); } fInfo.LastWriteTime = DateHandlers.CpmToDateTime(dateEntry.date4); _statCache.Add(file2, fInfo); } if (!string.IsNullOrEmpty(file3)) { if (_statCache.TryGetValue(file3, out fInfo)) { _statCache.Remove(file3); } else { fInfo = new FileEntryInfo(); } if (atime) { fInfo.AccessTime = DateHandlers.CpmToDateTime(dateEntry.date5); } else { fInfo.CreationTime = DateHandlers.CpmToDateTime(dateEntry.date5); } fInfo.LastWriteTime = DateHandlers.CpmToDateTime(dateEntry.date6); _statCache.Add(file3, fInfo); } file1 = null; file2 = null; file3 = null; dirCnt = 0; } // However, if this byte is 0, timestamp is in Z80DOS or DOS+ format else if (directory[dOff + 1] == 0x00) { TrdPartyDateEntry trdPartyDateEntry = Marshal.ByteArrayToStructureLittleEndian <TrdPartyDateEntry>(directory, dOff, 32); FileEntryInfo fInfo; // Entry contains timestamps for last 3 entries, whatever the kind they are. if (!string.IsNullOrEmpty(file1)) { if (_statCache.TryGetValue(file1, out fInfo)) { _statCache.Remove(file1); } else { fInfo = new FileEntryInfo(); } byte[] ctime = new byte[4]; ctime[0] = trdPartyDateEntry.create1[0]; ctime[1] = trdPartyDateEntry.create1[1]; fInfo.AccessTime = DateHandlers.CpmToDateTime(trdPartyDateEntry.access1); fInfo.CreationTime = DateHandlers.CpmToDateTime(ctime); fInfo.LastWriteTime = DateHandlers.CpmToDateTime(trdPartyDateEntry.modify1); _statCache.Add(file1, fInfo); } if (!string.IsNullOrEmpty(file2)) { if (_statCache.TryGetValue(file2, out fInfo)) { _statCache.Remove(file2); } else { fInfo = new FileEntryInfo(); } byte[] ctime = new byte[4]; ctime[0] = trdPartyDateEntry.create2[0]; ctime[1] = trdPartyDateEntry.create2[1]; fInfo.AccessTime = DateHandlers.CpmToDateTime(trdPartyDateEntry.access2); fInfo.CreationTime = DateHandlers.CpmToDateTime(ctime); fInfo.LastWriteTime = DateHandlers.CpmToDateTime(trdPartyDateEntry.modify2); _statCache.Add(file2, fInfo); } if (!string.IsNullOrEmpty(file3)) { if (_statCache.TryGetValue(file1, out fInfo)) { _statCache.Remove(file3); } else { fInfo = new FileEntryInfo(); } byte[] ctime = new byte[4]; ctime[0] = trdPartyDateEntry.create3[0]; ctime[1] = trdPartyDateEntry.create3[1]; fInfo.AccessTime = DateHandlers.CpmToDateTime(trdPartyDateEntry.access3); fInfo.CreationTime = DateHandlers.CpmToDateTime(ctime); fInfo.LastWriteTime = DateHandlers.CpmToDateTime(trdPartyDateEntry.modify3); _statCache.Add(file3, fInfo); } file1 = null; file2 = null; file3 = null; dirCnt = 0; } break; } } } // Cache all files. As CP/M maximum volume size is 8 Mib // this should not be a problem AaruConsole.DebugWriteLine("CP/M Plugin", "Reading files."); long usedBlocks = 0; _fileCache = new Dictionary <string, byte[]>(); foreach (string filename in _dirList) { var fileMs = new MemoryStream(); if (_statCache.TryGetValue(filename, out FileEntryInfo fInfo)) { _statCache.Remove(filename); } fInfo.Blocks = 0; if (fileExtents.TryGetValue(filename, out Dictionary <int, List <ushort> > extents)) { for (int ex = 0; ex < extents.Count; ex++) { if (!extents.TryGetValue(ex, out List <ushort> alBlks)) { continue; } foreach (ushort alBlk in alBlks) { allocationBlocks.TryGetValue(alBlk, out byte[] blk); fileMs.Write(blk, 0, blk.Length); fInfo.Blocks++; } } } // If you insist to call CP/M "extent based" fInfo.Attributes |= FileAttributes.Extents; fInfo.BlockSize = blockSize; fInfo.Length = fileMs.Length; _cpmStat.Files++; usedBlocks += fInfo.Blocks; _statCache.Add(filename, fInfo); _fileCache.Add(filename, fileMs.ToArray()); } _decodedPasswordCache = new Dictionary <string, byte[]>(); // For each stored password, store a decoded version of it if (_passwordCache.Count > 0) { foreach (KeyValuePair <string, byte[]> kvp in _passwordCache) { byte[] tmp = new byte[8]; Array.Copy(kvp.Value, 16, tmp, 0, 8); for (int t = 0; t < 8; t++) { tmp[t] ^= kvp.Value[13]; } _decodedPasswordCache.Add(kvp.Key, tmp); } } // Generate statfs. _cpmStat.Blocks = (ulong)(_dpb.dsm + 1); _cpmStat.FilenameLength = 11; _cpmStat.Files = (ulong)_fileCache.Count; _cpmStat.FreeBlocks = _cpmStat.Blocks - (ulong)usedBlocks; _cpmStat.PluginId = Id; _cpmStat.Type = "CP/M filesystem"; // Generate XML info XmlFsType = new FileSystemType { Clusters = _cpmStat.Blocks, ClusterSize = (uint)blockSize, Files = (ulong)_fileCache.Count, FilesSpecified = true, FreeClusters = _cpmStat.FreeBlocks, FreeClustersSpecified = true, Type = "CP/M filesystem" }; if (_labelCreationDate != null) { XmlFsType.CreationDate = DateHandlers.CpmToDateTime(_labelCreationDate); XmlFsType.CreationDateSpecified = true; } if (_labelUpdateDate != null) { XmlFsType.ModificationDate = DateHandlers.CpmToDateTime(_labelUpdateDate); XmlFsType.ModificationDateSpecified = true; } if (!string.IsNullOrEmpty(_label)) { XmlFsType.VolumeName = _label; } _mounted = true; return(Errno.NoError); }
public void UpdateFile(string strFileName, byte[] fileData, DateTime date) { strFileName = FileEntryInfo.GetDirPathToLowerNorm_STC(strFileName); if (string.IsNullOrEmpty(strFileName)) { throw new ArgumentNullException(strFileName); } string firstDir = ""; string childDirFileName = ""; firstDir = FileEntryInfo.GetFirstDir_STC(strFileName, out childDirFileName); DiskZip_ConnectInfo zipConn = _checkFileDataZip(strFileName);//获取存储位置 //_checkTopChildDirZipPath(firstDir);//检测ZIP,并创建 FileEntryInfo file = new FileEntryInfo(); file.FileDir = firstDir; file.FileName = childDirFileName; file.FileLen = fileData.Length; file.FileUpdateTime = file.DateTimeToStr(date); // zipConn.Open(); SharpCompress.Archive.Zip.ZipArchiveEntry findFile = null; foreach (var ze in zipConn.ZipTarget.Entries) { if (ze.IsDirectory) continue; if (string.Compare(ze.Key, file.FileName, true) == 0) { findFile = ze; break; } } // if (findFile != null) { zipConn.ZipTarget.RemoveEntry(findFile); //findFile.Close();//删除老的 } //修改文件 AddFile(strFileName, fileData, date); if (m_InUpdateState) { m_InUpdateConnZips.Add(zipConn); } else { zipConn.Save(); } }
/// <summary> /// 传进来的必须是目录,注意:会中断 m_InUpdateState状态 /// </summary> /// <param name="strDir"></param> public void DelDir(string strDir) { //删除DIR strDir = FileEntryInfo.GetDirPathToLowerNorm_STC(strDir); if (string.IsNullOrEmpty(strDir)) { return; //不能删除所有? } strDir = strDir + "/"; //补充 string firstDir = ""; string childFirstDirFileName = ""; string secendDir = ""; //string childSecendDirFileName = ""; firstDir = FileEntryInfo.GetFirstDir_STC(strDir, out childFirstDirFileName); if (!string.IsNullOrEmpty(childFirstDirFileName)) { //有子目录 int index = childFirstDirFileName.IndexOf('/'); if (index != -1) { secendDir = childFirstDirFileName.Substring(0, index); } else { secendDir = childFirstDirFileName; } } //第2层目录为空,说明删除的是顶层DIR if (string.IsNullOrEmpty(secendDir)) { //删除目录---这一块一执行就被删除了,需要注意 string realPath = Path.Combine(this.RootDir_FileTable, firstDir); string zip = Path.Combine(realPath, FileDataDBName); if (File.Exists(zip)) { DiskZip_ConnectInfo zipConn = _checkFileDataZip(firstDir + "/checkdir.zip"); zipConn.Close(); File.Delete(zip); } if (Directory.Exists(realPath)) { Directory.Delete(realPath, true); } if (m_FileDataZips.ContainsKey(zip)) { m_FileDataZips.Remove(zip); } } else { //场景中的机器人/VPL文件列表 secendDir = secendDir + "/"; //删除压缩包内的文件 DiskZip_ConnectInfo zipConn = _checkFileDataZip(strDir + "/checkfiledata.zip"); zipConn.Open(); List <SharpCompress.Archive.Zip.ZipArchiveEntry> olddirs = new List <SharpCompress.Archive.Zip.ZipArchiveEntry>(); foreach (var ze in zipConn.ZipTarget.Entries) { //if (ze.IsDirectory) { if (ze.Key.ToLower().StartsWith(secendDir)) { olddirs.Add(ze); } //} } //删除目录以及其中的文件 for (int i = olddirs.Count - 1; i >= 0; i--) { zipConn.ZipTarget.RemoveEntry(olddirs[i]); //olddirs[i].Close(); } if (m_InUpdateState) { m_InUpdateConnZips.Add(zipConn); } else { zipConn.Save(); } } }
public static void Main(string[] args) { //测试序列化 List <IRQ_VPLDocInfo_Json> vpldocjsons = new List <IRQ_VPLDocInfo_Json>(); for (int i = 0; i < 1; i++) { IRQ_VPLDocInfo_Json docjs = new IRQ_VPLDocInfo_Json(); docjs.Author = "me_" + i.ToString(); docjs.CodeType = VPL_CodeType.CSharp; docjs.CreateDate = DateTime.Now; docjs.Description = ""; docjs.IsCodeMode = false; docjs.IsNoRobotMode = true; docjs.LastUpdate = DateTime.Now; docjs.NoRobotMode_LastPlatId = ""; docjs.OnCloudSvr = false; docjs.PackageUniqueId = ""; docjs.PackageVersion = "2.0"; docjs.Source = IRQ_FileDocSource.User; docjs.Ver = "2.0"; vpldocjsons.Add(docjs); } string docJsonStr = SimpleJsonEx.SimpleJson.SerializeObject(vpldocjsons); using (FileStream fs = new FileStream("test_array.json", FileMode.OpenOrCreate, FileAccess.Write)) { using (StreamWriter sw = new StreamWriter(fs, Encoding.UTF8)) { sw.Write(docJsonStr); } } //反序列化 string docJosnStr2 = ""; using (StreamReader sr = new StreamReader("test_array.json", Encoding.UTF8)) { docJosnStr2 = sr.ReadToEnd(); } List <IRQ_VPLDocInfo_Json> redocJsons = SimpleJsonEx.SimpleJson.DeserializeObject <List <IRQ_VPLDocInfo_Json> >(docJosnStr2); char[] invalid1 = Path.GetInvalidFileNameChars(); char[] invalid2 = Path.GetInvalidPathChars(); List <char> invalidChars = new List <char>(); invalidChars.AddRange(invalid1); for (int i = 0; i < invalid2.Length; i++) { if (invalidChars.Contains(invalid2[i]) == false) { invalidChars.Add(invalid2[i]); } } StringBuilder sb = new StringBuilder(); for (int i = 0; i < invalidChars.Count; i++) { sb.Append(string.Format("{0}", (int)invalidChars[i])); if (i != invalidChars.Count - 1) { sb.Append(","); } } string chars = sb.ToString(); Console.WriteLine(chars); // "'\"','<','>','|','\0','','','','','','','\a','\b','\t','\n','\v','\f','\r','','','','','','','','','','','','','','','','','','',':','*','?','\\','/'" int timestart = Environment.TickCount; ResourceService.Init(); // Stopwatch swRead = new Stopwatch(); // swRead.Start(); // IFilePackerStrategy urobot = ResourceService.GetLib(IRQ_FileType.Robot); //byte[] strInfo= urobot.OpenFile("kaka/1.png"); //urobot.DelDir("kaka"); //swRead.Stop(); //Console.WriteLine("end:"+swRead.ElapsedMilliseconds.ToString()+" ms"); //return; // //增加文件 IFileSysPackerStrategy fps = ResourceService.GetLib(IRQ_FileType.TempLeadInRes); FileStream fsadd = new FileStream("mypack.data.zip", FileMode.Open, FileAccess.Read); byte[] buf_add = new byte[fsadd.Length]; fsadd.Read(buf_add, 0, buf_add.Length); fsadd.Close(); fps.Packer.BeginUpdate(fps); if (fps.FileExists("/test/mypack.data.zip")) { byte[] testReadBuf = fps.OpenFile("/test/mypack.data.zip"); fps.DelFile("/test/mypack.data.zip"); } fps.AddFile("/test/mypack.data.zip", buf_add, DateTime.Now); //addfile fps.AddFile("/test/dir2/mypack2.data.zip", buf_add, DateTime.Now); //addfile fps.UpdateFile("/test/dir2/mypack2.data.zip", new byte[] { 1, 2, 3, 4 }, DateTime.Now); fps.AddFile("/test2/mypack.data.zip", buf_add, DateTime.Now); //addfile fps.RenameFile("/test2/mypack.data.zip", "/test2/mypack2.data.zip"); fps.RenameDir("test", "test3"); List <string> getDirs = new List <string>(); fps.GetDirs(out getDirs); fps.Clean(); fps.AddFile("/test3/mypack.data.zip", buf_add, FileEntryInfo.DateTimeFromStr_STC("2016-01-01 12:12:12")); //addfile fps.AddFile("/test4/mypack.data.zip", buf_add, DateTime.Now); //addfile getDirs = new List <string>(); fps.GetDirs(out getDirs); DateTime dtupdate = fps.GetUpdateDate("test3/mypack.data.zip"); List <string> filenames = new List <string>(); int totalSize = 0; fps.GetFiles("test3", out filenames, out totalSize); fps.DelDir("test3"); fps.RenameDir("test4", "test"); fps.Packer.EndUpdate(fps, true); Console.WriteLine("耗时:" + (Environment.TickCount - timestart).ToString() + " ms"); Console.Read(); return; string SCRATCH_FILES_PATH = "ziptest"; // { //test //CompressionType.LZMA 10次 34175ms 242k //CompressionType.PPMd 10次 68678ms 319k //CompressionType.Deflate 10次 3006ms 428k //CompressionType.BZip2 10次 10103ms 335k //CompressionType.GZip not support //CompressionType.Rar not support //CompressionType.BCJ2 not support //CompressionType.BCJ not support Stopwatch sw = new Stopwatch(); sw.Start(); //for (int i = 0; i < 10; i++) { using (var archive = ZipArchive.Create()) { DirectoryInfo di = new DirectoryInfo(SCRATCH_FILES_PATH); foreach (var fi in di.GetFiles()) { archive.AddEntry(fi.Name, fi.OpenRead(), true); } FileStream fs_scratchPath = new FileStream("compresstimetest.zip", FileMode.OpenOrCreate, FileAccess.Write); archive.SaveTo(fs_scratchPath, CompressionType.Deflate); fs_scratchPath.Close(); } //break; //} sw.Stop(); Console.WriteLine("10time (ms):" + sw.ElapsedMilliseconds.ToString()); } string scratchPath = "ziptest.zip"; using (var archive = ZipArchive.Create()) { DirectoryInfo di = new DirectoryInfo(SCRATCH_FILES_PATH); foreach (var fi in di.GetFiles()) { archive.AddEntry(fi.Name, fi.OpenRead(), true); } FileStream fs_scratchPath = new FileStream(scratchPath, FileMode.OpenOrCreate, FileAccess.Write); archive.SaveTo(fs_scratchPath, CompressionType.LZMA); fs_scratchPath.Close(); //archive.AddAllFromDirectory(SCRATCH_FILES_PATH); //archive.SaveTo(scratchPath, CompressionType.Deflate); using (FileStream fs = new FileStream("ziphead.zip", FileMode.OpenOrCreate, FileAccess.ReadWrite)) { MyHead mh = new MyHead(); byte[] headData = mh.Create(); fs.Write(headData, 0, headData.Length); // SharpCompress.IO.OffsetStream ofs = new IO.OffsetStream(fs, fs.Position); archive.SaveTo(ofs, CompressionType.Deflate); } } //write my zipfile with head data using (FileStream fs = new FileStream("mypack.data.zip", FileMode.Create, FileAccess.ReadWrite, FileShare.Read)) { MyHead mh = new MyHead(); byte[] headData = mh.Create(); fs.Write(headData, 0, headData.Length); using (FileStream fs2 = new FileStream(scratchPath, FileMode.Open, FileAccess.Read)) { byte[] buf = new byte[1024]; int rc = 0; while ((rc = fs2.Read(buf, 0, buf.Length)) > 0) { fs.Write(buf, 0, rc); } } } // //read my zip file with head // using (FileStream fs = new FileStream("mypack.data.zip", FileMode.Open, FileAccess.Read, FileShare.Read)) { byte[] buf = new byte[1024]; int offset = fs.Read(buf, 0, buf.Length); System.Diagnostics.Debug.Assert(offset == 1024); //fs.Position = 0L; SharpCompress.IO.OffsetStream substream = new SharpCompress.IO.OffsetStream(fs, offset); ZipArchive zip = ZipArchive.Open(substream, Options.KeepStreamsOpen);//cann't read //ZipArchive zip = ZipArchive.Open(fs, Options.None); //will throw exption //ZipArchive zip = ZipArchive.Open(fs, Options.KeepStreamsOpen);//cann't read foreach (ZipArchiveEntry zf in zip.Entries) { Console.WriteLine(zf.Key); //bug:the will not none in zipfile } int jjj = 0; jjj++; } }
public Errno Stat(string path, out FileEntryInfo stat) { stat = null; if (!mounted) { return(Errno.AccessDenied); } if (debug && (string.IsNullOrEmpty(path) || path == "$" || path == "/")) { stat = new FileEntryInfo { Attributes = FileAttributes.Directory | FileAttributes.System | FileAttributes.Hidden, Blocks = GetClusters(superblock.rootDirectoryCluster).Length, BlockSize = bytesPerCluster, Length = GetClusters(superblock.rootDirectoryCluster).Length *bytesPerCluster, Inode = superblock.rootDirectoryCluster, Links = 1 }; return(Errno.NoError); } Errno err = GetFileEntry(path, out DirectoryEntry entry); if (err != Errno.NoError) { return(err); } stat = new FileEntryInfo { Attributes = new FileAttributes(), Blocks = entry.length / bytesPerCluster, BlockSize = bytesPerCluster, Length = entry.length, Inode = entry.firstCluster, Links = 1, CreationTime = littleEndian ? DateHandlers.DosToDateTime(entry.creationDate, entry.creationTime).AddYears(20) : DateHandlers.DosToDateTime(entry.creationTime, entry.creationDate), AccessTime = littleEndian ? DateHandlers.DosToDateTime(entry.lastAccessDate, entry.lastAccessTime).AddYears(20) : DateHandlers.DosToDateTime(entry.lastAccessTime, entry.lastAccessDate), LastWriteTime = littleEndian ? DateHandlers .DosToDateTime(entry.lastWrittenDate, entry.lastWrittenTime).AddYears(20) : DateHandlers.DosToDateTime(entry.lastWrittenTime, entry.lastWrittenDate) }; if (entry.length % bytesPerCluster > 0) { stat.Blocks++; } if (entry.attributes.HasFlag(Attributes.Directory)) { stat.Attributes |= FileAttributes.Directory; stat.Blocks = GetClusters(entry.firstCluster).Length; stat.Length = stat.Blocks * stat.BlockSize; } if (entry.attributes.HasFlag(Attributes.ReadOnly)) { stat.Attributes |= FileAttributes.ReadOnly; } if (entry.attributes.HasFlag(Attributes.Hidden)) { stat.Attributes |= FileAttributes.Hidden; } if (entry.attributes.HasFlag(Attributes.System)) { stat.Attributes |= FileAttributes.System; } if (entry.attributes.HasFlag(Attributes.Archive)) { stat.Attributes |= FileAttributes.Archive; } return(Errno.NoError); }
internal static void DoLs(LsOptions options) { DicConsole.DebugWriteLine("Ls command", "--debug={0}", options.Debug); DicConsole.DebugWriteLine("Ls command", "--verbose={0}", options.Verbose); DicConsole.DebugWriteLine("Ls command", "--input={0}", options.InputFile); FiltersList filtersList = new FiltersList(); IFilter inputFilter = filtersList.GetFilter(options.InputFile); Dictionary <string, string> parsedOptions = Options.Parse(options.Options); DicConsole.DebugWriteLine("Ls command", "Parsed options:"); foreach (KeyValuePair <string, string> parsedOption in parsedOptions) { DicConsole.DebugWriteLine("Ls command", "{0} = {1}", parsedOption.Key, parsedOption.Value); } parsedOptions.Add("debug", options.Debug.ToString()); if (inputFilter == null) { DicConsole.ErrorWriteLine("Cannot open specified file."); return; } Encoding encoding = null; if (options.EncodingName != null) { try { encoding = Claunia.Encoding.Encoding.GetEncoding(options.EncodingName); if (options.Verbose) { DicConsole.VerboseWriteLine("Using encoding for {0}.", encoding.EncodingName); } } catch (ArgumentException) { DicConsole.ErrorWriteLine("Specified encoding is not supported."); return; } } PluginBase plugins = new PluginBase(); try { IMediaImage imageFormat = ImageFormat.Detect(inputFilter); if (imageFormat == null) { DicConsole.WriteLine("Image format not identified, not proceeding with analysis."); return; } if (options.Verbose) { DicConsole.VerboseWriteLine("Image format identified by {0} ({1}).", imageFormat.Name, imageFormat.Id); } else { DicConsole.WriteLine("Image format identified by {0}.", imageFormat.Name); } try { if (!imageFormat.Open(inputFilter)) { DicConsole.WriteLine("Unable to open image format"); DicConsole.WriteLine("No error given"); return; } DicConsole.DebugWriteLine("Ls command", "Correctly opened image file."); DicConsole.DebugWriteLine("Ls command", "Image without headers is {0} bytes.", imageFormat.Info.ImageSize); DicConsole.DebugWriteLine("Ls command", "Image has {0} sectors.", imageFormat.Info.Sectors); DicConsole.DebugWriteLine("Ls command", "Image identifies disk type as {0}.", imageFormat.Info.MediaType); Core.Statistics.AddMediaFormat(imageFormat.Format); Core.Statistics.AddMedia(imageFormat.Info.MediaType, false); Core.Statistics.AddFilter(inputFilter.Name); } catch (Exception ex) { DicConsole.ErrorWriteLine("Unable to open image format"); DicConsole.ErrorWriteLine("Error: {0}", ex.Message); return; } List <Partition> partitions = Core.Partitions.GetAll(imageFormat); Core.Partitions.AddSchemesToStats(partitions); List <string> idPlugins; IReadOnlyFilesystem plugin; Errno error; if (partitions.Count == 0) { DicConsole.DebugWriteLine("Ls command", "No partitions found"); } else { DicConsole.WriteLine("{0} partitions found.", partitions.Count); for (int i = 0; i < partitions.Count; i++) { DicConsole.WriteLine(); DicConsole.WriteLine("Partition {0}:", partitions[i].Sequence); DicConsole.WriteLine("Identifying filesystem on partition"); Core.Filesystems.Identify(imageFormat, out idPlugins, partitions[i]); if (idPlugins.Count == 0) { DicConsole.WriteLine("Filesystem not identified"); } else if (idPlugins.Count > 1) { DicConsole.WriteLine($"Identified by {idPlugins.Count} plugins"); foreach (string pluginName in idPlugins) { if (plugins.ReadOnlyFilesystems.TryGetValue(pluginName, out plugin)) { DicConsole.WriteLine($"As identified by {plugin.Name}."); IReadOnlyFilesystem fs = (IReadOnlyFilesystem)plugin .GetType() .GetConstructor(Type.EmptyTypes) ?.Invoke(new object[] { }); if (fs == null) { continue; } error = fs.Mount(imageFormat, partitions[i], encoding, parsedOptions); if (error == Errno.NoError) { List <string> rootDir = new List <string>(); error = fs.ReadDir("/", out rootDir); if (error == Errno.NoError) { foreach (string entry in rootDir) { DicConsole.WriteLine("{0}", entry); } } else { DicConsole.ErrorWriteLine("Error {0} reading root directory {0}", error.ToString()); } Core.Statistics.AddFilesystem(fs.XmlFsType.Type); } else { DicConsole.ErrorWriteLine("Unable to mount device, error {0}", error.ToString()); } } } } else { plugins.ReadOnlyFilesystems.TryGetValue(idPlugins[0], out plugin); if (plugin == null) { continue; } DicConsole.WriteLine($"Identified by {plugin.Name}."); IReadOnlyFilesystem fs = (IReadOnlyFilesystem)plugin .GetType().GetConstructor(Type.EmptyTypes) ?.Invoke(new object[] { }); if (fs == null) { continue; } error = fs.Mount(imageFormat, partitions[i], encoding, parsedOptions); if (error == Errno.NoError) { List <string> rootDir = new List <string>(); error = fs.ReadDir("/", out rootDir); if (error == Errno.NoError) { foreach (string entry in rootDir) { DicConsole.WriteLine("{0}", entry); } } else { DicConsole.ErrorWriteLine("Error {0} reading root directory {0}", error.ToString()); } Core.Statistics.AddFilesystem(fs.XmlFsType.Type); } else { DicConsole.ErrorWriteLine("Unable to mount device, error {0}", error.ToString()); } } } } Partition wholePart = new Partition { Name = "Whole device", Length = imageFormat.Info.Sectors, Size = imageFormat.Info.Sectors * imageFormat.Info.SectorSize }; Core.Filesystems.Identify(imageFormat, out idPlugins, wholePart); if (idPlugins.Count == 0) { DicConsole.WriteLine("Filesystem not identified"); } else if (idPlugins.Count > 1) { DicConsole.WriteLine($"Identified by {idPlugins.Count} plugins"); foreach (string pluginName in idPlugins) { if (plugins.ReadOnlyFilesystems.TryGetValue(pluginName, out plugin)) { DicConsole.WriteLine($"As identified by {plugin.Name}."); IReadOnlyFilesystem fs = (IReadOnlyFilesystem)plugin .GetType().GetConstructor(Type.EmptyTypes) ?.Invoke(new object[] { }); if (fs == null) { continue; } error = fs.Mount(imageFormat, wholePart, encoding, parsedOptions); if (error == Errno.NoError) { List <string> rootDir = new List <string>(); error = fs.ReadDir("/", out rootDir); if (error == Errno.NoError) { foreach (string entry in rootDir) { DicConsole.WriteLine("{0}", entry); } } else { DicConsole.ErrorWriteLine("Error {0} reading root directory {0}", error.ToString()); } Core.Statistics.AddFilesystem(fs.XmlFsType.Type); } else { DicConsole.ErrorWriteLine("Unable to mount device, error {0}", error.ToString()); } } } } else { plugins.ReadOnlyFilesystems.TryGetValue(idPlugins[0], out plugin); if (plugin != null) { DicConsole.WriteLine($"Identified by {plugin.Name}."); IReadOnlyFilesystem fs = (IReadOnlyFilesystem)plugin .GetType().GetConstructor(Type.EmptyTypes)?.Invoke(new object[] { }); if (fs != null) { error = fs.Mount(imageFormat, wholePart, encoding, parsedOptions); if (error == Errno.NoError) { List <string> rootDir = new List <string>(); error = fs.ReadDir("/", out rootDir); if (error == Errno.NoError) { foreach (string entry in rootDir) { if (options.Long) { FileEntryInfo stat = new FileEntryInfo(); List <string> xattrs = new List <string>(); error = fs.Stat(entry, out stat); if (error == Errno.NoError) { DicConsole.WriteLine("{0}\t{1}\t{2} bytes\t{3}", stat.CreationTimeUtc, stat.Inode, stat.Length, entry); error = fs.ListXAttr(entry, out xattrs); if (error != Errno.NoError) { continue; } foreach (string xattr in xattrs) { byte[] xattrBuf = new byte[0]; error = fs.GetXattr(entry, xattr, ref xattrBuf); if (error == Errno.NoError) { DicConsole.WriteLine("\t\t{0}\t{1} bytes", xattr, xattrBuf.Length); } } } else { DicConsole.WriteLine("{0}", entry); } } else { DicConsole.WriteLine("{0}", entry); } } } else { DicConsole.ErrorWriteLine("Error {0} reading root directory {0}", error.ToString()); } Core.Statistics.AddFilesystem(fs.XmlFsType.Type); } else { DicConsole.ErrorWriteLine("Unable to mount device, error {0}", error.ToString()); } } } } } catch (Exception ex) { DicConsole.ErrorWriteLine($"Error reading file: {ex.Message}"); DicConsole.DebugWriteLine("Ls command", ex.StackTrace); } Core.Statistics.AddCommand("ls"); }
DirectoryType SidecarDirectory(IReadOnlyFilesystem filesystem, string path, string filename, FileEntryInfo stat) { var directory = new DirectoryType(); if (stat.AccessTimeUtc.HasValue) { directory.accessTime = stat.AccessTimeUtc.Value; directory.accessTimeSpecified = true; } directory.attributes = (ulong)stat.Attributes; if (stat.BackupTimeUtc.HasValue) { directory.backupTime = stat.BackupTimeUtc.Value; directory.backupTimeSpecified = true; } if (stat.CreationTimeUtc.HasValue) { directory.creationTime = stat.CreationTimeUtc.Value; directory.creationTimeSpecified = true; } if (stat.DeviceNo.HasValue) { directory.deviceNumber = stat.DeviceNo.Value; directory.deviceNumberSpecified = true; } directory.inode = stat.Inode; if (stat.LastWriteTimeUtc.HasValue) { directory.lastWriteTime = stat.LastWriteTimeUtc.Value; directory.lastWriteTimeSpecified = true; } directory.links = stat.Links; directory.name = filename; if (stat.GID.HasValue) { directory.posixGroupId = stat.GID.Value; directory.posixGroupIdSpecified = true; } if (stat.Mode.HasValue) { directory.posixMode = stat.Mode.Value; directory.posixModeSpecified = true; } if (stat.UID.HasValue) { directory.posixUserId = stat.UID.Value; directory.posixUserIdSpecified = true; } if (stat.StatusChangeTimeUtc.HasValue) { directory.statusChangeTime = stat.StatusChangeTimeUtc.Value; directory.statusChangeTimeSpecified = true; } Errno ret = filesystem.ReadDir(path + "/" + filename, out List <string> dirents); if (ret != Errno.NoError) { return(null); } List <DirectoryType> directories = new List <DirectoryType>(); List <ContentsFileType> files = new List <ContentsFileType>(); foreach (string dirent in dirents) { ret = filesystem.Stat(path + "/" + filename + "/" + dirent, out FileEntryInfo entryStat); if (ret != Errno.NoError) { AaruConsole.DebugWriteLine("Create-Sidecar command", "Cannot stat {0}", dirent); continue; } if (entryStat.Attributes.HasFlag(FileAttributes.Directory)) { directories.Add(SidecarDirectory(filesystem, path + "/" + filename, dirent, entryStat)); continue; } files.Add(SidecarFile(filesystem, path + "/" + filename, dirent, entryStat)); } if (files.Count > 0) { directory.File = files.OrderBy(f => f.name).ToArray(); } if (directories.Count > 0) { directory.Directory = directories.OrderBy(d => d.name).ToArray(); } return(directory); }
ContentsFileType SidecarFile(IReadOnlyFilesystem filesystem, string path, string filename, FileEntryInfo stat) { var file = new ContentsFileType(); var fileChkWorker = new Checksum(); if (stat.AccessTimeUtc.HasValue) { file.accessTime = stat.AccessTimeUtc.Value; file.accessTimeSpecified = true; } file.attributes = (ulong)stat.Attributes; if (stat.BackupTimeUtc.HasValue) { file.backupTime = stat.BackupTimeUtc.Value; file.backupTimeSpecified = true; } if (stat.CreationTimeUtc.HasValue) { file.creationTime = stat.CreationTimeUtc.Value; file.creationTimeSpecified = true; } if (stat.DeviceNo.HasValue) { file.deviceNumber = stat.DeviceNo.Value; file.deviceNumberSpecified = true; } file.inode = stat.Inode; if (stat.LastWriteTimeUtc.HasValue) { file.lastWriteTime = stat.LastWriteTimeUtc.Value; file.lastWriteTimeSpecified = true; } file.length = (ulong)stat.Length; file.links = stat.Links; file.name = filename; if (stat.GID.HasValue) { file.posixGroupId = stat.GID.Value; file.posixGroupIdSpecified = true; } if (stat.Mode.HasValue) { file.posixMode = stat.Mode.Value; file.posixModeSpecified = true; } if (stat.UID.HasValue) { file.posixUserId = stat.UID.Value; file.posixUserIdSpecified = true; } if (stat.StatusChangeTimeUtc.HasValue) { file.statusChangeTime = stat.StatusChangeTimeUtc.Value; file.statusChangeTimeSpecified = true; } byte[] data = new byte[0]; if (stat.Length > 0) { long position = 0; UpdateStatus($"Hashing file {path}/{filename}..."); InitProgress2(); while (position < stat.Length - 1048576) { if (_aborted) { return(file); } data = new byte[1048576]; filesystem.Read(path + "/" + filename, position, 1048576, ref data); UpdateProgress2("Hashing file byte {0} of {1}", position, stat.Length); fileChkWorker.Update(data); position += 1048576; } data = new byte[stat.Length - position]; filesystem.Read(path + "/" + filename, position, stat.Length - position, ref data); UpdateProgress2("Hashing file byte {0} of {1}", position, stat.Length); fileChkWorker.Update(data); EndProgress(); file.Checksums = fileChkWorker.End().ToArray(); } else { file.Checksums = _emptyChecksums; } Errno ret = filesystem.ListXAttr(path + "/" + filename, out List <string> xattrs); if (ret != Errno.NoError) { return(file); } List <ExtendedAttributeType> xattrTypes = new List <ExtendedAttributeType>(); foreach (string xattr in xattrs) { ret = filesystem.GetXattr(path + "/" + filename, xattr, ref data); if (ret != Errno.NoError) { continue; } var xattrChkWorker = new Checksum(); xattrChkWorker.Update(data); xattrTypes.Add(new ExtendedAttributeType { Checksums = xattrChkWorker.End().ToArray(), length = (ulong)data.Length, name = xattr }); } if (xattrTypes.Count > 0) { file.ExtendedAttributes = xattrTypes.OrderBy(x => x.name).ToArray(); } return(file); }
public Errno Stat(string path, out FileEntryInfo stat) { stat = null; if (!mounted) { return(Errno.AccessDenied); } string[] pathElements = path.Split(new[] { '/' }, StringSplitOptions.RemoveEmptyEntries); if (pathElements.Length != 1) { return(Errno.NotSupported); } path = pathElements[0]; if (debug) { if (string.Compare(path, "$", StringComparison.InvariantCulture) == 0 || string.Compare(path, "$Boot", StringComparison.InvariantCulture) == 0 || string.Compare(path, "$Bitmap", StringComparison.InvariantCulture) == 0 || string.Compare(path, "$MDB", StringComparison.InvariantCulture) == 0) { stat = new FileEntryInfo { BlockSize = device.Info.SectorSize, Inode = 0, Links = 1, Attributes = FileAttributes.System }; if (string.Compare(path, "$", StringComparison.InvariantCulture) == 0) { stat.Blocks = directoryBlocks.Length / stat.BlockSize + directoryBlocks.Length % stat.BlockSize; stat.Length = directoryBlocks.Length; } else if (string.Compare(path, "$Bitmap", StringComparison.InvariantCulture) == 0) { stat.Blocks = blockMapBytes.Length / stat.BlockSize + blockMapBytes.Length % stat.BlockSize; stat.Length = blockMapBytes.Length; } else if (string.Compare(path, "$Boot", StringComparison.InvariantCulture) == 0 && bootBlocks != null) { stat.Blocks = bootBlocks.Length / stat.BlockSize + bootBlocks.Length % stat.BlockSize; stat.Length = bootBlocks.Length; } else if (string.Compare(path, "$MDB", StringComparison.InvariantCulture) == 0) { stat.Blocks = mdbBlocks.Length / stat.BlockSize + mdbBlocks.Length % stat.BlockSize; stat.Length = mdbBlocks.Length; } else { return(Errno.InvalidArgument); } return(Errno.NoError); } } if (!filenameToId.TryGetValue(path.ToLowerInvariant(), out uint fileId)) { return(Errno.NoSuchFile); } if (!idToEntry.TryGetValue(fileId, out MFS_FileEntry entry)) { return(Errno.NoSuchFile); } Errno error = GetAttributes(path, out FileAttributes attr); if (error != Errno.NoError) { return(error); } stat = new FileEntryInfo { Attributes = attr, Blocks = entry.flLgLen / volMDB.drAlBlkSiz, BlockSize = volMDB.drAlBlkSiz, CreationTime = DateHandlers.MacToDateTime(entry.flCrDat), Inode = entry.flFlNum, LastWriteTime = DateHandlers.MacToDateTime(entry.flMdDat), Length = entry.flPyLen, Links = 1 }; return(Errno.NoError); }
public FileStream DownloadFile(string localFile, FileEntryInfo remoteFile) { return(this.DownloadFile(localFile, remoteFile.FullName)); }
public void DeleteFile(FileEntryInfo remoteFile) { this.DeleteFile(remoteFile.FullName); }
public Stream DownloadStream(FileEntryInfo remoteFile) { return(this.DownloadStream(remoteFile.FullName)); }
public Errno Stat(string path, out FileEntryInfo stat) { stat = null; if (!mounted) { return(Errno.AccessDenied); } string[] pathElements = path.Split(new[] { '/' }, StringSplitOptions.RemoveEmptyEntries); if (pathElements.Length != 1) { return(Errno.NotSupported); } string filename = pathElements[0].ToUpperInvariant(); if (filename.Length > 30) { return(Errno.NameTooLong); } if (!fileCache.ContainsKey(filename)) { return(Errno.NoSuchFile); } stat = new FileEntryInfo(); fileSizeCache.TryGetValue(filename, out int filesize); GetAttributes(path, out FileAttributes attrs); if (debug && (string.Compare(path, "$", StringComparison.InvariantCulture) == 0 || string.Compare(path, "$Boot", StringComparison.InvariantCulture) == 0 || string.Compare(path, "$Vtoc", StringComparison.InvariantCulture) == 0)) { if (string.Compare(path, "$", StringComparison.InvariantCulture) == 0) { stat.Length = catalogBlocks.Length; } else if (string.Compare(path, "$Boot", StringComparison.InvariantCulture) == 0) { stat.Length = bootBlocks.Length; } else if (string.Compare(path, "$Vtoc", StringComparison.InvariantCulture) == 0) { stat.Length = vtocBlocks.Length; } stat.Blocks = stat.Length / vtoc.bytesPerSector; } else { stat.Length = filesize; stat.Blocks = stat.Length / vtoc.bytesPerSector; } stat.Attributes = attrs; stat.BlockSize = vtoc.bytesPerSector; stat.Links = 1; return(Errno.NoError); }
public void RenameFile(FileEntryInfo remoteFile, string newName) { this.RenameFile(remoteFile.FullName, newName); }
public Errno Stat(string path, out FileEntryInfo stat) { stat = null; if (!_mounted) { return(Errno.AccessDenied); } Errno err = GetFileEntry(path, out DecodedDirectoryEntry entry); if (err != Errno.NoError) { return(err); } stat = new FileEntryInfo { Attributes = new FileAttributes(), Blocks = (long)(entry.Size / 2048), // TODO: XA BlockSize = 2048, Length = (long)entry.Size, Links = 1, LastWriteTimeUtc = entry.Timestamp }; if (entry.Extents?.Count > 0) { stat.Inode = entry.Extents[0].extent; } if (entry.Size % 2048 > 0) { stat.Blocks++; } if (entry.Flags.HasFlag(FileFlags.Directory)) { stat.Attributes |= FileAttributes.Directory; } if (entry.Flags.HasFlag(FileFlags.Hidden)) { stat.Attributes |= FileAttributes.Hidden; } if (entry.FinderInfo?.fdFlags.HasFlag(AppleCommon.FinderFlags.kIsAlias) == true) { stat.Attributes |= FileAttributes.Alias; } if (entry.FinderInfo?.fdFlags.HasFlag(AppleCommon.FinderFlags.kIsInvisible) == true) { stat.Attributes |= FileAttributes.Hidden; } if (entry.FinderInfo?.fdFlags.HasFlag(AppleCommon.FinderFlags.kHasBeenInited) == true) { stat.Attributes |= FileAttributes.HasBeenInited; } if (entry.FinderInfo?.fdFlags.HasFlag(AppleCommon.FinderFlags.kHasCustomIcon) == true) { stat.Attributes |= FileAttributes.HasCustomIcon; } if (entry.FinderInfo?.fdFlags.HasFlag(AppleCommon.FinderFlags.kHasNoINITs) == true) { stat.Attributes |= FileAttributes.HasNoINITs; } if (entry.FinderInfo?.fdFlags.HasFlag(AppleCommon.FinderFlags.kIsOnDesk) == true) { stat.Attributes |= FileAttributes.IsOnDesk; } if (entry.FinderInfo?.fdFlags.HasFlag(AppleCommon.FinderFlags.kIsShared) == true) { stat.Attributes |= FileAttributes.Shared; } if (entry.FinderInfo?.fdFlags.HasFlag(AppleCommon.FinderFlags.kIsStationery) == true) { stat.Attributes |= FileAttributes.Stationery; } if (entry.FinderInfo?.fdFlags.HasFlag(AppleCommon.FinderFlags.kHasBundle) == true) { stat.Attributes |= FileAttributes.Bundle; } if (entry.AppleIcon != null) { stat.Attributes |= FileAttributes.HasCustomIcon; } if (entry.XA != null) { if (entry.XA.Value.attributes.HasFlag(XaAttributes.GroupExecute)) { stat.Mode |= 8; } if (entry.XA.Value.attributes.HasFlag(XaAttributes.GroupRead)) { stat.Mode |= 32; } if (entry.XA.Value.attributes.HasFlag(XaAttributes.OwnerExecute)) { stat.Mode |= 64; } if (entry.XA.Value.attributes.HasFlag(XaAttributes.OwnerRead)) { stat.Mode |= 256; } if (entry.XA.Value.attributes.HasFlag(XaAttributes.SystemExecute)) { stat.Mode |= 1; } if (entry.XA.Value.attributes.HasFlag(XaAttributes.SystemRead)) { stat.Mode |= 4; } stat.UID = entry.XA.Value.user; stat.GID = entry.XA.Value.group; stat.Inode = entry.XA.Value.filenumber; } if (entry.PosixAttributes != null) { stat.Mode = (uint?)entry.PosixAttributes.Value.st_mode & 0x0FFF; if (entry.PosixAttributes.Value.st_mode.HasFlag(PosixMode.Block)) { stat.Attributes |= FileAttributes.BlockDevice; } if (entry.PosixAttributes.Value.st_mode.HasFlag(PosixMode.Character)) { stat.Attributes |= FileAttributes.CharDevice; } if (entry.PosixAttributes.Value.st_mode.HasFlag(PosixMode.Pipe)) { stat.Attributes |= FileAttributes.Pipe; } if (entry.PosixAttributes.Value.st_mode.HasFlag(PosixMode.Socket)) { stat.Attributes |= FileAttributes.Socket; } if (entry.PosixAttributes.Value.st_mode.HasFlag(PosixMode.Symlink)) { stat.Attributes |= FileAttributes.Symlink; } stat.Links = entry.PosixAttributes.Value.st_nlink; stat.UID = entry.PosixAttributes.Value.st_uid; stat.GID = entry.PosixAttributes.Value.st_gid; stat.Inode = entry.PosixAttributes.Value.st_ino; } else if (entry.PosixAttributesOld != null) { stat.Mode = (uint?)entry.PosixAttributesOld.Value.st_mode & 0x0FFF; if (entry.PosixAttributesOld.Value.st_mode.HasFlag(PosixMode.Block)) { stat.Attributes |= FileAttributes.BlockDevice; } if (entry.PosixAttributesOld.Value.st_mode.HasFlag(PosixMode.Character)) { stat.Attributes |= FileAttributes.CharDevice; } if (entry.PosixAttributesOld.Value.st_mode.HasFlag(PosixMode.Pipe)) { stat.Attributes |= FileAttributes.Pipe; } if (entry.PosixAttributesOld.Value.st_mode.HasFlag(PosixMode.Socket)) { stat.Attributes |= FileAttributes.Socket; } if (entry.PosixAttributesOld.Value.st_mode.HasFlag(PosixMode.Symlink)) { stat.Attributes |= FileAttributes.Symlink; } stat.Links = entry.PosixAttributesOld.Value.st_nlink; stat.UID = entry.PosixAttributesOld.Value.st_uid; stat.GID = entry.PosixAttributesOld.Value.st_gid; } if (entry.AmigaProtection != null) { if (entry.AmigaProtection.Value.Multiuser.HasFlag(AmigaMultiuser.GroupExec)) { stat.Mode |= 8; } if (entry.AmigaProtection.Value.Multiuser.HasFlag(AmigaMultiuser.GroupRead)) { stat.Mode |= 32; } if (entry.AmigaProtection.Value.Multiuser.HasFlag(AmigaMultiuser.GroupWrite)) { stat.Mode |= 16; } if (entry.AmigaProtection.Value.Multiuser.HasFlag(AmigaMultiuser.OtherExec)) { stat.Mode |= 1; } if (entry.AmigaProtection.Value.Multiuser.HasFlag(AmigaMultiuser.OtherRead)) { stat.Mode |= 4; } if (entry.AmigaProtection.Value.Multiuser.HasFlag(AmigaMultiuser.OtherWrite)) { stat.Mode |= 2; } if (entry.AmigaProtection.Value.Protection.HasFlag(AmigaAttributes.OwnerExec)) { stat.Mode |= 64; } if (entry.AmigaProtection.Value.Protection.HasFlag(AmigaAttributes.OwnerRead)) { stat.Mode |= 256; } if (entry.AmigaProtection.Value.Protection.HasFlag(AmigaAttributes.OwnerWrite)) { stat.Mode |= 128; } if (entry.AmigaProtection.Value.Protection.HasFlag(AmigaAttributes.Archive)) { stat.Attributes |= FileAttributes.Archive; } } if (entry.PosixDeviceNumber != null) { stat.DeviceNo = ((ulong)entry.PosixDeviceNumber.Value.dev_t_high << 32) + entry.PosixDeviceNumber.Value.dev_t_low; } if (entry.RripModify != null) { stat.LastWriteTimeUtc = DecodeIsoDateTime(entry.RripModify); } if (entry.RripAccess != null) { stat.AccessTimeUtc = DecodeIsoDateTime(entry.RripAccess); } if (entry.RripAttributeChange != null) { stat.StatusChangeTimeUtc = DecodeIsoDateTime(entry.RripAttributeChange); } if (entry.RripBackup != null) { stat.BackupTimeUtc = DecodeIsoDateTime(entry.RripBackup); } if (entry.SymbolicLink != null) { stat.Attributes |= FileAttributes.Symlink; } if (entry.XattrLength == 0 || _cdi || _highSierra) { return(Errno.NoError); } if (entry.CdiSystemArea != null) { stat.UID = entry.CdiSystemArea.Value.owner; stat.GID = entry.CdiSystemArea.Value.group; if (entry.CdiSystemArea.Value.attributes.HasFlag(CdiAttributes.GroupExecute)) { stat.Mode |= 8; } if (entry.CdiSystemArea.Value.attributes.HasFlag(CdiAttributes.GroupRead)) { stat.Mode |= 32; } if (entry.CdiSystemArea.Value.attributes.HasFlag(CdiAttributes.OtherExecute)) { stat.Mode |= 1; } if (entry.CdiSystemArea.Value.attributes.HasFlag(CdiAttributes.OtherRead)) { stat.Mode |= 4; } if (entry.CdiSystemArea.Value.attributes.HasFlag(CdiAttributes.OwnerExecute)) { stat.Mode |= 64; } if (entry.CdiSystemArea.Value.attributes.HasFlag(CdiAttributes.OwnerRead)) { stat.Mode |= 256; } } byte[] ea = ReadSingleExtent(entry.XattrLength * _blockSize, entry.Extents[0].extent); ExtendedAttributeRecord ear = Marshal.ByteArrayToStructureLittleEndian <ExtendedAttributeRecord>(ea); stat.UID = ear.owner; stat.GID = ear.group; stat.Mode = 0; if (ear.permissions.HasFlag(Permissions.GroupExecute)) { stat.Mode |= 8; } if (ear.permissions.HasFlag(Permissions.GroupRead)) { stat.Mode |= 32; } if (ear.permissions.HasFlag(Permissions.OwnerExecute)) { stat.Mode |= 64; } if (ear.permissions.HasFlag(Permissions.OwnerRead)) { stat.Mode |= 256; } if (ear.permissions.HasFlag(Permissions.OtherExecute)) { stat.Mode |= 1; } if (ear.permissions.HasFlag(Permissions.OtherRead)) { stat.Mode |= 4; } stat.CreationTimeUtc = DateHandlers.Iso9660ToDateTime(ear.creation_date); stat.LastWriteTimeUtc = DateHandlers.Iso9660ToDateTime(ear.modification_date); return(Errno.NoError); }