static void Main(string[] args) { // See README.md for setting up GHPL_TOKEN envirnoment variable before running... var token = Environment.GetEnvironmentVariable( TOKEN_KEY, EnvironmentVariableTarget.User ); var owner = ConfigurationManager.AppSettings[OWNER_KEY]; var repo = ConfigurationManager.AppSettings[REPO_KEY]; PrintHeader(token, owner, repo); Console.WriteLine(); Console.WriteLine("Ensuring current version of file is in the repo..."); var service = new ContentService(token); var file = new DiskFile("Files/content_file.gif"); var target = new FileTarget(owner, repo, file.Name); service.PushFile(file, target, "pushing file via GitHubPushLib"); Console.WriteLine("Finished!"); Console.Read(); }
public void RequiresMessage() { Assert.Throws<ArgumentNullException>(() => { var file = new DiskFile("Resources/content_file.gif"); this._subject.PushFile( file, new FileTarget("owner", "repo", "path"), "" ); }); }
public void Add(DiskFile df, int position) { if(!this.WordLocationMap.ContainsKey(df)) { List<int> tmpPosList = new List<int>(); tmpPosList.Add(position); this.WordLocationMap.Add(df, tmpPosList); } else { this.WordLocationMap[df].Add(position); } }
/// <summary> /// 将一个文件加入到文件夹节点的文件集合中 /// </summary> /// <param name="folderDBPath"></param> /// <param name="file"></param> public int AddFileOfFolderDB(String folderDBPath, DiskFile file) { if (string.IsNullOrEmpty(folderDBPath) || file == null) { return 0; } using (InfocenterEntities context = new InfocenterEntities(DALConfig.ConnectString)) { FolderDB folder = context.FolderDBs.FirstOrDefault(p => p.Path == folderDBPath); if (folder == null) { return 0; } folder.DiskFiles.Add(file); return context.SaveChanges(); } }
protected bool WritePacket(DiskFile diskfile, ulong offset, params object[] objects) { using (MemoryStream ms = new MemoryStream()) { using (BinaryWriter bw = new BinaryWriter(ms)) { // PacketHeader section bw.Write(header.magic); bw.Write(header.length); bw.Write(header.hash); bw.Write(header.setid); bw.Write(header.type); //Packet section foreach (object obj in objects) { if (obj.GetType().IsArray) { if (obj.GetType().ToString().ToLower() != "system.byte[]") { foreach (object innerObj in (object[])obj) { WriteObject(bw, innerObj); } } else { WriteObject(bw, obj); } } else { WriteObject(bw, obj); } } byte[] buffer = ms.ToArray(); return diskfile.Write(offset, buffer, (uint)buffer.Length); } } }
public async Task OverwriteSuccess() { string tempFileName = Path.GetTempFileName(); string written = "edge hub content"; await DiskFile.WriteAllAsync(tempFileName, written); string content = await DiskFile.ReadAllAsync(tempFileName); Assert.True(content.Length == written.Length); Assert.True(written == content); written = "edge hub"; await DiskFile.WriteAllAsync(tempFileName, written); content = await DiskFile.ReadAllAsync(tempFileName); Assert.True(content.Length == written.Length); Assert.True(written == content); File.Delete(tempFileName); }
public static void uploadFileToGit(string fileName) { Console.Write("Github Username:"******"pushing file via GitHubPushLib"); } catch (Exception e) { Console.WriteLine(e.ToString()); } }
public Task <IEnumerable <Metric> > GetAllMetricsAsync() { return(Directory.GetFiles(this.directory) .OrderBy(filename => filename) .SelectManyAsync <string, Metric>(async filename => { Metric[] fileMetrics; try { string rawMetrics = await DiskFile.ReadAllAsync(filename); fileMetrics = JsonConvert.DeserializeObject <Metric[]>(rawMetrics) ?? new Metric[0]; this.filesToDelete.Add(filename); } catch { fileMetrics = new Metric[0]; } return fileMetrics; })); }
public async void FileBackupDoesNotHappenIfConfigSourceEmpty() { if (File.Exists(this.tempFileName)) { File.Delete(this.tempFileName); } // Arrange var underlying = new Mock <IConfigSource>(); underlying.SetupSequence(cs => cs.GetDeploymentConfigInfoAsync()) .ReturnsAsync(ValidConfigInfo1) .ReturnsAsync(DeploymentConfigInfo.Empty); ISerde <DeploymentConfigInfo> serde = this.GetSerde(); // Act using (IConfigSource configSource = new FileBackupConfigSource(this.tempFileName, underlying.Object, serde, NullEncryptionProvider.Instance)) { // this call should fetch the config properly DeploymentConfigInfo config1 = await configSource.GetDeploymentConfigInfoAsync(); Assert.NotNull(config1); Assert.Equal(0, config1.Version); // this should cause the version with the exception to be returned DeploymentConfigInfo config2 = await configSource.GetDeploymentConfigInfoAsync(); // Assert Assert.NotNull(config2); Assert.Equal(0, config2.Version); Assert.Equal(config2.DeploymentConfig.Modules, config1.DeploymentConfig.Modules); // this should still be the JSON from the first config - config1 string backupJson = await DiskFile.ReadAllAsync(this.tempFileName); string returnedJson = serde.Serialize(config1); Assert.True(string.Equals(backupJson, returnedJson, StringComparison.OrdinalIgnoreCase)); } }
private static void PsgExtract() { var sw = Stopwatch.StartNew(); Console.WriteLine("Reading PassiveSkillGraph.psg..."); var psgFile = new DiskFile(@"C:\ggpk3\Metadata\PassiveSkillGraph.psg"); var diskDirectory = new DiskDirectory(@"C:\ggpk3\Data\"); var datIndex = new DatFileIndex(diskDirectory, DetSpecificationIndex.Default); var stats = new StatsDatLoader().Load(datIndex); var mods = new ModifiersDatLoader(stats).Load(datIndex); var passiveTree = new SkillTreeLoader(stats, datIndex); var psg = new PsgFile(psgFile); var result = passiveTree.Load(psg); sw.Stop(); Console.WriteLine($"Parsed Passive Skill Graph in {sw.ElapsedMilliseconds}ms.\r\n"); Console.WriteLine($"{psg.Groups.Count} groups, with {psg.Groups.Sum(c => c.Count)} nodes in total"); }
public bool LoadData(string fileName) { DiskFile zipFile = new DiskFile(fileName); if (!zipFile.Exists) { return(false); } ZipArchive zip = new ZipArchive(zipFile); AbstractFile[] files = zip.GetFiles(false, null); foreach (AbstractFile file in files) { if (mStreams.ContainsKey(file.Name)) { Stream s = file.OpenRead(); mStreams[file.Name].Load(s); s.Close(); } } return(true); }
public bool loadTempDataFile(string mFileName) { try { DiskFile zipFile = new DiskFile(mFileName); if (!zipFile.Exists) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("..Could not load source Disk file " + mFileName); Console.ForegroundColor = ConsoleColor.White; return(false); } ZipArchive zip = new ZipArchive(zipFile); if (!loadTerrainDat(zip)) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("..Could not load terrain data from source file " + mFileName); Console.ForegroundColor = ConsoleColor.White; return(false); } if (!loadModelsSet(zip)) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("..Could not load model data (or it does not exist) in source file " + mFileName); Console.ForegroundColor = ConsoleColor.White; //return ; } } catch (Exception e) { return(false); } return(true); }
private void ProcessDiskFile(HttpContext context) { int diskFileID; if (int.TryParse(context.Request.QueryString["DiskFileID"], out diskFileID)) { DiskFile diskFile = DiskBO.Instance.GetDiskFile(userID, diskFileID); if (diskFile == null) { Context.ThrowError <CustomError>(new CustomError("error", "该附件不存在,可能被移动或被删除!")); return; } else if (isMedia) { ProcessMedia(context, diskFile.ExtensionName); } else if (false == OutputFileByID(context, diskFile.FileID, diskFile.FileName, diskFile.ExtensionName, outputMode)) { ShowErrorMessage(context, "该附件不存在,可能被移动或被删除!", "文件不存在.gif"); } } }
private static ZipArchive CreateZipArchive(string destFile, bool createNew) { AbstractFile zipFile = new DiskFile(destFile); if (createNew) { // In order to create a new zip file, all we have to do is make sure // that file does not exist before creating our ZipArchive. if (zipFile.Exists) { zipFile.Delete(); } } else { if (!zipFile.Exists) { throw new ArgumentException(destFile + " is not exist!"); } } return(CreateZipArchive(zipFile)); }
async Task <DeploymentConfigInfo> ReadFromBackup() { DeploymentConfigInfo backedUpDeploymentConfigInfo = DeploymentConfigInfo.Empty; try { backedUpDeploymentConfigInfo = await this.lastBackedUpConfig .Map(v => Task.FromResult(v)) .GetOrElse( async() => { if (!File.Exists(this.configFilePath)) { Events.BackupFileDoesNotExist(this.configFilePath); return(DeploymentConfigInfo.Empty); } else { using (await this.sync.LockAsync()) { string encryptedJson = await DiskFile.ReadAllAsync(this.configFilePath); string json = await this.encryptionProvider.DecryptAsync(encryptedJson); DeploymentConfigInfo deploymentConfigInfo = this.serde.Deserialize(json); Events.ObtainedDeploymentFromBackup(this.configFilePath); this.lastBackedUpConfig = Option.Some(deploymentConfigInfo); return(deploymentConfigInfo); } } }); } catch (Exception e) { Events.GetBackupFailed(e, this.configFilePath); } return(backedUpDeploymentConfigInfo); }
void writeTempData(string filename) { //if we've already got the source file written, delete it if (File.Exists(Path.ChangeExtension(filename, ".ZIP"))) { File.Delete(Path.ChangeExtension(filename, ".ZIP")); } DiskFile zipFile = new DiskFile(Path.ChangeExtension(filename, ".ZIP")); if (!zipFile.Exists) { zipFile.Create(); } ZipArchive zip = new ZipArchive(zipFile); zip.DefaultCompressionMethod = CompressionMethod.Deflated; zip.AllowSpanning = true; writeTempTerrainToZip(zip); writeTempModelsToZip(zip); }
async Task BackupDeploymentConfig(DeploymentConfigInfo deploymentConfigInfo) { try { // backup the config info only if there isn't an error in it if (!deploymentConfigInfo.Exception.HasValue && !this.lastBackedUpConfig.Filter(c => deploymentConfigInfo.Equals(c)).HasValue) { string json = this.serde.Serialize(deploymentConfigInfo); string encrypted = await this.encryptionProvider.EncryptAsync(json); using (await this.sync.LockAsync()) { await DiskFile.WriteAllAsync(this.configFilePath, encrypted); this.lastBackedUpConfig = Option.Some(deploymentConfigInfo); } } } catch (Exception e) { Events.SetBackupFailed(e, this.configFilePath); } }
public override bool WritePacket(DiskFile diskfile, ulong offset) { return base.WritePacket(diskfile, offset, this.client); }
// Clear the location of the block internal void ClearLocation() { diskfile = null; offset = 0; }
private void btnStart_Click(object sender, EventArgs e) { btnStart.Enabled = false; try { Xceed.Ftp.Licenser.LicenseKey = "FTN42-K40Z3-DXCGS-PYGA"; data.Columns.Clear(); data.Rows.Clear(); data.Columns.Add("name", "Точка"); data.Columns.Add("rezult", "Результат"); data.Columns[0].Width = 200; data.Columns[1].Width = 350; cn = new SqlConnection(setting.Connection_string); cn.Open(); cmd = new SqlCommand("SELECT * FROM [place];", cn); DataTable tbl = new DataTable(); SqlDataAdapter da = new SqlDataAdapter(cmd); da.Fill(tbl); pb.Minimum = 0; pb.Maximum = tbl.Rows.Count; pb.Value = 0; foreach (DataRow r in tbl.Rows) { try { using (FtpConnection connection = new FtpConnection( r["server"].ToString().Trim(), r["username"].ToString().Trim(), r["password"].ToString().Trim())) { connection.Timeout = 10; string tmp = Guid.NewGuid().ToString(); StreamWriter w = new StreamWriter(System.IO.Path.GetTempPath() + tmp); w.Write(tmp); w.Close(); connection.Encoding = Encoding.GetEncoding(1251); DiskFile source = new DiskFile(System.IO.Path.GetTempPath() + tmp); string ftp_to = r["path"].ToString().Trim(); if (ftp_to.Substring(0, 1) == "/") ftp_to = ftp_to.Substring(1); FtpFolder destination = new FtpFolder(connection, ftp_to); source.CopyTo(destination, true); Thread.Sleep(2000); FtpFile remote = new FtpFile(connection, ftp_to + tmp); remote.Delete(); } data.Rows.Add(new string[] { r["name"].ToString().Trim(), "ok" }); } catch (Exception ex) { data.Rows.Add(new string[] { r["name"].ToString().Trim(), "ошибка: " + ex.Message }); } finally { pb.Value++; Application.DoEvents(); } } } catch { } finally { btnStart.Enabled = true; MessageBox.Show("ok"); } }
public async Task FileBackupReadOnlyWhenUninitialized() { if (File.Exists(this.tempFileName)) { File.Delete(this.tempFileName); } var underlying = new Mock <IConfigSource>(); underlying.SetupSequence(t => t.GetDeploymentConfigInfoAsync()) .ReturnsAsync(ValidConfigInfo1) .ReturnsAsync(DeploymentConfigInfo.Empty) .ThrowsAsync(new InvalidOperationException()) .ReturnsAsync(ValidConfigInfo1) .ReturnsAsync(DeploymentConfigInfo.Empty) .ThrowsAsync(new InvalidOperationException()); ISerde <DeploymentConfigInfo> serde = this.GetSerde(); DeploymentConfigInfo config1; using (IConfigSource configSource = new FileBackupConfigSource(this.tempFileName, underlying.Object, serde, NullEncryptionProvider.Instance)) { config1 = await configSource.GetDeploymentConfigInfoAsync(); Assert.NotNull(config1); Assert.True(File.Exists(this.tempFileName)); string backupJson = await DiskFile.ReadAllAsync(this.tempFileName); string returnedJson = serde.Serialize(config1); Assert.True(string.Equals(backupJson, returnedJson, StringComparison.OrdinalIgnoreCase)); File.Delete(this.tempFileName); DeploymentConfigInfo config2 = await configSource.GetDeploymentConfigInfoAsync(); Assert.NotNull(config2); Assert.Equal(serde.Serialize(config1), serde.Serialize(config2)); DeploymentConfigInfo config3 = await configSource.GetDeploymentConfigInfoAsync(); Assert.NotNull(config3); Assert.Equal(serde.Serialize(config1), serde.Serialize(config3)); } using (IConfigSource configSource = new FileBackupConfigSource(this.tempFileName, underlying.Object, serde, NullEncryptionProvider.Instance)) { config1 = await configSource.GetDeploymentConfigInfoAsync(); Assert.NotNull(config1); Assert.True(File.Exists(this.tempFileName)); string backupJson = await DiskFile.ReadAllAsync(this.tempFileName); string returnedJson = serde.Serialize(config1); Assert.True(string.Equals(backupJson, returnedJson, StringComparison.OrdinalIgnoreCase)); } using (IConfigSource configSource = new FileBackupConfigSource(this.tempFileName, underlying.Object, serde, NullEncryptionProvider.Instance)) { DeploymentConfigInfo config5 = await configSource.GetDeploymentConfigInfoAsync(); Assert.NotNull(config5); Assert.Equal(serde.Serialize(config1), serde.Serialize(config5)); } using (IConfigSource configSource = new FileBackupConfigSource(this.tempFileName, underlying.Object, serde, NullEncryptionProvider.Instance)) { DeploymentConfigInfo config5 = await configSource.GetDeploymentConfigInfoAsync(); Assert.NotNull(config5); Assert.Equal(serde.Serialize(config1), serde.Serialize(config5)); } File.Delete(this.tempFileName); using (IConfigSource configSource = new FileBackupConfigSource(this.tempFileName, underlying.Object, serde, NullEncryptionProvider.Instance)) { DeploymentConfigInfo config6 = await configSource.GetDeploymentConfigInfoAsync(); Assert.NotNull(config6); Assert.Equal(config6, DeploymentConfigInfo.Empty); } }
public abstract bool WritePacket(DiskFile diskfile, ulong offset);
private static bool WriteSessionArchive(string sFilename, Session[] arrSessions, string sPassword, bool bDisplayErrorMessages) { if ((null == arrSessions || (arrSessions.Length < 1))) { if (bDisplayErrorMessages) { FiddlerApplication.Log.LogString("WriteSessionArchive - No Input. No sessions were provided to save to the archive."); } return(false); } try { if (File.Exists(sFilename)) { File.Delete(sFilename); } DiskFile odfZip = new DiskFile(sFilename); ZipArchive oZip = new ZipArchive(odfZip); oZip.TempFolder = new MemoryFolder(); oZip.BeginUpdate(); ZippedFolder oZipRawFolder = (ZippedFolder)oZip.CreateFolder("raw"); #region PasswordProtectIfNeeded if (!String.IsNullOrEmpty(sPassword)) { if (CONFIG.bUseAESForSAZ) { oZip.DefaultEncryptionMethod = EncryptionMethod.WinZipAes; // Use 256bit AES } oZip.DefaultEncryptionPassword = sPassword; } #endregion PasswordProtectIfNeeded oZip.Comment = Fiddler.CONFIG.FiddlerVersionInfo + " " + GetZipLibraryInfo() + " Session Archive. See http://www.fiddler2.com"; #region ProcessEachSession int iFileNumber = 1; // Our format string must pad all session ids with leading 0s for proper sorting. string sFileNumberFormatter = ("D" + arrSessions.Length.ToString().Length); foreach (Session oSession in arrSessions) { WriteSessionToSAZ(oSession, odfZip, iFileNumber, sFileNumberFormatter, null, bDisplayErrorMessages); iFileNumber++; } #endregion ProcessEachSession oZip.EndUpdate(); return(true); } catch (Exception eX) { // TODO: Should close any open handles here. if (bDisplayErrorMessages) { FiddlerApplication.Log.LogString("Failed to save Session Archive.\n\n" + eX.Message); } return(false); } }
public override bool WritePacket(DiskFile diskfile, ulong offset) { return(base.WritePacket(diskfile, offset, this.client)); }
public override bool WritePacket(DiskFile diskfile, ulong offset) { return base.WritePacket(diskfile, offset, this.blocksize, this.recoverablefilecount, this.fileids); }
public override bool WritePacket(DiskFile diskfile, ulong offset) { return(base.WritePacket(diskfile, offset, this.fileid, this.hashfull, this.hash16k, this.length, this.name)); }
public override bool WritePacket(DiskFile diskfile, ulong offset) { return(base.WritePacket(diskfile, offset, this.blocksize, this.recoverablefilecount, this.fileids)); }
private static bool WriteSessionArchive(string sFilename, Session[] arrSessions, string sPassword, bool bDisplayErrorMessages) { if ((null == arrSessions || (arrSessions.Length < 1))) { if (bDisplayErrorMessages) { FiddlerApplication.Log.LogString("WriteSessionArchive - No Input. No sessions were provided to save to the archive."); } return false; } try { if (File.Exists(sFilename)) { File.Delete(sFilename); } DiskFile odfZip = new DiskFile(sFilename); ZipArchive oZip = new ZipArchive(odfZip); oZip.TempFolder = new MemoryFolder(); oZip.BeginUpdate(); ZippedFolder oZipRawFolder = (ZippedFolder)oZip.CreateFolder("raw"); #region PasswordProtectIfNeeded if (!String.IsNullOrEmpty(sPassword)) { if (CONFIG.bUseAESForSAZ) { oZip.DefaultEncryptionMethod = EncryptionMethod.WinZipAes; // Use 256bit AES } oZip.DefaultEncryptionPassword = sPassword; } #endregion PasswordProtectIfNeeded oZip.Comment = Fiddler.CONFIG.FiddlerVersionInfo + " " + GetZipLibraryInfo() + " Session Archive. See http://www.fiddler2.com"; #region ProcessEachSession int iFileNumber = 1; // Our format string must pad all session ids with leading 0s for proper sorting. string sFileNumberFormatter = ("D" + arrSessions.Length.ToString().Length); foreach (Session oSession in arrSessions) { WriteSessionToSAZ(oSession, odfZip, iFileNumber, sFileNumberFormatter, null, bDisplayErrorMessages); iFileNumber++; } #endregion ProcessEachSession oZip.EndUpdate(); return true; } catch (Exception eX) { // TODO: Should close any open handles here. if (bDisplayErrorMessages) { FiddlerApplication.Log.LogString("Failed to save Session Archive.\n\n" + eX.Message); } return false; } }
/// <summary> /// 将DBFileInfo对象转换为EF可以直接保存的DiskFile对象 /// </summary> /// <param name="fileInfo"></param> /// <param name="fileContent"></param> /// <returns></returns> public static DiskFile toDiskFile(DBFileInfo fileInfo, byte[] fileContent) { if (fileInfo != null) { DiskFile file = new DiskFile() { ID = fileInfo.ID, FileSize = fileInfo.FileSize, FilePath = fileInfo.FilePath, AddTime = fileInfo.AddTime, FileContent = fileContent }; return file; } return null; }
public override bool WritePacket(DiskFile diskfile, ulong offset) { return base.WritePacket(diskfile, offset, this.fileid, this.entries); }
internal static RecoveryPacket Create(DiskFile diskFile, ulong offset, ulong blocksize, uint exponent, byte[] setid) { RecoveryPacket tmpPacket = new RecoveryPacket(); // Fill in the details the we know tmpPacket.header = new PacketHeader(); tmpPacket.header.magic = Par2FileReader.packet_magic; tmpPacket.header.hash = new byte[16]; // Compute later tmpPacket.header.setid = setid; tmpPacket.header.type = Par2FileReader.recoveryblockpacket_type; tmpPacket.diskfile = diskFile; tmpPacket.offset = (int)offset; tmpPacket.exponent = exponent; //tmpPacket.length = 0; // tmpPacket.header.length = (ulong)(tmpPacket.header.GetSize() + sizeof(UInt32) + (int)blocksize); tmpPacket.length = (int)tmpPacket.header.length; // Start computation of the packet hash tmpPacket.packetcontext = MD5.Create(); using (MemoryStream ms = new MemoryStream()) { using (BinaryWriter bw = new BinaryWriter(ms)) { // PacketHeader section bw.Write(tmpPacket.header.setid); bw.Write(tmpPacket.header.type); //Packet section bw.Write(tmpPacket.exponent); byte[] buffer = ms.ToArray(); tmpPacket.packetcontext.TransformBlock(buffer, 0, buffer.Length, null, 0); } } // Set the data block to immediatly follow the header on disk tmpPacket.datablock = new DataBlock(); tmpPacket.datablock.SetLocation(tmpPacket.diskfile, (ulong)(tmpPacket.offset + tmpPacket.GetSize())); tmpPacket.datablock.SetLength(blocksize); return tmpPacket; }
private static ZipArchive CreateZipArchive(string destFile, bool createNew) { AbstractFile zipFile = new DiskFile(destFile); if (createNew) { // In order to create a new zip file, all we have to do is make sure // that file does not exist before creating our ZipArchive. if (zipFile.Exists) { zipFile.Delete(); } } else { if (!zipFile.Exists) { throw new ArgumentException(destFile + " is not exist!"); } } return CreateZipArchive(zipFile); }
public async void FileBackupWriteOnlyWhenConfigurationChanges() { if (File.Exists(this.tempFileName)) { File.Delete(this.tempFileName); } var underlying = new Mock <IConfigSource>(); underlying.SetupSequence(t => t.GetDeploymentConfigInfoAsync()) .ReturnsAsync(ValidConfigInfo1) .ReturnsAsync(ValidConfigInfo1) .ReturnsAsync(ValidConfigInfo2) .ReturnsAsync(ValidConfigInfo2); ISerde <DeploymentConfigInfo> serde = this.GetSerde(); using (IConfigSource configSource = new FileBackupConfigSource(this.tempFileName, underlying.Object, serde, NullEncryptionProvider.Instance)) { DeploymentConfigInfo config1 = await configSource.GetDeploymentConfigInfoAsync(); Assert.NotNull(config1); Assert.True(File.Exists(this.tempFileName)); string backupJson = await DiskFile.ReadAllAsync(this.tempFileName); string returnedJson = serde.Serialize(config1); Assert.True(string.Equals(backupJson, returnedJson, StringComparison.OrdinalIgnoreCase)); DateTime modifiedTime1 = File.GetLastWriteTimeUtc(this.tempFileName); Assert.True(DateTime.UtcNow - modifiedTime1 < TimeSpan.FromSeconds(5)); DeploymentConfigInfo config2 = await configSource.GetDeploymentConfigInfoAsync(); Assert.NotNull(config2); Assert.Equal(serde.Serialize(config1), serde.Serialize(config2)); DateTime modifiedTime2 = File.GetLastWriteTimeUtc(this.tempFileName); Assert.Equal(modifiedTime2, modifiedTime1); DeploymentConfigInfo config3 = await configSource.GetDeploymentConfigInfoAsync(); Assert.NotNull(config3); Assert.True(File.Exists(this.tempFileName)); backupJson = await DiskFile.ReadAllAsync(this.tempFileName); returnedJson = serde.Serialize(config3); Assert.True(string.Equals(backupJson, returnedJson, StringComparison.OrdinalIgnoreCase)); DateTime modifiedTime3 = File.GetLastWriteTimeUtc(this.tempFileName); Assert.True(DateTime.UtcNow - modifiedTime1 < TimeSpan.FromSeconds(5)); Assert.NotEqual(modifiedTime1, modifiedTime3); DeploymentConfigInfo config4 = await configSource.GetDeploymentConfigInfoAsync(); Assert.NotNull(config4); Assert.Equal(serde.Serialize(config4), serde.Serialize(config4)); DateTime modifiedTime4 = File.GetLastWriteTimeUtc(this.tempFileName); Assert.Equal(modifiedTime4, modifiedTime3); } }
public bool SaveData(string fileName, bool overwrite, bool checkReadOnly) { try { using (PerfSection p = new PerfSection("SaveData() " + Path.GetFileName(fileName))) { if (File.Exists(fileName)) { if (!overwrite) { return(false); } if (checkReadOnly && File.GetAttributes(fileName) == FileAttributes.ReadOnly) { return(false); } File.SetAttributes(fileName, FileAttributes.Normal); File.Delete(fileName); } DiskFile zipFile = new DiskFile(fileName); zipFile.Create(); if (!zipFile.Exists) { return(false); } ZipArchive zip = new ZipArchive(zipFile); Dictionary <string, IDataStream> .Enumerator it = mStreams.GetEnumerator(); while (it.MoveNext() != false) { AbstractFile md = zip.CreateFile(it.Current.Key, true); Stream s = md.OpenWrite(true); BufferedStream bs = null; if (CoreGlobals.OutOfMemory == false) { bs = new BufferedStream(s, 10000000); //~10mb buffer it.Current.Value.Save(bs); } else { it.Current.Value.Save(s); } if (bs != null) { bs.Flush(); bs.Close(); } else { s.Close(); } } } return(true); } catch (System.Exception ex) { CoreGlobals.FatalEdDataSaveError = true; throw ex; } return(false); }
internal void SetBlock(DiskFile diskfile, int offset) { datablock.SetLocation(diskfile, (ulong)offset); }
public override bool WritePacket(DiskFile diskfile, ulong offset) { return base.WritePacket(diskfile, offset, this.fileid, this.hashfull, this.hash16k, this.length, this.name); }
/// <summary> /// 下载文件(可断点续传) /// </summary> /// <param name="ftpAddress">服务器地址,格式为 userName:Password@hostName:port(ftp://softer:[email protected]:21)</param> /// <param name="remoteFilename"></param> /// <param name="localFilename"></param> /// <param name="resumeOperation"></param> public bool DownloadFile(string ftpAddress, string remoteFilename, string localFilename, bool resumeOperation) { FtpSiteData siteData = ParseFtpAddress(ftpAddress); if (siteData == null) { throw new ArgumentException("Invalid ftp address format!"); } using (FtpConnection connection = new FtpConnection(siteData.Host, siteData.Port, siteData.UserName, siteData.Password)) { SetConnection(connection); AbstractFolder remoteFolder = new FtpFolder(connection); AbstractFile remoteFile = remoteFolder.GetFile(remoteFilename); // 不行,必须要从Folder来 //AbstractFile remoteFile = new FtpFile(connection, remoteFilename); AbstractFile localFile = new DiskFile(localFilename); if (!resumeOperation || !localFile.Exists || remoteFile.Size < localFile.Size) { remoteFile.CopyTo(localFile, true); } else if (remoteFile.Size == localFile.Size) { return(true); } else if (remoteFile.Size > localFile.Size) { byte[] buf = new byte[1024]; int cnt = -1; using (System.IO.Stream localStream = localFile.OpenWrite(false)) { using (System.IO.Stream remoteStream = remoteFile.OpenRead()) { remoteStream.Seek(localFile.Size, System.IO.SeekOrigin.Begin); localStream.Seek(0, System.IO.SeekOrigin.End); do { cnt = remoteStream.Read(buf, 0, buf.Length); localStream.Write(buf, 0, cnt); } while (cnt == buf.Length); } } } return(true); } //FtpClient client = LoginFtp(ftpAddress); //if (System.IO.File.Exists(localFilename)) //{ // Xceed.FileSystem.DiskFile file = new Xceed.FileSystem.DiskFile(localFilename); // using (System.IO.Stream localStream = file.OpenWrite(false)) // { // client.ReceiveFile(remoteFilename, file.Size, localStream); // } //} //else //{ // client.ReceiveFile(remoteFilename, localFilename); //} }
public override bool WritePacket(DiskFile diskfile, ulong offset) { return(base.WritePacket(diskfile, offset, this.fileid, this.entries)); }
// Set the location of the block internal void SetLocation(DiskFile _diskfile, ulong _offset) { diskfile = _diskfile; offset = _offset; }
public void CallsUpdateFileOnRepo() { this._mockRepo .Setup(m => m.UpdateFile( this._authToken, It.IsAny<File>(), It.IsAny<FileTarget>(), It.IsAny<string>() )) .Verifiable(); var file = new DiskFile("Resources/content_file.gif"); var target = new FileTarget("pseudomuto", "reponame", "content_file.gif"); this._subject.PushFile(file, target, "updating file"); this._mockRepo.Verify(); }
// This is a refactored helper function which writes a single session to an open SAZ file. internal static void WriteSessionToSAZ(Session oSession, DiskFile odfZip, int iFileNumber, StringBuilder sbHTML, bool bDisplayErrorMessages) { string sBaseFilename = @"raw\" + iFileNumber.ToString("0000"); string sRequestFilename = sBaseFilename + "_c.txt"; string sResponseFilename = sBaseFilename + "_s.txt"; string sMetadataFilename = sBaseFilename + "_m.xml"; // Write the Request to the Archive try { ZippedFile o = new ZippedFile(odfZip, sRequestFilename); Stream oS = o.CreateWrite(FileShare.None); oSession.WriteRequestToStream(false, true, oS); oS.Close(); } catch (Exception eX) { if (bDisplayErrorMessages) { FiddlerApplication.Log.LogString("Archive Failure: Unable to add " + sRequestFilename + "\n\n" + eX.Message); } } // Write the Response to the Archive try { ZippedFile o = new ZippedFile(odfZip, sResponseFilename); Stream oS = o.CreateWrite(FileShare.None); oSession.WriteResponseToStream(oS, false); oS.Close(); } catch (Exception eX) { if (bDisplayErrorMessages) { FiddlerApplication.Log.LogString("Archive Failure: Unable to add " + sResponseFilename + "\n\n" + eX.Message); } } // Write the MetaData to the Archive try { ZippedFile o = new ZippedFile(odfZip, sMetadataFilename); Stream oS = o.CreateWrite(FileShare.None); oSession.WriteMetadataToStream(oS); oS.Close(); } catch (Exception eX) { if (bDisplayErrorMessages) { FiddlerApplication.Log.LogString("Archive Failure: Unable to add " + sMetadataFilename + "\n\n" + eX.Message); } } #region AddIndexHTMLEntry if (null != sbHTML) { sbHTML.Append("<tr>"); sbHTML.Append("<TD><a href='" + sRequestFilename + "'>C</a> "); sbHTML.Append("<a href='" + sResponseFilename + "'>S</a> "); sbHTML.Append("<a href='" + sMetadataFilename + "'>M</a></TD>"); sbHTML.Append("</tr>"); } #endregion AddIndexHTMLEntry }
public void RequiresTarget() { Assert.Throws<ArgumentNullException>(() => { var file = new DiskFile("Resources/content_file.gif"); this._subject.PushFile(file, null, "msg"); }); }
// This is a refactored helper function which writes a single session to an open SAZ file. internal static void WriteSessionToSAZ(Session oSession, DiskFile odfZip, int iFileNumber, string sFileNumberFormatter, StringBuilder sbHTML, bool bDisplayErrorMessages) { string sBaseFilename = @"raw\" + iFileNumber.ToString(sFileNumberFormatter); string sRequestFilename = sBaseFilename + "_c.txt"; string sResponseFilename = sBaseFilename + "_s.txt"; string sMetadataFilename = sBaseFilename + "_m.xml"; // Write the Request to the Archive try { ZippedFile o = new ZippedFile(odfZip, sRequestFilename); Stream oS = o.CreateWrite(FileShare.None); oSession.WriteRequestToStream(false, true, oS); oS.Close(); } catch (Exception eX) { if (bDisplayErrorMessages) { FiddlerApplication.Log.LogString("Archive Failure: Unable to add " + sRequestFilename + "\n\n" + eX.Message); } } // Write the Response to the Archive try { ZippedFile o = new ZippedFile(odfZip, sResponseFilename); Stream oS = o.CreateWrite(FileShare.None); oSession.WriteResponseToStream(oS, false); oS.Close(); } catch (Exception eX) { if (bDisplayErrorMessages) { FiddlerApplication.Log.LogString("Archive Failure: Unable to add " + sResponseFilename + "\n\n" + eX.Message); } } // Write the MetaData to the Archive try { ZippedFile o = new ZippedFile(odfZip, sMetadataFilename); Stream oS = o.CreateWrite(FileShare.None); oSession.WriteMetadataToStream(oS); oS.Close(); } catch (Exception eX) { if (bDisplayErrorMessages) { FiddlerApplication.Log.LogString("Archive Failure: Unable to add " + sMetadataFilename + "\n\n" + eX.Message); } } #region AddIndexHTMLEntry if (null != sbHTML) { sbHTML.Append("<tr>"); sbHTML.Append("<TD><a href='" + sRequestFilename + "'>C</a> "); sbHTML.Append("<a href='" + sResponseFilename + "'>S</a> "); sbHTML.Append("<a href='" + sMetadataFilename + "'>M</a></TD>"); sbHTML.Append("</tr>"); } #endregion AddIndexHTMLEntry }
/// <summary> /// Zip压缩一个文件至压缩文件 /// </summary> /// <param name="srcFile"></param> /// <param name="destFile"></param> /// <returns></returns> public static string ZipFromFile(string srcFile, string destFile) { ZipArchive archive = CreateZipArchive(destFile, true); AbstractFile source = new DiskFile(srcFile); source.CopyTo(archive, true); return destFile; }