public void Setup() { files = new Files(); abcdFile = File(mocks, "abcd"); files.Add(abcdFile); abdcFile = File(mocks, "abdc"); files.Add(abdcFile); history = new HistoryTest.HistoryStub(); }
public ProtoManifest(DepotManifest sourceManifest, ulong id) : this() { sourceManifest.Files.ForEach(f => Files.Add(new FileData(f))); ID = id; CreationTime = sourceManifest.CreationTime; }
public void AddFile(Stream stream, string filename) { Files.Add(new PackfileEntry(this, new PackfileEntryFileData(), filename, true)); m_Streams.Add(filename, stream); }
public override TaskStatus Run() { Info("Extracting ZIP archives..."); bool success = true; bool atLeastOneSucceed = false; var zips = SelectFiles(); if (zips.Length > 0) { foreach (FileInf zip in zips) { try { string destFolder = CreateSubDirectoryWithDateTime ? Path.Combine(DestDir, Path.GetFileNameWithoutExtension(zip.Path) + "_" + string.Format("{0:yyyy-MM-dd-HH-mm-ss-fff}", DateTime.Now)) : DestDir; if (!Directory.Exists(destFolder)) { Directory.CreateDirectory(destFolder); } ExtractZipFile(zip.Path, Password, destFolder); foreach (var file in Directory.GetFiles(destFolder, "*.*", SearchOption.AllDirectories)) { Files.Add(new FileInf(file, Id)); } InfoFormat("ZIP {0} extracted to {1}", zip.Path, destFolder); if (!atLeastOneSucceed) { atLeastOneSucceed = true; } } catch (ThreadAbortException) { throw; } catch (Exception e) { ErrorFormat("An error occured while extracting of the ZIP {0}", e, zip.Path); success = false; } } } var status = Status.Success; if (!success && atLeastOneSucceed) { status = Status.Warning; } else if (!success) { status = Status.Error; } Info("Task finished."); return(new TaskStatus(status, false)); }
private void ConvertToCsv(DbConnection conn, DbCommand comm) { conn.Open(); var reader = comm.ExecuteReader(); string destPath = Path.Combine(Workflow.WorkflowTempFolder, string.Format("SqlToCsv_{0:yyyy-MM-dd-HH-mm-ss-fff}.csv", DateTime.Now)); using (var sw = new StreamWriter(destPath)) { bool hasRows = reader.HasRows; while (hasRows) { List <string> columns = new List <string>(); List <string> values = new List <string>(); bool readColumns = false; bool headerDone = false; bool readRecord = false; while (reader.Read()) { if (readRecord) { if (!headerDone && Headers) { headerDone = true; if (Headers) { sw.Write(QuoteString + string.Join(Separator, columns) + QuoteString); sw.Write(EndOfLine); } } sw.Write(QuoteString + string.Join(Separator, values) + QuoteString); sw.Write(EndOfLine); values.Clear(); } int i; if (!readColumns) { for (i = 0; i < reader.FieldCount; i++) { columns.Add(reader.GetName(i)); } readColumns = true; } for (i = 0; i < reader.FieldCount; i++) { values.Add(reader[i].ToString()); } readRecord = true; } if (!headerDone && SingleRecordHeaders && Headers) { sw.Write(QuoteString + string.Join(Separator, columns) + QuoteString); sw.Write(EndOfLine); } sw.Write(QuoteString + string.Join(Separator, values) + QuoteString); sw.Write(EndOfLine); values.Clear(); columns.Clear(); hasRows = reader.NextResult(); } } if (!reader.HasRows && DoNotGenerateFilesIfEmpty) { InfoFormat("No file was generated because the result set is empty."); } else { Files.Add(new FileInf(destPath, Id)); InfoFormat("CSV file generated: {0}", destPath); } }
private void Read(long position) { var reader = new BinaryReader(_mFileStream); reader.BaseStream.Position = position; var folders = new List <Folder>(); var entryBlock = (SPk2EntryBlock) BufferToStruct(_mBlowfish.Decode(reader.ReadBytes(Marshal.SizeOf(typeof(SPk2EntryBlock)))), typeof(SPk2EntryBlock)); for (var i = 0; i < 20; i++) { var entry = entryBlock.Entries[i]; //..... switch (entry.Type) { case 0: //Null Entry break; case 1: //Folder if (entry.Name != "." && entry.Name != "..") { var folder = new Folder { Name = entry.Name, Position = BitConverter.ToInt64(entry.g_Position, 0) }; folders.Add(folder); Folders.Add(folder); _mCurrentFolder.SubFolders.Add(folder); } break; case 2: //File var file = new File { Position = entry.Position, Name = entry.Name, Size = entry.Size, ParentFolder = _mCurrentFolder }; Files.Add(file); _mCurrentFolder.Files.Add(file); break; } } if (entryBlock.Entries[19].NextChain != 0) { Read(entryBlock.Entries[19].NextChain); } foreach (var folder in folders) { _mCurrentFolder = folder; if (folder.Files == null) { folder.Files = new List <File>(); } if (folder.SubFolders == null) { folder.SubFolders = new List <Folder>(); } Read(folder.Position); } }
public void CreateProjectFiles(DecompileContext ctx) { var filenameCreator = new FilenameCreator(Directory, DefaultNamespace); var resourceNameCreator = new ResourceNameCreator(Options.Module, filenameCreator); AllowUnsafeBlocks = DotNetUtils.IsUnsafe(Options.Module); InitializeSplashScreen(); if (Options.Decompiler.CanDecompile(DecompilationType.AssemblyInfo)) { var filename = filenameCreator.CreateFromRelativePath(Path.Combine(PropertiesFolder, "AssemblyInfo"), Options.Decompiler.FileExtension); Files.Add(new AssemblyInfoProjectFile(Options.Module, filename, Options.DecompilationContext, Options.Decompiler, createDecompilerOutput)); } var ep = Options.Module.EntryPoint; if (!(ep is null) && !(ep.DeclaringType is null)) { StartupObject = ep.DeclaringType.ReflectionFullName; } applicationManifest = ApplicationManifest.TryCreate(Options.Module.Win32Resources, filenameCreator); if (!(ApplicationManifest is null)) { Files.Add(new ApplicationManifestProjectFile(ApplicationManifest.Filename)); } foreach (var rsrc in Options.Module.Resources) { ctx.CancellationToken.ThrowIfCancellationRequested(); switch (rsrc.ResourceType) { case ResourceType.Embedded: foreach (var file in CreateEmbeddedResourceFiles(Options.Module, resourceNameCreator, (EmbeddedResource)rsrc)) { Files.Add(file); Files.AddRange(CreateSatelliteFiles(rsrc.Name, filenameCreator, file)); } break; case ResourceType.AssemblyLinked: //TODO: What should be created here? break; case ResourceType.Linked: //TODO: What should be created here? break; default: break; } } InitializeXaml(); InitializeResX(); foreach (var type in Options.Module.Types) { ctx.CancellationToken.ThrowIfCancellationRequested(); if (!DecompileType(type)) { continue; } Files.Add(CreateTypeProjectFile(type, filenameCreator)); } CreateEmptyAppXamlFile(); var existingAppConfig = Options.Module.Location + ".config"; if (File.Exists(existingAppConfig)) { Files.Add(new AppConfigProjectFile(filenameCreator.CreateName("app.config"), existingAppConfig)); } applicationIcon = ApplicationIcon.TryCreate(Options.Module.Win32Resources, Path.GetFileName(Directory), filenameCreator); var dirs = new HashSet <string>(Files.Select(a => GetDirectoryName(a.Filename)).OfType <string>(), StringComparer.OrdinalIgnoreCase); int errors = 0; foreach (var dir in dirs) { ctx.CancellationToken.ThrowIfCancellationRequested(); try { System.IO.Directory.CreateDirectory(dir); } catch (Exception ex) { if (errors++ < 20) { ctx.Logger.Error(string.Format(dnSpy_Decompiler_Resources.MSBuild_CouldNotCreateDirectory2, dir, ex.Message)); } } } }
public override TaskStatus Run() { Info("Transforming files..."); bool success = true; bool atLeastOneSucceed = false; foreach (FileInf file in SelectFiles()) { var destPath = Path.Combine(Workflow.WorkflowTempFolder, string.Format(OutputFormat, Path.GetFileNameWithoutExtension(file.FileName), DateTime.Now, Extension)); try { Version = XDocument.Load(XsltPath).Root.Attribute("version").Value; switch (Version) { case "1.0": var xslt = new XslCompiledTransform(); xslt.Load(XsltPath); xslt.Transform(file.Path, destPath); InfoFormat("File transformed (XSLT 1.0): {0} -> {1}", file.Path, destPath); Files.Add(new FileInf(destPath, Id)); break; case "2.0": var xsl = new FileInfo(XsltPath); var input = new FileInfo(file.Path); var output = new FileInfo(destPath); // Compile stylesheet var processor = new Processor(); var compiler = processor.NewXsltCompiler(); var executable = compiler.Compile(new Uri(xsl.FullName)); // Do transformation to a destination var destination = new DomDestination(); using (var inputStream = input.OpenRead()) { var transformer = executable.Load(); transformer.SetInputStream(inputStream, new Uri(input.DirectoryName)); transformer.Run(destination); } // Save result to a file (or whatever else you wanna do) destination.XmlDocument.Save(output.FullName); InfoFormat("File transformed (XSLT 2.0): {0} -> {1}", file.Path, destPath); Files.Add(new FileInf(destPath, Id)); break; case "3.0": var xsl3 = new FileInfo(XsltPath); var input3 = new FileInfo(file.Path); var output3 = new FileInfo(destPath); var processor3 = new Processor(false); var compiler3 = processor3.NewXsltCompiler(); var stylesheet = compiler3.Compile(new Uri(xsl3.FullName)); var serializer = processor3.NewSerializer(); serializer.SetOutputFile(output3.FullName); using (var inputStream = input3.OpenRead()) { var transformer3 = stylesheet.Load30(); transformer3.Transform(inputStream, serializer); serializer.Close(); } InfoFormat("File transformed (XSLT 3.0): {0} -> {1}", file.Path, destPath); Files.Add(new FileInf(destPath, Id)); break; default: Error("Error in version option. Available options: 1.0, 2.0 or 3.0"); return(new TaskStatus(Status.Error, false)); } // Set renameTo and tags from /*//<WexflowProcessing>//<File> nodes // Remove /*//<WexflowProcessing> nodes if necessary var xdoc = XDocument.Load(destPath); var xWexflowProcessings = xdoc.Descendants("WexflowProcessing").ToArray(); foreach (var xWexflowProcessing in xWexflowProcessings) { var xFiles = xWexflowProcessing.Descendants("File"); foreach (var xFile in xFiles) { try { var taskId = int.Parse(xFile.Attribute("taskId").Value); string fileName = xFile.Attribute("name").Value; var xRenameTo = xFile.Attribute("renameTo"); string renameTo = xRenameTo != null ? xRenameTo.Value : string.Empty; var tags = (from xTag in xFile.Attributes() where xTag.Name != "taskId" && xTag.Name != "name" && xTag.Name != "renameTo" && xTag.Name != "path" && xTag.Name != "renameToOrName" select new Tag(xTag.Name.ToString(), xTag.Value)).ToList(); var fileToEdit = (from f in Workflow.FilesPerTask[taskId] where f.FileName.Equals(fileName) select f).FirstOrDefault(); if (fileToEdit != null) { fileToEdit.RenameTo = renameTo; fileToEdit.Tags.AddRange(tags); InfoFormat("File edited: {0}", fileToEdit.ToString()); } else { ErrorFormat("Cannot find the File: {{fileName: {0}, taskId:{1}}}", fileName, taskId); } } catch (ThreadAbortException) { throw; } catch (Exception e) { ErrorFormat("An error occured while editing the file: {0}. Error: {1}", xFile.ToString(), e.Message); } } } if (RemoveWexflowProcessingNodes) { xWexflowProcessings.Remove(); xdoc.Save(destPath); } if (!atLeastOneSucceed) { atLeastOneSucceed = true; } } catch (ThreadAbortException) { throw; } catch (Exception e) { ErrorFormat("An error occured while transforming the file {0}", e, file.Path); success = false; } } var status = Status.Success; if (!success && atLeastOneSucceed) { status = Status.Warning; } else if (!success) { status = Status.Error; } Info("Task finished."); return(new TaskStatus(status, false)); }
public void AddFile(FileModel file) => Files.Add(file);
/// <summary> /// Add the grf file to the internal lists /// <para>Note: File will be deleted and a clone added if the file already exists</para> /// </summary> /// <param name="item">The grf file</param> /// <returns>The new item state</returns> public ERoGrfFileItemState AddFile(RoGrfFileItem item) { RoGrfFileItem existingItem; bool replaceExistingItem = false; if ((existingItem = GetFileByHash(item.NameHash)) != null) { // Replace old item if (existingItem.IsAdded) { // A newly added item should be replaced.. // Remove it and add as new one DeleteFile(existingItem); replaceExistingItem = true; } else { // Update existing item with new uncompressed data existingItem.State |= ERoGrfFileItemState.Updated; // Mark as not deleted existingItem.State &= ~ERoGrfFileItemState.Deleted; // Check for file data if (item.IsAdded && item.NewFilepath != null && File.Exists(item.NewFilepath)) { existingItem.NewFilepath = item.NewFilepath; } else if (item.FileData != null) { // Save data in tmp file string tmpFilepath = Path.GetTempPath(); File.WriteAllBytes(tmpFilepath, item.FileData); existingItem.NewFilepath = tmpFilepath; } else { throw new Exception("Unable to fetch item data."); } // Updated compressed length existingItem.LengthCompressed = (uint)new FileInfo(existingItem.NewFilepath).Length; existingItem.FileData = new byte[0]; // Inform the client about the update of an existing item return(ERoGrfFileItemState.Updated); } } var newItem = item.Clone() as RoGrfFileItem; if (newItem == null) { throw new Exception("Failed to clone item."); } // Realy new item or just a replace? if (replaceExistingItem) { // Just replace the reference newItem.State = ERoGrfFileItemState.Updated; Files[newItem.NameHash] = newItem; } else { // Add new item newItem.State = ERoGrfFileItemState.Added; Files.Add(newItem.NameHash, newItem); mStringlist.Add(newItem.NameHash); } // Okay, this is handy.. // we return true, because the file was added as new item // buuut.. if the file was previously found AND the existing one was a NEW file // the new file will be deleted and the new one added // // so we need to "fake" the result, because of we dont add a new one // we replace the existing one.. // complicated.. if (replaceExistingItem) { // We didnt add a new item return(ERoGrfFileItemState.Updated); } return(ERoGrfFileItemState.Added); }
/// <summary> /// Reads the uncompressed body of versions equal or above 0x200. /// The body is ZIP (deflate) compressed. /// </summary> /// <param name="binReader"></param> /// <param name="fileCount"></param> /// <param name="skipFiles"></param> /// <returns></returns> private bool ReadFilesVersion2(BinaryReader binReader, int fileCount, bool skipFiles) { int lengthCompressed = binReader.ReadInt32(); int lengthUnCompressed = binReader.ReadInt32(); mFileTableLength = (ulong)lengthUnCompressed; var bufCompressed = new byte[lengthCompressed]; mFiletableUncompressed = new byte[(int)mFileTableLength]; binReader.Read(bufCompressed, 0, lengthCompressed); mFiletableUncompressed = Deflate.Decompress(bufCompressed); /* * if (mFiletableUncompressed.Length != (int)mFileTableLength) { * throw new Exception("Filesize missmatch! Uncompressed Body Size is not equal to Uncompressed Length!"); * } */ // Only read body? if (skipFiles == false) { for (int i = 0, offset = 0; i < fileCount; i++) { var filepath = string.Empty; char c; var itemTableOffset = (uint)offset; while ((c = (char)mFiletableUncompressed[offset++]) != '\0') { filepath += c; } filepath = Tools.UnifyPath(filepath); var item = new RoGrfFileItem { TableOffset = itemTableOffset, Index = Files.Count, Filepath = filepath, Flags = mFiletableUncompressed[offset + 12] }; // File or directory? if (item.IsFile) { item.LengthCompressed = BitConverter.ToUInt32(mFiletableUncompressed, offset); item.LengthCompressedAlign = BitConverter.ToUInt32(mFiletableUncompressed, offset + 4); item.LengthUnCompressed = BitConverter.ToUInt32(mFiletableUncompressed, offset + 8); // Offset is base offset + grf header item.DataOffset = BitConverter.ToUInt32(mFiletableUncompressed, offset + 13) + GrfHeaderLen; // from eAtehna, DES encryption item.Cycle = 1; switch (item.Flags) { case 3: for (var lop = 10; item.LengthCompressed >= lop; lop = lop * 10, item.Cycle++) { } break; case 5: item.Cycle = 0; break; default: item.Cycle = -1; break; } } else { // skip dirs offset += (int)GrfFileLen; continue; } // FIX: Some files in a tested grf are duplicated? // I cant remember grf version or something else.. if (GetFileByHash(item.NameHash) != null) { // Duplicate file, just skip it offset += (int)GrfFileLen; continue; } Files.Add(item.NameHash, item); mStringlist.Add(item.NameHash); mFileDataLength += item.LengthCompressedAlign; offset += (int)GrfFileLen; #if !DISABLE_GRF_EVENTS OnItemAdded(item, i, fileCount); #endif } } return(true); }
/// <summary> /// Reads the uncompressed body of versions between 0x100 and 0x103. /// No compression of the body but a mess on filenames. /// </summary> /// <param name="binReader"></param> /// <param name="fileCount"></param> /// <param name="skipFiles"></param> /// <returns></returns> private bool ReadFilesVersion1(BinaryReader binReader, int fileCount, bool skipFiles) { mFileTableLength = (ulong)(binReader.BaseStream.Length - binReader.BaseStream.Position); mFiletableUncompressed = binReader.ReadBytes((int)mFileTableLength); // Read only body? if (skipFiles == false) { for (int i = 0, offset = 0; i < fileCount; i++) { var itemTableOffset = (uint)offset; var entryType = mFiletableUncompressed[offset + 12]; var offset2 = offset + BitConverter.ToInt32(mFiletableUncompressed, offset) + 4; if (entryType == 0) { offset = offset2 + 17; continue; } var nameLen = mFiletableUncompressed[offset] - 6; // These are client limits if (nameLen >= GrfMaxFilenameLength) { throw new Exception("Filename on index " + i + " is " + nameLen + " bytes long, max length is " + GrfMaxFilenameLength + "."); } var nameBuf = new byte[nameLen]; Buffer.BlockCopy(mFiletableUncompressed, offset + 6, nameBuf, 0, nameLen); var name = RoGrfHelper.DecodeFileName(nameBuf); // Check and fix the filename if (name.Contains('\0')) { name = name.Substring(0, name.IndexOf('\0')); } var compressedLenAligned = (uint)(BitConverter.ToInt32(mFiletableUncompressed, offset2 + 4) - 37579); var realLen = (uint)BitConverter.ToInt32(mFiletableUncompressed, offset2 + 8); var pos = (uint)BitConverter.ToInt32(mFiletableUncompressed, offset2 + 13); var cycle = 0; var compressedLen = 0; if (name.Contains(".")) { var ext = "." + name.Split('.').Last().ToLower(); compressedLen = BitConverter.ToInt32(mFiletableUncompressed, offset2) - BitConverter.ToInt32(mFiletableUncompressed, offset2 + 8) - 715; if (ext != ".gnd" && ext != ".gat" && ext != ".act" && ext != ".str") { cycle = 1; for (int j = 10; compressedLen >= j; j *= 10) { cycle++; } } } name = Tools.UnifyPath(name); var item = new RoGrfFileItem { TableOffset = itemTableOffset, Index = Files.Count, Filepath = name, LengthCompressed = (uint)compressedLen, LengthCompressedAlign = compressedLenAligned, LengthUnCompressed = realLen, Flags = entryType, // base offset + header length DataOffset = pos + GrfHeaderLen }; Files.Add(item.NameHash, item); mStringlist.Add(item.NameHash); mFileDataLength += item.LengthCompressedAlign; offset += (int)GrfFileLen; #if !DISABLE_GRF_EVENTS OnItemAdded(item, i, fileCount); #endif } } return(true); }
public void AddExistingFile(string path) { Files.Add(path); }
public override TaskStatus Run() { Info("Moving files..."); var success = true; var atLeastOneSucceed = false; var files = SelectFiles(); for (var i = files.Length - 1; i > -1; i--) { var file = files[i]; var fileName = Path.GetFileName(file.Path); string destFilePath; if (!string.IsNullOrEmpty(fileName)) { destFilePath = Path.Combine(DestFolder, fileName); } else { ErrorFormat("File name of {0} is empty.", file); continue; } try { if (File.Exists(destFilePath)) { if (Overwrite) { File.Delete(destFilePath); } else { ErrorFormat("Destination file {0} already exists.", destFilePath); success = false; continue; } } File.Move(file.Path, destFilePath); var fi = new FileInf(destFilePath, Id); Files.Add(fi); Workflow.FilesPerTask[file.TaskId].Remove(file); InfoFormat("File moved: {0} -> {1}", file.Path, destFilePath); if (!atLeastOneSucceed) { atLeastOneSucceed = true; } } catch (ThreadAbortException) { throw; } catch (Exception e) { ErrorFormat("An error occured while moving the file {0} to {1}", e, file.Path, destFilePath); success = false; } } var status = Status.Success; if (!success && atLeastOneSucceed) { status = Status.Warning; } else if (!success) { status = Status.Error; } Info("Task finished."); return(new TaskStatus(status, false)); }
public override TaskStatus Run() { Info($"Approval process starting on the reocrd {RecordId} ..."); var status = Core.Status.Success; try { if (Workflow.IsApproval) { var trigger = Path.Combine(Workflow.ApprovalFolder, Workflow.Id.ToString(), Workflow.InstanceId.ToString(), Id.ToString(), "task.approved"); if (string.IsNullOrEmpty(RecordId)) { Error("The record id setting is empty."); status = Core.Status.Error; } else if (string.IsNullOrEmpty(AssignedTo)) { Error("The assignedTo id setting is empty."); status = Core.Status.Error; } else { var record = Workflow.Database.GetRecord(RecordId); var recordName = string.Empty; if (record == null) { Error($"Record {RecordId} does not exist in the database."); status = Core.Status.Error; } else { recordName = record.Name; var assignedTo = Workflow.Database.GetUser(AssignedTo); if (assignedTo == null) { Error($"The user {AssignedTo} does not exist in the database."); status = Core.Status.Error; } else { // notification onStart var assignedBy = Workflow.Database.GetUser(Workflow.StartedBy); var notificationMessage = $"An approval process on the record {record.Name} has started. You must update that record by adding new file versions. You can also add comments on that record."; var notification = new Notification { Message = notificationMessage, AssignedBy = assignedBy.GetDbId(), AssignedTo = assignedTo.GetDbId(), AssignedOn = DateTime.Now, IsRead = false }; Workflow.Database.InsertNotification(notification); if (Workflow.WexflowEngine.EnableEmailNotifications) { string subject = "Wexflow notification from " + assignedBy.Username; string body = notificationMessage; string host = Workflow.WexflowEngine.SmptHost; int port = Workflow.WexflowEngine.SmtpPort; bool enableSsl = Workflow.WexflowEngine.SmtpEnableSsl; string smtpUser = Workflow.WexflowEngine.SmtpUser; string smtpPassword = Workflow.WexflowEngine.SmtpPassword; string from = Workflow.WexflowEngine.SmtpFrom; Send(host, port, enableSsl, smtpUser, smtpPassword, assignedTo.Email, from, subject, body); } Info($"ApproveRecord.OnStart: User {assignedTo.Username} notified for the start of approval process on the record {record.GetDbId()} - {record.Name}."); // assign the record record.ModifiedBy = assignedBy.GetDbId(); record.AssignedTo = assignedTo.GetDbId(); record.AssignedOn = DateTime.Now; Workflow.Database.UpdateRecord(record.GetDbId(), record); Info($"Record {record.GetDbId()} - {record.Name} assigned to {assignedTo.Username}."); IsWaitingForApproval = true; Workflow.IsWaitingForApproval = true; while (true) { // notification onRecordDeleted record = Workflow.Database.GetRecord(RecordId); if (record == null) { notificationMessage = $"The approval process on the record {recordName} was stopped because the record was deleted."; notification = new Notification { Message = notificationMessage, AssignedBy = assignedBy.GetDbId(), AssignedTo = assignedTo.GetDbId(), AssignedOn = DateTime.Now, IsRead = false }; Workflow.Database.InsertNotification(notification); if (Workflow.WexflowEngine.EnableEmailNotifications) { string subject = "Wexflow notification from " + assignedBy.Username; string body = notificationMessage; string host = Workflow.WexflowEngine.SmptHost; int port = Workflow.WexflowEngine.SmtpPort; bool enableSsl = Workflow.WexflowEngine.SmtpEnableSsl; string smtpUser = Workflow.WexflowEngine.SmtpUser; string smtpPassword = Workflow.WexflowEngine.SmtpPassword; string from = Workflow.WexflowEngine.SmtpFrom; Send(host, port, enableSsl, smtpUser, smtpPassword, assignedTo.Email, from, subject, body); } Info($"ApproveRecord.OnRecordDeleted: User {assignedTo.Username} notified for the removal of the record {RecordId}."); var tasks = GetTasks(OnDeleted); ClearFiles(); foreach (var task in tasks) { task.Run(); } break; } // notification onApproved if (File.Exists(trigger)) { notificationMessage = $"The record {record.Name} was approved by the user {Workflow.ApprovedBy}."; notification = new Notification { Message = notificationMessage, AssignedBy = assignedBy.GetDbId(), AssignedTo = assignedTo.GetDbId(), AssignedOn = DateTime.Now, IsRead = false }; Workflow.Database.InsertNotification(notification); if (Workflow.WexflowEngine.EnableEmailNotifications) { string subject = "Wexflow notification from " + assignedBy.Username; string body = notificationMessage; string host = Workflow.WexflowEngine.SmptHost; int port = Workflow.WexflowEngine.SmtpPort; bool enableSsl = Workflow.WexflowEngine.SmtpEnableSsl; string smtpUser = Workflow.WexflowEngine.SmtpUser; string smtpPassword = Workflow.WexflowEngine.SmtpPassword; string from = Workflow.WexflowEngine.SmtpFrom; Send(host, port, enableSsl, smtpUser, smtpPassword, assignedTo.Email, from, subject, body); } Info($"ApproveRecord.OnApproved: User {assignedTo.Username} notified for the approval of the record {record.GetDbId()} - {record.Name}."); // update the record record.Approved = true; Workflow.Database.UpdateRecord(record.GetDbId(), record); Info($"Record {record.GetDbId()} - {record.Name} updated."); var tasks = GetTasks(OnApproved); var latestVersion = Workflow.Database.GetLatestVersion(RecordId); if (latestVersion != null) { ClearFiles(); Files.Add(new FileInf(latestVersion.FilePath, Id)); } foreach (var task in tasks) { task.Run(); } if (latestVersion != null) { Files.RemoveAll(f => f.Path == latestVersion.FilePath); } break; } // notification onRejected if (Workflow.IsRejected) { notificationMessage = $"The record {record.Name} was rejected by the user {Workflow.RejectedBy}."; notification = new Notification { Message = notificationMessage, AssignedBy = assignedBy.GetDbId(), AssignedTo = assignedTo.GetDbId(), AssignedOn = DateTime.Now, IsRead = false }; Workflow.Database.InsertNotification(notification); if (Workflow.WexflowEngine.EnableEmailNotifications) { string subject = "Wexflow notification from " + assignedBy.Username; string body = notificationMessage; string host = Workflow.WexflowEngine.SmptHost; int port = Workflow.WexflowEngine.SmtpPort; bool enableSsl = Workflow.WexflowEngine.SmtpEnableSsl; string smtpUser = Workflow.WexflowEngine.SmtpUser; string smtpPassword = Workflow.WexflowEngine.SmtpPassword; string from = Workflow.WexflowEngine.SmtpFrom; Send(host, port, enableSsl, smtpUser, smtpPassword, assignedTo.Email, from, subject, body); } Info($"ApproveRecord.OnRejected: User {assignedTo.Username} notified for the rejection of the record {record.GetDbId()} - {record.Name}."); // update the record record.Approved = false; Workflow.Database.UpdateRecord(record.GetDbId(), record); Info($"Record {record.GetDbId()} - {record.Name} updated."); var tasks = GetTasks(OnRejected); var latestVersion = Workflow.Database.GetLatestVersion(RecordId); if (latestVersion != null) { ClearFiles(); Files.Add(new FileInf(latestVersion.FilePath, Id)); } foreach (var task in tasks) { task.Run(); } if (latestVersion != null) { Files.RemoveAll(f => f.Path == latestVersion.FilePath); } break; } // notification onStopped if (IsStopped) { notificationMessage = $"The approval process on the record {record.Name} was stopped by the user {Workflow.StoppedBy}."; notification = new Notification { Message = notificationMessage, AssignedBy = assignedBy.GetDbId(), AssignedTo = assignedTo.GetDbId(), AssignedOn = DateTime.Now, IsRead = false }; Workflow.Database.InsertNotification(notification); Info($"ApproveRecord.OnStopped: User {assignedTo.Username} notified for the stop of the approval process of the record {record.GetDbId()} - {record.Name}."); var tasks = GetTasks(OnStopped); var latestVersion = Workflow.Database.GetLatestVersion(RecordId); if (latestVersion != null) { ClearFiles(); Files.Add(new FileInf(latestVersion.FilePath, Id)); } foreach (var task in tasks) { task.Run(); } if (latestVersion != null) { Files.RemoveAll(f => f.Path == latestVersion.FilePath); } break; } Thread.Sleep(1000); } IsWaitingForApproval = false; Workflow.IsWaitingForApproval = false; if (!Workflow.IsRejected && !IsStopped) { InfoFormat("Task approved: {0}", trigger); } else if (!IsStopped) { Info("This workflow has been rejected."); } if (File.Exists(trigger)) { File.Delete(trigger); } } } } } else { Error("This workflow is not an approval workflow. Mark this workflow as an approval workflow to use this task."); status = Core.Status.Error; } } catch (ThreadAbortException) { var record = Workflow.Database.GetRecord(RecordId); if (record != null) { var assignedBy = Workflow.Database.GetUser(Workflow.StartedBy); var assignedTo = Workflow.Database.GetUser(AssignedTo); if (assignedBy != null && assignedTo != null) { var notificationMessage = $"The approval process on the record {record.Name} was stopped by the user {Workflow.StoppedBy}."; var notification = new Notification { Message = notificationMessage, AssignedBy = assignedBy.GetDbId(), AssignedTo = assignedTo.GetDbId(), AssignedOn = DateTime.Now, IsRead = false }; Workflow.Database.InsertNotification(notification); if (Workflow.WexflowEngine.EnableEmailNotifications) { string subject = "Wexflow notification from " + assignedBy.Username; string body = notificationMessage; string host = Workflow.WexflowEngine.SmptHost; int port = Workflow.WexflowEngine.SmtpPort; bool enableSsl = Workflow.WexflowEngine.SmtpEnableSsl; string smtpUser = Workflow.WexflowEngine.SmtpUser; string smtpPassword = Workflow.WexflowEngine.SmtpPassword; string from = Workflow.WexflowEngine.SmtpFrom; Send(host, port, enableSsl, smtpUser, smtpPassword, assignedTo.Email, from, subject, body); } Info($"ApproveRecord.OnStopped: User {assignedTo.Username} notified for the stop of the approval process of the record {record.GetDbId()} - {record.Name}."); var tasks = GetTasks(OnStopped); var latestVersion = Workflow.Database.GetLatestVersion(RecordId); if (latestVersion != null) { ClearFiles(); Files.Add(new FileInf(latestVersion.FilePath, Id)); } foreach (var task in tasks) { task.Run(); } if (latestVersion != null) { Files.RemoveAll(f => f.Path == latestVersion.FilePath); } } } throw; } catch (Exception e) { Error("An error occured during approval process.", e); status = Core.Status.Error; } Info("Approval process finished."); return(new TaskStatus(status)); }
public void ParseArgs(IEnumerable <string> args, List <string> respFiles = null) { var helpStrings = new string[] { "/?", "-?", "/h", "-h" }; foreach (var a in args) { var arg = a.Trim(); if (arg.StartsWith("#")) { continue; } if (arg.StartsWith("/main:")) { MainName = Main = arg.Substring(6).Trim('"'); // only override the target kind if its currently a DLL if (Target == PEFileKinds.Dll) { Target = PEFileKinds.ConsoleApplication; } } else if (arg.StartsWith("/out:")) { Output = arg.Substring(5).Trim('"'); } else if (arg.StartsWith("/target:")) { string tgt = arg.Substring(8).Trim('"'); switch (tgt) { case "exe": Target = PEFileKinds.ConsoleApplication; break; case "winexe": Target = PEFileKinds.WindowApplication; break; default: Target = PEFileKinds.Dll; break; } } else if (arg.StartsWith("/platform:")) { string plat = arg.Substring(10).Trim('"'); switch (plat) { case "x86": Platform = IKVM.Reflection.PortableExecutableKinds.ILOnly | IKVM.Reflection.PortableExecutableKinds.Required32Bit; Machine = IKVM.Reflection.ImageFileMachine.I386; break; case "x64": Platform = IKVM.Reflection.PortableExecutableKinds.ILOnly | IKVM.Reflection.PortableExecutableKinds.PE32Plus; Machine = IKVM.Reflection.ImageFileMachine.AMD64; break; default: Platform = IKVM.Reflection.PortableExecutableKinds.ILOnly; Machine = IKVM.Reflection.ImageFileMachine.I386; break; } } else if (arg.StartsWith("/win32icon:")) { Win32Icon = arg.Substring(11).Trim('"'); } else if (arg.StartsWith("/version:")) { Version = arg.Substring(9).Trim('"'); } else if (arg.StartsWith("/errfmt:")) { ErrorMessageFormat = arg.Substring(8); } else if (arg.StartsWith("/embed")) { Embed = true; } else if (arg.StartsWith("/standalone")) { Standalone = true; } else if (arg.StartsWith("/mta")) { UseMta = true; } else if (Array.IndexOf(helpStrings, arg) >= 0) { ConsoleOps.Usage(true); } else { if (arg.StartsWith("@")) { var respFile = Path.GetFullPath(arg.Substring(1)); if (respFiles == null) { respFiles = new List <string>(); } if (!respFiles.Contains(respFile)) { respFiles.Add(respFile); ParseArgs(File.ReadAllLines(respFile), respFiles); } else { ConsoleOps.Warning("Already parsed response file '{0}'", arg.Substring(1)); } } else { Files.Add(arg); } } } }
public void Add(string name, HttpFile value) { Files.Add(new ValueFile { Name = name, Value = value }); }
public override TaskStatus Run() { Info("Generating SHA-1 hashes..."); bool success = true; bool atLeastOneSucceed = false; var files = SelectFiles(); if (files.Length > 0) { var md5Path = Path.Combine(Workflow.WorkflowTempFolder, string.Format("SHA1_{0:yyyy-MM-dd-HH-mm-ss-fff}.xml", DateTime.Now)); var xdoc = new XDocument(new XElement("Files")); foreach (FileInf file in files) { try { var sha1 = GetSha1(file.Path); if (xdoc.Root != null) { xdoc.Root.Add(new XElement("File", new XAttribute("path", file.Path), new XAttribute("name", file.FileName), new XAttribute("sha1", sha1))); } InfoFormat("SHA-1 hash of the file {0} is {1}", file.Path, sha1); if (!atLeastOneSucceed) { atLeastOneSucceed = true; } } catch (ThreadAbortException) { throw; } catch (Exception e) { ErrorFormat("An error occured while generating the SHA-1 hash of the file {0}", e, file.Path); success = false; } } xdoc.Save(md5Path); Files.Add(new FileInf(md5Path, Id)); } var status = WorkflowStatus.Success; if (!success && atLeastOneSucceed) { status = WorkflowStatus.Warning; } else if (!success) { status = WorkflowStatus.Error; } Info("Task finished."); return(new TaskStatus(status, false)); }
public bool Scan(string dir, string prefix) { uint smallest_rom_size = U.toOffset(Program.ROM.RomInfo.extends_address); string[] files = U.Directory_GetFiles_Safe(dir, "*", SearchOption.TopDirectoryOnly); foreach (string filepath in files) { string filename = Path.GetFileNameWithoutExtension(filepath); if (filename.IndexOf(prefix) != 0) {//prefixを保持していない. continue; } string ext = U.GetFilenameExt(filepath); if (ext == ".GBA" || ext == ".BIN") { long filesize = U.GetFileSize(filepath); if (filesize < smallest_rom_size) {//ファイルサイズが小さすぎる continue; } if (filesize > 32 * 1024 * 1024) {//ファイルサイズが大きすぎる continue; } } else if (ext == ".7Z" || ext == ".UPS") { long filesize = U.GetFileSize(filepath); if (filesize <= 10) {//ファイルサイズが小さすぎる continue; } } else {//不明なファイル continue; } if (IsAlreadyRegist(filename)) { continue; } if (IsTempFile(filename, prefix)) { continue; } FileInfo fi = new FileInfo(); fi.FilePath = filepath; fi.Date = GetFileDate(filepath); fi.Undo = new Undo(); Files.Add(fi); } if (Program.ROM.Modified == true) {//現在のROMを変更している場合、未保存の現在のROMが比較対象に設定できますね FileInfo fi = new FileInfo(); fi.FilePath = Program.ROM.Filename; fi.Date = File.GetLastWriteTime(Program.ROM.Filename); fi.Undo = Program.Undo; Files.Add(fi); } //日付順にソートする. this.Files.Sort((a, b) => { return(b.Date.CompareTo(a.Date)); }); return(true); }
ProjectFile CreateTypeProjectFile(TypeDef type, FilenameCreator filenameCreator) { var bamlFile = TryGetBamlFile(type); if (!(bamlFile is null)) { var filename = filenameCreator.Create(GetTypeExtension(type), type.FullName); TypeProjectFile newFile; var isAppType = DotNetUtils.IsSystemWindowsApplication(type); if (!Options.Decompiler.CanDecompile(DecompilationType.PartialType)) { newFile = new TypeProjectFile(type, filename, Options.DecompilationContext, Options.Decompiler, createDecompilerOutput); } else { newFile = new XamlTypeProjectFile(type, filename, Options.DecompilationContext, Options.Decompiler, createDecompilerOutput); } newFile.DependentUpon = bamlFile; if (isAppType && DotNetUtils.IsStartUpClass(type)) { bamlFile.IsAppDef = true; StartupObject = null; } if (isAppType) { appTypeProjFile = newFile; } return(newFile); } const string DESIGNER = ".Designer"; var resxFile = TryGetResXFile(type); if (DotNetUtils.IsWinForm(type)) { var fname = !(resxFile is null) ? Path.GetFileNameWithoutExtension(resxFile.Filename) : type.Name.String; var filename = filenameCreator.CreateFromNamespaceName(GetTypeExtension(type), type.ReflectionNamespace, fname); var dname = filenameCreator.CreateFromNamespaceName(GetTypeExtension(type), type.ReflectionNamespace, fname + DESIGNER); var newFile = new WinFormsProjectFile(type, filename, Options.DecompilationContext, Options.Decompiler, createDecompilerOutput); if (!(resxFile is null)) { resxFile.DependentUpon = newFile; } var winFormsDesignerFile = new WinFormsDesignerProjectFile(newFile, dname, createDecompilerOutput); winFormsDesignerFile.DependentUpon = newFile; Files.Add(winFormsDesignerFile); return(newFile); } else if (!(resxFile is null)) { var filename = filenameCreator.CreateFromNamespaceName(GetTypeExtension(type), type.ReflectionNamespace, Path.GetFileNameWithoutExtension(resxFile.Filename) + DESIGNER); var newFile = new TypeProjectFile(type, filename, Options.DecompilationContext, Options.Decompiler, createDecompilerOutput); newFile.DependentUpon = resxFile; newFile.AutoGen = true; newFile.DesignTime = true; resxFile.Generator = type.IsPublic ? "PublicResXFileCodeGenerator" : "ResXFileCodeGenerator"; resxFile.LastGenOutput = newFile; return(newFile); } var bt = type.BaseType; if (!(bt is null) && bt.FullName == "System.Configuration.ApplicationSettingsBase") { var designerFilename = filenameCreator.Create(DESIGNER + GetTypeExtension(type), type.FullName); var settingsFilename = filenameCreator.Create(".settings", type.FullName); ProjectFile designerTypeFile; if (Options.Decompiler.CanDecompile(DecompilationType.PartialType)) { var typeFilename = filenameCreator.Create(GetTypeExtension(type), type.FullName); var settingsTypeFile = new SettingsTypeProjectFile(type, typeFilename, Options.DecompilationContext, Options.Decompiler, createDecompilerOutput); designerTypeFile = new SettingsDesignerTypeProjectFile(settingsTypeFile, designerFilename, createDecompilerOutput); Files.Add(settingsTypeFile); } else { designerTypeFile = new TypeProjectFile(type, designerFilename, Options.DecompilationContext, Options.Decompiler, createDecompilerOutput); } var settingsFile = new SettingsProjectFile(type, settingsFilename); designerTypeFile.DependentUpon = settingsFile; designerTypeFile.AutoGen = true; designerTypeFile.DesignTimeSharedInput = true; settingsFile.Generator = type.IsPublic ? "PublicSettingsSingleFileGenerator" : "SettingsSingleFileGenerator"; settingsFile.LastGenOutput = designerTypeFile; Files.Add(settingsFile); return(designerTypeFile); } var newFilename = filenameCreator.Create(GetTypeExtension(type), type.FullName); return(new TypeProjectFile(type, newFilename, Options.DecompilationContext, Options.Decompiler, createDecompilerOutput)); }
public override TaskStatus Run() { Info("Zipping files..."); bool success = true; var files = SelectFiles(); if (files.Length > 0) { var zipPath = Path.Combine(Workflow.WorkflowTempFolder, ZipFileName); try { using (var s = new ZipOutputStream(File.Create(zipPath))) { s.SetLevel(9); // 0 - store only to 9 - means best compression var buffer = new byte[4096]; foreach (FileInf file in files) { // Using GetFileName makes the result compatible with XP // as the resulting path is not absolute. var entry = new ZipEntry(Path.GetFileName(file.Path)) { DateTime = DateTime.Now }; // Setup the entry data as required. // Crc and size are handled by the library for seakable streams // so no need to do them here. // Could also use the last write time or similar for the file. s.PutNextEntry(entry); using (FileStream fs = File.OpenRead(file.Path)) { // Using a fixed size buffer here makes no noticeable difference for output // but keeps a lid on memory usage. int sourceBytes; do { sourceBytes = fs.Read(buffer, 0, buffer.Length); s.Write(buffer, 0, sourceBytes); } while (sourceBytes > 0); } } // Finish/Close arent needed strictly as the using statement does this automatically // Finish is important to ensure trailing information for a Zip file is appended. Without this // the created file would be invalid. s.Finish(); // Close is important to wrap things up and unlock the file. s.Close(); InfoFormat("Zip {0} created.", zipPath); Files.Add(new FileInf(zipPath, Id)); } } catch (ThreadAbortException) { throw; } catch (Exception e) { ErrorFormat("An error occured while creating the Zip {0}", e, zipPath); success = false; } } var status = Status.Success; if (!success) { status = Status.Error; } Info("Task finished."); return(new TaskStatus(status, false)); }
private void InitFileSystemWatcher() { Info("Checking existing files..."); var files = GetFiles(); foreach (var file in files) { InfoFormat("FileSystemWatcher.OnFound started for {0}", file); try { ClearFiles(); Files.Add(new FileInf(file, Id)); var tasks = GetTasks(OnFileFound); foreach (var task in tasks) { task.Logs.Clear(); task.Run(); CurrentLogs.AddRange(task.Logs); } Files.RemoveAll(f => f.Path == file); } catch (IOException ex) when((ex.HResult & 0x0000FFFF) == 32) { Logger.InfoFormat("There is a sharing violation for the file {0}.", file); } catch (Exception ex) { ErrorFormat("An error while triggering FileSystemWatcher.OnFound on the file {0}. Message: {1}", file, ex.Message); } Info("FileSystemWatcher.OnFound finished."); } try { var entry = Workflow.Database.GetEntry(Workflow.Id, Workflow.InstanceId); entry.Logs = string.Join("\r\n", CurrentLogs); Workflow.Database.UpdateEntry(entry.GetDbId(), entry); } catch (Exception ex) { ErrorFormat("An error while updating FileSystemWatcher.OnCreated database entry.", ex); } Info("Checking existing files finished."); Info("Initializing FileSystemWatcher..."); Watcher = new IO.FileSystemWatcher { Path = FolderToWatch, Filter = Filter, IncludeSubdirectories = IncludeSubFolders }; // Add event handlers. Watcher.Created += OnCreated; Watcher.Changed += OnChanged; Watcher.Deleted += OnDeleted; // Begin watching. Watcher.EnableRaisingEvents = true; InfoFormat("FileSystemWatcher.Path={0}", Watcher.Path); InfoFormat("FileSystemWatcher.Filter={0}", Watcher.Filter); InfoFormat("FileSystemWatcher.EnableRaisingEvents={0}", Watcher.EnableRaisingEvents); Info("FileSystemWatcher Initialized."); Info("Begin watching ..."); CurrentLogs.AddRange(Logs); while (true) { Thread.Sleep(1); } }
public void AddFile([MarshalAs(UnmanagedType.IDispatch)] object file) { Files.Add((CloudSignFile)file); }
public override TaskStatus Run() { Info("Launching process..."); if (GeneratesFiles && !(ProcessArguments.Contains(VarFileName) && (ProcessArguments.Contains(VarOutput) && (ProcessArguments.Contains(VarFileName) || ProcessArguments.Contains(VarFileNameWithoutExtension))))) { Error("Error in process command. Please read the documentation."); return(TaskStatus.Failed); } if (!GeneratesFiles) { var startSuccessful = StartProcess(ProcessPath, ProcessArguments, HideGui); return(startSuccessful ? TaskStatus.Completed : TaskStatus.Failed); } foreach (FileInf file in SelectFiles()) { string cmd; string outputFilePath; try { cmd = ProcessArguments.Replace(string.Format("{{{0}}}", VarFilePath), string.Format("\"{0}\"", file.Path)); const string outputRegexPattern = @"{\$output:(?:\$fileNameWithoutExtension|\$fileName)(?:[a-zA-Z0-9._-]*})"; var outputRegex = new Regex(outputRegexPattern); var m = outputRegex.Match(cmd); if (m.Success) { string val = m.Value; outputFilePath = val; if (outputFilePath.Contains(VarFileNameWithoutExtension)) { outputFilePath = outputFilePath.Replace(VarFileNameWithoutExtension, Path.GetFileNameWithoutExtension(file.FileName)); } else if (outputFilePath.Contains(VarFileName)) { outputFilePath = outputFilePath.Replace(VarFileName, file.FileName); } outputFilePath = outputFilePath.Replace("{" + VarOutput + ":", Workflow.WorkflowTempFolder.Trim('\\') + "\\"); outputFilePath = outputFilePath.Trim('}'); cmd = cmd.Replace(val, "\"" + outputFilePath + "\""); } else { Error("Error in process command. Please read the documentation."); return(TaskStatus.Failed); } } catch (ThreadAbortException) { throw; } catch (Exception e) { ErrorFormat("Error in process command. Please read the documentation. Error: {0}", e.Message); return(TaskStatus.Failed); } if (StartProcess(ProcessPath, cmd, HideGui)) { Files.Add(new FileInf(outputFilePath, Id)); if (LoadAllFiles) { var files = Directory.GetFiles(Workflow.WorkflowTempFolder, "*.*", SearchOption.AllDirectories); foreach (var f in files) { if (f != outputFilePath) { Files.Add(new FileInf(f, Id)); } } } } } Info("Task finished."); return(TaskStatus.Completed); }
/// <summary> /// Adds a file to the folder. /// </summary> /// <param name="file">The file to add.</param> public void AddFile(SoundPackFile file) { Files.Add(file); }
public override TaskStatus Run() { Info("Copying files..."); bool success = true; bool atLeastOneSucceed = false; var files = SelectFiles(); foreach (FileInf file in files) { string destPath; if (!string.IsNullOrWhiteSpace(PreserveFolderStructFrom) && file.Path.StartsWith(PreserveFolderStructFrom, StringComparison.InvariantCultureIgnoreCase)) { var preservedFolderStruct = Path.GetDirectoryName(file.Path); preservedFolderStruct = preservedFolderStruct.Length > PreserveFolderStructFrom.Length ? preservedFolderStruct.Remove(0, PreserveFolderStructFrom.Length) : string.Empty; if (preservedFolderStruct.StartsWith(Path.DirectorySeparatorChar)) { preservedFolderStruct = preservedFolderStruct.Remove(0, 1); } destPath = Path.Combine(DestFolder, preservedFolderStruct, file.FileName); } else { destPath = Path.Combine(DestFolder, file.FileName); } try { if (AllowCreateDirectory && !Directory.Exists(Path.GetDirectoryName(destPath))) { InfoFormat("Creating directory: {0}", Path.GetDirectoryName(destPath)); Directory.CreateDirectory(Path.GetDirectoryName(destPath)); } File.Copy(file.Path, destPath, Overwrite); Files.Add(new FileInf(destPath, Id)); InfoFormat("File copied: {0} -> {1}", file.Path, destPath); if (!atLeastOneSucceed) { atLeastOneSucceed = true; } } catch (ThreadAbortException) { throw; } catch (Exception e) { ErrorFormat("An error occured while copying the file {0} to {1}.", e, file.Path, destPath); success = false; } } var status = Status.Success; if (!success && atLeastOneSucceed) { status = Status.Warning; } else if (!success) { status = Status.Error; } Info("Task finished."); return(new TaskStatus(status, false)); }
public override TaskStatus Run() { Info("Creating tgz archive..."); bool success = true; var files = SelectFiles(); if (files.Length > 0) { var tgzPath = Path.Combine(Workflow.WorkflowTempFolder, TgzFileName); try { using (var gz = new GZipOutputStream(File.Create(tgzPath))) using (var tar = new TarOutputStream(gz)) { foreach (FileInf file in files) { using (Stream inputStream = File.OpenRead(file.Path)) { string tarName = file.FileName; long fileSize = inputStream.Length; // Create a tar entry named as appropriate. You can set the name to anything, // but avoid names starting with drive or UNC. var entry = TarEntry.CreateTarEntry(tarName); // Must set size, otherwise TarOutputStream will fail when output exceeds. entry.Size = fileSize; // Add the entry to the tar stream, before writing the data. tar.PutNextEntry(entry); var localBuffer = new byte[32 * 1024]; while (true) { var numRead = inputStream.Read(localBuffer, 0, localBuffer.Length); if (numRead <= 0) { break; } tar.Write(localBuffer, 0, numRead); } } tar.CloseEntry(); } // Finish/Close arent needed strictly as the using statement does this automatically tar.Close(); // Finish is important to ensure trailing information for a Zip file is appended. Without this // the created file would be invalid. gz.Finish(); // Close is important to wrap things up and unlock the file. gz.Close(); InfoFormat("Tgz {0} created.", tgzPath); Files.Add(new FileInf(tgzPath, Id)); } } catch (ThreadAbortException) { throw; } catch (Exception e) { ErrorFormat("An error occured while creating the Tar {0}", e, tgzPath); success = false; } } var status = Status.Success; if (!success) { status = Status.Error; } Info("Task finished."); return(new TaskStatus(status, false)); }
private IRestRequest AddFile(FileParameter file) { Files.Add(file); return(this); }
/// <summary> /// Method creates predefined class that contains ID and other needed fields /// </summary> private void CreateParentClass() { CodeCompileUnit unit = new CodeCompileUnit(); //namespace CodeNamespace nameSpace = new CodeNamespace(defaultNS); unit.Namespaces.Add(nameSpace); //namespace imports nameSpace.Imports.Add(new CodeNamespaceImport("System")); //class CodeTypeDeclaration file = new CodeTypeDeclaration(); file.IsClass = true; file.Name = "IDClass"; file.TypeAttributes = TypeAttributes.Public; //create field string fieldName = "cim_ID"; CodeMemberField att = new CodeMemberField(typeof(string), fieldName); att.Attributes = MemberAttributes.Private; att.Comments.Add(new CodeCommentStatement("ID used for reference purposes", true)); file.Members.Add(att); //create property CodeMemberProperty prop = new CodeMemberProperty(); prop.Attributes = MemberAttributes.Public; prop.Type = new CodeTypeReference(typeof(string)); prop.Name = "ID"; prop.HasGet = true; prop.GetStatements.Add(new CodeSnippetExpression("return this." + fieldName)); prop.HasSet = true; prop.SetStatements.Add(new CodeSnippetExpression("this." + fieldName + " = value")); file.Members.Add(prop); CodeMemberField fieldIsMandatory = new CodeMemberField(); fieldIsMandatory.Attributes = MemberAttributes.Private | MemberAttributes.Const; fieldIsMandatory.Type = new CodeTypeReference(typeof(bool)); fieldIsMandatory.Name = "is" + StringManipulationManager.CreateHungarianNotation(att.Name.Substring(4)) + "Mandatory"; //switch case and set true or false fieldIsMandatory.InitExpression = new CodePrimitiveExpression(true); file.Members.Add(fieldIsMandatory); CodeMemberProperty propIsMandatory = new CodeMemberProperty(); propIsMandatory.Attributes = MemberAttributes.Public; propIsMandatory.Type = new CodeTypeReference(typeof(bool)); propIsMandatory.Name = "Is" + StringManipulationManager.CreateHungarianNotation(att.Name.Substring(4)) + "Mandatory"; propIsMandatory.HasGet = true; propIsMandatory.HasSet = false; propIsMandatory.GetStatements.Add(new CodeSnippetExpression("return " + fieldIsMandatory.Name)); file.Members.Add(propIsMandatory); nameSpace.Types.Add(file); Files.Add(unit); }
public override TaskStatus Run() { Info("Transforming files..."); bool success = true; bool atLeastOneSucceed = false; foreach (FileInf file in SelectFiles()) { var destPath = Path.Combine(Workflow.WorkflowTempFolder, string.Format("{0}_{1:yyyy-MM-dd-HH-mm-ss-fff}.xml", Path.GetFileNameWithoutExtension(file.FileName), DateTime.Now)); try { switch (Version) { case "1.0": var xslt = new XslCompiledTransform(); xslt.Load(XsltPath); xslt.Transform(file.Path, destPath); InfoFormat("File transformed: {0} -> {1}", file.Path, destPath); Files.Add(new FileInf(destPath, Id)); break; case "2.0": // Create a Processor instance. var processor = new Processor(); // Load the source document. var input = processor.NewDocumentBuilder().Build(new Uri(file.Path)); // Create a transformer for the stylesheet. var transformer = processor.NewXsltCompiler().Compile(new Uri(XsltPath)).Load(); // Set the root node of the source document to be the initial context node. transformer.InitialContextNode = input; // Create a serializer. var serializer = new Serializer(); serializer.SetOutputFile(destPath); // Transform the source XML to System.out. transformer.Run(serializer); InfoFormat("File transformed: {0} -> {1}", file.Path, destPath); Files.Add(new FileInf(destPath, Id)); break; default: Error("Error in version option. Available options: 1.0 or 2.0"); return(new TaskStatus(Status.Error, false)); } // Set renameTo and tags from /*//<WexflowProcessing>//<File> nodes // Remove /*//<WexflowProcessing> nodes if necessary var xdoc = XDocument.Load(destPath); var xWexflowProcessings = xdoc.Descendants("WexflowProcessing").ToArray(); foreach (var xWexflowProcessing in xWexflowProcessings) { var xFiles = xWexflowProcessing.Descendants("File"); foreach (var xFile in xFiles) { try { var taskId = int.Parse(xFile.Attribute("taskId").Value); string fileName = xFile.Attribute("name").Value; var xRenameTo = xFile.Attribute("renameTo"); string renameTo = xRenameTo != null ? xRenameTo.Value : string.Empty; var tags = (from xTag in xFile.Attributes() where xTag.Name != "taskId" && xTag.Name != "name" && xTag.Name != "renameTo" && xTag.Name != "path" && xTag.Name != "renameToOrName" select new Tag(xTag.Name.ToString(), xTag.Value)).ToList(); var fileToEdit = (from f in Workflow.FilesPerTask[taskId] where f.FileName.Equals(fileName) select f).FirstOrDefault(); if (fileToEdit != null) { fileToEdit.RenameTo = renameTo; fileToEdit.Tags.AddRange(tags); InfoFormat("File edited: {0}", fileToEdit.ToString()); } else { ErrorFormat("Cannot find the File: {{fileName: {0}, taskId:{1}}}", fileName, taskId); } } catch (ThreadAbortException) { throw; } catch (Exception e) { ErrorFormat("An error occured while editing the file: {0}. Error: {1}", xFile.ToString(), e.Message); } } } if (RemoveWexflowProcessingNodes) { xWexflowProcessings.Remove(); xdoc.Save(destPath); } if (!atLeastOneSucceed) { atLeastOneSucceed = true; } } catch (ThreadAbortException) { throw; } catch (Exception e) { ErrorFormat("An error occured while transforming the file {0}", e, file.Path); success = false; } } var status = Status.Success; if (!success && atLeastOneSucceed) { status = Status.Warning; } else if (!success) { status = Status.Error; } Info("Task finished."); return(new TaskStatus(status, false)); }
public override TaskStatus Run() { Info("Loading files..."); bool success = true; try { if (Recursive) { foreach (string folder in Folders) { var files = GetFilesRecursive(folder); foreach (var file in files) { if (string.IsNullOrEmpty(RegexPattern) || Regex.IsMatch(file, RegexPattern)) { var fi = new FileInf(file, Id); Files.Add(fi); InfoFormat("File loaded: {0}", file); } } } } else { foreach (string folder in Folders) { foreach (string file in Directory.GetFiles(folder)) { if (string.IsNullOrEmpty(RegexPattern) || Regex.IsMatch(file, RegexPattern)) { var fi = new FileInf(file, Id); Files.Add(fi); InfoFormat("File loaded: {0}", file); } } } } foreach (string file in FlFiles) { if (File.Exists(file)) { Files.Add(new FileInf(file, Id)); InfoFormat("File loaded: {0}", file); } else { ErrorFormat("File not found: {0}", file); success = false; } } } catch (ThreadAbortException) { throw; } catch (Exception e) { ErrorFormat("An error occured while loading files.", e); success = false; } var status = WorkflowStatus.Success; if (!success) { status = WorkflowStatus.Error; } Info("Task finished."); return(new TaskStatus(status, false)); }