public virtual void Process(SourceFile file) { foreach (var step in internalSteps) { step.Process(file); } }
public virtual void Process(SourceFile file) { foreach (IPreCompilationStep step in internalSteps) { step.Process(file); } }
public void ItemTests() { var sourceFile = new SourceFile(Services.FileSystem, "test.txt", "test.txt"); var doc = Services.CompositionService.Resolve<JsonTextSnapshot>().With(SnapshotParseContext.Empty, sourceFile, "{ \"Item\": { \"Fields\": [ { \"Name\": \"Text\", \"Value\": \"123\" } ] } }"); var root = doc.Root; Assert.IsNotNull(root); Assert.AreEqual("Item", root.Key); Assert.AreEqual(1, root.ChildNodes.Count()); var fields = root.ChildNodes.First(); Assert.AreEqual(1, fields.ChildNodes.Count()); var field = fields.ChildNodes.First(); Assert.AreEqual("Text", field.GetAttributeValue("Name")); Assert.AreEqual("123", field.GetAttributeValue("Value")); Assert.AreEqual(0, field.ChildNodes.Count()); var attribute = field.GetAttribute("Name"); Assert.IsNotNull(attribute); Assert.AreEqual("Text", attribute.Value); Assert.AreEqual(0, attribute.Attributes.Count()); Assert.AreEqual(0, attribute.ChildNodes.Count()); Assert.AreEqual(field.Snapshot, attribute.Snapshot); Assert.AreEqual(doc, attribute.Snapshot); }
public void Process(SourceFile file) { file.RenderBody = Internal.RegularExpressions.ContentTag.Replace( file.RenderBody, delegate(Match match) { var parsedAttributes = match.Groups["attributes"].Value; var attributes = Utilities.GetAttributesDictionaryFrom(parsedAttributes); if (attributes.Contains("runat") && String.Equals("server", (attributes["runat"] as string), StringComparison.InvariantCultureIgnoreCase)) { if (!attributes.Contains("contentplaceholderid")) throw new AspViewException(ExceptionMessages.ContentPlaceHolderIdAttributeNotFound); if (String.IsNullOrEmpty((string)attributes["contentplaceholderid"])) throw new AspViewException(ExceptionMessages.ContentPlaceHolderIdAttributeEmpty); var contentplaceholderid = (string)attributes["contentplaceholderid"]; // handle ViewContents special case if (contentplaceholderid == "ViewContents") return match.Groups["content"].Value; var capturefortagformat = @"<component:capturefor id=""{0}"">{1}</component:capturefor>"; return string.Format(capturefortagformat, contentplaceholderid, match.Groups["content"].Value); } else return match.Value; } ); }
public void AddNewImport(SourcePoint start, string namespaceName, SourceFile sourceFile, bool isLast) { string newImportCall = CodeRush.Language.GenerateElement(new NamespaceReference(namespaceName), sourceFile.Project.Language); if (isLast && newImportCall.EndsWith(Environment.NewLine)) // Remove cr/lf from last entry. newImportCall = newImportCall.Remove(newImportCall.Length - Environment.NewLine.Length); _NewImportCalls.Add(new FileChange(sourceFile.Name, start, newImportCall)); }
public void Process(SourceFile file) { file.RenderBody = Internal.RegularExpressions.LayoutContentPlaceHolder.Replace( file.RenderBody, delegate(Match match) { string parsedAttributes = match.Groups["attributes"].Value; IDictionary attributes = Utilities.GetAttributesDictionaryFrom(parsedAttributes); if(attributes.Contains("runat") && String.Equals("server",(attributes["runat"] as string), StringComparison.InvariantCultureIgnoreCase)) { if (!attributes.Contains("id")) throw new AspViewException(ExceptionMessages.IdAttributeNotFound); if (String.IsNullOrEmpty((string)attributes["id"])) throw new AspViewException(ExceptionMessages.IdAttributeEmpty); string placeholderid = (string) attributes["id"]; if(!file.Properties.ContainsKey(placeholderid)) { // handle ViewContents special case if (placeholderid != "ViewContents") file.Properties.Add(placeholderid, new ViewProperty(placeholderid, "string", @"""""")); } else if(!String.Equals(file.Properties[placeholderid].Type, "string", StringComparison.InvariantCultureIgnoreCase)) { throw new AspViewException(String.Format(ExceptionMessages.ViewPropertyAllreadyRegisteredWithOtherTypeFormat, placeholderid)); } return String.Format("<%={0}%>", placeholderid); } else { return match.Value; } }); }
void tokenize_file (SourceFile sourceFile, ModuleContainer module, ParserSession session) { Stream input; try { input = File.OpenRead (sourceFile.Name); } catch { Report.Error (2001, "Source file `" + sourceFile.Name + "' could not be found"); return; } using (input){ SeekableStreamReader reader = new SeekableStreamReader (input, ctx.Settings.Encoding); var file = new CompilationSourceFile (module, sourceFile); Tokenizer lexer = new Tokenizer (reader, file, session, ctx.Report); int token, tokens = 0, errors = 0; while ((token = lexer.token ()) != Token.EOF){ tokens++; if (token == Token.ERROR) errors++; } Console.WriteLine ("Tokenized: " + tokens + " found " + errors + " errors"); } return; }
private void RunSteps(SourceFile file) { foreach (IPreCompilationStep step in provider.GetSteps()) { step.Process(file); } }
internal CodeIssueFile(CodeIssue codeIssue, SourceFile file, string message) { this.codeIssue = codeIssue; this.file = file; this.message = message; }
/// <summary> /// Performs server side comment stripping /// </summary> /// <param name="file">The source file object to act upon.</param> void IPreCompilationStep.Process(SourceFile file) { file.RenderBody = Internal.RegularExpressions.ServerSideComment.Replace( file.RenderBody, string.Empty ); }
public TokenInfo(Token token, string value, SourceFile sourceFile, int ln, int col) { this.token = token; this.value = value; this.ln = ln; this.col = col; this.sourceFile = sourceFile; }
private void RegisterSectionHandler(string handlerName, string sectionContent, SourceFile file) { var processedSection = sectionContent; if (Internal.RegularExpressions.ViewComponentTags.IsMatch(sectionContent)) processedSection = Process(sectionContent, file); processedSection = scriptTransformer.Transform(processedSection); file.ViewComponentSectionHandlers[handlerName] = processedSection; }
internal CheckFile(IssueServices issueService, SourceFile file, IssueProcessor issueProcessor) { this.issueService = issueService; this.file = file; this.issueProcessor = issueProcessor; }
public void Process(SourceFile file) { file.RenderBody = Internal.RegularExpressions.ImportDirective.Replace(file.RenderBody, delegate(Match match) { file.Imports.Add(match.Groups["namespace"].Value); return string.Empty; }); }
void ConvertSequencePoints (PdbFunction function, SourceFile file, SourceMethodBuilder builder) { foreach (var line in function.lines.SelectMany (lines => lines.lines)) builder.MarkSequencePoint ( (int) line.offset, file.CompilationUnit.SourceFile, (int) line.lineBegin, (int) line.colBegin, line.lineBegin == 0xfeefee); }
/// <summary> /// Initializes an instance of the ReportItem class. /// </summary> /// <param name="descriptor">A message descriptor.</param> /// <param name="sourceFile">A source file.</param> /// <param name="sourceSpan">A source span.</param> /// <param name="sourceLine">A source line.</param> /// <param name="args">Some arguments required by the message descriptor.</param> public ReportItem(MessageDescriptor descriptor, SourceFile sourceFile, SourceSpan sourceSpan, TokenList sourceLine, params string[] args) { MessageDescriptor = descriptor; SourceFile = sourceFile; SourceSpan = sourceSpan; SourceLine = sourceLine; Arguments = args; }
public void Process(SourceFile file) { file.Imports.Add("System"); file.Imports.Add("System.IO"); file.Imports.Add("System.Collections"); file.Imports.Add("System.Collections.Generic"); file.Imports.Add("Castle.MonoRail.Framework"); file.Imports.Add("Castle.MonoRail.Views.AspView"); }
/// <summary> /// /// </summary> /// <param name="provider"></param> /// <param name="sourceFile"></param> protected SourceReference(SourceProvider provider, SourceFile sourceFile) { if (provider == null) throw new ArgumentNullException("provider"); else if (sourceFile == null) throw new ArgumentNullException("sourceFile"); _provider = provider; _sourceFile = sourceFile; }
public void InvalidXmlTests() { var sourceFile = new SourceFile(Services.FileSystem, "test.txt", "test.txt"); var doc = Services.CompositionService.Resolve<XmlTextSnapshot>().With(SnapshotParseContext.Empty, sourceFile, "<Item>", string.Empty, string.Empty); Assert.AreEqual(TextNode.Empty, doc.Root); doc = Services.CompositionService.Resolve<XmlTextSnapshot>().With(SnapshotParseContext.Empty, sourceFile, string.Empty, string.Empty, string.Empty); Assert.AreEqual(TextNode.Empty, doc.Root); }
private ISourceCode SetupSourceCode(SourceFile scope) { if (scope.Document != null) { return new VSSourceCode(scope); } else { return new FileSourceCode(scope.FilePath); } }
public void Process(SourceFile file) { file.RenderBody = Internal.RegularExpressions.PageDirective.Replace(file.RenderBody, delegate(Match match) { file.BaseClassName = GetBaseClass(match.Groups["base"]); file.TypedViewName = GetTypedViewName(match.Groups["view"]); if (file.TypedViewName != null) file.BaseClassName += "<" + file.TypedViewName + ">"; return string.Empty; }, 1); }
void RenameFile(SourceFile file, string newName) { CodeRush.Language.ParseActiveDocument(); RefactoringProviderBase renameFile = CodeRush.Refactoring.Get("Rename File to Match Type"); if (renameFile != null) { if(renameFile.IsAvailable) renameFile.Execute(); return; } }
/// <summary> /// Initializes a new instance of the <see cref="SourceModelProxy"/> class. /// </summary> /// <param name="source">The source.</param> public SourceModelProxy(SourceModelServices source) { if (source == null) { throw new ArgumentNullException("source", "source is null."); } Contract.EndContractBlock(); this.activeProject = GetNonNull(source.ActiveProject, "source.ActiveProject"); this.activeSourceFile = GetNonNull(source.ActiveSourceFile, "source.ActiveSourceFile"); }
public void Process(SourceFile file) { file.RenderBody = Internal.RegularExpressions.Script.Replace(file.RenderBody, delegate(Match match) { string script = match.Groups["script"].Value; string markup = match.Groups["markup"].Value; foreach (IMarkupTransformer transformer in markupTransformers) { markup = transformer.Transform(markup); } return markup + script; }); }
/// <summary> /// Returns the text document associated with the SourceFile passed in fileNode parameter /// </summary> /// <param name="fileNode"></param> /// <returns></returns> private static TextDocument getTextDocument(SourceFile fileNode) { if (fileNode == null) { return null; } IDocument document = fileNode.Document; if (document == null) { return getTextDocument(fileNode); } return (document as TextDocument); }
public void ValueTests() { var sourceFile = new SourceFile(Services.FileSystem, "test.txt", "test.txt"); var doc = Services.CompositionService.Resolve<XmlTextSnapshot>().With(SnapshotParseContext.Empty, sourceFile, "<Item><Field Name=\"Text\">123</Field></Item>", string.Empty, string.Empty); var root = doc.Root; var field = root.ChildNodes.First(); Assert.AreEqual("Field", field.Key); Assert.AreEqual("Text", field.GetAttributeValue("Name")); Assert.AreEqual("123", field.GetInnerTextNode()?.Value); Assert.AreEqual(string.Empty, field.Value); Assert.AreEqual(0, field.ChildNodes.Count()); }
public void Process(SourceFile file) { file.RenderBody = Internal.RegularExpressions.PropertiesSection.Replace(file.RenderBody, delegate(Match match) { HandlePropertiesSection(match, file); return string.Empty; }); file.RenderBody = Internal.RegularExpressions.PropertiesServerScriptSection.Replace(file.RenderBody, delegate(Match match) { HandlePropertiesSection(match, file); return string.Empty; }); }
/// <summary> /// Would remove <script runat="server"> tags from the file's body, /// and add them to a list, later to be added to the generated class. /// </summary> /// <param name="file">The source file object to act upon.</param> void IPreCompilationStep.Process(SourceFile file) { file.RenderBody = Internal.RegularExpressions.EmbededServerScriptBlock.Replace( file.RenderBody, delegate(Match match) { string scriptcontents = match.Groups["content"].Value; file.EmbededScriptBlocks.Add(scriptcontents); return string.Empty; } ); }
public ActionResult Index(string company, string login, string password, string computerName, string computerUser, string imageName, string pdbHash, string sourcePath) { if ("Public".Equals(company, StringComparison.OrdinalIgnoreCase)) company = "Public"; if (string.IsNullOrEmpty(login) && string.IsNullOrEmpty(password)) { var configuration = configurationFactory.Create(company); login = configuration.PublicLogin; password = configuration.PublicPassword; } //TODO: mamy na to test? //Ktoœ ma ma³¹ literk¹ numer kompilacji pdbHash = pdbHash.ToUpper(); using (var backend = factory.Create(company, login, "VisualStudio", password)) { var imageFile = backend.GetImageFile(imageName, pdbHash); if (imageFile == null) { backend.LogImageFileNotFound(imageName, pdbHash); Response.StatusCode = 404; return Content("Not found"); } var sourceFile = new SourceFile { Company = company, Repository = imageFile.Repository, Project = imageFile.Project, Version = imageFile.Version, Mode = imageFile.Mode, Platform = imageFile.Platform, ImageName = imageName, Path = sourcePath }; var link = backend.GetSourceFileLink(ref sourceFile); if (link == null) { backend.LogSourceFileFound(sourceFile); Response.StatusCode = 404; return Content("Source file not found"); } backend.LogSourceFileFound(sourceFile); return Redirect(link); } }
public void Process(SourceFile file) { file.RenderBody = Internal.RegularExpressions.SubViewTags.Replace(file.RenderBody, delegate(Match match) { string viewName = match.Groups["viewName"].Value.Replace('.', '/'); string attributes = match.Groups["attributes"].Value; string attributesString = Utilities.GetAttributesStringFrom(attributes); return string.Format(@"<% OutputSubView(""{0}""{1}); %>", viewName, attributesString); }); }
public ProgramNode(SourceFile sourceFile, IReadOnlyList <IStatement> statements) { SourceFile = sourceFile; Statements = statements; }
public Boolean ShouldBreak(SourceFile file, int line, Frame frame) { String localPath = PluginMain.debugManager.GetLocalPath(file); if (localPath == null) { return(false); } if (m_TemporaryBreakPointInfo != null) { if (m_TemporaryBreakPointInfo.FileFullPath == localPath && m_TemporaryBreakPointInfo.Line == (line - 1)) { m_TemporaryBreakPointInfo.IsDeleted = true; List <BreakPointInfo> bpList = new List <BreakPointInfo>(); bpList.Add(m_TemporaryBreakPointInfo); PluginMain.debugManager.FlashInterface.UpdateBreakpoints(bpList); m_TemporaryBreakPointInfo = null; return(true); } } int index = GetBreakPointIndex(localPath, line - 1); if (index >= 0) { BreakPointInfo bpInfo = m_BreakPointList[index]; if (bpInfo.ParsedExpression != null) { try { if (frame == null) { // take currently active worker and frame frame = PluginMain.debugManager.FlashInterface.GetFrames()[PluginMain.debugManager.CurrentFrame]; } var ctx = new ExpressionContext(PluginMain.debugManager.FlashInterface.Session, frame); var val = bpInfo.ParsedExpression.evaluate(ctx); if (val is java.lang.Boolean) { return(((java.lang.Boolean)val).booleanValue()); } if (val is Value) { return(ECMA.toBoolean(((Value)val))); } if (val is Variable) { return(ECMA.toBoolean(((Variable)val).getValue())); } throw new NotImplementedException(val.toString()); } catch (/*Expression*/ Exception e) { TraceManager.AddAsync("[Problem in breakpoint: " + e.ToString() + "]", 4); ErrorManager.ShowError(e); return(true); } } else { return(true); } } return(true); }
public override async Task AddAttributes(AnnotatedCodeBlock block) { if (Package == null && Project?.FullName != null) { block.AddAttribute("data-trydotnet-package", Project.FullName); } var fileName = GetDestinationFileAbsolutePath(); if (!string.IsNullOrWhiteSpace(fileName)) { block.AddAttribute( "data-trydotnet-file-name", fileName); } if (ReadOnlyRegionRoundtrip()) { block.AddAttribute("data-trydotnet-injection-point", "replace"); } bool ReadOnlyRegionRoundtrip() { return(!Editable && !string.IsNullOrWhiteSpace(Region) && SourceFile != null && (DestinationFile == null || SourceFile.Equals(DestinationFile))); } await base.AddAttributes(block); }
protected abstract IEnumerable <DestinationFile> ConvertApplicableImageThenSave( SourceFile source, ImageFileKind destinationKind, ArgumentOptionCollection optionCollection, ILogger logger);
public static XmlSourceFileBuilder CreateAnnotatedSourceBuilder(this XElement sourceElement, SourceFileInfo info, string projectId) { var sourceFile = new SourceFile() { Content = sourceElement.ToString(), Info = info }; var binder = new XmlSourceFileBuilder(sourceFile, projectId); var elementList = sourceElement.DescendantsAndSelf().ToList(); var sortedElementList = binder.SortedElementList; Debug.Assert(elementList.Count == sortedElementList.Count); List <XmlAttributeSyntax> attributeSyntaxBuffer = new List <XmlAttributeSyntax>(); List <XAttribute> attributeBuffer = new List <XAttribute>(); for (int i = 0; i < elementList.Count; i++) { var element = elementList[i]; var syntaxElement = sortedElementList[i].Value; foreach (var annotation in element.Annotations <XmlSemanticAnnotation>()) { int start = 0; int length = 0; if (annotation.AppliesToName) { var nameNode = syntaxElement.NameNode(); start = nameNode.Start; length = nameNode.FullWidth - nameNode.GetTrailingTriviaWidth(); } else { var valueNode = syntaxElement.ValueNode(); start = valueNode.Start; length = valueNode.FullWidth; } if (annotation.References != null) { binder.AnnotateReferences(start, length, annotation.References); } else { binder.AnnotateDefinition(start, length, annotation.Definition); } } attributeSyntaxBuffer.Clear(); attributeBuffer.Clear(); attributeSyntaxBuffer.AddRange(syntaxElement.AsSyntaxElement.Attributes); attributeBuffer.AddRange(element.Attributes()); Debug.Assert(attributeSyntaxBuffer.Count == attributeBuffer.Count); for (int j = 0; j < attributeSyntaxBuffer.Count; j++) { var attribute = attributeBuffer[j]; var attributeSyntax = attributeSyntaxBuffer[j]; foreach (var annotation in attribute.Annotations <XmlSemanticAnnotation>()) { int start = 0; int length = 0; if (annotation.AppliesToName) { var nameNode = attributeSyntax.NameNode; start = nameNode.Start; length = nameNode.FullWidth - nameNode.GetTrailingTriviaWidth(); } else { var valueNode = attributeSyntax?.ValueNode.As <XmlStringSyntax>()?.TextTokens.Node; if (valueNode != null) { start = valueNode.Start; length = valueNode.FullWidth; } } if (length != 0) { if (annotation.References != null) { binder.AnnotateReferences(start, length, annotation.References); } else { binder.AnnotateDefinition(start, length, annotation.Definition); } } } } } return(binder); }
public InternalFileLinesReader(IFileReaderFactory factory, SourceFile file, Duplicate[] duplicates) { _factory = factory; _file = file; _duplicates = duplicates; }
public bool TryGetValue(DocumentUri uri, [MaybeNullWhen(false)] out SourceFile sourceFile) => documents.TryGetValue(uri, out sourceFile);
void Populate(Collection <SequencePoint> sequencePoints, int [] offsets, int [] startRows, int [] endRows, int [] startCols, int [] endCols, out SourceFile file) { SourceFile source_file = null; for (int i = 0; i < sequencePoints.Count; i++) { var sequence_point = sequencePoints [i]; offsets [i] = sequence_point.Offset; if (source_file == null) { source_file = GetSourceFile(sequence_point.Document); } startRows [i] = sequence_point.StartLine; endRows [i] = sequence_point.EndLine; startCols [i] = sequence_point.StartColumn; endCols [i] = sequence_point.EndColumn; } file = source_file; }
/// <summary> /// Copy the contents of the given source directory to the given destination directory /// </summary> bool CopySourceToDestDir(string InSourceDirectory, string InDestinationDirectory, string InWildCard, bool bInIncludeSubDirectories, bool bInRemoveDestinationOrphans) { if (Directory.Exists(InSourceDirectory) == false) { Log.TraceInformation("Warning: CopySourceToDestDir - SourceDirectory does not exist: {0}", InSourceDirectory); return(false); } // Make sure the destination directory exists! Directory.CreateDirectory(InDestinationDirectory); SearchOption OptionToSearch = bInIncludeSubDirectories ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly; var SourceDirs = new List <string>(Directory.GetDirectories(InSourceDirectory, "*.*", OptionToSearch)); foreach (string SourceDir in SourceDirs) { string SubDir = SourceDir.Replace(InSourceDirectory, ""); string DestDir = InDestinationDirectory + SubDir; Directory.CreateDirectory(DestDir); } var SourceFiles = new List <string>(Directory.GetFiles(InSourceDirectory, InWildCard, OptionToSearch)); var DestFiles = new List <string>(Directory.GetFiles(InDestinationDirectory, InWildCard, OptionToSearch)); // Keep a list of the files in the source directory... without the source path List <string> FilesInSource = new List <string>(); // Copy all the source files that are newer... foreach (string SourceFile in SourceFiles) { string Filename = SourceFile.Replace(InSourceDirectory, ""); FilesInSource.Add(Filename.ToUpperInvariant()); string DestFile = InDestinationDirectory + Filename; System.DateTime SourceTime = File.GetLastWriteTime(SourceFile); System.DateTime DestTime = File.GetLastWriteTime(DestFile); if (SourceTime > DestTime) { try { DeployHelper_DeleteFile(DestFile); File.Copy(SourceFile, DestFile, true); } catch (Exception exceptionMessage) { Log.TraceInformation("Failed to copy {0} to deployment: {1}", SourceFile, exceptionMessage); } } } if (bInRemoveDestinationOrphans == true) { // If requested, delete any destination files that do not have a corresponding // file in the source directory foreach (string DestFile in DestFiles) { string DestFilename = DestFile.Replace(InDestinationDirectory, ""); if (FilesInSource.Contains(DestFilename.ToUpperInvariant()) == false) { Log.TraceInformation("Destination file does not exist in Source - DELETING: {0}", DestFile); try { DeployHelper_DeleteFile(DestFile); } catch (Exception exceptionMessage) { Log.TraceInformation("Failed to delete {0} from deployment: {1}", DestFile, exceptionMessage); } } } } return(true); }
public static void BuildArchive(string SourceDir, ModEntry metaData, string outputFilePath) { Debug.LogLine($"[BuildArchive] {SourceDir}."); HashingExtended.ReadDictionary(); string buildDir = Directory.GetCurrentDirectory() + "\\_build"; try { if (Directory.Exists(buildDir)) { Directory.Delete(buildDir, true); } } catch { Debug.LogLine(string.Format("[BuildArchive] preexisting _build directory could not be deleted: {0}", buildDir)); } Directory.CreateDirectory("_build"); List <string> fpkFiles = Directory.GetFiles(SourceDir, "*.fpk*", SearchOption.AllDirectories).ToList(); for (int i = fpkFiles.Count - 1; i >= 0; i--) { string fpkFile = fpkFiles[i].Substring(SourceDir.Length + 1); if (!fpkFile.StartsWith("Assets")) { string updatedFileName = HashingExtended.UpdateName(fpkFile); if (updatedFileName != null) { updatedFileName = SourceDir + updatedFileName.Replace('/', '\\'); if (fpkFiles.Contains(updatedFileName)) { fpkFiles.Remove(fpkFiles[i]); } } } } List <string> fpkFolders = ListFpkFolders(SourceDir); for (int i = fpkFolders.Count - 1; i >= 0; i--) { string fpkFolder = fpkFolders[i].Substring(SourceDir.Length + 1); if (!fpkFolder.StartsWith("Assets")) { string updatedFileName = HashingExtended.UpdateName(fpkFolder.Replace("_fpk", ".fpk")); if (updatedFileName != null) { updatedFileName = SourceDir + updatedFileName.Replace('/', '\\'); if (fpkFolders.Contains(updatedFileName.Replace(".fpk", "_fpk")) || fpkFiles.Contains(updatedFileName)) { MessageBox.Show(string.Format("{0} was not packed or added to the build, because {1} (the unhashed filename of {0}) already exists in the mod directory.", Path.GetFileName(fpkFolders[i]), Path.GetFileName(updatedFileName))); fpkFolders.Remove(fpkFolders[i]); } } } } // check for FPKs that must be built and build metaData.ModFpkEntries = new List <ModFpkEntry>(); List <string> builtFpks = new List <string>(); foreach (string FpkFullDir in fpkFolders) { foreach (ModFpkEntry fpkEntry in BuildFpk(FpkFullDir, SourceDir)) { metaData.ModFpkEntries.Add(fpkEntry); if (!builtFpks.Contains(fpkEntry.FpkFile)) { builtFpks.Add(fpkEntry.FpkFile); } } } // check for other FPKs and build fpkentry data foreach (string SourceFile in Directory.GetFiles(SourceDir, "*.fpk*", SearchOption.AllDirectories)) { //tex chunk0\Assets\tpp\pack\collectible\common\col_common_tpp_fpk\Assets\tpp\pack\resident\resident00.fpkl is the only fpkl, don't know what a fpkl is, but gzcore crashes on it. if (SourceFile.EndsWith(".fpkl") || SourceFile.EndsWith(".xml")) { continue; } string FileName = Tools.ToQarPath(SourceFile.Substring(SourceDir.Length)); if (!builtFpks.Contains(FileName)) { // unpack FPK and build FPK list string fpkDir = Tools.ToWinPath(FileName.Replace(".fpk", "_fpk")); string fpkFullDir = Path.Combine(SourceDir, fpkDir); if (!Directory.Exists(fpkFullDir)) { GzsLib.ExtractArchive <FpkFile>(SourceFile, fpkFullDir); } var fpkContents = GzsLib.ListArchiveContents <FpkFile>(SourceFile); foreach (string file in fpkContents) { if (!GzsLib.IsExtensionValidForArchive(file, fpkDir)) { Debug.LogLine($"[BuildArchive] {file} is not a valid file for a {Path.GetExtension(fpkDir)} archive."); continue; } metaData.ModFpkEntries.Add(new ModFpkEntry() { FilePath = file, FpkFile = FileName, ContentHash = Tools.GetMd5Hash(Path.Combine(SourceDir, fpkDir, Tools.ToWinPath(file))) }); } } } // build QAR entries List <string> qarFiles = ListQarFiles(SourceDir); for (int i = qarFiles.Count - 1; i >= 0; i--) { string qarFile = qarFiles[i].Substring(SourceDir.Length + 1); if (!qarFile.StartsWith("Assets")) { string updatedQarName = HashingExtended.UpdateName(qarFile); if (updatedQarName != null) { updatedQarName = SourceDir + updatedQarName.Replace('/', '\\'); if (qarFiles.Contains(updatedQarName)) { MessageBox.Show(string.Format("{0} was not added to the build, because {1} (the unhashed filename of {0}) already exists in the mod directory.", Path.GetFileName(qarFiles[i]), Path.GetFileName(updatedQarName))); qarFiles.Remove(qarFiles[i]); } } } } metaData.ModQarEntries = new List <ModQarEntry>(); foreach (string qarFile in qarFiles) { string subDir = qarFile.Substring(0, qarFile.LastIndexOf("\\")).Substring(SourceDir.Length).TrimStart('\\'); // the subdirectory for XML output string qarFilePath = Tools.ToQarPath(qarFile.Substring(SourceDir.Length)); if (!Directory.Exists(Path.Combine("_build", subDir))) { Directory.CreateDirectory(Path.Combine("_build", subDir)); // create file structure } File.Copy(qarFile, Path.Combine("_build", Tools.ToWinPath(qarFilePath)), true); ulong hash = Tools.NameToHash(qarFilePath); metaData.ModQarEntries.Add(new ModQarEntry() { FilePath = qarFilePath, Compressed = qarFile.EndsWith(".fpk") || qarFile.EndsWith(".fpkd") ? true : false, ContentHash = Tools.GetMd5Hash(qarFile), Hash = hash }); } //tex build external entries metaData.ModFileEntries = new List <ModFileEntry>(); var externalFiles = ListExternalFiles(SourceDir); foreach (string externalFile in externalFiles) { string subDir = externalFile.Substring(0, externalFile.LastIndexOf("\\")).Substring(SourceDir.Length).TrimStart('\\'); // the subdirectory for XML output string externalFilePath = Tools.ToQarPath(externalFile.Substring(SourceDir.Length)); if (!Directory.Exists(Path.Combine("_build", subDir))) { Directory.CreateDirectory(Path.Combine("_build", subDir)); // create file structure } File.Copy(externalFile, Path.Combine("_build", Tools.ToWinPath(externalFilePath)), true); string strip = "/" + ExternalDirName; if (externalFilePath.StartsWith(strip)) { externalFilePath = externalFilePath.Substring(strip.Length); } //ulong hash = Tools.NameToHash(qarFilePath); metaData.ModFileEntries.Add(new ModFileEntry() { FilePath = externalFilePath, ContentHash = Tools.GetMd5Hash(externalFile) }); } metaData.SBVersion.Version = Application.ProductVersion; metaData.SaveToFile("_build\\metadata.xml"); // build archive FastZip zipper = new FastZip(); zipper.CreateZip(outputFilePath, "_build", true, "(.*?)"); try { Directory.Delete("_build", true); } catch (Exception e) { Debug.LogLine(string.Format("[BuildArchive] _build directory could not be deleted: {0}", e.ToString())); } }
/// <summary> /// /// </summary> public String GetLocalPath(SourceFile file) { if (file == null) { return(null); } String fileFullPath = file.getFullPath(); if (m_PathMap.ContainsKey(fileFullPath)) { return(m_PathMap[fileFullPath]); } if (File.Exists(fileFullPath)) { m_PathMap[fileFullPath] = fileFullPath; return(fileFullPath); } Char pathSeparator = Path.DirectorySeparatorChar; String pathFromPackage = file.getPackageName().ToString().Replace('/', pathSeparator); String fileName = file.getName(); foreach (Folder folder in PluginMain.settingObject.SourcePaths) { StringBuilder localPathBuilder = new StringBuilder(260 /*Windows max path length*/); localPathBuilder.Append(folder.Path); localPathBuilder.Append(pathSeparator); localPathBuilder.Append(pathFromPackage); localPathBuilder.Append(pathSeparator); localPathBuilder.Append(fileName); String localPath = localPathBuilder.ToString(); if (File.Exists(localPath)) { m_PathMap[fileFullPath] = localPath; return(localPath); } } Project project = PluginBase.CurrentProject as Project; if (project != null) { foreach (string cp in project.Classpaths) { StringBuilder localPathBuilder = new StringBuilder(260 /*Windows max path length*/); localPathBuilder.Append(project.Directory); localPathBuilder.Append(pathSeparator); localPathBuilder.Append(cp); localPathBuilder.Append(pathSeparator); localPathBuilder.Append(pathFromPackage); localPathBuilder.Append(pathSeparator); localPathBuilder.Append(fileName); String localPath = localPathBuilder.ToString(); if (File.Exists(localPath)) { m_PathMap[fileFullPath] = localPath; return(localPath); } } } m_PathMap[fileFullPath] = null; return(null); }
public LatestSourceFileChangedEventArgs(bool wasLatestSourceFileStale, bool isLatestSourceFileStale, SourceFile latestSourceFile) { WasLatestSourceFileStale = wasLatestSourceFileStale; IsLatestSourceFileStale = isLatestSourceFileStale; LatestSourceFile = latestSourceFile; }
protected override void RunImport() { var ss = new PCEngine.PCESyncSettings { Port1 = PceControllerType.Unplugged, Port2 = PceControllerType.Unplugged, Port3 = PceControllerType.Unplugged, Port4 = PceControllerType.Unplugged, Port5 = PceControllerType.Unplugged }; _deck = new PceControllerDeck( ss.Port1, ss.Port2, ss.Port3, ss.Port4, ss.Port5); Result.Movie.HeaderEntries[HeaderKeys.PLATFORM] = "PCE"; using var sr = SourceFile.OpenText(); string line; while ((line = sr.ReadLine()) != null) { if (string.IsNullOrWhiteSpace(line)) { continue; } if (line[0] == '|') { ImportTextFrame(line); } else if (line.ToLower() .StartsWith("ports")) { var portNumStr = ParseHeader(line, "ports"); if (int.TryParse(portNumStr, out int ports)) { // Ugh if (ports > 0) { ss.Port1 = PceControllerType.GamePad; } if (ports > 1) { ss.Port2 = PceControllerType.GamePad; } if (ports > 2) { ss.Port3 = PceControllerType.GamePad; } if (ports > 3) { ss.Port4 = PceControllerType.GamePad; } if (ports > 4) { ss.Port5 = PceControllerType.GamePad; } _deck = new PceControllerDeck( ss.Port1, ss.Port2, ss.Port3, ss.Port4, ss.Port5); } } else if (line.ToLower().StartsWith("pcecd")) { Result.Movie.HeaderEntries[HeaderKeys.PLATFORM] = "PCECD"; } else if (line.ToLower().StartsWith("emuversion")) { Result.Movie.Comments.Add($"{EmulationOrigin} Mednafen/PCEjin version {ParseHeader(line, "emuVersion")}"); } else if (line.ToLower().StartsWith("version")) { string version = ParseHeader(line, "version"); Result.Movie.Comments.Add($"{MovieOrigin} .mc2 version {version}"); } else if (line.ToLower().StartsWith("romfilename")) { Result.Movie.HeaderEntries[HeaderKeys.GAMENAME] = ParseHeader(line, "romFilename"); } else if (line.ToLower().StartsWith("cdgamename")) { Result.Movie.HeaderEntries[HeaderKeys.GAMENAME] = ParseHeader(line, "cdGameName"); } else if (line.ToLower().StartsWith("comment author")) { Result.Movie.HeaderEntries[HeaderKeys.AUTHOR] = ParseHeader(line, "comment author"); } else if (line.ToLower().StartsWith("rerecordcount")) { int rerecordCount; // Try to parse the re-record count as an integer, defaulting to 0 if it fails. try { rerecordCount = int.Parse(ParseHeader(line, "rerecordCount")); } catch { rerecordCount = 0; } Result.Movie.Rerecords = (ulong)rerecordCount; } else if (line.ToLower().StartsWith("startsfromsavestate")) { // If this movie starts from a savestate, we can't support it. if (ParseHeader(line, "StartsFromSavestate") == "1") { Result.Errors.Add("Movies that begin with a savestate are not supported."); } } else { // Everything not explicitly defined is treated as a comment. Result.Movie.Comments.Add(line); } } Result.Movie.SyncSettingsJson = ConfigService.SaveWithType(ss); }
/// <summary> /// Loads all scan data from a Thermo .raw file. /// </summary> public static ThermoRawFileReaderData LoadAllStaticData(string filePath, IFilteringParams filterParams = null, int maxThreads = -1) { if (!File.Exists(filePath)) { throw new FileNotFoundException(); } Loaders.LoadElements(); // I don't know why this line needs to be here, but it does... var temp = RawFileReaderAdapter.FileFactory(filePath); var threadManager = RawFileReaderFactory.CreateThreadManager(filePath); var rawFileAccessor = threadManager.CreateThreadAccessor(); if (!rawFileAccessor.IsOpen) { throw new MzLibException("Unable to access RAW file!"); } if (rawFileAccessor.IsError) { throw new MzLibException("Error opening RAW file!"); } if (rawFileAccessor.InAcquisition) { throw new MzLibException("RAW file still being acquired!"); } rawFileAccessor.SelectInstrument(Device.MS, 1); var msDataScans = new MsDataScan[rawFileAccessor.RunHeaderEx.LastSpectrum]; Parallel.ForEach(Partitioner.Create(0, msDataScans.Length), new ParallelOptions { MaxDegreeOfParallelism = maxThreads }, (fff, loopState) => { IRawDataPlus myThreadDataReader = threadManager.CreateThreadAccessor(); myThreadDataReader.SelectInstrument(Device.MS, 1); for (int s = fff.Item1; s < fff.Item2; s++) { try { var scan = GetOneBasedScan(myThreadDataReader, filterParams, s + 1); msDataScans[s] = scan; } catch (Exception ex) { throw new MzLibException("Error reading scan " + (s + 1) + ": " + ex.Message); } } }); rawFileAccessor.Dispose(); string sendCheckSum; using (FileStream stream = File.OpenRead(filePath)) { using (SHA1Managed sha = new SHA1Managed()) { byte[] checksum = sha.ComputeHash(stream); sendCheckSum = BitConverter.ToString(checksum) .Replace("-", string.Empty); } } SourceFile sourceFile = new SourceFile( @"Thermo nativeID format", @"Thermo RAW format", sendCheckSum, @"SHA-1", filePath, Path.GetFileNameWithoutExtension(filePath)); return(new ThermoRawFileReaderData(msDataScans, sourceFile)); }
protected ThermoRawFileReaderData(MsDataScan[] scans, SourceFile sourceFile) : base(scans, sourceFile) { }
public bool TrySourceFile(SourceFile file) { EnterSourceFile(file); return(File.Exists(file.Path)); }
public void Update(DocumentUri uri, string text) { var sourceFile = new SourceFile(uri.GetFileSystemPath(), text); documents.AddOrUpdate(uri, sourceFile, (otherUri, otherSourceFile) => sourceFile); }
protected override void RunImport() { var bsnesName = ((CoreAttribute)Attribute.GetCustomAttribute(typeof(LibsnesCore), typeof(CoreAttribute))).CoreName; Result.Movie.HeaderEntries[HeaderKeys.CORE] = bsnesName; using var fs = SourceFile.Open(FileMode.Open, FileAccess.Read); using var r = new BinaryReader(fs); // 000 4-byte signature: 53 4D 56 1A "SMV\x1A" string signature = new string(r.ReadChars(4)); if (signature != "SMV\x1A") { Result.Errors.Add("This is not a valid .SMV file."); return; } Result.Movie.HeaderEntries[HeaderKeys.PLATFORM] = "SNES"; // 004 4-byte little-endian unsigned int: version number uint versionNumber = r.ReadUInt32(); string version; switch (versionNumber) { case 1: version = "1.43"; break; case 4: version = "1.51"; break; case 5: version = "1.52"; break; default: version = "Unknown"; break; } Result.Movie.Comments.Add($"{EmulationOrigin} Snes9x version {version}"); Result.Movie.Comments.Add($"{MovieOrigin} .SMV"); /* * 008 4-byte little-endian integer: movie "uid" - identifies the movie-savestate relationship, also used as the * recording time in Unix epoch format */ uint uid = r.ReadUInt32(); // 00C 4-byte little-endian unsigned int: rerecord count Result.Movie.Rerecords = r.ReadUInt32(); // 010 4-byte little-endian unsigned int: number of frames uint frameCount = r.ReadUInt32(); // 014 1-byte flags "controller mask" byte controllerFlags = r.ReadByte(); /* * bit 0: controller 1 in use * bit 1: controller 2 in use * bit 2: controller 3 in use * bit 3: controller 4 in use * bit 4: controller 5 in use * other: reserved, set to 0 */ bool[] controllersUsed = new bool[5]; // Eww, this is a clunky way to do this for (int controller = 1; controller <= controllersUsed.Length; controller++) { controllersUsed[controller - 1] = ((controllerFlags >> (controller - 1)) & 0x1) != 0; } var controllerCount = controllersUsed.Count(c => c); var ss = new LibsnesCore.SnesSyncSettings { LeftPort = LibsnesControllerDeck.ControllerType.Gamepad, RightPort = LibsnesControllerDeck.ControllerType.Gamepad }; if (controllerCount == 1) { ss.RightPort = LibsnesControllerDeck.ControllerType.Unplugged; } else if (controllerCount > 2) { // More than 2 controllers means a multi-tap on the first port // Snes9x only supported up to 5 controllers, so right port would never be multitap ss.LeftPort = LibsnesControllerDeck.ControllerType.Multitap; // Unless there are exactly 5, right port is unplugged, as the multitap will handle 4 controllers if (controllerCount < 5) { ss.RightPort = LibsnesControllerDeck.ControllerType.Unplugged; } } _deck = new LibsnesControllerDeck(ss); // 015 1-byte flags "movie options" byte movieFlags = r.ReadByte(); /* * bit 0: * if "0", movie begins from an embedded "quicksave" snapshot * if "1", a SRAM is included instead of a quicksave; movie begins from reset */ if ((movieFlags & 0x1) == 0) { Result.Errors.Add("Movies that begin with a savestate are not supported."); return; } // bit 1: if "0", movie is NTSC (60 fps); if "1", movie is PAL (50 fps) bool pal = ((movieFlags >> 1) & 0x1) != 0; Result.Movie.HeaderEntries[HeaderKeys.PAL] = pal.ToString(); // other: reserved, set to 0 /* * 016 1-byte flags "sync options": * bit 0: MOVIE_SYNC2_INIT_FASTROM * other: reserved, set to 0 */ r.ReadByte(); /* * 017 1-byte flags "sync options": * bit 0: MOVIE_SYNC_DATA_EXISTS * if "1", all sync options flags are defined. * if "0", all sync options flags have no meaning. * bit 1: MOVIE_SYNC_WIP1TIMING * bit 2: MOVIE_SYNC_LEFTRIGHT * bit 3: MOVIE_SYNC_VOLUMEENVX * bit 4: MOVIE_SYNC_FAKEMUTE * bit 5: MOVIE_SYNC_SYNCSOUND * bit 6: MOVIE_SYNC_HASROMINFO * if "1", there is extra ROM info located right in between of the metadata and the savestate. * bit 7: set to 0. */ byte syncFlags = r.ReadByte(); /* * Extra ROM info is always positioned right before the savestate. Its size is 30 bytes if MOVIE_SYNC_HASROMINFO * is used (and MOVIE_SYNC_DATA_EXISTS is set), 0 bytes otherwise. */ int extraRomInfo = (((syncFlags >> 6) & 0x1) != 0 && (syncFlags & 0x1) != 0) ? 30 : 0; // 018 4-byte little-endian unsigned int: offset to the savestate inside file uint savestateOffset = r.ReadUInt32(); // 01C 4-byte little-endian unsigned int: offset to the controller data inside file uint firstFrameOffset = r.ReadUInt32(); int[] controllerTypes = new int[2]; // The (.SMV 1.51 and up) header has an additional 32 bytes at the end if (version != "1.43") { // 020 4-byte little-endian unsigned int: number of input samples, primarily for peripheral-using games r.ReadBytes(4); /* * 024 2 1-byte unsigned ints: what type of controller is plugged into ports 1 and 2 respectively: 0=NONE, * 1=JOYPAD, 2=MOUSE, 3=SUPERSCOPE, 4=JUSTIFIER, 5=MULTITAP */ controllerTypes[0] = r.ReadByte(); controllerTypes[1] = r.ReadByte(); // 026 4 1-byte signed ints: controller IDs of port 1, or -1 for unplugged r.ReadBytes(4); // 02A 4 1-byte signed ints: controller IDs of port 2, or -1 for unplugged r.ReadBytes(4); // 02E 18 bytes: reserved for future use r.ReadBytes(18); } /* * After the header comes "metadata", which is UTF16-coded movie title string (author info). The metadata begins * from position 32 (0x20 (0x40 for 1.51 and up)) and ends at <savestate_offset - * length_of_extra_rom_info_in_bytes>. */ byte[] metadata = r.ReadBytes((int)(savestateOffset - extraRomInfo - ((version != "1.43") ? 0x40 : 0x20))); string author = NullTerminated(Encoding.Unicode.GetString(metadata).Trim()); if (!string.IsNullOrWhiteSpace(author)) { Result.Movie.HeaderEntries[HeaderKeys.AUTHOR] = author; } if (extraRomInfo == 30) { // 000 3 bytes of zero padding: 00 00 00 003 4-byte integer: CRC32 of the ROM 007 23-byte ascii string r.ReadBytes(3); int crc32 = r.ReadInt32(); Result.Movie.HeaderEntries["CRC32"] = crc32.ToString(); // the game name copied from the ROM, truncated to 23 bytes (the game name in the ROM is 21 bytes) string gameName = NullTerminated(Encoding.UTF8.GetString(r.ReadBytes(23))); Result.Movie.HeaderEntries[HeaderKeys.GAMENAME] = gameName; } SimpleController controllers = new SimpleController { Definition = _deck.Definition }; r.BaseStream.Position = firstFrameOffset; /* * 01 00 (reserved) * 02 00 (reserved) * 04 00 (reserved) * 08 00 (reserved) * 10 00 R * 20 00 L * 40 00 X * 80 00 A * 00 01 Right * 00 02 Left * 00 04 Down * 00 08 Up * 00 10 Start * 00 20 Select * 00 40 Y * 00 80 B */ string[] buttons = { "Right", "Left", "Down", "Up", "Start", "Select", "Y", "B", "R", "L", "X", "A" }; for (int frame = 0; frame <= frameCount; frame++) { controllers["Reset"] = true; for (int player = 1; player <= controllersUsed.Length; player++) { if (!controllersUsed[player - 1]) { continue; } /* * Each frame consists of 2 bytes per controller. So if there are 3 controllers, a frame is 6 bytes and * if there is only 1 controller, a frame is 2 bytes. */ byte controllerState1 = r.ReadByte(); byte controllerState2 = r.ReadByte(); /* * In the reset-recording patch, a frame that contains the value FF FF for every controller denotes a * reset. The reset is done through the S9xSoftReset routine. */ if (controllerState1 != 0xFF || controllerState2 != 0xFF) { controllers["Reset"] = false; } /* * While the meaning of controller data (for 1.51 and up) for a single standard SNES controller pad * remains the same, each frame of controller data can contain additional bytes if input for peripherals * is being recorded. */ if (version != "1.43" && player <= controllerTypes.Length) { var peripheral = ""; switch (controllerTypes[player - 1]) { case 0: // NONE continue; case 1: // JOYPAD break; case 2: // MOUSE peripheral = "Mouse"; // 5*num_mouse_ports r.ReadBytes(5); break; case 3: // SUPERSCOPE peripheral = "Super Scope"; // 6*num_superscope_ports r.ReadBytes(6); break; case 4: // JUSTIFIER peripheral = "Justifier"; // 11*num_justifier_ports r.ReadBytes(11); break; case 5: // MULTITAP peripheral = "Multitap"; break; } if (peripheral != "" && !Result.Warnings.Any()) { Result.Warnings.Add($"Unable to import {peripheral}. Not supported yet"); } } ushort controllerState = (ushort)(((controllerState1 << 4) & 0x0F00) | controllerState2); for (int button = 0; button < buttons.Length; button++) { controllers[$"P{player} {buttons[button]}"] = ((controllerState >> button) & 0x1) != 0; } } // The controller data contains <number_of_frames + 1> frames. if (frame == 0) { continue; } Result.Movie.AppendFrame(controllers); Result.Movie.SyncSettingsJson = ConfigService.SaveWithType(ss); Global.Config.SnesInSnes9x = false; } }
public MethodInfo(AssemblyInfo assembly, MethodDefinitionHandle methodDefHandle, int token, SourceFile source, TypeInfo type, MetadataReader asmMetadataReader, MetadataReader pdbMetadataReader) { this.Assembly = assembly; this.methodDef = asmMetadataReader.GetMethodDefinition(methodDefHandle); this.DebugInformation = pdbMetadataReader.GetMethodDebugInformation(methodDefHandle.ToDebugInformationHandle()); this.source = source; this.Token = token; this.methodDefHandle = methodDefHandle; this.Name = asmMetadataReader.GetString(methodDef.Name); this.pdbMetadataReader = pdbMetadataReader; this.IsEnCMethod = false; if (!DebugInformation.SequencePointsBlob.IsNil) { var sps = DebugInformation.GetSequencePoints(); SequencePoint start = sps.First(); SequencePoint end = sps.First(); foreach (SequencePoint sp in sps) { if (sp.StartLine < start.StartLine) { start = sp; } else if (sp.StartLine == start.StartLine && sp.StartColumn < start.StartColumn) { start = sp; } if (sp.EndLine > end.EndLine) { end = sp; } else if (sp.EndLine == end.EndLine && sp.EndColumn > end.EndColumn) { end = sp; } } StartLocation = new SourceLocation(this, start); EndLocation = new SourceLocation(this, end); } localScopes = pdbMetadataReader.GetLocalScopes(methodDefHandle); }
public Boolean ShouldBreak(SourceFile file, int line) { return(ShouldBreak(file, line, null)); }
protected override void RunImport() { Result.Movie.HeaderEntries[HeaderKeys.Platform] = VSystemID.Raw.SAT; var ss = new Emulation.Cores.Waterbox.NymaCore.NymaSyncSettings { PortDevices = { [0] = "gamepad", [1] = "none", [2] = "none", [3] = "none", [4] = "none", [5] = "none", [6] = "none", [7] = "none", [8] = "none", [9] = "none", [10] = "none", [11] = "none", } }; using var sr = SourceFile.OpenText(); string line; while ((line = sr.ReadLine()) != null) { if (string.IsNullOrWhiteSpace(line)) { continue; } if (line[0] == '|') { ImportTextFrame(line); } else if (line.ToLower().StartsWith("emuversion")) { Result.Movie.Comments.Add($"{EmulationOrigin} Yabause version {ParseHeader(line, "emuVersion")}"); } else if (line.ToLower().StartsWith("version")) { string version = ParseHeader(line, "version"); Result.Movie.Comments.Add($"{MovieOrigin} .ymv version {version}"); } else if (line.ToLower().StartsWith("cdGameName")) { Result.Movie.HeaderEntries[HeaderKeys.GameName] = ParseHeader(line, "romFilename"); } else if (line.ToLower().StartsWith("rerecordcount")) { int rerecordCount; // Try to parse the re-record count as an integer, defaulting to 0 if it fails. try { rerecordCount = int.Parse(ParseHeader(line, "rerecordCount")); } catch { rerecordCount = 0; } Result.Movie.Rerecords = (ulong)rerecordCount; } else if (line.ToLower().StartsWith("startsfromsavestate")) { // If this movie starts from a savestate, we can't support it. if (ParseHeader(line, "StartsFromSavestate") == "1") { Result.Errors.Add("Movies that begin with a savestate are not supported."); } } else if (line.ToLower().StartsWith("ispal")) { bool pal = ParseHeader(line, "isPal") == "1"; Result.Movie.HeaderEntries[HeaderKeys.Pal] = pal.ToString(); } else { // Everything not explicitly defined is treated as a comment. Result.Movie.Comments.Add(line); } } Result.Movie.SyncSettingsJson = ConfigService.SaveWithType(ss); }
protected bool Equals(UnityAssetFindResult other) { return(SourceFile.Equals(other.SourceFile) && OwningElemetLocation.Equals(other.OwningElemetLocation)); }
public void Generate( InstrumentationResult result, SourceFile sourceFile, HitsInfo hitsInfo, float threshold, string outputFile) { var lines = File.ReadAllLines(Path.Combine(result.SourcePath, sourceFile.Path)); Directory.CreateDirectory(Path.GetDirectoryName(outputFile)); var summary = SummaryFactory.CalculateFilesSummary(new[] { sourceFile }, hitsInfo, threshold); var lineCoverageClass = summary.LinesCoveragePass ? "green" : "red"; var statementsCoverageClass = summary.StatementsCoveragePass ? "green" : "red"; var branchCoverageClass = summary.BranchesCoveragePass ? "green" : "red"; using (var htmlWriter = (TextWriter)File.CreateText(outputFile)) { htmlWriter.WriteLine("<html>"); htmlWriter.WriteLine("<style>"); htmlWriter.WriteLine(ResourceUtils.GetContent("MiniCover.Reports.Html.Shared.css")); htmlWriter.WriteLine(ResourceUtils.GetContent("MiniCover.Reports.Html.SourceFile.css")); htmlWriter.WriteLine("</style>"); htmlWriter.WriteLine("<script>"); htmlWriter.WriteLine(ResourceUtils.GetContent("MiniCover.Reports.Html.Shared.js")); htmlWriter.WriteLine("</script>"); htmlWriter.WriteLine("<body>"); htmlWriter.WriteLine("<h2>Summary</h2>"); htmlWriter.WriteLine("<table>"); htmlWriter.WriteLine($"<tr><th>Generated on</th><td>{DateTime.Now}</td></tr>"); htmlWriter.WriteLine($"<tr><th>Line Coverage</th><td class=\"{lineCoverageClass}\">{summary.LinesPercentage:P} ({summary.CoveredLines}/{summary.Lines})</td></tr>"); htmlWriter.WriteLine($"<tr><th>Statements Coverage</th><td class=\"{branchCoverageClass}\">{summary.StatementsPercentage:P} ({summary.CoveredStatements}/{summary.Statements})</td></tr>"); htmlWriter.WriteLine($"<tr><th>Branch Coverage</th><td class=\"{branchCoverageClass}\">{summary.BranchesPercentage:P} ({summary.CoveredBranches}/{summary.Branches})</td></tr>"); htmlWriter.WriteLine($"<tr><th>Threshold</th><td>{threshold:P}</td></tr>"); htmlWriter.WriteLine("</table>"); htmlWriter.WriteLine("<h2>Code</h2>"); htmlWriter.WriteLine("<div class=\"legend\">"); htmlWriter.Write("<label>Legend:</label>"); htmlWriter.Write("<div class=\"hit\">Covered</div>"); htmlWriter.Write("<div class=\"partial\">Partially covered</div>"); htmlWriter.Write("<div class=\"not-hit\">Not covered</div>"); htmlWriter.WriteLine("</div>"); htmlWriter.WriteLine("<div class=\"code\">"); for (var l = 1; l <= lines.Length; l++) { var line = lines[l - 1]; var instructions = sourceFile.Sequences .Where(i => i.GetLines().Contains(l)) .ToArray(); var lineHitCount = instructions.Sum(a => hitsInfo.GetHitCount(a.HitId)); var lineClasses = new List <string> { "line" }; if (lineHitCount > 0) { if (instructions.Any(i => !hitsInfo.WasHit(i.HitId) || i.Conditions.SelectMany(x => x.Branches).Any(b => !hitsInfo.WasHit(b.HitId)))) { lineClasses.Add("partial"); } else { lineClasses.Add("hit"); } } else if (instructions.Length > 0) { lineClasses.Add("not-hit"); } htmlWriter.Write($"<div class=\"{string.Join(" ", lineClasses)}\">"); htmlWriter.Write($"<div class=\"line-number\">{l}</div>"); htmlWriter.Write("<div class=\"line-content\">"); if (line.Length > 0) { for (var c = 1; c <= line.Length; c++) { var character = line[c - 1].ToString(); foreach (var instruction in instructions) { if (instruction.StartLine == l && instruction.StartColumn == c || instruction.StartLine < l && c == 1) { var statementIdClass = $"s-{instruction.HitId}"; var statementClasses = new List <string> { "statement", statementIdClass }; if (hitsInfo.WasHit(instruction.HitId)) { statementClasses.Add("hit"); if (instruction.Conditions.SelectMany(x => x.Branches).Any(b => !hitsInfo.WasHit(b.HitId))) { statementClasses.Add("partial"); } } else { statementClasses.Add("not-hit"); } htmlWriter.Write($"<div data-hover-target=\".{statementIdClass}\" data-activate-target=\".{statementIdClass}\" class=\"{string.Join(" ", statementClasses)}\">"); if (instruction.EndLine == l) { var hitCount = hitsInfo.GetHitCount(instruction.HitId); var contexts = hitsInfo.GetHitContexts(instruction.HitId) .Distinct() .ToArray(); htmlWriter.Write($"<div class=\"statement-info {statementIdClass}\">"); htmlWriter.Write($"<div>Id: {instruction.HitId}</div>"); htmlWriter.Write($"<div>Hits: {hitCount}</div>"); if (instruction.Conditions.Length > 0) { var conditionIndex = 0; foreach (var condition in instruction.Conditions) { htmlWriter.Write($"<div>Condition {++conditionIndex}:"); htmlWriter.Write("<ul>"); var branchIndex = 0; foreach (var branch in condition.Branches) { var branchHitCount = hitsInfo.GetHitCount(branch.HitId); htmlWriter.Write($"<li>Branch {++branchIndex}: {FormatHits(branchHitCount)}</li>"); } htmlWriter.Write("</ul>"); htmlWriter.Write("</div>"); } } if (contexts.Length > 0) { htmlWriter.Write("<div>Contexts:"); htmlWriter.Write("<ul>"); foreach (var context in contexts) { var contextHitCount = context.GetHitCount(instruction.HitId); var description = $"{context.ClassName}.{context.MethodName}"; htmlWriter.Write($"<li>{WebUtility.HtmlEncode(description)}: {FormatHits(contextHitCount)}</li>"); } htmlWriter.Write("</ul></div>"); } htmlWriter.Write("</div>"); } } } htmlWriter.Write(WebUtility.HtmlEncode(character)); foreach (var instruction in instructions) { if (instruction.EndLine == l && instruction.EndColumn == c + 1 || instruction.EndLine > l && c == line.Length) { htmlWriter.Write("</div>"); } } } } else { htmlWriter.WriteLine(" "); } htmlWriter.Write("</div>"); htmlWriter.WriteLine("</div>"); } htmlWriter.WriteLine("</div>"); htmlWriter.WriteLine("</body>"); htmlWriter.WriteLine("</html>"); } }
private void Populate() { var d2s = new Dictionary <int, SourceFile>(); SourceFile FindSource(DocumentHandle doc, int rowid, string documentName) { if (d2s.TryGetValue(rowid, out SourceFile source)) { return(source); } var src = new SourceFile(this, sources.Count, doc, GetSourceLinkUrl(documentName), documentName); sources.Add(src); d2s[rowid] = src; return(src); }; foreach (DocumentHandle dh in asmMetadataReader.Documents) { var document = asmMetadataReader.GetDocument(dh); } if (pdbMetadataReader != null) { ProcessSourceLink(); } foreach (TypeDefinitionHandle type in asmMetadataReader.TypeDefinitions) { var typeDefinition = asmMetadataReader.GetTypeDefinition(type); var typeInfo = new TypeInfo(this, typeDefinition); typesByName[typeInfo.FullName] = typeInfo; if (pdbMetadataReader != null) { foreach (MethodDefinitionHandle method in typeDefinition.GetMethods()) { var methodDefinition = asmMetadataReader.GetMethodDefinition(method); if (!method.ToDebugInformationHandle().IsNil) { var methodDebugInformation = pdbMetadataReader.GetMethodDebugInformation(method.ToDebugInformationHandle()); if (!methodDebugInformation.Document.IsNil) { var document = pdbMetadataReader.GetDocument(methodDebugInformation.Document); var documentName = pdbMetadataReader.GetString(document.Name); SourceFile source = FindSource(methodDebugInformation.Document, asmMetadataReader.GetRowNumber(methodDebugInformation.Document), documentName); var methodInfo = new MethodInfo(this, method, asmMetadataReader.GetRowNumber(method), source, typeInfo, asmMetadataReader, pdbMetadataReader); methods[asmMetadataReader.GetRowNumber(method)] = methodInfo; if (source != null) { source.AddMethod(methodInfo); } typeInfo.Methods.Add(methodInfo); } } } } } }
public override void Analyze(BinaryAnalyzerContext context) { PEHeader peHeader = context.PE.PEHeaders.PEHeader; Pdb pdb = context.Pdb; if (pdb == null) { Errors.LogExceptionLoadingPdb(context, context.PdbParseException.Message); return; } Dictionary <string, TruncatedCompilandRecordList> vulnerabilityToModules = new Dictionary <string, TruncatedCompilandRecordList>(); TruncatedCompilandRecordList moduleList; foreach (DisposableEnumerableView <Symbol> omView in pdb.CreateObjectModuleIterator()) { Symbol om = omView.Value; ObjectModuleDetails details = om.GetObjectModuleDetails(); if (details.Language != Language.C && details.Language != Language.Cxx) { continue; } if (!details.HasDebugInfo) { continue; } foreach (DisposableEnumerableView <SourceFile> sfView in pdb.CreateSourceFileIterator(om)) { SourceFile sf = sfView.Value; string fileName = Path.GetFileName(sf.FileName); if (!_files.Contains(fileName) || sf.HashType == HashType.None) { continue; } string hash = fileName + "#" + BitConverter.ToString(sf.Hash); VulnerableDependencyDescriptor descriptor; if (_filesToVulnerabilitiesMap.TryGetValue(hash, out descriptor)) { if (!vulnerabilityToModules.TryGetValue(descriptor.Id, out moduleList)) { moduleList = vulnerabilityToModules[descriptor.Id] = new TruncatedCompilandRecordList(); } moduleList.Add(om.CreateCompilandRecordWithSuffix(hash)); } } } if (vulnerabilityToModules.Count != 0) { foreach (string id in vulnerabilityToModules.Keys) { moduleList = vulnerabilityToModules[id]; VulnerableDependencyDescriptor descriptor = (VulnerableDependencyDescriptor)context.Policy.GetProperty(VulnerableDependencies)[id]; // '{0}' was built with a version of {1} which is subject to the following issues: {2}. // To resolve this, {3}. The source files that triggered this were: {4} context.Logger.Log(this, RuleUtilities.BuildResult(ResultKind.Error, context, null, nameof(RuleResources.BA2002_Fail), descriptor.Name, descriptor.VulnerabilityDescription, descriptor.Resolution, moduleList.CreateSortedObjectList())); } return; } // '{0}' does not incorporate any known vulnerable dependencies, as configured by current policy. context.Logger.Log(this, RuleUtilities.BuildResult(ResultKind.Pass, context, null, nameof(RuleResources.BA2002_Pass))); }
/// <inheritdoc /> protected override SourceFile DoCreateSourceFile(AbsolutePath path) { // This is the interop point to advertise values to other DScript specs // For now we just return an empty SourceFile return(SourceFile.Create(path.ToString(m_context.PathTable))); }
/// <summary> /// Execute the task. /// </summary> /// <param name="Job">Information about the current job</param> /// <param name="BuildProducts">Set of build products produced by this node.</param> /// <param name="TagNameToFileSet">Mapping from tag names to the set of files they include</param> public override void Execute(JobContext Job, HashSet <FileReference> BuildProducts, Dictionary <string, HashSet <FileReference> > TagNameToFileSet) { // Get the project path, and check it exists FileReference ProjectFile = null; if (Parameters.Project != null) { ProjectFile = ResolveFile(Parameters.Project); if (!FileReference.Exists(ProjectFile)) { throw new AutomationException("Couldn't find project '{0}'", ProjectFile.FullName); } } // Get the directories used for staging this project DirectoryReference SourceEngineDir = CommandUtils.EngineDirectory; DirectoryReference SourceProjectDir = (ProjectFile == null)? SourceEngineDir : ProjectFile.Directory; // Get the output directories. We flatten the directory structure on output. DirectoryReference TargetDir = ResolveDirectory(Parameters.ToDir); DirectoryReference TargetEngineDir = DirectoryReference.Combine(TargetDir, "Engine"); DirectoryReference TargetProjectDir = DirectoryReference.Combine(TargetDir, ProjectFile.GetFileNameWithoutExtension()); // Get the path to the receipt FileReference ReceiptFileName = TargetReceipt.GetDefaultPath(SourceProjectDir, Parameters.Target, Parameters.Platform, Parameters.Configuration, Parameters.Architecture); // Try to load it TargetReceipt Receipt; if (!TargetReceipt.TryRead(ReceiptFileName, SourceEngineDir, SourceProjectDir, out Receipt)) { throw new AutomationException("Couldn't read receipt '{0}'", ReceiptFileName); } // Stage all the build products needed at runtime HashSet <FileReference> SourceFiles = new HashSet <FileReference>(); foreach (BuildProduct BuildProduct in Receipt.BuildProducts.Where(x => x.Type != BuildProductType.StaticLibrary && x.Type != BuildProductType.ImportLibrary)) { SourceFiles.Add(BuildProduct.Path); } foreach (RuntimeDependency RuntimeDependency in Receipt.RuntimeDependencies.Where(x => x.Type != StagedFileType.UFS)) { SourceFiles.Add(RuntimeDependency.Path); } // Get all the target files List <FileReference> TargetFiles = new List <FileReference>(); foreach (FileReference SourceFile in SourceFiles) { // Get the destination file to copy to, mapping to the new engine and project directories as appropriate FileReference TargetFile; if (SourceFile.IsUnderDirectory(SourceEngineDir)) { TargetFile = FileReference.Combine(TargetEngineDir, SourceFile.MakeRelativeTo(SourceEngineDir)); } else { TargetFile = FileReference.Combine(TargetProjectDir, SourceFile.MakeRelativeTo(SourceProjectDir)); } // Fixup the case of the output file. Would expect Platform.DeployLowerCaseFilenames() to return true here, but seems not to be the case. if (Parameters.Platform == UnrealTargetPlatform.PS4) { TargetFile = FileReference.Combine(TargetDir, TargetFile.MakeRelativeTo(TargetDir).ToLowerInvariant()); } // Only copy the output file if it doesn't already exist. We can stage multiple targets to the same output directory. if (Parameters.Overwrite || !FileReference.Exists(TargetFile)) { DirectoryReference.CreateDirectory(TargetFile.Directory); CommandUtils.CopyFile(SourceFile.FullName, TargetFile.FullName); // Force all destination files to not readonly. CommandUtils.SetFileAttributes(TargetFile.FullName, ReadOnly: false); } // Add it to the list of target files TargetFiles.Add(TargetFile); } // Apply the optional tag to the build products foreach (string TagName in FindTagNamesFromList(Parameters.Tag)) { FindOrAddTagSet(TagNameToFileSet, TagName).UnionWith(TargetFiles); } // Add the target file to the list of build products BuildProducts.UnionWith(TargetFiles); }
public SourceFileTabPage(SourceFile sourceFile) { InitializeComponent(); SourceFile = sourceFile; }
protected override void RunImport() { Result.Movie.HeaderEntries[HeaderKeys.Core] = CoreNames.NesHawk; const string emulator = "FCEUX"; var platform = "NES"; // TODO: FDS? var syncSettings = new NES.NESSyncSettings(); var controllerSettings = new NESControlSettings { NesLeftPort = nameof(UnpluggedNES), NesRightPort = nameof(UnpluggedNES) }; _deck = controllerSettings.Instantiate((x, y) => true); AddDeckControlButtons(); Result.Movie.HeaderEntries[HeaderKeys.Platform] = platform; using var sr = SourceFile.OpenText(); string line; while ((line = sr.ReadLine()) != null) { if (line == "") { continue; } if (line[0] == '|') { ImportInputFrame(line); } else if (line.ToLower().StartsWith("sub")) { var subtitle = ImportTextSubtitle(line); if (!string.IsNullOrEmpty(subtitle)) { Result.Movie.Subtitles.AddFromString(subtitle); } } else if (line.ToLower().StartsWith("emuversion")) { Result.Movie.Comments.Add($"{EmulationOrigin} {emulator} version {ParseHeader(line, "emuVersion")}"); } else if (line.ToLower().StartsWith("version")) { string version = ParseHeader(line, "version"); if (version != "3") { Result.Warnings.Add("Detected a .fm2 movie version other than 3, which is unsupported"); } else { Result.Movie.Comments.Add($"{MovieOrigin} .fm2 version 3"); } } else if (line.ToLower().StartsWith("romfilename")) { Result.Movie.HeaderEntries[HeaderKeys.GameName] = ParseHeader(line, "romFilename"); } else if (line.ToLower().StartsWith("cdgamename")) { Result.Movie.HeaderEntries[HeaderKeys.GameName] = ParseHeader(line, "cdGameName"); } else if (line.ToLower().StartsWith("romchecksum")) { string blob = ParseHeader(line, "romChecksum"); byte[] md5 = DecodeBlob(blob); if (md5 != null && md5.Length == 16) { Result.Movie.HeaderEntries[Md5] = md5.BytesToHexString().ToLower(); } else { Result.Warnings.Add("Bad ROM checksum."); } } else if (line.ToLower().StartsWith("comment author")) { Result.Movie.HeaderEntries[HeaderKeys.Author] = ParseHeader(line, "comment author"); } else if (line.ToLower().StartsWith("rerecordcount")) { int.TryParse(ParseHeader(line, "rerecordCount"), out var rerecordCount); Result.Movie.Rerecords = (ulong)rerecordCount; } else if (line.ToLower().StartsWith("guid")) { // We no longer care to keep this info } else if (line.ToLower().StartsWith("startsfromsavestate")) { // If this movie starts from a savestate, we can't support it. if (ParseHeader(line, "StartsFromSavestate") == "1") { Result.Errors.Add("Movies that begin with a savestate are not supported."); break; } } else if (line.ToLower().StartsWith("palflag")) { Result.Movie.HeaderEntries[HeaderKeys.Pal] = ParseHeader(line, "palFlag"); } else if (line.ToLower().StartsWith("port0")) { if (ParseHeader(line, "port0") == "1") { controllerSettings.NesLeftPort = nameof(ControllerNES); _deck = controllerSettings.Instantiate((x, y) => false); AddDeckControlButtons(); } } else if (line.ToLower().StartsWith("port1")) { if (ParseHeader(line, "port1") == "1") { controllerSettings.NesRightPort = nameof(ControllerNES); _deck = controllerSettings.Instantiate((x, y) => false); AddDeckControlButtons(); } } else if (line.ToLower().StartsWith("port2")) { if (ParseHeader(line, "port2") == "1") { Result.Warnings.Add("Famicom port detected but not yet supported, ignoring"); } } else if (line.ToLower().StartsWith("fourscore")) { bool fourscore = ParseHeader(line, "fourscore") == "1"; if (fourscore) { // TODO: set controller config sync settings controllerSettings.NesLeftPort = nameof(FourScore); controllerSettings.NesRightPort = nameof(FourScore); } _deck = controllerSettings.Instantiate((x, y) => false); } else { Result.Movie.Comments.Add(line); // Everything not explicitly defined is treated as a comment. } } syncSettings.Controls = controllerSettings; Result.Movie.SyncSettingsJson = ConfigService.SaveWithType(syncSettings); }
/// <summary> /// Main entry point for the command /// </summary> public override void ExecuteBuild() { string FileSpec = ParseRequiredStringParam("Files"); // Make sure the patterns are a valid syntax if (!FileSpec.StartsWith("//")) { throw new AutomationException("Files must be specified as full depot paths"); } // Pick out the source and target prefixes string Prefix; if (FileSpec.EndsWith("*")) { Prefix = FileSpec.Substring(0, FileSpec.Length - 1); } else if (FileSpec.EndsWith("...")) { Prefix = FileSpec.Substring(0, FileSpec.Length - 3); } else { Prefix = FileSpec; } // Make sure there aren't any other wildcards in the pattern if (Prefix.Contains("?") || Prefix.Contains("*") || Prefix.Contains("...")) { throw new AutomationException("Wildcards are only permitted at the end of filespecs"); } // Find all the source files List <string> SourceFiles = P4.Files(String.Format("-e {0}", FileSpec)); if (SourceFiles.Count == 0) { throw new AutomationException("No files found matching {0}", FileSpec); } SourceFiles.RemoveAll(x => x.StartsWith(Prefix, StringComparison.Ordinal)); // Error if we didn't find anything if (SourceFiles.Count == 0) { throw new AutomationException("No files found matching spec"); } // Find all the target files List <string> TargetFiles = new List <string>(SourceFiles.Count); foreach (string SourceFile in SourceFiles) { if (SourceFile.StartsWith(Prefix, StringComparison.OrdinalIgnoreCase)) { TargetFiles.Add(Prefix + SourceFile.Substring(Prefix.Length)); } else { throw new AutomationException("Source file '{0}' does not start with '{1}'", SourceFile, Prefix); } } // Print what we're going to do LogInformation("Ready to rename {0} files:", SourceFiles.Count); for (int Idx = 0; Idx < SourceFiles.Count; Idx++) { LogInformation("{0,3}: {1}", Idx, SourceFiles[Idx]); LogInformation("{0,3} {1}", "", TargetFiles[Idx]); } // If we're not going through with it, print the renames if (!AllowSubmit) { LogWarning("Skipping due to no -Submit option"); return; } // Force sync all the old files foreach (string OldFile in SourceFiles) { P4.LogP4(String.Format("sync -f {0}", OldFile)); } // Delete all the old files int DeleteChangeNumber = P4.CreateChange(Description: String.Format("Fixing case of {0} (1/2){1}", FileSpec, BoilerplateText)); foreach (string OldFile in SourceFiles) { P4.LogP4(String.Format("delete -k -c {0} {1}", DeleteChangeNumber, OldFile)); } P4.Submit(DeleteChangeNumber); // Re-add all the files in the new location int AddChangeNumber = P4.CreateChange(Description: String.Format("Fixing case of {0} (2/2){1}", FileSpec, BoilerplateText)); foreach (string NewFile in TargetFiles) { P4.LogP4(String.Format("add -c {0} {1}", AddChangeNumber, NewFile)); } P4.Submit(AddChangeNumber); }