public void RecordsFromMultipleInputFilesWrittenToCorrespondingOutputFiles() { // Arrange File.WriteAllLines(this.workingDirectory + "FileA.txt", new String[] { "a", "b", "c", "d", "e", "f" }); File.WriteAllLines(this.workingDirectory + "FileB.txt", new String[] { "g", "h", "i", "j", "k", "l" }); var mockStatisticsCollector = Substitute.For<IStatisticsCollector>(); var writer = new InputFileRecordWriter(mockStatisticsCollector, true, true); using (FileReader inputFileA = new FileReader(this.workingDirectory + "FileA.txt")) { using (FileReader inputFileB = new FileReader(this.workingDirectory + "FileB.txt")) { // Act writer.WriteMatchedRecord(inputFileA, new Record { Start = 0, End = 8 }); writer.WriteMatchedRecord(inputFileB, new Record { Start = 0, End = 8 }); writer.WriteUnmatchedRecord(inputFileA, new Record { Start = 9, End = 18 }); writer.WriteUnmatchedRecord(inputFileB, new Record { Start = 9, End = 18 }); writer.Close(); } } // Assert File.Exists(this.workingDirectory + "Matched_From_FileA.txt").ShouldBeTrue(); File.ReadAllLines(this.workingDirectory + "Matched_From_FileA.txt").ShouldBeEquivalentTo(new String[] { "a", "b", "c" }); File.Exists(this.workingDirectory + "Matched_From_FileB.txt").ShouldBeTrue(); File.ReadAllLines(this.workingDirectory + "Matched_From_FileB.txt").ShouldBeEquivalentTo(new String[] { "g", "h", "i" }); File.Exists(this.workingDirectory + "Unmatched_From_FileA.txt").ShouldBeTrue(); File.ReadAllLines(this.workingDirectory + "Unmatched_From_FileA.txt").ShouldBeEquivalentTo(new String[] { "d", "e", "f" }); File.Exists(this.workingDirectory + "Unmatched_From_FileB.txt").ShouldBeTrue(); File.ReadAllLines(this.workingDirectory + "Unmatched_From_FileB.txt").ShouldBeEquivalentTo(new String[] { "j", "k", "l" }); }
public WikiGenerator(Convertor sourceConvertor, string rootPath, IPageCache pageCache) { this.convertor = sourceConvertor; this.rootWikiPath = rootPath; this.pageCache = pageCache; this.fileReader = new FileReader(FileReaderPolicy.LimitedBlock, 500); }
public void TestDisposeOfStream(IFileStreamWrap stream) { var test = new FileReader(stream); test.Dispose(); test.Stream.ShouldBeEquivalentTo(null); }
public Bone(FileReader Reader, int Index) { BoneIndex = Index; Reader.ReadUInt32(); //Unknown Name = Reader.ReadPascalString(); ParentName = Reader.ReadPascalString(); HasPropertyList = (Reader.ReadByte() != 0) ? true : false; if(HasPropertyList) { uint PropertyCount = Reader.ReadUInt32(); for (int i = 0; i < PropertyCount; i++) PropertyList.Add(new Property(Reader)); } Translation = new Vector3(Reader.ReadFloat(), Reader.ReadFloat(), Reader.ReadFloat()); Rotation = new Quaternion(Reader.ReadFloat(), -Reader.ReadFloat(), -Reader.ReadFloat(), Reader.ReadFloat()); CanTranslate = (Reader.ReadUInt32() != 0) ? true : false; CanRotate = (Reader.ReadUInt32() != 0) ? true : false; CanBlend = (Reader.ReadUInt32() != 0) ? true : false; //Don Hopkins says the Wiggle parameters are left over from an attempt to use Perlin noise //introduce some randomness into the animations, so that an animation would look a little different //each time it was run. Reader.ReadFloat(); Reader.ReadFloat(); }
public void TestStreamIsGzipForKnownExt() { var stream = new FileStreamMock("C:\\PAth\\pakjdkjd\\test.gz"); var test = new FileReader(stream); test.Compression.ShouldBeEquivalentTo(CompressionScheme.GZip); }
public void TestStreamIsNoCompressionForRandomFileExtension(string fileExt) { var stream = new FileStreamMock("C:\\PAth\\pakjdkjd\\test." + fileExt); var test = new FileReader(stream); test.Compression.ShouldBeEquivalentTo(CompressionScheme.None); }
public IFFChunk(FileReader Reader, GraphicsDevice Device, Iff Parent) { m_Parent = Parent; m_Device = Device; ReadHeader(Reader); }
/// <summary> /// Constructs a new Property instance. /// </summary> /// <param name="Reader">A FileReader instance, used to read the Property.</param> public Property(FileReader Reader) { uint PairCount = Reader.ReadUInt32(); for (int i = 0; i < PairCount; i++) PropertyPairs.Add(Reader.ReadPascalString(), Reader.ReadPascalString()); }
public ItemReturn(Data data) { this.data = data; resources = data.Resources; reader = new FileReader(data); writer = new FileWriter(data); }
public HIM(string file) { FileReader fr = new FileReader ( file ); Length = fr.Read<int> (); Width = fr.Read<int> (); GridCount = fr.Read<int> (); GridSize = fr.Read<float> (); //fr.BaseStream.Seek (8, System.IO.SeekOrigin.Current); Heights = new float[Length, Width]; MinHeight = 10000000000000000000.0f; MaxHeight = 10000.0f; for (int y = 0; y < Length; ++y) { for (int x = 0; x < Width; ++x) { Heights [y, x] = fr.Read<float> (); if (Heights [y, x] < MinHeight) MinHeight = Heights [y, x]; if (Heights [y, x] > MaxHeight) MaxHeight = Heights [y, x]; } } fr.Close (); }
public static void Main(string[] args) { //Initialize browser PropertiesCollection.driver = new FirefoxDriver(); Console.WriteLine("Opened Browser"); //Read File FileReader file = new FileReader(); file.ReadFile("e:/Documents/Programs/C#/LoanPaymentAutomationTests/info.txt"); //Navigate to Loan Site PropertiesCollection.driver.Navigate().GoToUrl(file.info(11)); //Login through 2 pages LoginPage page1 = new LoginPage("user-id"); SecureLoginPage page2 = page1.Login(file); //Make payments and submit AccountSummaryPage page3 = page2.Submit(file); MakeAPaymentPage page4 = page3.MakeAPayment(); SpecialPaymentPage page5 = page4.SubmitPayment(file); PaymentPreviewPage page6 = page5.Next(); page6.btnSubmit.Click(); //Close browser PropertiesCollection.driver.Close(); Console.WriteLine("Close the browser"); Log(); }
internal static Account Read(System.IO.FileInfo file) { MoneyParser = new SimpleMoneyParser(); FileReader reader = new FileReader(file.FullName); var lines = reader.ReadAllLines(); string name = ParseFileName(file.Name); Console.Out.WriteLine("\n\n{0}", name); var utSaldo = ParseSaldoLine(lines[0]); lines.RemoveAt(0); lines.RemoveAt(0); //Headings var innSaldo = ParseSaldoLine(lines[lines.Count - 1]); lines.RemoveAt(lines.Count -1); Account result = new Account(file.Name, name, innSaldo, utSaldo); foreach (var line in lines) { AccountLine aLine = ParseLine(line, name); result.AddLine(aLine); } return result; }
public AlgoMetaData(string name, IAutoGenerator autoGenerator = null, FileReader fileReader = null, ManualInput manualInput = null) { Name = name; AutoGenerator = autoGenerator; FileReader = fileReader; InputFunction = manualInput; }
public static void Main(string[] args) { var commandParser = new FileReader(); var input = commandParser.GetPreviousCommand(); var moduleRunner = new ModuleRunner(); moduleRunner.Execute(input); }
public void ReadFileObject(FileReader reader) { Name = reader.ReadString(); Offset = reader.ReadInt32(); Size = reader.ReadInt32(); //reader.BaseStream.Position = Offset; //Bytes = reader.ReadByteArray(Size); }
public void Test() { var file = new FileReader().Read(TestFile); if (File.Exists(OutFile)) File.Delete(OutFile); new FileWriter().Write(OutFile, file); FileAssert.AreSameContents(TestFile, OutFile); }
private string MESSAGE_HASH_FILE_NAME = "./MessageHash.txt"; // Файл для хранения кэша сообщения #endregion Fields #region Constructors public DigitSignature() { rsa = new EncryptionAlgorithmsLib.RSA(); fileReader = new FileReader(); fileWriter = new FileWriter(); md5Hash = MD5.Create(); rsa.GenerateKeys(); }
/// <summary> /// Constructs a new TimeProperty instance. /// </summary> /// <param name="Reader">A FileReader instance used to read a TimeProperty.</param> public TimeProperty(FileReader Reader) { ID = Reader.ReadUInt32(); uint PropsCount = Reader.ReadUInt32(); for (int i = 0; i < PropsCount; i++) PropertyList.Add(new Property(Reader)); }
public void TestResetDoesntWork(string testPath) { var stream = new FileStreamMock(testPath); var test = new FileReader(stream); var enumerator = test.GetEnumerator(); Assert.Throws<InvalidOperationException>(() => enumerator.Reset()); }
public BoneBinding(FileReader Reader) { BoneIndex = Reader.ReadUInt32(); FirstRealVertexIndex = Reader.ReadUInt32(); RealVertexCount = Reader.ReadUInt32(); FirstBlendVertexIndex = Reader.ReadUInt32(); BlendVertexCount = Reader.ReadUInt32(); }
public void TestMoveNextDoesntWork(string testPath) { var stream = new FileStreamMock(testPath); var test = new FileReader(stream); var enumerator = test.GetEnumerator(); enumerator.MoveNext().ShouldBeEquivalentTo(false); }
public static FileReader GetInstance() { if (instance == null) { instance = new FileReader(); } return instance; }
public CardFace(byte[] bytes,FaceType type) { faceBytes = bytes; faceType = type; Stream stream = new MemoryStream(faceBytes); FileReader reader = new FileReader(stream); reader.ReadBytes(4);//skip Tag 'FACE' bgColor = reader.ReadUInt32(); hasLogo = reader.ReadBoolean(); byte[] dataBytes; byte[] readBytes; int len; if (hasLogo) { reader.ReadBytes(16);//skip LogoRect logoDepth = reader.ReadUInt32();//LogoDepth len = (int)(reader.ReadUInt32());//LogoMatrixBytes if (len > 0) { logoMatrix = new Matrix(reader.ReadBytes(24)); } len = (int)(reader.ReadUInt32());//LogoColorTransBytes if (len > 0) { logoColorTrans = new ColorTransform(reader.ReadBytes(32)); } } len = (int)(reader.ReadUInt32());//SymbolBytes if (len > 0) { symbols = new SymbolCollection(reader.ReadBytes(len)); } len = (int)(reader.ReadUInt32());//TextBytes if (len > 0) { texts = new TextCollection(reader.ReadBytes(len)); } len = (int)(reader.ReadUInt32());//ThumbnailBytes if (len > 0) { dataBytes=new byte[len]; readBytes = reader.ReadBytes(len); readBytes.CopyTo(dataBytes,0); thumbnail = new Thumbnail(dataBytes,ThumbnailType.CardThumbnail); } stream.Close(); reader.Close(); }
public static string GetRiotVersion() { if (_version == null) { using (FileReader reader = new FileReader()) { string json = reader.GetFileString("Resources.config.json"); _version = json; } } return _version; }
public void TestGuardClauses() { var action = new Action(() => { var test = new FileReader(null); }); action.ShouldThrow<ArgumentNullException>(); }
static void Main(string[] args) { Log.ToLog(DateTime.Now.ToString(), "start", "-----------------------------"); Controller cont = new Controller(); FileReader file = new FileReader(); file.Reader(cont); file.Writer(cont); Log.ToLog(DateTime.Now.ToString(), "end", "-------------------------------"); }
/// <summary> /// The entry point of the program, where the program control starts and ends. /// </summary> /// <param name="args">The command-line arguments.</param> static void Main(string[] args) { String path; Console.Write("File to create torrent for <absolute path>: "); path = Console.ReadLine(); while (!(File.Exists(path))) { Console.WriteLine("File does not exist, try again."); Console.Write("File to create torrent for <absolute path>: "); path = Console.ReadLine(); } Sha1Hasher sha1Hasher = new Sha1Hasher(); FileReader fileReader = new FileReader(path, sha1Hasher.inputChannel, PIECE_SIZE); string torrentPath = path + ".torrent"; FileStream torrentFile = new FileStream(torrentPath, FileMode.Create); int pieces = (int)Math.Ceiling((double)fileReader.fileBytes.Length / (double)PIECE_SIZE); TorrentFileWriter torrentFileWriter = new TorrentFileWriter(torrentFile, pieces, sha1Hasher.outputChannel); torrentFileWriter.Start(); // Create the header of the torrent file StringBuilder torrentHeader = new StringBuilder(); torrentHeader.Append("d8:announce44:udp://tracker.openbittorrent.com:80/announce"); torrentHeader.Append("8:encoding5:UTF-8"); torrentHeader.Append("4:info"); torrentHeader.Append("d6:lengthi" + fileReader.fileBytes.Length + "e"); string filename = path.Substring(path.LastIndexOf('/') + 1); torrentHeader.Append("4:name" + filename.Length + ":" + filename); torrentHeader.Append("12:piece lengthi" + PIECE_SIZE + "e"); torrentHeader.Append("6:pieces" + pieces * 20 + ":"); torrentFileWriter.inputChannel.Put(Encoding.UTF8.GetBytes(torrentHeader.ToString())); sha1Hasher.Start(); fileReader.Start(); // Wait until initial writing has completed while (!torrentFileWriter.WritingComplete()){} // Append the bencoding ends string end = "ee"; torrentFileWriter.inputChannel.Put(Encoding.UTF8.GetBytes(end)); // Wait until writing has completed again while (!torrentFileWriter.WritingComplete()){} // Stop the file writer torrentFileWriter.Stop(); // Close the torrent file torrentFile.Close(); Console.WriteLine("Torrent file created: \"" + torrentPath + "\""); }
/// <summary> /// Tagses the specified presious. /// </summary> /// <param name="files">The files.</param> /// <returns></returns> public List<string> Tags(List<string> files ) { foreach (var file in files) { _fileReader = new FileReader(file); _length = _fileReader.Length(); long currentPosition = 0; var trigger = string.Empty; var skip = false; var inTrigger = false; while (_length > currentPosition) { var s = _fileReader.ReadString(1); currentPosition++; if (s == "<") { trigger = string.Empty; skip = false; inTrigger = true; } trigger = trigger + s; if (s == " ") { if (inTrigger) { if (!_tags.Contains(trigger)) { _tags.Add(trigger); } trigger = string.Empty; skip = true; inTrigger = false; } } if (s == ">") { if (!skip) { if (inTrigger) { if (!_tags.Contains(trigger)) { _tags.Add(trigger); } trigger = string.Empty; inTrigger = false; } } } } } return _tags; }
public void readFileTest() { StreamReader stubReader = MockRepository.GenerateStub<StreamReader>(); string testFileContents = "123456-1234-1234-1234-123456789101\r"; string testPath = "\\a\\test\\path.txt"; stubReader.Stub(X => X.ReadToEnd()).Return(testFileContents); fileReader = new MockReader_1(stubReader); List<string> formattedData = fileReader.Read(testPath); Assert.IsTrue(formattedData.Count == 1); }
public static FileReader FromString(string str, string filename = "default.rx") { var r = new FileReader(); r.buffer = str.ToCharArray(); r.Position = 0; r.Line = 1; r.Column = 1; r.Filename = filename; return r; }
public static long Save(IStorage delta, FileStream writer, string foundBaseNCA) { var filenameOffset = foundBaseNCA.Split(':'); if (delta.GetSize() < 0x40) { throw new InvalidDataException("Delta file is too small."); } if (foundBaseNCA.Length > 255) { throw new IndexOutOfRangeException("Base NCA filename isn't allowed to be longer then 255 characters"); } var Header = new DeltaFragmentHeader(new StorageFile(delta, OpenMode.Read)); var reader = new FileReader(new StorageFile(delta, OpenMode.Read)); reader.Position = 0; if (filenameOffset.Length == 1 && Header.Magic != DeltaTools.Ndv0Magic) { writer.Write(DeltaTools.LCA3Macic, 0, DeltaTools.LCA3Macic.Length); writer.WriteByte((byte)foundBaseNCA.Length); writer.Write(Encoding.ASCII.GetBytes(foundBaseNCA), 0, foundBaseNCA.Length); return(0); } if (Header.Magic == DeltaTools.Ndv0Magic) { var fragmentSize = Header.FragmentHeaderSize + Header.FragmentBodySize; //if (!isSplitNdv0 && delta.Length < fragmentSize) //{ // throw new InvalidDataException( // $"Delta file is smaller than the header indicates. (0x{fragmentSize} bytes)"); //} var headerData = reader.ReadBytes((int)Header.FragmentHeaderSize); headerData[0] = 0x54; //T (NDV0 to TDV0) writer.Write(headerData, 0, (int)Header.FragmentHeaderSize); } else { writer.Write(Encoding.ASCII.GetBytes(DeltaTools.Cdv0Magic), 0, DeltaTools.Cdv0Magic.Length); } writer.WriteByte((byte)foundBaseNCA.Length); writer.Write(Encoding.ASCII.GetBytes(foundBaseNCA), 0, foundBaseNCA.Length); var foundBaseNCAEndOffsetPos = foundBaseNCA.LastIndexOf(':') + 1; var foundBaseNCAEndOffsetLen = foundBaseNCA.Length - foundBaseNCAEndOffsetPos; var SplitNdv0EndOffsetPos = writer.Position - foundBaseNCAEndOffsetLen; long offset = 0; long deltaSize = delta.GetSize(); Console.WriteLine($"reader={reader.Position} writer={writer.Position}"); while (reader.Position < deltaSize) { ReadSegmentHeader(reader, writer, out var size, out var seek); if (seek > 0) { offset += seek; } if (size > 0) { offset += size; } reader.Position += size; } if (reader.Position == deltaSize) { if (filenameOffset.Length > 2) { var startOffset = long.Parse(filenameOffset[1], NumberStyles.HexNumber); var endOffset = startOffset + offset; var realEndOffset = endOffset.ToString($"X{foundBaseNCAEndOffsetLen}"); var posReal = writer.Position; writer.Position = SplitNdv0EndOffsetPos; writer.Write(Encoding.ASCII.GetBytes(realEndOffset), 0, realEndOffset.Length); writer.Position = posReal; } //Size of data untimmed in this function call return(offset); } throw new InvalidDataException("Fragment file seems to be corrupted!"); }
/// <summary> /// Reads all entries in this archive into memory. /// </summary> /// <param name="ThrowException">Wether or not to throw an exception if the archive was not a DBPF. If false, function will return.</param> public bool ReadArchive(bool ThrowException) { m_FinishedReading.Reset(); if (m_Reader == null) { try { m_Reader = new FileReader(m_Path, false); } //This will be thrown because of file access privileges or because an archive is being tentatively opened twice. catch (Exception) { if (ThrowException) { throw; } else { return(false); } } } lock (m_Reader) { ASCIIEncoding Enc = new ASCIIEncoding(); string MagicNumber = Enc.GetString(m_Reader.ReadBytes(4)); if (!MagicNumber.Equals("DBPF", StringComparison.InvariantCultureIgnoreCase)) { if (ThrowException) { throw new DBPFException("MagicNumber was wrong - DBPFArchive.cs!"); } else { m_Reader.Close(); return(false); } } m_Reader.ReadUInt32(); //MajorVersion m_Reader.ReadUInt32(); //MinorVersion m_Reader.ReadBytes(12); //Reserved. m_Reader.ReadBytes(4); //Date created. m_Reader.ReadBytes(4); //Date modified. m_Reader.ReadUInt32(); //Index major version. IndexEntryCount = m_Reader.ReadUInt32(); IndexOffset = m_Reader.ReadUInt32(); IndexSize = m_Reader.ReadUInt32(); m_Reader.Seek(IndexOffset); for (int i = 0; i < IndexEntryCount; i++) { DBPFEntry Entry = new DBPFEntry(); Entry.TypeID = m_Reader.ReadUInt32(); Entry.GroupID = m_Reader.ReadUInt32(); Entry.InstanceID = m_Reader.ReadUInt32(); Entry.FileOffset = m_Reader.ReadUInt32(); Entry.FileSize = m_Reader.ReadUInt32(); UniqueFileID ID = new UniqueFileID(Entry.TypeID, Entry.InstanceID, Entry.GroupID); Entry.EntryID = ID; m_Entries.Add(ID, Entry); } } m_FinishedReading.Set(); return(true); }
public void NoPackagesConfigFileReturnsEmptyList() { var results = new FileReader(new FakeFileSystem()).ReadPackagesConfig(@"x:\foo\my.csproj"); results.Should().BeEmpty(); }
public void Read(FileReader reader) { }
private static void Main(string[] args) { var cla = new CommandLineArgs(); var sw = new Stopwatch(); sw.Start(); #if DEBUG Debugger.Launch(); #endif try { // test for piped content. var arguments = GetArguments(args); var result = cla.CliParse(arguments); if (!result.Successful) { Console.WriteLine("Parse failed! Use --help flag for instructions on usage."); return; } if (result.ShowHelp) { var usage = cla.GetUsage(); Console.WriteLine(usage); return; } if (cla.ShowPatternHelp) { var usage = cla.GetPatternUsage(); Console.Write(usage); return; } if (cla.ListNamedPatterns) { var paths = new List <string>(); paths.Add("default"); if (!string.IsNullOrEmpty(cla.NamedPatterns)) { cla.NamedPatterns.Split(';').ToList().ForEach(paths.Add); } Console.WriteLine("Named Parameters:"); foreach (var file in paths) { var correctedPath = FileReader.GetPatternFilePath(file); var namedParameters = FileReader.LoadNamedPatterns(correctedPath); foreach (var namedParameter in namedParameters.Patterns) { Console.WriteLine(namedParameter.Name); } } } else { var template = GetTemplateValue(cla); if (!string.IsNullOrEmpty(template)) // output path provided. { if (cla.Verbose) { Console.WriteLine("Creating " + cla.Count.ToString() + " items."); } if (!string.IsNullOrEmpty(cla.OutputFilePath)) { OutputToFile(cla, template); } else { OutputToConsole(cla, template); } } else { Console.WriteLine(cla.GetUsage()); } } if (cla.Verbose) { if (sw != null) { sw.Stop(); Console.WriteLine("Generation took {0} milliseconds", sw.ElapsedMilliseconds); } } } catch (GenerationException gex) { Console.WriteLine("Error:\n{0}", gex.Message); } catch (Exception ex) { Console.WriteLine("Error:{0}\n\nStackTrace:{1}", ex.Message, ex.StackTrace); } }
public void Part1() { var result = _day24.Solve(FileReader.ReadFile("day24.txt").Select(x => Convert.ToInt32(x)).ToArray()); Assert.AreEqual(10723906903, result); }
public int Compute(string filePath) { var lines = FileReader.GetFileContent(filePath).ToList(); return(ReadBoardingPass(lines).Max()); }
public TRK(Stream Data) { m_Reader = new FileReader(Data, false); string DataStr = ""; string[] Elements; ASCIIEncoding Enc = new ASCIIEncoding(); string MagicNumber = Enc.GetString(m_Reader.ReadBytes(4)); if (!MagicNumber.Equals("2DKT", StringComparison.InvariantCultureIgnoreCase) && !MagicNumber.Equals("TKDT", StringComparison.InvariantCultureIgnoreCase)) { throw new TRKException("Invalid TrackData header - TRK.cs"); } if (MagicNumber.Equals("2DKT", StringComparison.InvariantCultureIgnoreCase)) { DataStr = Enc.GetString(m_Reader.ReadBytes((int)m_Reader.ReadUInt32())); Elements = DataStr.Split(','); } else { Elements = Enc.GetString(m_Reader.ReadToEnd()).Split(','); } m_Version = int.Parse(Elements[1], NumberStyles.Integer); TrackName = Elements[2]; if (!Elements[3].Equals("", StringComparison.InvariantCultureIgnoreCase)) { SoundID = uint.Parse(Elements[3].Replace("0x", ""), NumberStyles.HexNumber); } else { SoundID = 0; } if (Elements[5].Equals("\r\n", StringComparison.InvariantCultureIgnoreCase)) { return; } if (!Elements[5].Equals("", StringComparison.InvariantCultureIgnoreCase)) { Argument = (HITTrackArguments)Enum.Parse(typeof(HITTrackArguments), Elements[5]); } if (!Elements[7].Equals("", StringComparison.InvariantCultureIgnoreCase)) { ControlGroup = (HITControlGroup)Enum.Parse(typeof(HITControlGroup), Elements[7]); } if (!Elements[(m_Version != 2) ? 11 : 12].Equals("", StringComparison.InvariantCultureIgnoreCase)) { DuckingPriority = int.Parse(Elements[(m_Version != 2) ? 11 : 12], NumberStyles.Integer); } if (!Elements[(m_Version != 2) ? 12 : 13].Equals("", StringComparison.InvariantCultureIgnoreCase)) { Looped = (int.Parse(Elements[(m_Version != 2) ? 12 : 13], NumberStyles.Integer) != 0) ? true : false; } if (!Elements[(m_Version != 2) ? 13 : 14].Equals("", StringComparison.InvariantCultureIgnoreCase)) { Volume = int.Parse(Elements[(m_Version != 2) ? 13 : 14], NumberStyles.Integer); } m_Reader.Close(); }
public static int Part1() { var lines = FileReader.ReadInputLines(10); return(NumberOfAsteroidsFromBestLocation(lines)); }
public static Vector3 Read_8_8_8_Unorm(FileReader reader) { return(new Vector3(reader.ReadByte() / 255f, reader.ReadByte() / 255f, reader.ReadByte() / 255f)); }
public static Vector4 Read_8_8_8_8_Snorm(FileReader reader) { return(new Vector4(reader.ReadSByte() / 255f, reader.ReadSByte() / 255f, reader.ReadSByte() / 255f, reader.ReadSByte() / 255f)); }
public void ReadVertexBuffers() { Nodes.Clear(); TreeNode skeletonNode = new TreeNode("Skeleton"); for (int t = 0; t < Skeleton?.bones.Count; t++) { if (Skeleton.bones[t].Parent == null) { skeletonNode.Nodes.Add(Skeleton.bones[t]); } } if (skeletonNode.Nodes.Count > 0) { Nodes.Add(skeletonNode); } using (var reader = new FileReader(DataDictionary.GetFile003Data())) { for (int i = 0; i < VertexBufferPointers.Count; i++) { LM2_Mesh mesh = Meshes[i]; RenderableMeshWrapper genericObj = new RenderableMeshWrapper(); genericObj.Mesh = mesh; genericObj.Text = $"Mesh {i}"; genericObj.SetMaterial(mesh.Material); RenderedMeshes.Add(genericObj); Nodes.Add(genericObj); DataDictionary.Renderer.Meshes.Add(genericObj); STGenericPolygonGroup polyGroup = new STGenericPolygonGroup(); genericObj.PolygonGroups.Add(polyGroup); using (reader.TemporarySeek(BufferStart + VertexBufferPointers[i], System.IO.SeekOrigin.Begin)) { var bufferNodeDebug = new DebugVisualBytes(reader.ReadBytes((int)80 * mesh.VertexCount)); bufferNodeDebug.Text = $"Buffer {mesh.DataFormat.ToString("x")}"; genericObj.Nodes.Add(bufferNodeDebug); } if (!LM2_Mesh.FormatInfos.ContainsKey(mesh.DataFormat)) { Console.WriteLine($"Unsupported data format! " + mesh.DataFormat.ToString("x")); continue; } else { var formatInfo = LM2_Mesh.FormatInfos[mesh.DataFormat]; if (formatInfo.BufferLength > 0) { reader.BaseStream.Position = BufferStart + mesh.IndexStartOffset; switch (mesh.IndexFormat) { case IndexFormat.Index_8: for (int f = 0; f < mesh.IndexCount; f++) { polyGroup.faces.Add(reader.ReadByte()); } break; case IndexFormat.Index_16: for (int f = 0; f < mesh.IndexCount; f++) { polyGroup.faces.Add(reader.ReadUInt16()); } break; } Console.WriteLine($"Mesh {genericObj.Text} Format {formatInfo.Format} BufferLength {formatInfo.BufferLength}"); uint bufferOffet = BufferStart + VertexBufferPointers[i]; /* for (int v = 0; v < mesh.VertexCount; v++) * { * reader.SeekBegin(bufferOffet + (v * formatInfo.BufferLength)); * * }*/ switch (formatInfo.Format) { case VertexDataFormat.Float16: for (int v = 0; v < mesh.VertexCount; v++) { reader.SeekBegin(bufferOffet + (v * formatInfo.BufferLength)); Vertex vert = new Vertex(); genericObj.vertices.Add(vert); vert.pos = new Vector3( UShortToFloatDecode(reader.ReadInt16()), UShortToFloatDecode(reader.ReadInt16()), UShortToFloatDecode(reader.ReadInt16())); Vector4 nrm = Read_8_8_8_8_Snorm(reader); vert.nrm = nrm.Xyz.Normalized(); vert.pos = Vector3.TransformPosition(vert.pos, mesh.Transform); vert.uv0 = NormalizeUvCoordsToFloat(reader.ReadUInt16(), reader.ReadUInt16()); if (formatInfo.BufferLength == 22) { Console.WriteLine("unk 1 " + reader.ReadUInt16()); Console.WriteLine("unk 2 " + reader.ReadUInt16()); Console.WriteLine("unk 3 " + reader.ReadUInt16()); Console.WriteLine("unk 4 " + reader.ReadUInt16()); } } break; case VertexDataFormat.Float32: for (int v = 0; v < mesh.VertexCount; v++) { reader.SeekBegin(bufferOffet + (v * formatInfo.BufferLength)); Vertex vert = new Vertex(); genericObj.vertices.Add(vert); vert.pos = new Vector3(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()); vert.pos = Vector3.TransformPosition(vert.pos, mesh.Transform); } break; case VertexDataFormat.Float32_32: reader.BaseStream.Position = BufferStart + VertexBufferPointers[i] + 0x08; for (int v = 0; v < mesh.VertexCount; v++) { reader.SeekBegin(bufferOffet + (v * formatInfo.BufferLength)); Vertex vert = new Vertex(); genericObj.vertices.Add(vert); vert.pos = new Vector3(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()); vert.pos = Vector3.TransformPosition(vert.pos, mesh.Transform); vert.uv0 = NormalizeUvCoordsToFloat(reader.ReadUInt16(), reader.ReadUInt16()); vert.uv1 = NormalizeUvCoordsToFloat(reader.ReadUInt16(), reader.ReadUInt16()); vert.col = Read_8_8_8_8_Unorm(reader); } break; case VertexDataFormat.Float32_32_32: for (int v = 0; v < mesh.VertexCount; v++) { reader.SeekBegin(bufferOffet + (v * formatInfo.BufferLength)); Vertex vert = new Vertex(); genericObj.vertices.Add(vert); vert.pos = new Vector3(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()); vert.pos = Vector3.TransformPosition(vert.pos, mesh.Transform); Vector4 nrm = Read_8_8_8_8_Snorm(reader); vert.nrm = nrm.Xyz.Normalized(); vert.uv0 = NormalizeUvCoordsToFloat(reader.ReadUInt16(), reader.ReadUInt16()); vert.uv1 = NormalizeUvCoordsToFloat(reader.ReadUInt16(), reader.ReadUInt16()); if (formatInfo.BufferLength >= 0x1C) { vert.col = Read_8_8_8_8_Unorm(reader); } } break; } genericObj.TransformPosition(new Vector3(0), new Vector3(-90, 0, 0), new Vector3(1)); } } genericObj.RemoveDuplicateVertices(); genericObj.FlipUvsVertical(); } } }
public STR(IFFChunk BaseChunk) : base(BaseChunk) { FileReader Reader = new FileReader(new MemoryStream(m_Data), false, Encoding.GetEncoding(1252)); Version = Reader.ReadInt16(); ushort NumStrings = 0; if ((Reader.StreamLength - Reader.Position) > 2) { switch (Version) { case 0: NumStrings = Reader.ReadUShort(); for (int i = 0; i < NumStrings; i++) { TranslatedString Str = new TranslatedString(); Str.LangCode = LanguageCodes.unused; Str.TranslatedStr = Reader.ReadPascalString(); if (Strings.ContainsKey(Str.LangCode)) { Strings[Str.LangCode].Add(Str); } else { List <TranslatedString> LanguageSet = new List <TranslatedString>(); LanguageSet.Add(Str); Strings.Add(Str.LangCode, LanguageSet); } } break; case -1: NumStrings = Reader.ReadUShort(); for (int i = 0; i < NumStrings; i++) { TranslatedString Str = new TranslatedString(); Str.LangCode = LanguageCodes.unused; Str.TranslatedStr = Reader.ReadCString(); if (Strings.ContainsKey(Str.LangCode)) { Strings[Str.LangCode].Add(Str); } else { List <TranslatedString> LanguageSet = new List <TranslatedString>(); LanguageSet.Add(Str); Strings.Add(Str.LangCode, LanguageSet); } } break; case -2: NumStrings = Reader.ReadUShort(); for (int i = 0; i < NumStrings; i++) { TranslatedString Str = new TranslatedString(); Str.LangCode = LanguageCodes.unused; Str.TranslatedStr = Reader.ReadCString(); Reader.ReadCString(); //Comment if (Strings.ContainsKey(Str.LangCode)) { Strings[Str.LangCode].Add(Str); } else { List <TranslatedString> LanguageSet = new List <TranslatedString>(); LanguageSet.Add(Str); Strings.Add(Str.LangCode, LanguageSet); } } break; case -3: NumStrings = Reader.ReadUShort(); for (int i = 0; i < NumStrings; i++) { TranslatedString Str = new TranslatedString(); Str.LangCode = (LanguageCodes)Reader.ReadByte(); Str.TranslatedStr = Reader.ReadCString(); Reader.ReadCString(); //Comment if (Strings.ContainsKey(Str.LangCode)) { Strings[Str.LangCode].Add(Str); } else { List <TranslatedString> LanguageSet = new List <TranslatedString>(); LanguageSet.Add(Str); Strings.Add(Str.LangCode, LanguageSet); } } break; case -4: byte LanguageSets = Reader.ReadByte(); for (int i = 0; i < LanguageSets; i++) { NumStrings = Reader.ReadUShort(); for (int j = 0; j < NumStrings; j++) { TranslatedString Str = new TranslatedString(); Str.LangCode = (LanguageCodes)(Reader.ReadByte() + 1); Str.TranslatedStr = Reader.ReadString(); Reader.ReadString(); //Comment if (Strings.ContainsKey(Str.LangCode)) { Strings[Str.LangCode].Add(Str); } else { List <TranslatedString> LanguageSet = new List <TranslatedString>(); LanguageSet.Add(Str); Strings.Add(Str.LangCode, LanguageSet); } } } break; } } Reader.Close(); m_Data = null; }
public void OpenFile(string FileName, byte[] data = null, bool Compressed = false, CompressionType CompType = CompressionType.None) { if (data == null) { data = File.ReadAllBytes(FileName); } if (File.Exists(FileName)) { SaveRecentFile(FileName); } FileReader f = new FileReader(data); string Magic = f.ReadMagic(0, 4); string Magic2 = f.ReadMagic(0, 2); //Determine if the file is compressed or not if (Magic == "Yaz0") { data = EveryFileExplorer.YAZ0.Decompress(data).ToArray(); OpenFile(FileName, data, true, CompressionType.Yaz0); return; } if (Magic == "ZLIB") { data = FileReader.InflateZLIB(f.getSection(64, data.Length - 64)); OpenFile(FileName, data, true, CompressionType.Zlib); return; } if (Path.GetExtension(FileName) == ".cmp" && CompType == CompressionType.None) { f.Position = 0; int OuSize = f.ReadInt32(); int InSize = data.Length - 4; data = STLibraryCompression.Type_LZ4F.Decompress(f.getSection(4, InSize)); OpenFile(FileName, data, true, CompressionType.Lz4f); return; } f.Dispose(); f.Close(); foreach (IFileFormat format in SupportedFormats) { Console.WriteLine(format.Magic.Reverse()); Console.WriteLine(Magic2); if (format.Magic == Magic || format.Magic == Magic2 || format.Magic.Reverse() == Magic2) { format.CompressionType = CompType; format.FileIsCompressed = Compressed; format.Data = data; format.FileName = Path.GetFileName(FileName); format.Load(); format.FilePath = FileName; if (format.EditorRoot != null) { objectList.treeView1.Nodes.Add(format.EditorRoot); } if (format.CanSave) { saveAsToolStripMenuItem.Enabled = true; saveToolStripMenuItem.Enabled = true; } if (format.UseEditMenu) { editToolStripMenuItem.Enabled = true; } } if (format.Magic == String.Empty) //Load by extension if magic isn't defined { foreach (string ext in format.Extension) { if (ext.Remove(0, 1) == Path.GetExtension(FileName)) { format.CompressionType = CompType; format.FileIsCompressed = Compressed; format.Data = data; format.FileName = Path.GetFileName(FileName); format.FilePath = FileName; format.Load(); if (format.EditorRoot != null) { objectList.treeView1.Nodes.Add(format.EditorRoot); } if (format.CanSave) { saveAsToolStripMenuItem.Enabled = true; saveToolStripMenuItem.Enabled = true; } if (format.UseEditMenu) { editToolStripMenuItem.Enabled = true; } } } } } }
public void TestMethodGetPath() { FileReader f = new FileReader(); var a = f.getPathList(); }
public bool Identify(File_Info fileInfo, Stream stream) { using (var reader = new FileReader(stream, true)) { return(reader.CheckSignature(4, "BAHS")); } }
public void Part2() { var result = _day24.SolvePart2(FileReader.ReadFile("day24.txt").Select(x => Convert.ToInt32(x)).ToArray()); Assert.AreEqual(74850409, result); }
private static void CreateCoreTTInclude(string generatorRoot, string ttRoot) { string header1 = @"<# // Copyright (C) Simon Hughes 2012 " + "// v" + _version + @" // If you want to submit a pull request, please modify the Generator C# project as this file // is automatically constructed from the C# Generator project during the build process. #> <#@ template debug=""true"" hostspecific=""true"" language=""C#"" #> <#@ include file=""EF6.Utility.CS.ttinclude""#><#@ assembly name=""System.Configuration"" #> <#@ assembly name=""System.Windows.Forms"" #> <#@ import namespace=""System.Data.Entity.Infrastructure.Pluralization"" #>"; const string header2 = @"<#@ import namespace=""EnvDTE"" #> <#@ import namespace=""Microsoft.VisualStudio.TextTemplating"" #> <#@ output extension="".cs"" encoding=""utf-8"" #> <# var DefaultNamespace = new CodeGenerationTools(this).VsNamespaceSuggestion() ?? ""DebugMode""; Settings.Root = Host.ResolvePath(string.Empty); #><#+"; const string footer = @" public static void ArgumentNotNull<T>(T arg, string name) where T : class { if (arg == null) { throw new ArgumentNullException(name); } } #>"; var fileReaderStrategy = new FileReaderStrategy(); string[] ignoreFolders = { "\\bin", "\\obj" }; string[] ignoreFiles = { "AssemblyInfo.cs", "EntityFrameworkTemplateFileManager.cs", "GeneratedTextTransformation.cs", "GlobalSuppressions.cs", // Resharper }; var filesToListFirst = new List <KeyValuePair <int, string> > { new KeyValuePair <int, string>(1, "Settings.cs"), new KeyValuePair <int, string>(2, "FilterSettings.cs"), new KeyValuePair <int, string>(3, "SingleContextFilter.cs") }; var files = Directory .GetFiles(generatorRoot, "*.cs", SearchOption.AllDirectories) .OrderBy(x => x) .ToList(); var filesToListFirstReaders = new List <KeyValuePair <int, IFileReader> >(); var remainingFileReaders = new List <IFileReader>(); foreach (var file in files) { var skip = false; var listFirst = false; var path = Path.GetDirectoryName(file); var filename = Path.GetFileName(file); foreach (var ignore in ignoreFolders) { if (path.Contains(ignore)) { skip = true; } } if (skip) { continue; } foreach (var ignore in ignoreFiles) { if (filename == ignore) { skip = true; } } if (skip) { continue; } var order = 0; foreach (var fileToListFirst in filesToListFirst) { if (filename == fileToListFirst.Value) { listFirst = true; order = fileToListFirst.Key; } } var fileReader = new FileReader(fileReaderStrategy); if (fileReader.ReadFile(file)) { if (listFirst) { filesToListFirstReaders.Add(new KeyValuePair <int, IFileReader>(order, fileReader)); } else { remainingFileReaders.Add(fileReader); } } } var fileReaders = new List <IFileReader>(); fileReaders.AddRange(filesToListFirstReaders.OrderBy(x => x.Key).Select(x => x.Value)); fileReaders.AddRange(remainingFileReaders); using (var tt = File.CreateText(Path.Combine(ttRoot, "EF.Reverse.POCO.v3.ttinclude"))) { var writerStrategy = new TTWriterStrategy(); var writer = new FileWriter(tt, writerStrategy, fileReaders); tt.WriteLine(header1); writer.WriteUsings(); tt.WriteLine(header2); writer.WriteCode(); tt.WriteLine(footer); } }
public override void Load(FileReader stream) { base.Load(stream); To = stream.ReadInt16(); }
public void Load(System.IO.Stream stream) { using (FileReader reader = new FileReader(stream)) { reader.ByteOrder = Syroot.BinaryData.ByteOrder.LittleEndian; magic = reader.ReadBytes(4); uint magicval = magic[0] + 256 * (uint)(magic[1]) + 65536 * (uint)(magic[2]) + 16777216 * (uint)(magic[3]); if (magicval != MagicFileConstant) { throw new Exception("Invalid identifier"); } BlockDimX = reader.ReadByte(); BlockDimY = reader.ReadByte(); BlockDimZ = reader.ReadByte(); xsize = reader.ReadBytes(3); ysize = reader.ReadBytes(3); zsize = reader.ReadBytes(3); Width = (uint)(xsize[0] + 256 * xsize[1] + 65536 * xsize[2]); Height = (uint)(ysize[0] + 256 * ysize[1] + 65536 * ysize[2]); Depth = (uint)(zsize[0] + 256 * zsize[1] + 65536 * zsize[2]); reader.Seek(0x10, System.IO.SeekOrigin.Begin); DataBlock = reader.ReadBytes((int)(reader.BaseStream.Length - reader.Position)); Console.WriteLine(Width); Console.WriteLine(Height); Console.WriteLine(Depth); if (BlockDimX == 4 && BlockDimY == 4) { Format = TEX_FORMAT.ASTC_4x4_UNORM; } else if (BlockDimX == 5 && BlockDimY == 4) { Format = TEX_FORMAT.ASTC_5x4_UNORM; } else if (BlockDimX == 5 && BlockDimY == 5) { Format = TEX_FORMAT.ASTC_5x5_UNORM; } else if (BlockDimX == 6 && BlockDimY == 5) { Format = TEX_FORMAT.ASTC_6x5_UNORM; } else if (BlockDimX == 6 && BlockDimY == 6) { Format = TEX_FORMAT.ASTC_6x6_UNORM; } else if (BlockDimX == 8 && BlockDimY == 5) { Format = TEX_FORMAT.ASTC_8x5_UNORM; } else if (BlockDimX == 8 && BlockDimY == 6) { Format = TEX_FORMAT.ASTC_8x6_UNORM; } else if (BlockDimX == 8 && BlockDimY == 8) { Format = TEX_FORMAT.ASTC_8x8_UNORM; } else if (BlockDimX == 10 && BlockDimY == 10) { Format = TEX_FORMAT.ASTC_10x10_UNORM; } else if (BlockDimX == 10 && BlockDimY == 5) { Format = TEX_FORMAT.ASTC_10x5_UNORM; } else if (BlockDimX == 10 && BlockDimY == 6) { Format = TEX_FORMAT.ASTC_10x6_UNORM; } else if (BlockDimX == 10 && BlockDimY == 8) { Format = TEX_FORMAT.ASTC_10x8_UNORM; } else { throw new Exception($"Unsupported block dims! ({BlockDimX} x {BlockDimY})"); } } stream.Dispose(); stream.Close(); }
public void Section_should_not_contain_duplicate_keys() { var reader = new FileReader(GetPath("duplicate-section-keys.txt")); var parser = new SectionParser(reader); }
public void Load(System.IO.Stream stream) { modelFolder = new LM2_ModelFolder(this); DrawableContainer.Name = FileName; Renderer = new LM2_Renderer(); DrawableContainer.Drawables.Add(Renderer); Text = FileName; using (var reader = new FileReader(stream)) { reader.ByteOrder = Syroot.BinaryData.ByteOrder.LittleEndian; uint Identifier = reader.ReadUInt32(); ushort Unknown = reader.ReadUInt16(); //Could also be 2 bytes, not sure. Always 0x0401 IsCompressed = reader.ReadByte() == 1; reader.ReadByte(); //Padding uint FileCount = reader.ReadUInt32(); uint LargestCompressedFile = reader.ReadUInt32(); reader.SeekBegin(0x2C); byte[] Unknowns = reader.ReadBytes((int)FileCount); TreeNode tableNodes = new TreeNode("File Section Entries"); long FileTablePos = reader.Position; for (int i = 0; i < FileCount; i++) { var file = new FileEntry(this); file.Text = $"entry {i}"; file.Read(reader); fileEntries.Add(file); tableNodes.Nodes.Add(file); //The first file stores a chunk layout //The second one seems to be a duplicate? if (i == 0) { using (var tableReader = new FileReader(file.GetData())) { ChunkTable = new LM2_ChunkTable(); ChunkTable.Read(tableReader); TreeNode debugFolder = new TreeNode("DEBUG TABLE INFO"); Nodes.Add(debugFolder); TreeNode list1 = new TreeNode("Entry List 1"); TreeNode list2 = new TreeNode("Entry List 2 "); debugFolder.Nodes.Add(tableNodes); debugFolder.Nodes.Add(list1); debugFolder.Nodes.Add(list2); debugFolder.Nodes.Add(chunkFolder); foreach (var chunk in ChunkTable.ChunkEntries) { list1.Nodes.Add($"ChunkType {chunk.ChunkType} ChunkOffset {chunk.ChunkOffset} Unknown1 {chunk.Unknown1} ChunkSubCount {chunk.ChunkSubCount} Unknown3 {chunk.Unknown3}"); } foreach (var chunk in ChunkTable.ChunkSubEntries) { list2.Nodes.Add($"ChunkType {chunk.ChunkType} ChunkSize {chunk.ChunkSize} Unknown {chunk.ChunkOffset}"); } } } } //Set an instance of our current data //Chunks are in order, so you build off of when an instance gets loaded TexturePOWE currentTexture = new TexturePOWE(); LM2_Model currentModel = new LM2_Model(this); //Each part of the file is divided into multiple file/section entries //The first entry being the chunk table parsed before this //The second file being a duplicate (sometimes slightly larger than the first) //The third file stores texture headers, while the fourth one usually has the rest of the main data //Any additional ones currently are unknown how they work. Some of which have unknown compression aswell byte[] File002Data = fileEntries[2].GetData(); //Get the third file byte[] File003Data = fileEntries[3].GetData(); //Get the fourth file int chunkId = 0; uint ImageHeaderIndex = 0; uint modelIndex = 0; foreach (var chunk in ChunkTable.ChunkSubEntries) { var chunkEntry = new ChunkDataEntry(this, chunk); chunkEntry.DataFile = File003Data; chunkEntry.Text = $"Chunk {chunk.ChunkType} {chunkId++}"; chunkEntries.Add(chunkEntry); chunkFolder.Nodes.Add(chunkEntry); switch (chunk.ChunkType) { case SubDataType.TextureHeader: chunkEntry.DataFile = File002Data; //Read the info using (var textureReader = new FileReader(chunkEntry.FileData)) { currentTexture = new TexturePOWE(); currentTexture.ImageKey = "texture"; currentTexture.SelectedImageKey = currentTexture.ImageKey; currentTexture.Index = ImageHeaderIndex; currentTexture.Read(textureReader); currentTexture.Text = $"Texture {ImageHeaderIndex}"; textureFolder.Nodes.Add(currentTexture); Renderer.TextureList.Add(currentTexture); ImageHeaderIndex++; } break; case SubDataType.TextureData: currentTexture.ImageData = chunkEntry.FileData; break; case SubDataType.ModelStart: currentModel = new LM2_Model(this); currentModel.ModelInfo = new LM2_ModelInfo(); currentModel.Text = $"Model {modelIndex}"; currentModel.ModelInfo.Data = chunkEntry.FileData; modelFolder.Nodes.Add(currentModel); modelIndex++; break; case SubDataType.MeshBuffers: currentModel.BufferStart = chunkEntry.Entry.ChunkOffset; currentModel.BufferSize = chunkEntry.Entry.ChunkSize; break; case SubDataType.VertexStartPointers: using (var vtxPtrReader = new FileReader(chunkEntry.FileData)) { while (!vtxPtrReader.EndOfStream) { currentModel.VertexBufferPointers.Add(vtxPtrReader.ReadUInt32()); } } break; case SubDataType.SubmeshInfo: int MeshCount = chunkEntry.FileData.Length / 0x28; using (var meshReader = new FileReader(chunkEntry.FileData)) { for (uint i = 0; i < MeshCount; i++) { LM2_Mesh mesh = new LM2_Mesh(); mesh.Read(meshReader); currentModel.Meshes.Add(mesh); } } currentModel.ModelInfo.Read(new FileReader(currentModel.ModelInfo.Data), currentModel.Meshes); break; case SubDataType.ModelTransform: using (var transformReader = new FileReader(chunkEntry.FileData)) { //This is possibly very wrong //The data isn't always per mesh, but sometimes is if (transformReader.BaseStream.Length / 0x40 == currentModel.Meshes.Count) { for (int i = 0; i < currentModel.Meshes.Count; i++) { currentModel.Meshes[i].Transform = transformReader.ReadMatrix4(); } } } break; case SubDataType.MaterialName: using (var matReader = new FileReader(chunkEntry.FileData)) { materialNamesFolder.Nodes.Add(matReader.ReadZeroTerminatedString()); } break; default: break; } } foreach (LM2_Model model in modelFolder.Nodes) { model.ReadVertexBuffers(); } if (modelFolder.Nodes.Count > 0) { Nodes.Add(modelFolder); } if (textureFolder.Nodes.Count > 0) { Nodes.Add(textureFolder); } if (materialNamesFolder.Nodes.Count > 0) { Nodes.Add(materialNamesFolder); } } }
/// <inheritdoc/> protected override void OnLoad() { // Store caret positions before reload. var caretPositions = new TextViewPosition[ViewModels.Count]; for (int i = 0; i < ViewModels.Count; i++) { caretPositions[i] = ((TextDocumentViewModel)ViewModels[i]).TextEditor.TextArea.Caret.Position; } string text; using (var fileStream = FileReader.OpenFile(Uri.LocalPath, UTF8NoBOM)) { _encoding = fileStream.CurrentEncoding; text = fileStream.ReadToEnd(); } // Check for binary files. // See http://stackoverflow.com/questions/910873/how-can-i-determine-if-a-file-is-binary-or-text-in-c foreach (char c in text) { if (char.IsControl(c) && c != '\t' && c != '\n' && c != '\r') { // Binary file detected. Cancel? var result = MessageBox.Show( "Loading file: \"" + this.GetName() + "\"\n\n" + "Unsupported file format. Do you want to open the file as a text document?\n\n" + "Warning: Binary files can slow down the text editor.", Editor.ApplicationName, MessageBoxButton.OKCancel, MessageBoxImage.Warning); if (result == MessageBoxResult.Cancel) { throw new OperationCanceledException(); } break; } } AvalonEditDocument.Text = text; AvalonEditDocument.UndoStack.ClearAll(); AvalonEditDocument.UndoStack.MarkAsOriginalFile(); _fileInfo = new FileInfo(Uri.LocalPath); // Update syntax-highlighting mode of all views. foreach (var view in ViewModels.OfType <TextDocumentViewModel>()) { view.UpdateSyntaxHighlighting(); } // Restore caret position for (int i = 0; i < ViewModels.Count; i++) { ((TextDocumentViewModel)ViewModels[i]).TextEditor.TextArea.Caret.Position = caretPositions[i]; } BeginInvokeUpdateProperties(); }
/// <summary> /// Open the working file and set the file read result /// </summary> /// <param name="path"></param> public void OpenWorkingFile(string path) { var reader = new FileReader(); FileReadResult = reader.ReadAllLines(path); }
static IMonsterDefinitionFile OpenMonsterDefinitionFile(FileReader reader) { return(new MonsterDefinitionFile_Ao(reader)); }
private void ReadGPUFile(string FileName) { string path = FileName.Replace("cpu", "gpu"); if (!System.IO.File.Exists(path)) { return; } int offset = 0; //Read the data based on CPU chunk info using (var reader = new FileReader(path)) { for (int i = 0; i < Chunks.Count; i++) { if (Chunks[i].FileSize != 0 || Chunks[i].FileName != string.Empty || Chunks[i].ChunkData != null) { long pos = reader.Position; var identifer = Chunks[i].Identifier.Reverse(); var fileInfo = new FileInfo(); //Get CPU chunk data if (Chunks[i].ChunkData != null) { if (Chunks[i].ChunkData is SWUTexture) { SWUTexture texFile = (SWUTexture)Chunks[i].ChunkData; if (Chunks[i].FileSize != 0) { texFile.ImageData = reader.ReadBytes((int)Chunks[i].FileSize); } continue; } if (Chunks[i].ChunkData is AnimationFile) { AnimationFile animFile = (AnimationFile)Chunks[i].ChunkData; fileInfo.FileName = animFile.FileName; fileInfo.FileData = animFile.Data; } if (Chunks[i].ChunkData is SkeletonFile) { SkeletonFile animFile = (SkeletonFile)Chunks[i].ChunkData; fileInfo.FileName = animFile.FileName; fileInfo.FileData = animFile.Data; } if (Chunks[i].ChunkData is MaterialFile) { MaterialFile animFile = (MaterialFile)Chunks[i].ChunkData; fileInfo.FileName = animFile.FileName; fileInfo.FileData = animFile.Data; } if (Chunks[i].ChunkData is MaterialFile) { MaterialFile animFile = (MaterialFile)Chunks[i].ChunkData; fileInfo.FileName = animFile.FileName; fileInfo.FileData = animFile.Data; } if (Chunks[i].ChunkData is ModelFile) { ModelFile modelFile = (ModelFile)Chunks[i].ChunkData; fileInfo.FileName = modelFile.FileName; byte[] BufferData = new byte[0]; if (Chunks[i].FileSize != 0) { BufferData = reader.ReadBytes((int)Chunks[i].FileSize); } fileInfo.FileData = Utils.CombineByteArray(modelFile.Data, modelFile.Data2, BufferData); //Don't advance the stream unless the chunk has a pointer if (Chunks[i].NextFilePtr != 0) { reader.Seek(pos + Chunks[i].NextFilePtr, System.IO.SeekOrigin.Begin); } } } else //Else get the data from GPU { if (Chunks[i].FileName != string.Empty) { fileInfo.FileName = $"{Chunks[i].FileName}"; } else { fileInfo.FileName = $"{i} {Chunks[i].ChunkId} {identifer.ToString("X")}"; } if (Chunks[i].FileSize != 0) { fileInfo.FileData = reader.ReadBytes((int)Chunks[i].FileSize); } else { fileInfo.FileData = new byte[0]; } } files.Add(fileInfo); //Don't advance the stream unless the chunk has a pointer if (Chunks[i].NextFilePtr != 0) { reader.Seek(pos + Chunks[i].NextFilePtr, System.IO.SeekOrigin.Begin); } } } } }
} // end Constructor // Updates the stored data according to the fileName specified. public override void UpdateData(String fileName) { Data = FileReader.UpdateGenericPathData(updateDocName, fileName); }
public static void AnalyzeTsFile(string fileName, out int jk, out DateTime?startTime) { startTime = null; var fr = new FileReader(); fr.Open(fileName); var size = fr.Length; var pos = 0; var pids = new Dictionary <int, int>(); jk = 0; while (pos < size && (startTime == null || jk == 0)) { // いくらなんでも探しすぎ if (pos > 0x1000000) { break; } // sync if (fr.Get(pos) != 0x47 || fr.Get(pos + 188) != 0x47) { ++pos; continue; } // PID var pid = (fr.Get(pos + 1) * 256 + fr.Get(pos + 2)) & 0x1FFF; var payloadStart = fr.Get(pos + 1) & 0x40; if (!pids.ContainsKey(pid)) { pids[pid] = 0; } pids[pid] = (pids[pid]) + 1; // PAT if (pid == 0x00) { byte[] p = fr.Get(pos, 188); byte adaptSize = p[4]; long length = (p[adaptSize + 6] * 256 + p[adaptSize + 7]) & 0x0FFF; for (var i = 13 + adaptSize; i < 5 + length - 4 - adaptSize; i += 4) { uint sid = (uint)(p[i] * 256 + p[i + 1]); if (sid > 0) { if (ntsIdList.ContainsKey(sid)) { jk = ntsIdList[sid]; } } } } // TDT/TOT if (pid == 0x14) { byte[] p = fr.Get(pos, 188); var adaptSize = p[4]; if (p[adaptSize + 5] == 0x70 || p[adaptSize + 5] == 0x73) { var ymd = p[adaptSize + 8] * 256 + p[adaptSize + 9]; var ydash = (ymd * 20 - 301564) / 7305; var mdash = (ymd * 10000 - 149561000 - ydash * 1461 / 4 * 10000) / 306001; var d = (mdash == 14 || mdash == 15) ? 1 : 0; var day = ymd - 14956 - (ydash * 1461 / 4) - (mdash * 306001 / 10000); var year = (ydash + d) + 1900; var month = (int)Math.Floor(mdash - 1 - d * 12.0); var date = new DateTime(year, month, day, int.Parse($"{p[adaptSize + 10]:x2}"), int.Parse($"{p[adaptSize + 11]:x2}"), int.Parse($"{p[adaptSize + 12]:x2}")); if (startTime == null) { // if (date.Second > 10) // { // 録画マージンを考慮して調整する(50秒まで) // date = date.AddMinutes(1); // } startTime = date; } } } pos += 188; } }
public void Load(System.IO.Stream stream) { CanSave = false; using (var reader = new FileReader(stream)) { reader.SetByteOrder(true); Text = FileName; while (!reader.EndOfStream) { ChunkHeader chunk = new ChunkHeader(); chunk.Position = reader.Position; chunk.Identifier = reader.ReadUInt32(); uint unk = reader.ReadUInt32(); chunk.ChunkSize = reader.ReadUInt32(); chunk.ChunkId = reader.ReadUInt32(); chunk.NextFilePtr = reader.ReadUInt32(); chunk.FileSize = reader.ReadUInt32(); uint unk2 = reader.ReadUInt32(); uint unk3 = reader.ReadUInt32(); Chunks.Add(chunk); var Identifer = chunk.Identifier.Reverse(); switch (Identifer) { case ChunkTextureFile: SWUTexture texture = new SWUTexture(); reader.SeekBegin(chunk.Position + 72); texture.ImageKey = "texture"; texture.SelectedImageKey = "texture"; texture.ReadChunk(reader); chunk.ChunkData = texture; if (chunk.ChunkSize > 244) { reader.Seek(chunk.Position + 244, System.IO.SeekOrigin.Begin); chunk.FileName = reader.ReadString(Syroot.BinaryData.BinaryStringFormat.ZeroTerminated); texture.Text = chunk.FileName; } Nodes.Add(texture); break; case ChunkMetaInfo: break; case ChunkAnimInfo: if (chunk.ChunkSize > 0xB0) { reader.Seek(chunk.Position + 0xB0, System.IO.SeekOrigin.Begin); chunk.FileName = reader.ReadString(Syroot.BinaryData.BinaryStringFormat.ZeroTerminated); } break; case ChunkAnimData: AnimationFile animFile = new AnimationFile(); animFile.Read(reader); chunk.ChunkData = animFile; break; case ChunkSkeletonData: SkeletonFile skelFile = new SkeletonFile(); skelFile.Read(reader); chunk.ChunkData = skelFile; break; case ChunkModelData: ModelFile modelFile = new ModelFile(); modelFile.Read(reader); chunk.ChunkData = modelFile; break; case ChunkMaterialData: MaterialFile matFile = new MaterialFile(); matFile.Read(reader); chunk.ChunkData = matFile; break; } reader.Seek(chunk.Position + chunk.ChunkSize, System.IO.SeekOrigin.Begin); } ReadGPUFile(FilePath); } TreeHelper.CreateFileDirectory(this); }
public void WithNoWeavingNotChanged() { var sourceProjectFile = Path.GetFullPath(@"TestProjects\ProjectWithNoWeaving.csproj"); var targetFile = Path.GetTempFileName(); File.Copy(sourceProjectFile, targetFile, true); try { new ProjectRemover(targetFile); Assert.AreEqual(FileReader.Read(@"TestProjects\ProjectWithNoWeaving.csproj"), FileReader.Read(targetFile)); } finally { File.Delete(targetFile); } }