public void Deserialize(Stream input) { input.Seek(0, SeekOrigin.Begin); var header = input.ReadStructure<SimGroup.FileHeader>(); if (header.HeaderSize < Marshal.SizeOf(header) || header.HeaderSize > input.Length) { throw new FormatException("bad data size"); } uint[] unknown08s = new uint[header.Unknown14Count]; input.Seek(header.Unknown08Offset, SeekOrigin.Begin); for (ushort i = 0; i < header.Unknown14Count; i++) { unknown08s[i] = input.ReadValueU32(); } input.Seek(header.Unknown0COffset, SeekOrigin.Begin); for (ushort i = 0; i < header.Unknown16Count; i++) { } }
protected override void InternalWrite(Stream Target) { Target.Seek(0, SeekOrigin.Begin); Target.Write(ByteHelper.StringToByte("ID3")); // Version 3.0 Flags dummy size Target.Write(new byte[] { 3,0, 0, 0, 0, 0, 0 }, 0, 7); long totalFrameSize = 0; Stream frameStream = null; foreach(Frame f in Frames) { frameStream = FrameObjectToByteArray(f); totalFrameSize += frameStream.Length; frameStream.CopyTo(Target); frameStream.Flush(); } Target.Seek(6, SeekOrigin.Begin); Target.Write(ByteHelper.GetByteArrayWith7SignificantBitsPerByteForInt((int)totalFrameSize)); // frame fertig geschrieben jetzt müssen die daten rein => // sourcestream an die stelle spulen und lesen bis endpos Target.Seek(DataStart, SeekOrigin.Begin); // Target.Copy(SourceStream, DataStart, DataEnd); Target.Flush(); Target.Close(); }
public HvaReader(Stream s) { // Index swaps for transposing a matrix var ids = new byte[]{0,4,8,12,1,5,9,13,2,6,10,14}; s.Seek(16, SeekOrigin.Begin); FrameCount = s.ReadUInt32(); LimbCount = s.ReadUInt32(); // Skip limb names s.Seek(16*LimbCount, SeekOrigin.Current); Transforms = new float[16*FrameCount*LimbCount]; for (var j = 0; j < FrameCount; j++) for (var i = 0; i < LimbCount; i++) { // Convert to column-major matrices and add the final matrix row var c = 16*(LimbCount*j + i); Transforms[c + 3] = 0; Transforms[c + 7] = 0; Transforms[c + 11] = 0; Transforms[c + 15] = 1; for (var k = 0; k < 12; k++) Transforms[c + ids[k]] = s.ReadFloat(); } }
public void Initialize(Stream stream) { if (!IsValidExecutable(stream)) throw new Exception("This loader plugin can't handle that kind of stream."); BinaryReader br = new BinaryReader(stream); stream.Seek(0x3C, SeekOrigin.Begin); uint peOff = br.ReadUInt32(); stream.Seek(peOff, SeekOrigin.Begin); imHdr = new ImageHeader(); imHdr.Read(br); optHdr = new OptionalHeader(); optHdr.Read(br); sectHdrs = new SectionHeader[imHdr.NumberOfSections]; for (int i = 0; i < imHdr.NumberOfSections; i++) { sectHdrs[i] = new SectionHeader(optHdr.ImageBase); sectHdrs[i].Read(br); } foreach (SectionHeader s in sectHdrs) { stream.Seek(s.PointerToRawData, SeekOrigin.Begin); s.Content = new byte[s.VirtualSize]; stream.Read(s.Content, 0, s.VirtualSize < s.SizeOfRawData ? (int)s.VirtualSize : (int)s.SizeOfRawData); } }
public void FromStream(Stream stream, int size) { long start = stream.Position; BinaryReader reader = new BinaryReader(stream, Encoding.ASCII); ushort magic = reader.ReadUInt16(); stream.Seek(start, SeekOrigin.Begin); _optionalHeaderBase = null; switch (magic) { case Constants.IMAGE_NT_OPTIONAL_HDR32_MAGIC: _optionalHeaderBase = new OptionalHeaderPE32(); break; case Constants.IMAGE_NT_OPTIONAL_HDR64_MAGIC: _optionalHeaderBase = new OptionalHeaderPE64(); break; case Constants.IMAGE_ROM_OPTIONAL_HDR_MAGIC: //_optionalHeaderBase = new OptionalHeaderPE32(); //break; default: break; } if (_optionalHeaderBase != null) { _optionalHeaderBase.FromStream(stream); } stream.Seek(start + size, SeekOrigin.Begin); }
public static ISavegame Deserialize(Stream stream) { if (stream == null) throw new ArgumentNullException(nameof(stream)); if (stream.Position != 0) stream.Seek(0, SeekOrigin.Begin); var checkBuffer = new byte[2]; stream.Read(checkBuffer, 0, 2); stream.Seek(0, SeekOrigin.Begin); if (checkBuffer.SequenceEqual(new byte[] {0x1f, 0x8b})) { using (var gZipStream = new GZipStream(stream, CompressionMode.Decompress)) { using (var memoryStream = new MemoryStream()) { gZipStream.CopyTo(memoryStream); return Deserialize(Encoding.UTF8.GetString(memoryStream.ToArray())); } } } if (stream is MemoryStream) { var memoryStream = (MemoryStream) stream; return Deserialize(Encoding.UTF8.GetString(memoryStream.ToArray())); } using (var memoryStream = new MemoryStream()) { stream.CopyTo(memoryStream); return Deserialize(Encoding.UTF8.GetString(memoryStream.ToArray())); } }
public bool Open(Stream fs) { Header = MarshalUtil.ReadStruct<CBMD>(fs); //read header //-- Graphics Reading -- //Read ahead the size of the uncompressed file fs.Seek(Header.CompressedCGFXOffset + 1, SeekOrigin.Begin); var intBytes = new byte[4]; fs.Read(intBytes, 0, 4); DecompressedCGFX = new byte[BitConverter.ToUInt32(intBytes, 0)]; //Read again from the start fs.Seek(Header.CompressedCGFXOffset, SeekOrigin.Begin); var ms = new MemoryStream(DecompressedCGFX); try { var lz11 = new DSDecmp.Formats.Nitro.LZ11(); lz11.Decompress(fs, Header.CBMDLength - fs.Position, ms); } catch { //might throw exception if size of compressed is bigger than it should be } ms.Seek(0, SeekOrigin.Begin); CGFXContext = new CGFXContext(); CGFXContext.Open(ms); return true; }
public void Load(Stream data, bool fast = false) { uint headersize = Helpers.ReadLEUInt(data); long startoffset = data.Position; Header = new HeaderStruct(); Header.magic = Helpers.ReadLEUInt(data); Header.totalCount = Helpers.ReadLEUInt(data); Header.ebxCount = Helpers.ReadLEUInt(data); Header.resCount = Helpers.ReadLEUInt(data); Header.chunkCount = Helpers.ReadLEUInt(data); Header.stringOffset = Helpers.ReadLEUInt(data); Header.chunkMetaOffset = Helpers.ReadLEUInt(data); Header.chunkMetaSize = Helpers.ReadLEUInt(data); ReadSha1List(data); ReadEbxList(data); ReadResList(data); ReadChunkList(data); if (Header.chunkCount != 0) ChunkMeta = BJSON.ReadField(data); else ChunkMeta = null; data.Seek(startoffset + Header.stringOffset, 0); ReadEbxListNames(data); data.Seek(startoffset + Header.stringOffset, 0); ReadResListNames(data); data.Seek(startoffset + headersize, 0); ReadEbxListData(data, fast); ReadResListData(data, fast); ReadChunkListData(data, fast); ApplySHA1s(); }
public Packfile(Stream stream, bool isStr2) { IsStr2 = isStr2; stream.Seek(0, SeekOrigin.Begin); FileData = stream.ReadStruct<PackfileFileData>(); m_Files = new List<IPackfileEntry>(); uint runningPosition = 0; List<PackfileEntryFileData> entryFileData = new List<PackfileEntryFileData>(); for (int i = 0; i < FileData.NumFiles; i++) { PackfileEntryFileData data = stream.ReadStruct<PackfileEntryFileData>(); if (IsCondensed && IsCompressed) { data.Flags = 0; data.Start = runningPosition; runningPosition += data.Size; } else if (IsCondensed) { data.Start = runningPosition; runningPosition += data.Size.Align(16); } entryFileData.Add(data); } for (int i = 0; i < FileData.NumFiles; i++) { stream.Align(2); string filename = stream.ReadAsciiNullTerminatedString(); stream.Seek(1, SeekOrigin.Current); m_Files.Add(new PackfileEntry(this, entryFileData[i], filename)); stream.Align(2); } if (IsCondensed && IsCompressed) { DataOffset = 0; byte[] compressedData = new byte[FileData.CompressedDataSize]; stream.Read(compressedData, 0, (int)FileData.CompressedDataSize); using (MemoryStream tempStream = new MemoryStream(compressedData)) { using (Stream s = new ZlibStream(tempStream, CompressionMode.Decompress, true)) { byte[] uncompressedData = new byte[FileData.DataSize]; s.Read(uncompressedData, 0, (int)FileData.DataSize); DataStream = new MemoryStream(uncompressedData); } } } else { DataStream = stream; DataOffset = stream.Position; } }
public FrameScanner(Stream input) { input.Seek(0, SeekOrigin.Begin); byte[] frameheadbuffer = new byte[byte.MaxValue + 4]; byte[] buffer = new byte[6]; input.Read(buffer, 0, 6); headerOkay = Encoding.ASCII.GetString(buffer, 0, 4) == "MMDb"; if (!headerOkay) return; version = new Version(buffer[4], buffer[5]); if (version == FileVersions.First) { input.Read(buffer, 0, 4); id = BitConverter.ToInt32(buffer, 0); while (input.Position < input.Length) { byte b = (byte)input.ReadByte(); input.Read(frameheadbuffer, 0, 4 + b); FrameIdentifier identifier = new FrameIdentifier(frameheadbuffer, 0, b); int length = BitConverter.ToInt32(frameheadbuffer, b); int position = (int)input.Position; Frame frame = new Frame(position, length, 5 + b); frames.Add(identifier, frame); input.Seek(length, SeekOrigin.Current); } } else { headerOkay = false; return; } }
public Heap(Stream stream, bool useCompression = false, AllocationStrategy strategy = AllocationStrategy.FromTheCurrentBlock) { stream.Seek(0, SeekOrigin.Begin); //support Seek? Stream = stream; space = new Space(); used = new Dictionary<long, Pointer>(); reserved = new Dictionary<long, Pointer>(); if (stream.Length < AtomicHeader.SIZE) //create new { header = new AtomicHeader(); header.UseCompression = useCompression; space.Add(new Ptr(AtomicHeader.SIZE, long.MaxValue - AtomicHeader.SIZE)); } else //open exist (ignore the useCompression flag) { header = AtomicHeader.Deserialize(Stream); stream.Seek(header.SystemData.Position, SeekOrigin.Begin); Deserialize(new BinaryReader(stream)); //manual alloc header.SystemData var ptr = space.Alloc(header.SystemData.Size); if (ptr.Position != header.SystemData.Position) throw new Exception("Logical error."); } Strategy = strategy; currentVersion++; }
public Packfile(Stream stream) { DataStream = stream; stream.Seek(0, SeekOrigin.Begin); FileData = stream.ReadStruct<PackfileFileData>(); m_Files = new List<IPackfileEntry>(); stream.Seek(GetEntryDataOffset(), SeekOrigin.Begin); List<PackfileEntryFileData> entryFileData = new List<PackfileEntryFileData>(); for (int i = 0; i < FileData.IndexCount; i++) { var fileData = stream.ReadStruct<PackfileEntryFileData>(); entryFileData.Add(fileData); } List<string> fileNames = new List<string>(); for (int i = 0; i < FileData.IndexCount; i++) { var fileData = entryFileData[i]; stream.Seek(CalculateEntryNamesOffset() + fileData.FilenameOffset, SeekOrigin.Begin); string name = stream.ReadAsciiNullTerminatedString(); stream.Seek(CalculateExtensionsOffset() + fileData.ExtensionOffset, SeekOrigin.Begin); string extension = stream.ReadAsciiNullTerminatedString(); m_Files.Add(new PackfileEntry(this, fileData, name + "." + extension)); } }
internal static BEncodedFormatDecodeException CreateTraced(string message, Exception innerException, Stream traceStream) { StringBuilder traceBuilder = new StringBuilder(); BEncodedFormatDecodeException exception = new BEncodedFormatDecodeException(message, innerException); long seekBack = traceStream.Position > TRACE_SEEK_BACK ? TRACE_SEEK_BACK : traceStream.Position; long seekForward = traceStream.Length > (traceStream.Position + TRACE_SEEK_FORWARD) ? TRACE_SEEK_FORWARD : traceStream.Length - traceStream.Position; byte[] traceBuffer = new byte[seekBack + seekForward]; // Back traceStream.Seek(seekBack * -1, SeekOrigin.Current); traceStream.Read(traceBuffer, 0, (int)seekBack); // Forward traceStream.Read(traceBuffer, (int)seekBack, (int)seekForward); traceStream.Seek(seekForward * -1, SeekOrigin.Current); traceBuilder.Append(Encoding.UTF8.GetString(traceBuffer)); traceBuilder.AppendLine(); for (int i = 0; i < traceBuffer.Length; i++) traceBuilder.Append(i != seekBack ? '-' : '^'); traceBuilder.Append(" (^ denotes error)"); traceBuilder.AppendLine(); traceBuilder.AppendLine(); exception.decodeTrace = traceBuilder.ToString(); return exception; }
static byte [] HashStream (Stream stream, ImageWriter writer, out int strong_name_pointer) { const int buffer_size = 8192; var text = writer.text; var header_size = (int) writer.GetHeaderSize (); var text_section_pointer = (int) text.PointerToRawData; var strong_name_directory = writer.GetStrongNameSignatureDirectory (); if (strong_name_directory.Size == 0) throw new InvalidOperationException (); strong_name_pointer = (int) (text_section_pointer + (strong_name_directory.VirtualAddress - text.VirtualAddress)); var strong_name_length = (int) strong_name_directory.Size; var sha1 = new SHA1Managed (); var buffer = new byte [buffer_size]; using (var crypto_stream = new CryptoStream (Stream.Null, sha1, CryptoStreamMode.Write)) { stream.Seek (0, SeekOrigin.Begin); CopyStreamChunk (stream, crypto_stream, buffer, header_size); stream.Seek (text_section_pointer, SeekOrigin.Begin); CopyStreamChunk (stream, crypto_stream, buffer, (int) strong_name_pointer - text_section_pointer); stream.Seek (strong_name_length, SeekOrigin.Current); CopyStreamChunk (stream, crypto_stream, buffer, (int) (stream.Length - (strong_name_pointer + strong_name_length))); } return sha1.Hash; }
private static PEFileKinds GetPEFileKinds(Stream s) { var rawPeSignatureOffset = new byte[4]; s.Seek(0x3c, SeekOrigin.Begin); s.Read(rawPeSignatureOffset, 0, 4); int peSignatureOffset = rawPeSignatureOffset[0]; peSignatureOffset |= rawPeSignatureOffset[1] << 8; peSignatureOffset |= rawPeSignatureOffset[2] << 16; peSignatureOffset |= rawPeSignatureOffset[3] << 24; var coffHeader = new byte[24]; s.Seek(peSignatureOffset, SeekOrigin.Begin); s.Read(coffHeader, 0, 24); byte[] signature = { (byte)'P', (byte)'E', (byte)'\0', (byte)'\0' }; for (int index = 0; index < 4; index++) { if (coffHeader[index] != signature[index]) throw new InvalidOperationException("Attempted to check a non PE file for the console subsystem!"); } var subsystemBytes = new byte[2]; s.Seek(68, SeekOrigin.Current); s.Read(subsystemBytes, 0, 2); int subSystem = subsystemBytes[0] | subsystemBytes[1] << 8; return // http://support.microsoft.com/kb/90493 subSystem == 3 ? PEFileKinds.ConsoleApplication : subSystem == 2 ? PEFileKinds.WindowApplication : PEFileKinds.Dll; /*IMAGE_SUBSYSTEM_WINDOWS_CUI*/ }
public PlateFile2(string filename, int filecount) { fileStream = File.Open(filename, FileMode.Create); //Initialize the header header.Signature = 0x17914242; header.HashBuckets = NextPowerOfTwo(filecount); header.HashTableLocation = Marshal.SizeOf(header); header.FirstDirectoryEntry = header.HashTableLocation + header.HashBuckets * 8; header.NextFreeDirectoryEntry = header.FirstDirectoryEntry + Marshal.SizeOf(entry); header.FileCount = 0; header.FreeEntries = header.HashBuckets - 1; //Write the header and the empty Hash area. O/S will zero the data Byte[] headerData = GetHeaderBytes(); fileStream.Write(headerData, 0, headerData.Length); fileStream.Seek(header.NextFreeDirectoryEntry + Marshal.SizeOf(entry) * header.HashBuckets, SeekOrigin.Begin); fileStream.WriteByte(42); fileStream.Seek(header.FirstDirectoryEntry, SeekOrigin.Begin); entry = new DirectoryEntry(); entry.size = (uint)header.FreeEntries * (uint)Marshal.SizeOf(entry); entry.location = header.FirstDirectoryEntry; byte[] entryData = GetEntryBytes(); fileStream.Write(entryData, 0, entryData.Length); fileStream.Seek(0, SeekOrigin.Begin); }
public static void WriteToWav(Stream RawData, Stream Output, AudioFormat AudioFormat) { if (AudioFormat.WaveFormat != WaveFormatType.Pcm) throw new ArgumentException("You can write only to WAV."); BinaryWriter bwOutput = new BinaryWriter(Output); //WAV header bwOutput.Write("RIFF".ToCharArray()); bwOutput.Write((uint)(RawData.Length + 36)); bwOutput.Write("WAVE".ToCharArray()); bwOutput.Write("fmt ".ToCharArray()); bwOutput.Write((uint)0x10); bwOutput.Write((ushort)0x01); bwOutput.Write((ushort)AudioFormat.Channels); bwOutput.Write((uint)AudioFormat.SamplesPerSecond); bwOutput.Write((uint)(AudioFormat.BitsPerSample * AudioFormat.SamplesPerSecond * AudioFormat.Channels / 8)); bwOutput.Write((ushort)(AudioFormat.BitsPerSample * AudioFormat.Channels / 8)); bwOutput.Write((ushort)AudioFormat.BitsPerSample); bwOutput.Write("data".ToCharArray()); bwOutput.Write((uint)RawData.Length); long originalRawDataStreamPosition = RawData.Position; RawData.Seek(0, SeekOrigin.Begin); byte[] buffer = new byte[4096]; int read; while ((read = RawData.Read(buffer, 0, 4096)) > 0) { bwOutput.Write(buffer, 0, read); } RawData.Seek(originalRawDataStreamPosition, SeekOrigin.Begin); }
public HvaReader(Stream s, string fileName) { // Index swaps for transposing a matrix var ids = new byte[] { 0, 4, 8, 12, 1, 5, 9, 13, 2, 6, 10, 14 }; s.Seek(16, SeekOrigin.Begin); FrameCount = s.ReadUInt32(); LimbCount = s.ReadUInt32(); // Skip limb names s.Seek(16 * LimbCount, SeekOrigin.Current); Transforms = new float[16 * FrameCount * LimbCount]; var testMatrix = new float[16]; for (var j = 0; j < FrameCount; j++) for (var i = 0; i < LimbCount; i++) { // Convert to column-major matrices and add the final matrix row var c = 16 * (LimbCount * j + i); Transforms[c + 3] = 0; Transforms[c + 7] = 0; Transforms[c + 11] = 0; Transforms[c + 15] = 1; for (var k = 0; k < 12; k++) Transforms[c + ids[k]] = s.ReadFloat(); Array.Copy(Transforms, 16 * (LimbCount * j + i), testMatrix, 0, 16); if (Util.MatrixInverse(testMatrix) == null) throw new InvalidDataException( "The transformation matrix for HVA file `{0}` section {1} frame {2} is invalid because it is not invertible!" .F(fileName, i, j)); } }
public static void CopyStream(Stream source, Stream dest, bool rewindSource) { long restorePosition1 = -1; if (source.CanSeek) { restorePosition1 = source.Position; if (rewindSource) source.Seek(0, SeekOrigin.Begin); } else { if (rewindSource) throw new Exception("Can't rewind source"); } byte[] buffer = new byte[bufferSize]; int readAmount; int readOffset = 0; while ((readAmount = source.Read(buffer, readOffset, bufferSize)) > 0) { dest.Write(buffer, 0, readAmount); } if (source.CanSeek) source.Seek(restorePosition1, SeekOrigin.Begin); }
public static Encoding Detect(Stream stream, IEnumerable<Encoding> encodings) { if (stream == null) { throw new ArgumentNullException("stream"); } if (encodings == null) { throw new ArgumentNullException("encodings"); } if (!stream.CanSeek && !stream.CanRead) { throw new ArgumentException(); } foreach (Encoding encoding in encodings) { stream.Seek(0, SeekOrigin.Begin); bool found = true; foreach (byte b in encoding.GetPreamble()) { if (b != stream.ReadByte()) { found = false; break; } } if (found) { return encoding; } } stream.Seek(0, SeekOrigin.Begin); return null; }
public unsafe MultiIcon Load(Stream stream) { stream.Position = 0; SingleIcon singleIcon = new SingleIcon("Untitled"); ICONDIR iconDir = new ICONDIR(stream); if (iconDir.idReserved != 0) throw new InvalidMultiIconFileException(); if (iconDir.idType != 1) throw new InvalidMultiIconFileException(); int entryOffset = sizeof(ICONDIR); // Add Icon Images one by one to the new entry created for(int i=0; i<iconDir.idCount; i++) { stream.Seek(entryOffset, SeekOrigin.Begin); ICONDIRENTRY entry = new ICONDIRENTRY(stream); // If there is missing information in the header... lets try to calculate it entry = CheckAndRepairEntry(entry); stream.Seek(entry.dwImageOffset, SeekOrigin.Begin); singleIcon.Add(new IconImage(stream, (int) (stream.Length - stream.Position))); entryOffset += sizeof(ICONDIRENTRY); } return new MultiIcon(singleIcon); }
public static string getSSMBase(Stream imageStream) { byte[] byc = new byte[4]; long highlimit = 5000000; long lowlimit = 100000; long difflimit = 100000; if (imageStream.Length < highlimit) highlimit = imageStream.Length; for (long i = lowlimit; i < highlimit; i += 4) { long start = i; imageStream.Seek(i, SeekOrigin.Begin); if (SSMBaseRecursion(imageStream, i, 0, 0, difflimit)) return start.ToString("X"); else continue; } difflimit += 40000; for (long i = lowlimit; i < highlimit; i += 4) { long start = i; imageStream.Seek(i, SeekOrigin.Begin); if (SSMBaseRecursion(imageStream, i, 0, 0, difflimit)) return start.ToString("X"); else continue; } return "Enter SSM Base"; }
public IBObject DecodeNext(Stream inputStream) { IBObject returnValue = null; long lastPosition = inputStream.Position; int firstByteNextObject = inputStream.ReadByte(); if (firstByteNextObject == Definitions.ASCII_i) { inputStream.Seek(-1, SeekOrigin.Current); returnValue = integerTransform.Decode(inputStream); // ex: i10e } else if (firstByteNextObject == Definitions.ASCII_l) { inputStream.Seek(-1, SeekOrigin.Current); returnValue = listTransform.Decode(inputStream); // ex: li10e4:spame } else if (firstByteNextObject == Definitions.ASCII_d) { inputStream.Seek(-1, SeekOrigin.Current); returnValue = dictionaryTransform.Decode(inputStream); } else if (firstByteNextObject >= Definitions.ASCII_0 && firstByteNextObject <= Definitions.ASCII_9) { inputStream.Seek(-1, SeekOrigin.Current); returnValue = byteStringTransform.Decode(inputStream); // ex: 4:spam } return returnValue; }
public void WriteDataCommand(Stream source, long offset, long length) { writer.Write(BinaryFormat.DataCommand); writer.Write(length); var originalPosition = source.Position; try { source.Seek(offset, SeekOrigin.Begin); var buffer = new byte[Math.Min((int)length, 1024 * 1024)]; int read; long soFar = 0; while ((read = source.Read(buffer, 0, (int)Math.Min(length - soFar, buffer.Length))) > 0) { soFar += read; writer.Write(buffer, 0, read); } } finally { source.Seek(originalPosition, SeekOrigin.Begin); } }
public void WriteDataCommand(Stream source, long offset, long length) { writer.Write(BinaryFormat.DataCommand); writer.Write(length); var originalPosition = source.Position; try { source.Seek(offset, SeekOrigin.Begin); var buffer = new byte[1024 * 1024]; int read; do { read = source.Read(buffer, 0, buffer.Length); writer.Write(buffer, 0, read); } while (read > 0); } finally { source.Seek(originalPosition, SeekOrigin.Begin); } }
private void ParseIndex(Stream stream, int i) { using (var br = new BinaryReader(stream)) { stream.Seek(-12, SeekOrigin.End); int count = br.ReadInt32(); stream.Seek(0, SeekOrigin.Begin); if (count * (16 + 4 + 4) > stream.Length) throw new Exception("ParseIndex failed"); for (int j = 0; j < count; ++j) { byte[] key = br.ReadBytes(16); if (key.IsZeroed()) // wtf? key = br.ReadBytes(16); if (key.IsZeroed()) // wtf? throw new Exception("key.IsZeroed()"); IndexEntry entry = new IndexEntry(); entry.Index = i; entry.Size = br.ReadInt32BE(); entry.Offset = br.ReadInt32BE(); CDNIndexData.Add(key, entry); } } }
public bool Claims( Stream MapStream ) { byte[] temp = new byte[8]; byte[] data; int length; try { MapStream.Seek( -4, SeekOrigin.End ); MapStream.Read( temp, 0, sizeof( int ) ); MapStream.Seek( 0, SeekOrigin.Begin ); length = BitConverter.ToInt32( temp, 0 ); data = new byte[length]; using( GZipStream reader = new GZipStream( MapStream, CompressionMode.Decompress, true ) ) { reader.Read( data, 0, length ); } for( int i = 0; i < length - 1; i++ ) { if( data[i] == 0xAC && data[i + 1] == 0xED ) { return true; } } return false; } catch( Exception ) { return false; } }
public int FindBytePattern(Stream stream, byte[] pattern) { //a simple pattern matcher stream.Seek(0, SeekOrigin.Begin); for (int i = 0; i < stream.Length; i++) { var b = stream.ReadByte(); if (b == pattern[0]) { bool match = true; for (int j = 1; j < pattern.Length; j++) { var b2 = stream.ReadByte(); if (b2 != pattern[j]) { match = false; break; } } if (match) return (int)stream.Position; else stream.Seek(i+1, SeekOrigin.Begin); } } return -1; //no match }
private void CreateV1Frames(Stream stream) { // ID1 header am ende ... stream.Seek(-125, SeekOrigin.End); // 30 zeichen title => TIT2 // 30 zeichen artist => TCOM // 30 zeichen album => TALB // 4 zeichen jahr => TYER // 30 zeichen commentar - kann auch den track enthalten (TRCK) - der letzte byte // 1 byte Genre // == 125 foreach (var item in new[] { "TIT2", "TCOM", "TALB" }) { AddFrame(item, ByteHelper.BytesToString(stream.Read(30))); } AddFrame("TYER", ByteHelper.BytesToString(stream.Read(4))); // comment - egal stream.Seek(28, SeekOrigin.Current); var track = stream.Read(2); if (track[0] == 0 && track[1] != 0) { // dann ist track[1] eine zahl; if (track[1] < 48) { AddFrame("TRCK",((int)track[1]).ToString()); } } }
private void ReadTOC(Stream S, TOC T) { ArrayList Entries = new ArrayList(); ArrayList SubTOCs = new ArrayList(); long BaseOffset = S.Position; TOCEntry TE = new TOCEntry(S, BaseOffset); bool ValidEntry = true; while (ValidEntry) { switch (TE.Type) { case EntryType.SubTOC: long Pos = S.Position; S.Seek(BaseOffset + TE.Offset_, SeekOrigin.Begin); TOC SubTOC = new TOC(); SubTOC.Name = TE.Name; this.ReadTOC(S, SubTOC); SubTOCs.Add(SubTOC); S.Seek(Pos, SeekOrigin.Begin); break; case EntryType.PNG: Entries.Add(TE); break; default: ValidEntry = false; break; } TE = new TOCEntry(S, BaseOffset); } T.SubTOCs = (TOC[])SubTOCs.ToArray(typeof (TOC)); T.Entries = (TOCEntry[])Entries.ToArray(typeof (TOCEntry)); }
public Agenda Leer() { file?.Seek(0, SeekOrigin.Begin); var json = Lector.ReadToEnd(); //Console.WriteLine("Almacen.Leer"); //Console.WriteLine(json); return(JsonConvert.DeserializeObject <Agenda>(json)); }
protected override async Task <string> RequestAsync(string address, String requestData = null, Stream imageData = null) { try { if (requestData != null) { address += "?" + requestData; } UtilityHelper.CountlyLogging("POST " + address); //make sure stream is at start imageData?.Seek(0, SeekOrigin.Begin); var httpResponseMessage = await Client.PostAsync(new Uri(address), (imageData != null)?new HttpStreamContent(imageData.AsInputStream()) : null); if (httpResponseMessage.IsSuccessStatusCode) { return(await httpResponseMessage.Content.ReadAsStringAsync()); } else { return(null); } } catch (Exception ex) { UtilityHelper.CountlyLogging("Encountered a exception while making a POST request, " + ex.ToString()); return(null); } }
static void Main(string[] args) { var option = new Dictionary <string, object>() { { "spd", 5 }, // 语速 { "vol", 5 }, // 音量 { "per", 0 } // 发音人,4:情感度丫丫童声 }; var text = "滴,滴,答,10,9,8,7,6,5,4,3,2,1"; WaveFileWriter writer = null; Stream output = null; foreach (var s in text.Split(',')) { var result = _ttsClient.Synthesis(s, option); if (!result.Success) { Console.WriteLine(result.ErrorMsg); } var ret = WriteSpeech(result.Data, 1, ref writer); if (ret != null) { output = ret; } } writer?.Flush(); output?.Seek(0, SeekOrigin.Begin); using (var fs = new FileStream("test.wav", FileMode.Create)) { output?.CopyTo(fs); fs.Close(); } writer?.Close(); Console.ReadLine(); }
private void SetAsyncStreamImage(Stream imageStream) { ImageViewAsync imageAsync; if ((imageAsync = CastImageViewToAsync()) == null) { return; } ImageService.Instance .LoadStream(cancelationToken => System.Threading.Tasks.Task.Factory.StartNew( () => { cancelationToken.ThrowIfCancellationRequested(); var streamClone = new MemoryStream(); if (imageStream?.CanSeek ?? false) { imageStream?.Seek(0, SeekOrigin.Begin); } imageStream?.CopyTo(streamClone); if (streamClone?.CanSeek ?? false) { streamClone?.Seek(0, SeekOrigin.Begin); } return(streamClone as Stream); }, cancelationToken)) .Finish(obj => SetImageColorFilter(_imageColor)) .DownSampleMode(InterpolationMode.Low) .Into(imageAsync); }
private SKImage Combine(Stream fighterFile, Stream rankFile) { SKBitmap fighter = null; SKBitmap rank = null; try { fighterFile?.Seek(0, SeekOrigin.Begin); rankFile?.Seek(0, SeekOrigin.Begin); fighter = SKBitmap.Decode(fighterFile); rank = SKBitmap.Decode(rankFile); using (var tempSurface = SKSurface.Create(new SKImageInfo(fighter.Width, fighter.Height))) { var canvas = tempSurface.Canvas; canvas.Clear(SKColors.Transparent); canvas.DrawBitmap(fighter, SKRect.Create(0, 0, fighter.Width, fighter.Height)); canvas.DrawBitmap(rank, SKRect.Create(fighter.Width - rank.Width - 10, 0, rank.Width, rank.Height)); return(tempSurface.Snapshot()); } } finally { fighter?.Dispose(); rank?.Dispose(); } }
public IPackage Seek(long offset, SeekOrigin origin) { if (Stream?.CanSeek != true) { throw new InvalidOperationException(); } Stream?.Seek(offset, origin); return(this); }
/// <summary> /// Platform specific networking code /// </summary> /// <param name="address"></param> /// <param name="requestData"></param> /// <param name="imageData"></param> /// <returns></returns> protected override async Task <RequestResult> RequestAsync(string address, String requestData = null, Stream imageData = null) { Stream dataStream = null; RequestResult requestResult = new RequestResult(); try { UtilityHelper.CountlyLogging("POST " + address); //make sure stream is at start imageData?.Seek(0, SeekOrigin.Begin); HttpWebRequest request = (HttpWebRequest)WebRequest.Create(address); request.Method = "POST"; request.ContentType = "application/json"; if (imageData != null) { dataStream = imageData; } if (requestData != null) { request.ContentType = "application/x-www-form-urlencoded"; dataStream = UtilityHelper.GenerateStreamFromString(requestData); } if (dataStream != null) { using (var stream = request.GetRequestStream()) { CopyStream(dataStream, stream); stream.Flush(); } } var response = (HttpWebResponse)request.GetResponse(); requestResult.responseCode = (int)response.StatusCode; requestResult.responseText = new StreamReader(response.GetResponseStream()).ReadToEnd(); return(requestResult); } catch (Exception ex) { UtilityHelper.CountlyLogging("Encountered a exception while making a POST request, " + ex.ToString()); return(requestResult); } finally { if (dataStream != null) { dataStream.Close(); dataStream.Dispose(); } } }
public void ResetStream() { try { Stream?.Seek(0, SeekOrigin.Begin); } catch (Exception) { } }
private void Transform() { ImageViewAsync imageAsync; if ((imageAsync = CastImageViewToAsync()) == null) { return; } if (!_strem.IsNull()) { ImageService.Instance .LoadStream(cancelationToken => System.Threading.Tasks.Task.Factory.StartNew( () => { cancelationToken.ThrowIfCancellationRequested(); var streamClone = new MemoryStream(); if (_strem?.CanSeek ?? false) { _strem?.Seek(0, SeekOrigin.Begin); } _strem?.CopyTo(streamClone); if (streamClone?.CanSeek ?? false) { streamClone?.Seek(0, SeekOrigin.Begin); } return(streamClone as Stream); }, cancelationToken)) .Transform(new CircleTransformation()) .Finish(obj => SetImageColorFilter(_imageColor)) .Into(imageAsync); } else if (!Url.IsNullOrWhiteSpace()) { ImageService.Instance .LoadUrl(Url) .Transform(new CircleTransformation()) .Finish(obj => SetImageColorFilter(_imageColor)) .Into(imageAsync); } else if (DrawableId > 0) { ImageService.Instance .LoadCompiledResource(DrawableId.ToString()) .Transform(new CircleTransformation()) .Finish(obj => SetImageColorFilter(_imageColor)) .Into(imageAsync); } }
public static void SetImageTransform(ImageView image, string url = "", Stream stream = default(Stream), int resourceDrawable = 0) { ImageViewAsync imageAsync = null; if ((imageAsync = CastImageViewToAsync(image)) == null) { return; } if (!stream.IsNull()) { ImageService.Instance .LoadStream(cancelationToken => System.Threading.Tasks.Task.Factory.StartNew( () => { cancelationToken.ThrowIfCancellationRequested(); var streamClone = new MemoryStream(); if (stream?.CanSeek ?? false) { stream?.Seek(0, SeekOrigin.Begin); } stream?.CopyTo(streamClone); if (streamClone?.CanSeek ?? false) { streamClone.Seek(0, SeekOrigin.Begin); } return(streamClone as Stream); }, cancelationToken)) .Transform(new CircleTransformation()) .Into(imageAsync); } else if (!url.IsNullOrWhiteSpace()) { ImageService.Instance .LoadUrl(url) .Transform(new CircleTransformation()) .Into(imageAsync); } else if (resourceDrawable > 0) { ImageService.Instance .LoadCompiledResource(resourceDrawable.ToString()) .Transform(new CircleTransformation()) .Into(imageAsync); } }
protected override async Task <RequestResult> RequestAsync(string address, String requestData = null, Stream imageData = null) { RequestResult requestResult = new RequestResult(); try { UtilityHelper.CountlyLogging("POST " + address); //make sure stream is at start imageData?.Seek(0, SeekOrigin.Begin); HttpContent httpContent = (imageData != null) ? new StreamContent(imageData) : null; if (requestData != null) { //if there is request data to stream, that means it was too long String[] pairsS = requestData.Split('&'); KeyValuePair <string, string>[] pairs = new KeyValuePair <string, string> [pairsS.Length]; for (int a = 0; a < pairsS.Length; a++) { String[] splitPair = pairsS[a].Split('='); if (splitPair.Length <= 1) { //string did not contain a '=', skip it UtilityHelper.CountlyLogging("Encountered a faulty request param, skipping it: [" + pairsS[a] + "]"); continue; } String decodedValue = UtilityHelper.DecodeDataForURL(splitPair[1]); pairs[a] = new KeyValuePair <string, string>(splitPair[0], decodedValue); } httpContent = new FormUrlEncodedContent(pairs); } System.Net.Http.HttpClient httpClient = new System.Net.Http.HttpClient(); System.Net.Http.HttpResponseMessage httpResponseMessage = await httpClient.PostAsync(address, httpContent); requestResult.responseText = await httpResponseMessage.Content.ReadAsStringAsync(); requestResult.responseCode = (int)httpResponseMessage.StatusCode; return(requestResult); } catch (Exception ex) { UtilityHelper.CountlyLogging("Encountered a exception while making a POST request, " + ex.ToString()); return(requestResult); } }
public override long Seek(long offset, SeekOrigin origin) { //Config.Log.LogOut(string.Format("AmazonDriveBaseStream : Seek {0} {1}", offset, origin)); if (origin == SeekOrigin.Begin && offset == 0) { if (innerStream?.Position > 0) { InitStream(); } } var ret = innerStream?.Seek(offset, origin) ?? 0; pos = ret; return(ret); }
protected override async Task <string> RequestAsync(string address, String requestData = null, Stream imageData = null) { try { UtilityHelper.CountlyLogging("POST " + address); //make sure stream is at start imageData?.Seek(0, SeekOrigin.Begin); HttpContent httpContent = (imageData != null) ? new StreamContent(imageData) : null; if (requestData != null) { //if there is request data to stream, that means it was too long String[] pairsS = UtilityHelper.DecodeDataForURL(requestData).Split('&'); KeyValuePair <string, string>[] pairs = new KeyValuePair <string, string> [pairsS.Length]; for (int a = 0; a < pairsS.Length; a++) { String[] splitPair = pairsS[a].Split('='); pairs[a] = new KeyValuePair <string, string>(splitPair[0], splitPair[1]); } httpContent = new FormUrlEncodedContent(pairs); } System.Net.Http.HttpClient httpClient = new System.Net.Http.HttpClient(); System.Net.Http.HttpResponseMessage httpResponseMessage = await httpClient.PostAsync(address, httpContent); if (httpResponseMessage.IsSuccessStatusCode) { return(await httpResponseMessage.Content.ReadAsStringAsync()); } else { return(null); } } catch (Exception ex) { UtilityHelper.CountlyLogging("Encountered a exception while making a POST request, " + ex.ToString()); return(null); } }
public bool IsXml(string fileName, Stream fileContent) { if (string.IsNullOrEmpty(fileName)) { return(false); } var extention = Path.GetExtension(fileName); if (!validExtentions.Any(ext => ext.Equals(extention, StringComparison.InvariantCultureIgnoreCase))) { return(false); } try { if (fileContent == null) { throw new ArgumentNullException(nameof(fileContent)); } var fileHeader = new byte[8]; if (fileContent.Read(fileHeader, 0, 8) == 8) { if (FileStartsWithMagicNumber(fileHeader, zipArchiveMagicNumber)) { return(true); } if (FileStartsWithMagicNumber(fileHeader, xlsMagicNumber)) { return(false); } } } finally { fileContent?.Seek(0, SeekOrigin.Begin); } return(false); }
static Fault DeserializeFault(Type type, Stream stream) { try { var serializer = new DataContractJsonSerializer( type, new DataContractJsonSerializerSettings { DateTimeFormat = new DateTimeFormat("o", CultureInfo.InvariantCulture), EmitTypeInformation = EmitTypeInformation.Never, }); return(serializer.ReadObject(stream) as Fault); } catch (SerializationException) { // swallow it and try the next one return(null); } finally { stream?.Seek(0, SeekOrigin.Begin); } }
internal IEnumerable <ZipHeader> ReadSeekableHeader(Stream stream) { var reader = new BinaryReader(stream); SeekBackToHeader(stream, reader); var eocd_location = stream.Position; var entry = new DirectoryEndHeader(); entry.Read(reader); if (entry.IsZip64) { _zip64 = true; // ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin); uint zip64_locator = reader.ReadUInt32(); if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR) { throw new ArchiveException("Failed to locate the Zip64 Directory Locator"); } var zip64Locator = new Zip64DirectoryEndLocatorHeader(); zip64Locator.Read(reader); stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin); uint zip64Signature = reader.ReadUInt32(); if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY) { throw new ArchiveException("Failed to locate the Zip64 Header"); } var zip64Entry = new Zip64DirectoryEndHeader(); zip64Entry.Read(reader); stream.Seek(zip64Entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin); } else { stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin); } long position = stream.Position; while (true) { stream.Position = position; uint signature = reader.ReadUInt32(); var nextHeader = ReadHeader(signature, reader, _zip64); position = stream.Position; if (nextHeader is null) { yield break; } if (nextHeader is DirectoryEntryHeader entryHeader) { //entry could be zero bytes so we need to know that. entryHeader.HasData = entryHeader.CompressedSize != 0; yield return(entryHeader); } else if (nextHeader is DirectoryEndHeader endHeader) { yield return(endHeader); } } }
/// <summary> /// Applies a binary patch (in <a href="http://www.daemonology.net/bsdiff/">bsdiff</a> format) to the data in /// <paramref name="input"/> and writes the results of patching to <paramref name="output"/>. /// </summary> /// <param name="input">A <see cref="Stream"/> containing the input data.</param> /// <param name="openPatchStream">A func that can open a <see cref="Stream"/> positioned at the start of the patch data. /// This stream must support reading and seeking, and <paramref name="openPatchStream"/> must allow multiple streams on /// the patch to be opened concurrently.</param> /// <param name="output">A <see cref="Stream"/> to which the patched data is written.</param> public static void Apply(Stream input, Func <Stream> openPatchStream, Stream output) { // check arguments if (input == null) { throw new ArgumentNullException("input"); } if (openPatchStream == null) { throw new ArgumentNullException("openPatchStream"); } if (output == null) { throw new ArgumentNullException("output"); } /* * File format: * 0 8 "BSDIFF40" * 8 8 X * 16 8 Y * 24 8 sizeof(newfile) * 32 X bzip2(control block) * 32+X Y bzip2(diff block) * 32+X+Y ??? bzip2(extra block) * with control block a set of triples (x,y,z) meaning "add x bytes * from oldfile to x bytes from the diff block; copy y bytes from the * extra block; seek forwards in oldfile by z bytes". */ // read header long controlLength, diffLength, newSize; using (Stream patchStream = openPatchStream()) { // check patch stream capabilities if (!patchStream.CanRead) { throw new ArgumentException("Patch stream must be readable.", "openPatchStream"); } if (!patchStream.CanSeek) { throw new ArgumentException("Patch stream must be seekable.", "openPatchStream"); } byte[] header = patchStream.ReadExactly(c_headerSize); // check for appropriate magic long signature = ReadInt64(header, 0); if (signature != c_fileSignature) { throw new InvalidOperationException("Corrupt patch."); } // read lengths from header controlLength = ReadInt64(header, 8); diffLength = ReadInt64(header, 16); newSize = ReadInt64(header, 24); if (controlLength < 0 || diffLength < 0 || newSize < 0) { throw new InvalidOperationException("Corrupt patch."); } } // preallocate buffers for reading and writing const int c_bufferSize = 1048576; byte[] newData = new byte[c_bufferSize]; byte[] oldData = new byte[c_bufferSize]; // prepare to read three parts of the patch in parallel using (Stream compressedControlStream = openPatchStream()) using (Stream compressedDiffStream = openPatchStream()) using (Stream compressedExtraStream = openPatchStream()) { // seek to the start of each part compressedControlStream.Seek(c_headerSize, SeekOrigin.Current); compressedDiffStream.Seek(c_headerSize + controlLength, SeekOrigin.Current); compressedExtraStream.Seek(c_headerSize + controlLength + diffLength, SeekOrigin.Current); // decompress each part (to read it) using (BZip2InputStream controlStream = new BZip2InputStream(compressedControlStream)) using (BZip2InputStream diffStream = new BZip2InputStream(compressedDiffStream)) using (BZip2InputStream extraStream = new BZip2InputStream(compressedExtraStream)) { long[] control = new long[3]; byte[] buffer = new byte[8]; int oldPosition = 0; int newPosition = 0; while (newPosition < newSize) { // read control data for (int i = 0; i < 3; i++) { controlStream.ReadExactly(buffer, 0, 8); control[i] = ReadInt64(buffer, 0); } // sanity-check if (newPosition + control[0] > newSize) { throw new InvalidOperationException("Corrupt patch."); } // seek old file to the position that the new data is diffed against input.Position = oldPosition; int bytesToCopy = (int)control[0]; while (bytesToCopy > 0) { int actualBytesToCopy = Math.Min(bytesToCopy, c_bufferSize); // read diff string diffStream.ReadExactly(newData, 0, actualBytesToCopy); // add old data to diff string int availableInputBytes = Math.Min(actualBytesToCopy, (int)(input.Length - input.Position)); input.ReadExactly(oldData, 0, availableInputBytes); for (int index = 0; index < availableInputBytes; index++) { newData[index] += oldData[index]; } output.Write(newData, 0, actualBytesToCopy); // adjust counters newPosition += actualBytesToCopy; oldPosition += actualBytesToCopy; bytesToCopy -= actualBytesToCopy; } // sanity-check if (newPosition + control[1] > newSize) { throw new InvalidOperationException("Corrupt patch."); } // read extra string bytesToCopy = (int)control[1]; while (bytesToCopy > 0) { int actualBytesToCopy = Math.Min(bytesToCopy, c_bufferSize); extraStream.ReadExactly(newData, 0, actualBytesToCopy); output.Write(newData, 0, actualBytesToCopy); newPosition += actualBytesToCopy; bytesToCopy -= actualBytesToCopy; } // adjust position oldPosition = (int)(oldPosition + control[2]); } } } }
/// <summary> /// Merges 2 streams of JSON on the property defined by <code>itemName</code>. The property should be a JSON array /// </summary> /// <param name="left">The first stream.</param> /// <param name="right">The second stream.</param> /// <param name="itemName">The name of the array property to merge on.</param> /// <param name="nextLinkName">The name of the property containing the next link name.</param> /// <param name="cancellationToken">The cancellation token.</param> internal async Task <Stream?> MergeJsonStreamsAsync(Stream?left, Stream?right, string itemName = "value", string nextLinkName = "nextLink", CancellationToken cancellationToken = default) { if (left?.CanSeek == true) { left?.Seek(0, SeekOrigin.Begin); } if (right?.CanSeek == true) { right?.Seek(0, SeekOrigin.Begin); } if (left == null || right == null) { return(left ?? right); } JsonNode?nodeLeft = JsonNode.Parse(left); if (left.CanSeek == true) { left.Seek(0, SeekOrigin.Begin); } JsonNode?nodeRight = JsonNode.Parse(right); if (right.CanSeek == true) { right.Seek(0, SeekOrigin.Begin); } JsonArray?leftArray = null; JsonArray?rightArray = null; if (!string.IsNullOrWhiteSpace(itemName)) { if (nodeLeft?[itemName] == null) { return(right); } else if (nodeRight?[itemName] == null) { return(left); } leftArray = nodeLeft[itemName]?.AsArray(); rightArray = nodeRight[itemName]?.AsArray(); } else { leftArray = nodeLeft?.AsArray(); rightArray = nodeRight?.AsArray(); } if (leftArray != null && rightArray != null) { var elements = rightArray.Where(i => i != null); var item = elements.FirstOrDefault(); while (item != null) { rightArray.Remove(item); leftArray.Add(item); item = elements.FirstOrDefault(); } } if (!string.IsNullOrWhiteSpace(itemName) && nodeLeft != null) { nodeLeft[itemName] = leftArray ?? rightArray; } else { nodeLeft = leftArray ?? rightArray; } // Replace next link with new page's next link if (!string.IsNullOrWhiteSpace(nextLinkName)) { var obj1 = nodeLeft as JsonObject; if (obj1?[nextLinkName] != null) { obj1.Remove(nextLinkName); } if (nodeRight is JsonObject obj2 && obj2?[nextLinkName] != null) { var nextLink = obj2[nextLinkName]; obj2.Remove(nextLinkName); obj1?.Add(nextLinkName, nextLink); } } var stream = new MemoryStream(); using var writer = new Utf8JsonWriter(stream); nodeLeft?.WriteTo(writer); await writer.FlushAsync(cancellationToken); stream.Position = 0; return(stream); }
public static Multi Load(string idxFile, string mulFile, MulFileAccessMode mode) { IndexFile indexFile = null; Stream stream = null; BinaryReader reader = null; try { indexFile = IndexFile.Load(idxFile); stream = File.OpenRead(mulFile); reader = new BinaryReader(stream); Multi multi = new Multi(); IndexData indexData; for (int i = 0; i < indexFile.Count; i++) { indexData = indexFile[i]; if (indexData.IsValid) { stream.Seek(indexData.Lookup, SeekOrigin.Begin); MultiData multiData = new MultiData(); multiData.Id = (ushort)i; multiData.Tiles = new MultiTile[indexData.Lenght / 12]; for (int j = 0; j < indexData.Lenght / 12; j++) { MultiTile tile = new MultiTile { ItemID = reader.ReadUInt16(), X = reader.ReadInt16(), Y = reader.ReadInt16(), Z = reader.ReadInt16(), Flags = reader.ReadInt32() }; multiData.Tiles[j] = tile; } multi.data.Add(multiData.Id, multiData); } } Trace.WriteLine(String.Format("Multi: File \"{0}\" succesfully loaded.", mulFile), "MulLib"); return(multi); } catch (Exception e) { throw new Exception("Error loading Multi.", e); } finally { if (indexFile != null) { indexFile.Dispose(); } if (reader != null) { reader.Close(); } if (stream != null) { stream.Close(); } } }
/// <summary> /// Write changes to the slicing table to a file or stream. /// </summary> public void Update(SrezTable srezTable) { if (srezTable == null) { throw new ArgumentNullException(nameof(srezTable)); } Stream stream = null; BinaryWriter writer = null; try { stream = ioStream ?? new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite); writer = new BinaryWriter(stream); // record modified slices foreach (var srez in srezTable.ModifiedSrezList) { stream.Seek(srez.Position + 8, SeekOrigin.Begin); writer.Write(GetCnlDataBuf(srez.CnlData)); } // setting the recording position of the added slices to the stream, // restore slice table if necessary var lastSrez = srezTable.LastStoredSrez; if (lastSrez == null) { stream.Seek(0, SeekOrigin.Begin); } else { stream.Seek(0, SeekOrigin.End); long offset = lastSrez.Position + lastSrez.CnlNums.Length * 9 + 8; if (stream.Position < offset) { var buf = new byte[offset - stream.Position]; stream.Write(buf, 0, buf.Length); } else { stream.Seek(offset, SeekOrigin.Begin); } } // record added slices var prevSrezDescr = lastSrez?.SrezDescr; foreach (var srez in srezTable.AddedSrezList) { // recording cutoff channel numbers writer.Write(srez.SrezDescr.Equals(prevSrezDescr) ? EmptyCnlNumsBuf : GetSrezDescrBuf(srez.SrezDescr)); prevSrezDescr = srez.SrezDescr; // slice data entry srez.Position = stream.Position; writer.Write(ScadaUtils.EncodeDateTime(srez.DateTime)); writer.Write(GetCnlDataBuf(srez.CnlData)); lastSrez = srez; } // confirmation of successful saving of changes srezTable.AcceptChanges(); srezTable.LastStoredSrez = lastSrez; } finally { if (fileMode) { writer?.Close(); stream?.Close(); } } }
/// <summary> /// Populate the dest object from the FileName slice file /// </summary> protected void FillObj(object dest) { Stream stream = null; BinaryReader reader = null; var fillTime = DateTime.Now; var srezTableLight = dest as SrezTableLight; var dataTable = dest as DataTable; var trend = dest as Trend; var srezTable = srezTableLight as SrezTable; SrezTableLight.Srez lastStoredSrez = null; try { if (srezTableLight == null && dataTable == null && trend == null) { throw new ScadaException("Destination object is invalid."); } // storage facility preparation if (srezTableLight != null) { srezTableLight.Clear(); srezTableLight.TableName = tableName; srezTable?.BeginLoadData(); } else if (dataTable != null) { // forming the table structure dataTable.BeginLoadData(); dataTable.DefaultView.Sort = ""; if (dataTable.Columns.Count == 0) { dataTable.Columns.Add("DateTime", typeof(DateTime)); dataTable.Columns.Add("CnlNum", typeof(int)); dataTable.Columns.Add("Val", typeof(double)); dataTable.Columns.Add("Stat", typeof(int)); dataTable.DefaultView.AllowNew = false; dataTable.DefaultView.AllowEdit = false; dataTable.DefaultView.AllowDelete = false; } else { dataTable.Rows.Clear(); } } else // trend != null { trend.Clear(); trend.TableName = tableName; } // filling the object with data stream = ioStream ?? new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); reader = new BinaryReader(stream); var date = ExtractDate(tableName); // slice date determination SrezTable.SrezDescr srezDescr = null; // cut description int[] cnlNums = null; // link to input channel numbers from slice description while (stream.Position < stream.Length) { // reading the list of channels and CS numbers int cnlNumCnt = reader.ReadUInt16(); if (cnlNumCnt > 0) { // loading channel numbers to buffer to increase speed int cnlNumSize = cnlNumCnt * 2; var buf = new byte[cnlNumSize]; int readSize = reader.Read(buf, 0, cnlNumSize); // creating a description of the slice and filling the numbers of the channels from the buffer // with checking their uniqueness and orderliness if (readSize == cnlNumSize) { int prevCnlNum = -1; srezDescr = new SrezTable.SrezDescr(cnlNumCnt); cnlNums = srezDescr.CnlNums; for (var i = 0; i < cnlNumCnt; i++) { int cnlNum = BitConverter.ToUInt16(buf, i * 2); if (prevCnlNum >= cnlNum) { throw new ScadaException("Table is incorrect."); } cnlNums[i] = prevCnlNum = cnlNum; } srezDescr.CalcCS(); } } else if (srezDescr == null) { throw new Exception("Table is incorrect."); } // reading and checking the cops ushort cs = reader.ReadUInt16(); bool csOk = cnlNumCnt > 0 ? srezDescr.CS == cs : cs == 1; // read slice data int cnlCnt = cnlNums.Length; // the number of channels in the slice int srezDataSize = cnlCnt * 9; // slice data size if (csOk) { long srezPos = stream.Position; double time = reader.ReadDouble(); var srezDT = ScadaUtils.CombineDateTime(date, time); // initialize slice SrezTableLight.Srez srez; if (srezTable != null) { srez = new SrezTable.Srez(srezDT, srezDescr) { State = DataRowState.Unchanged, Position = srezPos }; } else if (srezTableLight != null) { srez = new SrezTableLight.Srez(srezDT, cnlCnt); cnlNums.CopyTo(srez.CnlNums, 0); } else // srezTableLight == null { srez = null; } // read input data var bufInd = 0; double val; byte stat; if (trend != null) { // select channel data for trend int index = Array.BinarySearch <int>(cnlNums, trend.CnlNum); if (index >= 0) { stream.Seek(index * 9, SeekOrigin.Current); var buf = new byte[9]; int readSize = reader.Read(buf, 0, 9); if (readSize == 9) { ExtractCnlData(buf, ref bufInd, out val, out stat); var point = new Trend.Point(srezDT, val, stat); trend.Points.Add(point); stream.Seek(srezDataSize - (index + 1) * 9, SeekOrigin.Current); } } else { stream.Seek(srezDataSize, SeekOrigin.Current); } } else { // loading slice data to buffer to increase speed var buf = new byte[srezDataSize]; int readSize = reader.Read(buf, 0, srezDataSize); // filling the buffer table if (srezTableLight != null) { for (var i = 0; i < cnlCnt; i++) { ExtractCnlData(buf, ref bufInd, out val, out stat); srez.CnlNums[i] = cnlNums[i]; srez.CnlData[i].Val = val; srez.CnlData[i].Stat = stat; if (bufInd >= readSize) { break; } } srezTableLight.AddSrez(srez); lastStoredSrez = srez; } else // dataTable != null { for (var i = 0; i < cnlCnt; i++) { ExtractCnlData(buf, ref bufInd, out val, out stat); var row = dataTable.NewRow(); row["DateTime"] = srezDT; row["CnlNum"] = cnlNums[i]; row["Val"] = val; row["Stat"] = stat; dataTable.Rows.Add(row); if (bufInd >= readSize) { break; } } } } } else { // skip the slice, considering its size as in the case of a repeated list of channel numbers stream.Seek(srezDataSize + 8, SeekOrigin.Current); } } } catch (EndOfStreamException) { // normal file end situation } catch { fillTime = DateTime.MinValue; throw; } finally { if (fileMode) { reader?.Close(); stream?.Close(); } if (srezTableLight != null) { srezTableLight.LastFillTime = fillTime; if (srezTable != null) { srezTable.LastStoredSrez = (SrezTable.Srez)lastStoredSrez; srezTable.EndLoadData(); } } if (dataTable != null) { dataTable.EndLoadData(); dataTable.AcceptChanges(); dataTable.DefaultView.Sort = "DateTime, CnlNum"; } if (trend != null) { trend.LastFillTime = fillTime; trend.Sort(); } } }
public override long Seek(long offset, SeekOrigin origin) => _stream?.Seek(offset, origin) ?? -1;
/// <summary> /// When overridden in a derived class, sets the position within the current stream. /// </summary> /// <param name="offset">A byte offset relative to the <paramref name="origin"/> parameter.</param> /// <param name="origin">A value of type <see cref="T:System.IO.SeekOrigin"/> indicating the reference point used to obtain the new position.</param> /// <returns> /// The new position within the current stream. /// </returns> /// <exception cref="T:System.IO.IOException"> /// An I/O error occurs. /// </exception> /// <exception cref="T:System.NotSupportedException"> /// The stream does not support seeking, such as if the stream is constructed from a pipe or console output. /// </exception> /// <exception cref="T:System.ObjectDisposedException"> /// Methods were called after the stream was closed. /// </exception> public override long Seek(long offset, SeekOrigin origin) { return(s.Seek(offset, origin)); }
public static Stream GetObject(GitPack pack, Stream stream, int offset, string objectType, GitPackObjectType packObjectType) { if (pack == null) { throw new ArgumentNullException(nameof(pack)); } if (stream == null) { throw new ArgumentNullException(nameof(stream)); } // Read the signature stream.Seek(0, SeekOrigin.Begin); Span <byte> buffer = stackalloc byte[4]; stream.ReadAll(buffer); Debug.Assert(buffer.SequenceEqual(Signature)); stream.ReadAll(buffer); var versionNumber = BinaryPrimitives.ReadInt32BigEndian(buffer); Debug.Assert(versionNumber == 2); stream.ReadAll(buffer); var numberOfObjects = BinaryPrimitives.ReadInt32BigEndian(buffer); stream.Seek(offset, SeekOrigin.Begin); var(type, decompressedSize) = ReadObjectHeader(stream); if (type == GitPackObjectType.OBJ_OFS_DELTA) { var baseObjectRelativeOffset = ReadVariableLengthInteger(stream); var baseObjectOffset = (int)(offset - baseObjectRelativeOffset); var deltaStream = GitObjectStream.Create(stream, decompressedSize); int baseObjectlength = ReadMbsInt(deltaStream); int targetLength = ReadMbsInt(deltaStream); var baseObjectStream = pack.GetObject(baseObjectOffset, objectType); return(new GitPackDeltafiedStream(baseObjectStream, deltaStream, targetLength)); } else if (type == GitPackObjectType.OBJ_REF_DELTA) { Span <byte> baseObjectId = stackalloc byte[20]; stream.ReadAll(baseObjectId); Stream baseObject = pack.Repository.GetObjectBySha(CharUtils.ToHex(baseObjectId), objectType); throw new NotImplementedException(); } // Tips for handling deltas: https://github.com/choffmeister/gitnet/blob/4d907623d5ce2d79a8875aee82e718c12a8aad0b/src/GitNet/GitPack.cs if (type != packObjectType) { throw new GitException(); } return(GitObjectStream.Create(stream, decompressedSize)); }
public void SetContent(Stream content) { Content = content; Content?.Seek(0, SeekOrigin.Begin); }
private void SkipChunk(Stream src) { Chunk ch = Chunk.FromStream(src); src.Seek(ch.length - 6, SeekOrigin.Current); }
public static void WriteArrayToPosition( this Stream stream, byte[] array, long position ) { stream.Seek( position, SeekOrigin.Begin ); stream.Write( array, 0, array.Length ); }
private void ReadZipStream(Stream inputStream, bool isEmbeddedZip) { this.Log.StartJob(Util.FILES_Reading); if (inputStream.CanSeek) { inputStream.Seek(0, SeekOrigin.Begin); } var unzip = new ZipInputStream(inputStream); ZipEntry entry = unzip.GetNextEntry(); while (entry != null) { entry.CheckZipEntry(); if (!entry.IsDirectory) { // Add file to list var file = new InstallFile(unzip, entry, this); if (file.Type == InstallFileType.Resources && (file.Name.Equals("containers.zip", StringComparison.InvariantCultureIgnoreCase) || file.Name.Equals("skins.zip", StringComparison.InvariantCultureIgnoreCase))) { // Temporarily save the TempInstallFolder string tmpInstallFolder = this.TempInstallFolder; // Create Zip Stream from File using (var zipStream = new FileStream(file.TempFileName, FileMode.Open, FileAccess.Read)) { // Set TempInstallFolder this.TempInstallFolder = Path.Combine(this.TempInstallFolder, Path.GetFileNameWithoutExtension(file.Name)); // Extract files from zip this.ReadZipStream(zipStream, true); } // Restore TempInstallFolder this.TempInstallFolder = tmpInstallFolder; // Delete zip file var zipFile = new FileInfo(file.TempFileName); zipFile.Delete(); } else { this.Files[file.FullName.ToLowerInvariant()] = file; if (file.Type == InstallFileType.Manifest && !isEmbeddedZip) { if (this.ManifestFile == null) { this.ManifestFile = file; } else { if (file.Extension == "dnn7" && (this.ManifestFile.Extension == "dnn" || this.ManifestFile.Extension == "dnn5" || this.ManifestFile.Extension == "dnn6")) { this.ManifestFile = file; } else if (file.Extension == "dnn6" && (this.ManifestFile.Extension == "dnn" || this.ManifestFile.Extension == "dnn5")) { this.ManifestFile = file; } else if (file.Extension == "dnn5" && this.ManifestFile.Extension == "dnn") { this.ManifestFile = file; } else if (file.Extension == this.ManifestFile.Extension) { this.Log.AddFailure(Util.EXCEPTION_MultipleDnn + this.ManifestFile.Name + " and " + file.Name); } } } } this.Log.AddInfo(string.Format(Util.FILE_ReadSuccess, file.FullName)); } entry = unzip.GetNextEntry(); } if (this.ManifestFile == null) { this.Log.AddFailure(Util.EXCEPTION_MissingDnn); } if (this.Log.Valid) { this.Log.EndJob(Util.FILES_ReadingEnd); } else { this.Log.AddFailure(new Exception(Util.EXCEPTION_FileLoad)); this.Log.EndJob(Util.FILES_ReadingEnd); } // Close the Zip Input Stream as we have finished with it inputStream.Close(); }
void logEntryFMT(byte packettype, Stream br) { switch (packettype) { case 0x80: // FMT log_Format logfmt = new log_Format(); object obj = logfmt; int len = Marshal.SizeOf(obj); byte[] bytearray = new byte[len]; br.Read(bytearray, 0, bytearray.Length); IntPtr i = Marshal.AllocHGlobal(len); // create structure from ptr obj = Marshal.PtrToStructure(i, obj.GetType()); // copy byte array to ptr Marshal.Copy(bytearray, 0, i, len); obj = Marshal.PtrToStructure(i, obj.GetType()); Marshal.FreeHGlobal(i); logfmt = (log_Format)obj; string lgname = ASCIIEncoding.ASCII.GetString(logfmt.name).Trim(new char[] { '\0' }); //string lgformat = ASCIIEncoding.ASCII.GetString(logfmt.format).Trim(new char[] {'\0'}); //string lglabels = ASCIIEncoding.ASCII.GetString(logfmt.labels).Trim(new char[] {'\0'}); logformat[lgname] = logfmt; packettypecache[logfmt.type] = new log_format_cache() { length = logfmt.length, type = logfmt.type, name = ASCIIEncoding.ASCII.GetString(logfmt.name).Trim(new char[] { '\0' }), format = ASCIIEncoding.ASCII.GetString(logfmt.format).Trim(new char[] { '\0' }) }; return; default: //string format = ""; //string name = ""; int size = 0; if (packettypecache[packettype].length != 0) { var fmt = packettypecache[packettype]; //name = fmt.name; //format = fmt.format; size = fmt.length; } // didnt find a match, return unknown packet type if (size == 0) { return; } //byte[] buf = new byte[size - 3]; //br.Read(buf, 0, buf.Length); br.Seek(br.Position + size - 3, SeekOrigin.Begin); break; } }
/// <inheritdoc /> public override async Task <int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { int?result = null; await Retry.Do( CustomHttpClient.RetryTimes, CustomHttpClient.RetryDelay, async() => { try { if ((position != lastposition) || (responseStream == null)) { if (position != lastposition) { Close(); } lastposition = position; var client = await http.GetHttpClient(url); if (position != 0) { client.AddRange(position); } client.Method = "GET"; response = (HttpWebResponse)await client.GetResponseAsync(); var lengthStr = response.GetResponseHeader("Content-Length"); long len; if (long.TryParse(lengthStr, out len)) { length = position + len; } responseStream = response.GetResponseStream(); if (InitialSeek.HasValue) { responseStream?.Seek(InitialSeek.Value, SeekOrigin.Current); } } Contract.Assert(responseStream != null, "responseStream!=null"); result = await responseStream.ReadAsync(buffer, offset, count, cancellationToken); position += result.Value; lastposition += result.Value; return(true); } catch (Exception) { Close(); throw; } }, http.GeneralExceptionProcessor).ConfigureAwait(false); if (result == null) { throw new NullReferenceException("Read result was not set"); } return(result.Value); }