public CLRMetaStreamBinaryData(StreamParser parser, long metaRootPosition, Dictionary<string, CLRStreamHeader> shByName) { BlobData = LoadStream(parser, metaRootPosition, "#Blob", shByName); GuidData = LoadStream(parser, metaRootPosition, "#GUID", shByName); USData = LoadStream(parser, metaRootPosition, "#US", shByName); StringData = LoadStream(parser, metaRootPosition, "#Strings", shByName); }
public DOSHeader(StreamParser parser) { ushort magic = parser.ReadU16(); if (magic != 0x5a4d) throw new ParseFailedException("Invalid DOS header"); parser.Skip(58); PEHeaderOffset = parser.ReadU32(); }
private byte[] LoadStream(StreamParser parser, long metaRootPosition, string key, Dictionary<string, CLRStreamHeader> shByName) { CLRStreamHeader header = null; if (!shByName.TryGetValue(key, out header)) throw new ParseFailedException("Missing metadata stream " + shByName); parser.Seek(metaRootPosition + header.Offset); byte[] bytes = new byte[header.Size]; parser.Read(bytes, 0, header.Size); return bytes; }
public static void Load(string xml, Document doc) { var sp =new StreamParser(); sp.OnStreamStart += (sender, node) => doc.ChildNodes.Add(node); sp.OnStreamElement += (sender, node) => doc.RootElement.ChildNodes.Add(node); byte[] b = System.Text.Encoding.UTF8.GetBytes(xml); sp.Push(b, 0, b.Length); }
/// <summary> /// </summary> /// <param name="xml"> </param> /// <param name="d"> </param> public DomLoader(string xml, Document d) { doc = d; sp = new StreamParser(); sp.OnStreamStart += sp_OnStreamStart; sp.OnStreamElement += sp_OnStreamElement; sp.OnStreamEnd += sp_OnStreamEnd; byte[] b = Encoding.UTF8.GetBytes(xml); sp.Push(b, 0, b.Length); }
public DomLoader(string xml, Document d) { doc = d; sp = new StreamParser(); sp.OnStreamStart += new StreamHandler(sp_OnStreamStart); sp.OnStreamElement += new StreamHandler(sp_OnStreamElement); sp.OnStreamEnd += new StreamHandler(sp_OnStreamEnd); byte[] b = System.Text.Encoding.UTF8.GetBytes(xml); sp.Push(b, 0, b.Length); }
public PESectionHeader(StreamParser parser) { Name = parser.ReadUTF8String(8); Misc = parser.ReadU32(); VirtualAddress = parser.ReadU32(); SizeOfRawData = parser.ReadU32(); PointerToRawData = parser.ReadU32(); PointerToRelocations = parser.ReadU32(); PointerToLinenumbers = parser.ReadU32(); NumberOfRelocations = parser.ReadU16(); NumberOfLinenumbers = parser.ReadU16(); Characteristics = parser.ReadU32(); }
public PEHeader(StreamParser parser) { uint peSig = parser.ReadU32(); if (peSig != 0x00004550) throw new ParseFailedException("Bad PE signature"); Machine = parser.ReadU16(); NumberOfSections = parser.ReadU16(); TimeDateStamp = parser.ReadU32(); PointerToSymbolTable = parser.ReadU32(); NumberOfSymbols = parser.ReadU32(); SizeOfOptionalHeader = parser.ReadU16(); Characteristics = parser.ReadU16(); }
public PEAssembly(StreamParser parser) { DOSHeader dosHeader = new DOSHeader(parser); parser.Seek(dosHeader.PEHeaderOffset); PEHeader peHeader = new PEHeader(parser); if (peHeader.SizeOfOptionalHeader == 0) throw new ParseFailedException("PE missing NT header"); PEOptionalHeader peOptionalHeader = new PEOptionalHeader(parser); PESectionHeader[] sectionHeaders = new PESectionHeader[peHeader.NumberOfSections]; for (int i = 0; i < peHeader.NumberOfSections; i++) sectionHeaders[i] = new PESectionHeader(parser); SectionHeaders = sectionHeaders; DataDirectory = peOptionalHeader.DataDirectory; }
public void XmlWithUnicodeTagsThatContainPayloadTest() { StreamParser parser = new StreamParser(); XmppXElement el = null; parser.OnStreamElement += (XmppXElement e) => el = e; string xml = @"<foo><फ़क>bar</फ़क></foo>"; var b1 = Encoding.UTF8.GetBytes(xml); parser.Write(b1, 0, b1.Length); el.Name.LocalName.ShouldBe("फ़क"); el.Value.ShouldBe("bar"); }
public void Initialize() { streamParser = new StreamParser(new char[] { '.', 'B', 'W' }); gridParser = new GridParser <CellColor>(new Dictionary <char, CellColor>() { { '.', CellColor.Blank }, { 'W', CellColor.White }, { 'B', CellColor.Black } }); gridDrawer = new GridTextDrawer <CellColor>(Console.Out, new Dictionary <CellColor, char>() { { CellColor.Blank, '.' }, { CellColor.White, 'W' }, { CellColor.Black, 'B' } }, '0'); }
public void StreamIsNotStartedTwice() { // Arrange var userStream = new Mock <IStreamingConnection>(); userStream.Setup(s => s.Start(It.IsAny <Func <IStreamContent, Task> >())) .Returns(Task.FromResult(new List <IStreaming>())).Verifiable(); var parser = StreamParser.Create(userStream.Object, null); // Act parser.StartStreaming(); parser.StartStreaming(); // Assert userStream.Verify(s => s.Start(It.IsAny <Func <IStreamContent, Task> >()), Times.AtMostOnce()); }
public void ParsingOnPool_Works() { // arrange string toParse = "Hello World !!!"; MemoryStream memoryStream = new MemoryStream(Encoding.UTF8.GetBytes(toParse)); memoryStream.Seek(0, SeekOrigin.Begin); MockPartProcessor mockPartProcessor = new MockPartProcessor(); StreamParser sut = new StreamParser(new Splitter(' '), Encoding.UTF8, 1024 * 1024); // act sut.ParseStream(memoryStream, mockPartProcessor); // assert Assert.Equal(3, mockPartProcessor.Count); }
public void StreamParser_ReadInt32_1() { const string text = " \t1234 ogo"; StreamParser parser = StreamParser.FromString(text); int? int32 = parser.ReadInt32(); Assert.IsTrue(int32.HasValue); Assert.AreEqual(1234, int32.Value); parser = StreamParser.FromString(" -1234 "); int32 = parser.ReadInt32(); Assert.IsTrue(int32.HasValue); Assert.AreEqual(-1234, int32.Value); parser = StreamParser.FromString(" "); int32 = parser.ReadInt32(); Assert.IsFalse(int32.HasValue); }
public Connection(Socket UserSocket, string SID) { streamParser = new StreamParser(); streamParser.OnStreamStart += new StreamHandler(streamParser_OnStreamStart); streamParser.OnStreamEnd += new StreamHandler(streamParser_OnStreamEnd); streamParser.OnStreamElement += new StreamHandler(streamParser_OnStreamElement); SessionId = SID; buffer = new byte[BUFFERSIZE]; ConnectionSocket = UserSocket; this.Authenticated = false; this.Username = ""; this.UserStatus = ShowType.NONE; this.UserStatusText = ""; this.StartTime = DateTime.Now; ConnectionSocket.BeginReceive(buffer, 0, BUFFERSIZE, 0, new AsyncCallback(ReadCallback), null); }
//~XmppWebSocketConnection() //{ // Dispose(); //} #endregion #region 初始化方法 /// <summary> /// 初始化属性对象 /// </summary> private void InitPropObj() { m_StreamParser = new StreamParser(); m_StreamParser.OnStreamStart += new StreamHandler(StreamParserOnStreamStart); m_StreamParser.OnStreamEnd += new StreamHandler(StreamParserOnStreamEnd); m_StreamParser.OnStreamElement += new StreamHandler(StreamParserOnStreamElement); m_StreamParser.OnStreamError += new StreamError(StreamParserOnStreamError); m_StreamParser.OnError += new ErrorHandler(StreamParserOnError); AutoPresence = true; HeartbeatPeriod = 0;// 默认0分钟,不开启心跳包 // m_IqGrabber = new WSIqGrabber(this); m_PresenceManager = new WSPresenceManager(this); m_RosterManager = new WSRosterManager(this); m_PresenceGrabber = new WSPresenceGrabber(this); m_MessageGrabber = new WSMessageGrabber(this); }
public void EmptyReqModParserTest() { using (Stream stream = ResourceStream.GetStream("Resources.test-reqmod-no-body.txt")) { Assert.NotNull(stream); StreamParser parser = new StreamParser(); IcapRequestMessage message = parser.Parse(stream); string hostHeader = message.Headers["Host"]; Assert.NotNull(hostHeader); Assert.Equal(hostHeader, "icap-server.net"); Assert.Equal(message.Encapsulations.Count, 1); Assert.Equal(message.Encapsulations[0].Type, EncapsulationType.RequestHeader); Assert.Equal(message.Encapsulations[0].Data.IndexOf("GET / HTTP/1.1"), 0); } }
public override void Close() { try { this.PublishStateChange(TransportState.Closing); this.StopAsyncReads(); if (this.parser != null) { this.parser.Dispose(); this.parser = null; } if (this.reader != null) { this.reader.Dispose(); this.reader = null; } if (this.writer != null) { this.writer.Dispose(); this.writer = null; } if (this.socket != null) { this.socket.Dispose(); this.socket = null; } if (this.ignorableServerCertificateErrors != null) { this.ignorableServerCertificateErrors.Clear(); this.ignorableServerCertificateErrors = null; } } catch { } finally { this.PublishStateChange(TransportState.Closed); } }
static void Main(string[] args) { var streamParser = new StreamParser(new char[] { '.', 'B', 'W' }); var gridParser = new GridParser <CellColor>(new Dictionary <char, CellColor>() { { '.', CellColor.Blank }, { 'W', CellColor.White }, { 'B', CellColor.Black } }); var gridWriter = new GridTextDrawer <CellColor>(Console.Out, new Dictionary <CellColor, char>() { { CellColor.Blank, '.' }, { CellColor.White, 'W' }, { CellColor.Black, 'B' } }, '0'); while (true) { Console.Clear(); if (!streamParser.TryParse(Console.In, out string streamResult)) { continue; } if (!gridParser.TryParse(streamResult, out var grid)) { continue; } Console.WriteLine(); var owner = CellColor.Black; var finder = new PossibleMoveCellFinder(grid); var possibleMovePoints = finder.Find(owner); gridWriter.Draw(grid, possibleMovePoints, owner); Console.ReadKey(); break; } }
public async Task KeepAliveMessageIsIgnored() { // Arrange string strContent = ""; var execute = new Mock <ITwitterExecute>(); StreamContent content = new StreamContent(execute.Object, strContent); var stream = new Mock <IStreamingConnection>(); stream.Setup(s => s.Start(It.IsAny <Func <IStreamContent, Task> >())) .Callback <Func <StreamContent, Task> >(func => func(content)) .Returns(Task.FromResult(new List <IStreaming>())); var parser = StreamParser.Create(stream.Object); // Act parser.StartStreaming(); await parser.StreamingTask; // Assert }
public TcpXmppConnection(Socket socket, long maxPacket) { if (socket == null) { throw new ArgumentNullException("socket"); } Id = UniqueId.CreateNewId(); streamParser = new StreamParser(); streamParser.Reset(); streamParser.OnStreamStart += StreamParserOnStreamStart; streamParser.OnStreamElement += StreamParserOnStreamElement; streamParser.OnStreamEnd += StreamParserOnStreamEnd; buffer = new byte[socket.ReceiveBufferSize]; remoteEndPoint = socket.RemoteEndPoint; sendStream = receiveStream = new NetworkStream(socket, true); this.maxPacket = maxPacket; log.DebugFormat("Create new connection {0} with {1}", Id, remoteEndPoint); }
public CLRAssembly(StreamParser parser) { PEAssembly peAssembly = new PE.PEAssembly(parser); RvaAndSize importAddressDD = peAssembly.DataDirectory[12]; RvaAndSize importTableDD = peAssembly.DataDirectory[1]; RvaAndSize clrHeaderDD = peAssembly.DataDirectory[14]; PE.PESectionHeader textSection = null; for (int i = 0; i < peAssembly.SectionHeaders.Length; i++) { PE.PESectionHeader sheader = peAssembly.SectionHeaders[i]; if (sheader.Name == ".text") textSection = sheader; } if (textSection == null) throw new ParseFailedException("PE missing text section"); parser.Seek(textSection.PointerToRawData); CLRHeader clrHeader = new CLRHeader(parser); // Parse metadata parser.Seek(peAssembly.ResolveRva(clrHeader.MetaData.RelativeVirtualAddress)); MetaData = new CLRMetaData(parser, this); // Parse methods ICLRTable methodDefTable = MetaData.MetaDataTables.GetTable((int)CLRMetaDataTables.TableIndex.MethodDef); for (uint i = 0; i < methodDefTable.NumRows; i++) { CLRMethodDefRow methodDef = (CLRMethodDefRow)methodDefTable.GetRow(i); if (methodDef.RVA != 0) { parser.Seek(peAssembly.ResolveRva(methodDef.RVA)); methodDef.DigestMethod(MetaData.MetaDataTables.MetaDataParser); } } }
public CLRHeader(StreamParser parser) { parser.Skip(8); HeaderSize = parser.ReadU32(); if (HeaderSize != 0x48) throw new ParseFailedException("Invalid CLI header"); MajorRuntimeVersion = parser.ReadU16(); MinorRuntimeVersion = parser.ReadU16(); MetaData = new RvaAndSize(parser); Flags = parser.ReadU32(); EntryPointToken = parser.ReadU32(); Resources = new RvaAndSize(parser); StrongNameSignature = new RvaAndSize(parser); CodeManagerTable = new RvaAndSize(parser); VTableFixups = new RvaAndSize(parser); ExportAddressTableJumps = new RvaAndSize(parser); ManagedNativeHeader = new RvaAndSize(parser); if (EntryPointToken != 0) throw new NotImplementedException(); // Need to figure this out }
public CLRMetaData(StreamParser parser, CLRAssembly assembly) { Assembly = assembly; long metaRootPosition = parser.Position; uint magic = parser.ReadU32(); if (magic != 0x424a5342) throw new ParseFailedException("Bad metadata magic"); MajorVersion = parser.ReadU16(); MinorVersion = parser.ReadU16(); parser.Skip(4); // Reserved uint versionLength = parser.ReadU32(); if (versionLength > 255) throw new ParseFailedException("Oversized section length"); uint paddedLength = versionLength + 3; paddedLength -= paddedLength % 4; Version = parser.ReadUTF8String((int)versionLength); parser.Skip(paddedLength - versionLength); Flags = parser.ReadU16(); ushort nStreams = parser.ReadU16(); StreamHeaders = new CLRStreamHeader[nStreams]; Dictionary<string, CLRStreamHeader> streamHeadersByName = new Dictionary<string, CLRStreamHeader>(); for (int i = 0; i < nStreams; i++) { CLRStreamHeader sh = new CLRStreamHeader(parser); if (streamHeadersByName.ContainsKey(sh.Name)) throw new ParseFailedException("Duplicate metadata stream"); streamHeadersByName[sh.Name] = sh; } // Strings US GUID Blob CLRMetaStreamBinaryData binData = new CLRMetaStreamBinaryData(parser, metaRootPosition, streamHeadersByName); // Parse metadata tables parser.Seek(streamHeadersByName["#~"].Offset + metaRootPosition); MetaDataTables = new CLRMetaDataTables(parser, this, binData); }
public async Task GivenStreamedMessage_Observable_PushesParsedMessages() { using (var stream = new MemoryStream()) { var mockMessageParser = new MockMessageParser(); int messages = 0; var cancellation = new CancellationTokenSource(TimeSpan.FromSeconds(5)); TaskCompletionSource <bool> taskCompletion = new TaskCompletionSource <bool>(); byte[] msg = TestFixMessageBuilder.CreateDefaultMessage(); await stream.WriteAsync(msg); stream.Position = 0; var uut = new StreamParser <TestTypeParent>(stream, mockMessageParser, SupportedFixVersion.Fix44); uut.Subscribe(parsedObject => messages++, ex => taskCompletion.SetResult(false), () => taskCompletion.SetResult(true)); var listener = Task.Run(() => uut.ListenAsync(cancellation.Token)); Assert.True(await taskCompletion.Task); Assert.Equal(1, messages); await listener; } }
public RvaAndSize(StreamParser parser) { m_rva = parser.ReadU32(); m_size = parser.ReadU32(); }
public override void Execute(DateTime runDate, Action <string> logMessage, CancellationToken cancellationToken) { using (IDirectoryRepository source = RepositoryFactory.CreateDirectoryRepository(_directoryArgs)) { logMessage($"Connected to source repository '{source.Name}' ({source.ConnectionString})"); Regex filePattern = new Regex(@"^((ITU[.]Item_inventories[.]\d{8})|(ITU[.]Circulation_Item_Inventories[.]\d{8}))[.]txt([.]zip)?$", RegexOptions.IgnoreCase); var sourceFilesBeforeZip = source.ListFiles().Where(y => filePattern.IsMatch(y.Name)).ToArray(); foreach (DirectoryObjectMetadata zipFile in sourceFilesBeforeZip.Where(x => x.Path.Contains(".txt.zip"))) { if (sourceFilesBeforeZip.All(x => x.Name != zipFile.Name.Replace(".zip", ""))) { ZipFile.ExtractToDirectory(zipFile.Path, Path.GetDirectoryName(zipFile.Path.Replace(".txt.zip", ""))); } //source.DeleteFile(zipFile.Path); } List <String> modified = new List <String>(); Int32 newCount = 0; List <DirectoryObjectMetadata> sourceFiles = source.ListFiles("/").Where(y => filePattern.IsMatch(y.Name) && !y.Name.EndsWith(".zip")).ToList(); using (IDatabaseRepository <IHarvesterDataContext> harvester = RepositoryFactory.CreateHarvesterRepository(_harvesterArgs)) { logMessage($"Connected to database '{harvester.Name}' ({harvester.ConnectionString})"); Entities.Repository repository = harvester.DataContext.Repositories.First(y => y.Name == source.Name); if (OperationID == 0) { logMessage("Warning: OperationID was not set properly. Correcting this."); OperationID = harvester.DataContext.Operations.First(d => d.Name == Name).ID; } Dictionary <String, DirectoryRecord> dictionary = harvester.DataContext.DirectoryRecords.Where(d => d.OperationID == OperationID && d.RepositoryID == repository.ID).ToDictionary(d => d.FilePath); foreach (DirectoryObjectMetadata file in sourceFiles) { if (!dictionary.ContainsKey(file.Path)) { modified.Add(file.Name); newCount++; } else { DirectoryRecord element = dictionary[file.Path]; if (file.ModifiedDate > element.FileModifiedDate) { modified.Add(file.Name); } } } } if (modified.Count == 0 && newCount == 0) { logMessage("No Records to be processed."); return; } logMessage($"Discovered {modified.Count} files to be processed ({newCount} new and {modified.Count - newCount} updated)."); if (cancellationToken.IsCancellationRequested) { source.Dispose(); cancellationToken.ThrowIfCancellationRequested(); } using (IDatabaseRepository <IStatisticsDataContext> destination = RepositoryFactory.CreateStatisticsRepository(_databaseArgs)) { logMessage($"Connected to destination database '{destination.Name}' ({destination.ConnectionString})"); bool exceptionHandled = false; List <DirectoryObjectMetadata> successfulFiles = new List <DirectoryObjectMetadata>(); StreamParser <WmsInventoryRecord> Parser = new StreamParser <WmsInventoryRecord>(); StreamParser <WmsInventoryRecordEdge> ParserEdge = new StreamParser <WmsInventoryRecordEdge>(); StreamParser <WmsInventoryRecordDiff> ParserDiff = new StreamParser <WmsInventoryRecordDiff>(); StreamParser <WmsInventoryRecord2018> Parser2018 = new StreamParser <WmsInventoryRecord2018>(); if (cancellationToken.IsCancellationRequested) { source.Dispose(); cancellationToken.ThrowIfCancellationRequested(); } var completedFiles = modified.Select <string, (string file, DateTime RunDate, IEnumerable <IWmsInventoryRecord> records)>(file => { logMessage($"Processing '{file}':"); Stream inputStream = source.OpenFile(file); DateTime fileRunDate = GetFileDate(file); string headers = ExtractHeader(inputStream); //logMessage(headers); if (IsWmsInventoryRecord(headers)) { return(file, fileRunDate, Parser.ParseStream(inputStream)); } if (IsWmsInventoryRecord2018(headers)) { return(file, fileRunDate, Parser2018.ParseStream(inputStream)); } if (IsWmsInventoryRecordDiff(headers)) { return(file, fileRunDate, ParserDiff.ParseStream(inputStream)); } if (IsWmsInventoryRecordEdge(headers)) { return(file, fileRunDate, ParserEdge.ParseStream(inputStream)); } throw new InvalidDataException($"Header format not recognized: '{headers}'"); }).Select(wmsFileRecord => { var parsed = wmsFileRecord.records.Select(wms => new InventoryRecord { OclcNumber = wms.OclcNumber, Title = ParseTitle(wms.Title), Author = wms.Author, MaterialFormat = wms.MaterialFormat.ToString(), Barcode = wms.Barcode, Cost = wms.Cost, LastInventoriedDate = wms.LastInventoriedDate, DeletedDate = wms.ItemDeletedDate, ItemType = wms.ItemType.ToString(), CallNumber = wms.CallNumber, ShelvingLocation = wms.ShelvingLocation?.ToString(), CurrentStatus = wms.CurrentStatus?.ToString(), Description = wms.Description, RunDate = wmsFileRecord.RunDate, Anomalous = AnomalousBarcode(wms.Barcode), }).Where(y => y.Title != null || y.Barcode != null).GroupBy(x => new { x.OclcNumber, x.Barcode, x.RunDate }).Select(x => x.First()).ToArray(); logMessage($"Records Found: {parsed.Length}"); if (cancellationToken.IsCancellationRequested) { logMessage("Operation was cancelled"); exceptionHandled = true; return(null); } if (parsed.Length <= 0) { logMessage("Failed to parse properly and return any meaningful records. This might mean that non of the parsed records had a Title or Barcode."); exceptionHandled = true; return(null); } try { destination.DataContext.BulkImportInventory( parsed.ToDataReader(r => new object[] { r.OclcNumber, r.Title, r.MaterialFormat, r.Author, r.Barcode, r.Cost, r.LastInventoriedDate, r.DeletedDate, r.ItemType, r.CallNumber, r.ShelvingLocation, r.CurrentStatus, r.Description, r.RunDate, r.Anomalous })); } catch (SqlException ex) { logMessage(ex.Message); if (ex.InnerException != null) { logMessage(ex.InnerException.ToString()); } logMessage("Sql Server was most likely put into an unusable state after this exception and thus the whole operation was canceled."); exceptionHandled = true; } return(sourceFiles.First(x => x.Name == wmsFileRecord.file)); }).Where(x => x != null); foreach (var success in completedFiles) { successfulFiles.Add(success); } UpdateHarvesterRecord(logMessage, successfulFiles, source.Name, _harvesterArgs); if (exceptionHandled) { destination.DataContext.Connection.Close(); destination.DataContext.Dispose(); throw new Exception("An Exception was encountered. At least one file failed"); } } } }
public static JsonValue StringToValue(TextReader reader) => JsonException.Wrap(() => { return(StreamParser.Parse(reader)); });
public void Test2(string input, int expectedScore) { Assert.AreEqual(expectedScore, StreamParser.ScoreStream(input).removedGarbage); }
public void StreamParser_SkipControl_2() { StreamParser parser = StreamParser.FromString("\r\n"); Assert.IsFalse(parser.SkipControl()); }
public void Test1(string input, int expectedScore) { Assert.AreEqual(expectedScore, StreamParser.ScoreStream(input).score); }
public CLRMetaDataParser CreateInternalStreamParser(StreamParser secondaryStream) { return new CLRMetaDataParser(secondaryStream, m_binData, m_tables, m_largeStrings, m_largeGuids, m_largeBlobs); }
public void StreamParser_SkipPunctuation_2() { StreamParser parser = StreamParser.FromString(".,"); Assert.IsFalse(parser.SkipPunctuation()); }
public override void Execute(DateTime runDate, Action <string> logMessage, System.Threading.CancellationToken cancellationToken) { using (IDirectoryRepository source = RepositoryFactory.CreateDirectoryRepository(_directoryArgs)) { logMessage($"Connected to source repository '{source.Name}' ({source.ConnectionString})"); using (IDirectoryRepository destination = RepositoryFactory.CreateDirectoryRepository(_logDirectoryArgs)) { logMessage($"Connected to destination repository '{destination.Name}' ({destination.ConnectionString})"); Regex filePattern = new Regex(_arguments.FilePattern); foreach (DirectoryObjectMetadata file in source.ListFiles().Where(x => x.Path.Contains(".zip"))) { string tempZipDirectoryPath = file.Path.Replace(".zip", ""); string tempZipDirectoryName = file.Name.Replace(".zip", ""); ZipFile.ExtractToDirectory(file.Path, tempZipDirectoryPath); source.DeleteFile(file.Path); foreach (String unzippedfile in source.ListFiles(tempZipDirectoryName).Select(x => x.Path).Where(x => filePattern.IsMatch(x))) { string filename = unzippedfile.Split(new[] { "\\" }, StringSplitOptions.None).Last(); List <String> currentFiles = source.ListFiles().Select(x => x.Name).ToList(); if (!currentFiles.Contains(filename)) { source.MoveFile(unzippedfile, filename); } } foreach (String gzipFile in source.ListFiles(tempZipDirectoryName).Select(x => x.Path).Where(x => x.Contains(".gz"))) { string fileNameConcat = tempZipDirectoryName + ".log"; List <String> currentFiles = destination.ListFiles().Select(x => x.Name).ToList(); if (!currentFiles.Contains(fileNameConcat)) { using (GZipStream gzipStream = new GZipStream(source.OpenFile(gzipFile), CompressionMode.Decompress)) { using (Stream unzippedDestination = destination.CreateFile(fileNameConcat, Repository.Directory.FileCreationMode.ThrowIfFileExists)) { gzipStream.CopyTo(unzippedDestination); } } } } source.DeleteDirectory(tempZipDirectoryPath); } } List <String> modified = new List <String>(); Int32 newCount = 0; using (IDatabaseRepository <IHarvesterDataContext> harvester = RepositoryFactory.CreateHarvesterRepository(_harvesterArgs)) { logMessage($"Connected to database '{harvester.Name}' ({harvester.ConnectionString})"); IEnumerable <DirectoryObjectMetadata> sourceFiles = source.ListFiles("/"); Dictionary <String, DirectoryRecord> dictionary = harvester.DataContext.DirectoryRecords.Where(d => d.Operation.Name == Name && d.Repository.Name == source.Name).ToDictionary(d => d.FilePath); Entities.Repository repository = harvester.DataContext.Repositories.First(x => x.Name == source.Name); if (OperationID == 0) { logMessage("Warning: OperationID was not set properly. Correcting this."); OperationID = harvester.DataContext.Operations.First(d => d.Name == Name).ID; } foreach (DirectoryObjectMetadata file in sourceFiles) { if (!dictionary.ContainsKey(file.Path)) { modified.Add(file.Name); newCount++; harvester.DataContext.DirectoryRecords.InsertOnSubmit(new DirectoryRecord { OperationID = OperationID, RepositoryID = repository.ID, FilePath = file.Path, FileModifiedDate = file.ModifiedDate, CreationDate = DateTime.Now, ModifiedDate = DateTime.Now }); } else { DirectoryRecord element = dictionary[file.Path]; if (file.ModifiedDate > element.FileModifiedDate) { modified.Add(file.Name); element.FileModifiedDate = file.ModifiedDate; element.ModifiedDate = DateTime.Now; } } } if (cancellationToken.IsCancellationRequested) { source.Dispose(); harvester.Dispose(); cancellationToken.ThrowIfCancellationRequested(); } harvester.DataContext.SubmitChanges(); } logMessage($"Discovered {modified.Count} files to be processed ({newCount} new and {modified.Count - newCount} updated)."); if (modified.Count == 0) { return; } using (IDatabaseRepository <IStatisticsDataContext> destination = RepositoryFactory.CreateStatisticsRepository(_statisticsArgs)) { logMessage($"Connected to database '{destination.Name}' ({destination.ConnectionString})"); StreamParser <EZProxyAudit> Parser = new StreamParser <EZProxyAudit>(); List <EZProxyAudit> records = modified.Select(file => { logMessage($"Processing '{file}':"); int lineNumber = 0; return(Parser.ParseStream(source.OpenFile(file)).Select(x => new EZProxyAudit { DateTime = x.DateTime, Event = x.Event, IP = x.IP, Other = x.Other, Session = x.Session, Username = x.Username, LineNumber = lineNumber++, })); }).SelectMany(x => x).ToList(); logMessage($"Records Found: {records.Count}"); destination.DataContext.BulkImportEZProxyAudit(records.ToDataReader(r => new object[] { r.DateTime, r.Event, r.IP, r.Username, r.Session, r.Other, r.LineNumber })); } } }
public CLRMetaDataTables(StreamParser parser, CLRMetaData metaData, CLRMetaStreamBinaryData binData) { MetaData = metaData; parser.Skip(4); // Reserved byte majorVersion = parser.ReadU8(); byte minorVersion = parser.ReadU8(); byte heapSizes = parser.ReadU8(); parser.Skip(1); // Reserved ulong validMask = parser.ReadU64(); ulong sortedMask = parser.ReadU64(); m_rowCounts = new uint[64]; for (int i = 0; i < 64; i++) { if ((validMask & ((ulong)1 << i)) != 0) m_rowCounts[i] = parser.ReadU32(); } StringOffsets32Bit = ((heapSizes & 1) != 0); GuidOffsets32Bit = ((heapSizes & 2) != 0); BlobOffsets32Bit = ((heapSizes & 4) != 0); if (majorVersion != 2 || minorVersion != 0) throw new ParseFailedException("Unknown metadata table version"); m_clrTables = new ICLRTable[64]; AddTable(0x00, new CLRTable<CLRModuleRow>()); AddTable(0x01, new CLRTable<CLRTypeRefRow>()); AddTable(0x02, new CLRTable<CLRTypeDefRow>()); AddTable(0x04, new CLRTable<CLRFieldRow>()); AddTable(0x06, new CLRTable<CLRMethodDefRow>()); AddTable(0x08, new CLRTable<CLRParamRow>()); AddTable(0x09, new CLRTable<CLRInterfaceImplRow>()); AddTable(0x0a, new CLRTable<CLRMemberRefRow>()); AddTable(0x0b, new CLRTable<CLRConstantRow>()); AddTable(0x0c, new CLRTable<CLRCustomAttributeRow>()); AddTable(0x0d, new CLRTable<CLRFieldMarshalRow>()); AddTable(0x0e, new CLRTable<CLRDeclSecurityRow>()); AddTable(0x0f, new CLRTable<CLRClassLayoutRow>()); AddTable(0x10, new CLRTable<CLRFieldLayoutRow>()); AddTable(0x11, new CLRTable<CLRStandAloneSigRow>()); AddTable(0x12, new CLRTable<CLREventMapRow>()); AddTable(0x14, new CLRTable<CLREventRow>()); AddTable(0x15, new CLRTable<CLRPropertyMapRow>()); AddTable(0x17, new CLRTable<CLRPropertyRow>()); AddTable(0x18, new CLRTable<CLRMethodSemanticsRow>()); AddTable(0x19, new CLRTable<CLRMethodImplRow>()); AddTable(0x1a, new CLRTable<CLRModuleRefRow>()); AddTable(0x1b, new CLRTable<CLRTypeSpecRow>()); AddTable(0x1c, new CLRTable<CLRImplMapRow>()); AddTable(0x1d, new CLRTable<CLRFieldRVARow>()); AddTable(0x20, new CLRTable<CLRAssemblyRow>()); AddTable(0x21, new CLRTable<CLRAssemblyProcessorRow>()); AddTable(0x22, new CLRTable<CLRAssemblyOSRow>()); AddTable(0x23, new CLRTable<CLRAssemblyRefRow>()); AddTable(0x24, new CLRTable<CLRAssemblyRefProcessorRow>()); AddTable(0x25, new CLRTable<CLRAssemblyRefOSRow>()); AddTable(0x26, new CLRTable<CLRFileRow>()); AddTable(0x27, new CLRTable<CLRExportedTypeRow>()); AddTable(0x28, new CLRTable<CLRManifestResourceRow>()); AddTable(0x29, new CLRTable<CLRNestedClassRow>()); AddTable(0x2a, new CLRTable<CLRGenericParamRow>()); AddTable(0x2c, new CLRTable<CLRGenericParamConstraintRow>()); AddTable(0x2b, new CLRTable<CLRMethodSpecRow>()); MetaDataParser = new CLRMetaDataParser(parser, binData, this, StringOffsets32Bit, GuidOffsets32Bit, BlobOffsets32Bit); for (int i = 0; i < 64; i++) { if ((validMask & (ulong)1 << i) != 0) { if (m_clrTables[i] == null) throw new ParseFailedException("Unknown table type"); m_clrTables[i].Parse(MetaDataParser); } } }
private ISMModel(string filename) { FileStream fs = new FileStream(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite); sp = new StreamParser(fs, Endian.Little); pmxModel = new PMXModel(); sp.BaseStream.Seek(0x00, SeekOrigin.Begin); if (sp.ReadU32() != 0x324D5349) { fs.Close(); return; } sp.BaseStream.Seek(0x14, SeekOrigin.Begin); uint endianCheck = sp.ReadU32(); if (endianCheck > 0 && endianCheck < 65535) { sp.Endian = Endian.Little; } else { sp.Endian = Endian.Big; } sp.BaseStream.Seek(0x04, SeekOrigin.Begin); versionA = sp.ReadU8(); byte VersionB = sp.ReadU8(); byte VersionC = sp.ReadU8(); byte VersionD = sp.ReadU8(); uint header3 = sp.ReadU32(); uint header4 = sp.ReadU32(); uint filesize = sp.ReadU32(); uint sectionCount = sp.ReadU32(); uint header7 = sp.ReadU32(); uint header8 = sp.ReadU32(); int i, j, k; SectionData[] SectionArray = new SectionData[sectionCount]; for (i = 0; i < sectionCount; i++) { SectionArray[i] = new SectionData() { SectionType = sp.ReadU32(), SectionOffset = sp.ReadU32() }; } /** * Importing strings begin */ foreach (SectionData sd in SectionArray) { if (sd.SectionType == 33) //String array needs to be filled first { sp.BaseStream.Seek(sd.SectionOffset + 8, SeekOrigin.Begin); int stringCount = sp.ReadS32(); stringArray = new string[stringCount]; uint[] strOffsets = new uint[stringCount]; for (i = 0; i < stringCount; i++) { strOffsets[i] = sp.ReadU32(); } for (i = 0; i < stringCount; i++) { sp.BaseStream.Seek(strOffsets[i], SeekOrigin.Begin); stringArray[i] = sp.ReadAnsiNullTerminatedString(); } } } /** * Importing strings end */ /** * Materials */ foreach (SectionData sd in SectionArray) { if (sd.SectionType == 97) { sp.BaseStream.Seek(sd.SectionOffset + 8, SeekOrigin.Begin); int matTotal = sp.ReadS32(); uint[] matOffsetArray = new uint[matTotal]; for (i = 0; i < matTotal; i++) { matOffsetArray[i] = sp.ReadU32(); } for (i = 0; i < matTotal; i++) { sp.BaseStream.Seek(matOffsetArray[i] + 8, SeekOrigin.Begin); int matSubTotal = sp.ReadS32(); string matSubString1 = stringArray[sp.ReadS32()]; string matSubString2 = stringArray[sp.ReadS32()]; string matSubString3 = stringArray[sp.ReadS32()]; sp.BaseStream.Seek(4, SeekOrigin.Current); PMXMaterial mat = new PMXMaterial(pmxModel); pmxModel.Materials.Add(mat); mat.NameEN = matSubString1; mat.NameJP = matSubString1; mat.Diffuse = new PMXColorRGB(0.77f, 0.77f, 0.77f); mat.Specular = new PMXColorRGB(0.0f, 0.0f, 0.0f); mat.Ambient = new PMXColorRGB(1.0f, 1.0f, 1.0f); mat.StandardToonIndex = 3; mat.EdgeEnabled = false; if (matSubTotal > 0) { int matSubOffset = sp.ReadS32(); sp.BaseStream.Seek(matSubOffset + 12, SeekOrigin.Begin); matSubOffset = sp.ReadS32(); sp.BaseStream.Seek(matSubOffset + 20, SeekOrigin.Begin); matSubOffset = sp.ReadS32(); sp.BaseStream.Seek(matSubOffset + 24, SeekOrigin.Begin); matSubOffset = sp.ReadS32(); sp.BaseStream.Seek(matSubOffset + 24, SeekOrigin.Begin); matSubOffset = sp.ReadS32(); sp.BaseStream.Seek(matSubOffset, SeekOrigin.Begin); mat.DiffuseTexture = stringArray[sp.ReadS32()] + ".dds"; } else { mat.DiffuseTexture = "tex_c.dds"; } //Console.WriteLine(texturename); } } } /** * Bones and material groups */ foreach (SectionData sd in SectionArray) { if (sd.SectionType == 03) { Console.WriteLine("Object data"); sp.BaseStream.Seek(sd.SectionOffset + 8, SeekOrigin.Begin); int boneCount = sp.ReadS32(); uint[] boneOffsets = new uint[boneCount]; string boneDataString1 = stringArray[sp.ReadS32()]; string boneDataString2 = stringArray[sp.ReadS32()]; for (i = 0; i < boneCount; i++) { boneOffsets[i] = sp.ReadU32(); } for (i = 0; i < boneCount; i++) { sp.BaseStream.Seek(boneOffsets[i] + 8, SeekOrigin.Begin); /*uint sectionType = sp.ReadU32(); * sp.ReadU32();*/ int boneHeaderTotal = sp.ReadS32(); string boneName1 = stringArray[sp.ReadS32()]; string boneName2 = stringArray[sp.ReadS32()]; sp.BaseStream.Seek(8, SeekOrigin.Current); uint boneParentOffset = sp.ReadU32(); int boneParent = -1; for (j = 0; j < boneCount; j++) { if (boneOffsets[j] == boneParentOffset) { boneParent = j; } } sp.BaseStream.Seek(12, SeekOrigin.Current); int boneIdNum = sp.ReadS32(); sp.BaseStream.Seek(16, SeekOrigin.Current); uint[] boneHeaderOffsets = new uint[boneHeaderTotal]; for (j = 0; j < boneHeaderTotal; j++) { boneHeaderOffsets[j] = sp.ReadU32(); } for (j = 0; j < boneHeaderTotal; j++) { sp.BaseStream.Seek(boneHeaderOffsets[j], SeekOrigin.Begin); uint sectionType = sp.ReadU32(); if (sectionType == 76) { ImportSurface(); } if (sectionType == 91) { ImportBone(boneName1, i, boneParent, boneIdNum); } } } } } /** * Importing textures begin */ /*foreach (SectionData sd in SectionArray) * { * if (sd.SectionType == 46) * { * sp.BaseStream.Seek(sd.SectionOffset + 8, SeekOrigin.Begin); * int texTotal = sp.ReadS32(); * uint[] texOffsetArray = new uint[texTotal]; * for (i = 0; i < texTotal; i++) * { * texOffsetArray[i] = sp.ReadU32(); * } * * for (i = 0; i < texTotal; i++) * { * sp.BaseStream.Seek(texOffsetArray[i] + 12, SeekOrigin.Begin); * string texturename = stringArray[sp.ReadS32()]; * sp.BaseStream.Seek(12, SeekOrigin.Current); * //Console.WriteLine(texturename); * } * } * }*/ //TODO: Not sure what these do /** * Importing strings end */ /** * Vertices */ foreach (SectionData sd in SectionArray) { if (sd.SectionType == 11) { Console.WriteLine("Vertex data"); sp.BaseStream.Seek(sd.SectionOffset + 8, SeekOrigin.Begin); int vtxHeaderTotal = sp.ReadS32(); uint[] vtxHeadOffsets = new uint[vtxHeaderTotal]; for (i = 0; i < vtxHeaderTotal; i++) { vtxHeadOffsets[i] = sp.ReadU32(); } for (i = 0; i < vtxHeaderTotal; i++) { sp.BaseStream.Seek(vtxHeadOffsets[i], SeekOrigin.Begin); uint sectionType = sp.ReadU32(); if (sectionType == 10) { ImportVertexGroup(); } } } } fs.Close(); }
public async Task <HttpResponseMessage> UploadFileAsync( Int64 containerId, String itemPath, Stream fileStream, byte[] contentId, Int64 fileLength, Boolean isGzipped, Guid scopeIdentifier, CancellationToken cancellationToken = default(CancellationToken), int chunkSize = c_defaultChunkSize, int chunkRetryTimes = c_defaultChunkRetryTimes, bool uploadFirstChunk = false, Object userState = null) { if (containerId < 1) { throw new ArgumentException(WebApiResources.ContainerIdMustBeGreaterThanZero(), "containerId"); } if (chunkSize > c_maxChunkSize) { chunkSize = c_maxChunkSize; } // if a contentId is specified but the chunk size is not a 2mb multiple error if (contentId != null && (chunkSize % c_ContentChunkMultiple) != 0) { throw new ArgumentException(FileContainerResources.ChunksizeWrongWithContentId(c_ContentChunkMultiple), "chunkSize"); } ArgumentUtility.CheckForNull(fileStream, "fileStream"); ApiResourceVersion gzipSupportedVersion = new ApiResourceVersion(new Version(1, 0), 2); ApiResourceVersion requestVersion = await NegotiateRequestVersionAsync(FileContainerResourceIds.FileContainer, s_currentApiVersion, userState, cancellationToken).ConfigureAwait(false); if (isGzipped && (requestVersion.ApiVersion < gzipSupportedVersion.ApiVersion || (requestVersion.ApiVersion == gzipSupportedVersion.ApiVersion && requestVersion.ResourceVersion < gzipSupportedVersion.ResourceVersion))) { throw new ArgumentException(FileContainerResources.GzipNotSupportedOnServer(), "isGzipped"); } if (isGzipped && fileStream.Length >= fileLength) { throw new ArgumentException(FileContainerResources.BadCompression(), "fileLength"); } HttpRequestMessage requestMessage = null; List <KeyValuePair <String, String> > query = AppendItemQueryString(itemPath, scopeIdentifier); if (fileStream.Length == 0) { // zero byte upload FileUploadTrace(itemPath, $"Upload zero byte file '{itemPath}'."); requestMessage = await CreateRequestMessageAsync(HttpMethod.Put, FileContainerResourceIds.FileContainer, routeValues : new { containerId = containerId }, version : s_currentApiVersion, queryParameters : query, userState : userState, cancellationToken : cancellationToken).ConfigureAwait(false); return(await SendAsync(requestMessage, userState, cancellationToken).ConfigureAwait(false)); } bool multiChunk = false; int totalChunks = 1; if (fileStream.Length > chunkSize) { totalChunks = (int)Math.Ceiling(fileStream.Length / (double)chunkSize); FileUploadTrace(itemPath, $"Begin chunking upload file '{itemPath}', chunk size '{chunkSize} Bytes', total chunks '{totalChunks}'."); multiChunk = true; } else { FileUploadTrace(itemPath, $"File '{itemPath}' will be uploaded in one chunk."); chunkSize = (int)fileStream.Length; } StreamParser streamParser = new StreamParser(fileStream, chunkSize); SubStream currentStream = streamParser.GetNextStream(); HttpResponseMessage response = null; Byte[] dataToSend = new Byte[chunkSize]; int currentChunk = 0; Stopwatch uploadTimer = new Stopwatch(); while (currentStream.Length > 0 && !cancellationToken.IsCancellationRequested) { currentChunk++; for (int attempt = 1; attempt <= chunkRetryTimes && !cancellationToken.IsCancellationRequested; attempt++) { if (attempt > 1) { TimeSpan backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(10)); FileUploadTrace(itemPath, $"Backoff {backoff.TotalSeconds} seconds before attempt '{attempt}' chunk '{currentChunk}' of file '{itemPath}'."); await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); currentStream.Seek(0, SeekOrigin.Begin); } FileUploadTrace(itemPath, $"Attempt '{attempt}' for uploading chunk '{currentChunk}' of file '{itemPath}'."); // inorder for the upload to be retryable, we need the content to be re-readable // to ensure this we copy the chunk into a byte array and send that // chunk size ensures we can convert the length to an int int bytesToCopy = (int)currentStream.Length; using (MemoryStream ms = new MemoryStream(dataToSend)) { await currentStream.CopyToAsync(ms, bytesToCopy, cancellationToken).ConfigureAwait(false); } // set the content and the Content-Range header HttpContent byteArrayContent = new ByteArrayContent(dataToSend, 0, bytesToCopy); byteArrayContent.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream"); byteArrayContent.Headers.ContentLength = currentStream.Length; byteArrayContent.Headers.ContentRange = new System.Net.Http.Headers.ContentRangeHeaderValue(currentStream.StartingPostionOnOuterStream, currentStream.EndingPostionOnOuterStream, streamParser.Length); FileUploadTrace(itemPath, $"Generate new HttpRequest for uploading file '{itemPath}', chunk '{currentChunk}' of '{totalChunks}'."); try { if (requestMessage != null) { requestMessage.Dispose(); requestMessage = null; } requestMessage = await CreateRequestMessageAsync( HttpMethod.Put, FileContainerResourceIds.FileContainer, routeValues : new { containerId = containerId }, version : s_currentApiVersion, content : byteArrayContent, queryParameters : query, userState : userState, cancellationToken : cancellationToken).ConfigureAwait(false); } catch (OperationCanceledException) when(cancellationToken.IsCancellationRequested) { // stop re-try on cancellation. throw; } catch (Exception ex) when(attempt < chunkRetryTimes) // not the last attempt { FileUploadTrace(itemPath, $"Chunk '{currentChunk}' attempt '{attempt}' of file '{itemPath}' fail to create HttpRequest. Error: {ex.ToString()}."); continue; } if (isGzipped) { //add gzip header info byteArrayContent.Headers.ContentEncoding.Add("gzip"); byteArrayContent.Headers.Add("x-tfs-filelength", fileLength.ToString(System.Globalization.CultureInfo.InvariantCulture)); } if (contentId != null) { byteArrayContent.Headers.Add("x-vso-contentId", Convert.ToBase64String(contentId)); // Base64FormattingOptions.None is default when not supplied } FileUploadTrace(itemPath, $"Start uploading file '{itemPath}' to server, chunk '{currentChunk}'."); uploadTimer.Restart(); try { if (response != null) { response.Dispose(); response = null; } response = await SendAsync(requestMessage, userState, cancellationToken).ConfigureAwait(false); } catch (OperationCanceledException) when(cancellationToken.IsCancellationRequested) { // stop re-try on cancellation. throw; } catch (Exception ex) when(attempt < chunkRetryTimes) // not the last attempt { FileUploadTrace(itemPath, $"Chunk '{currentChunk}' attempt '{attempt}' of file '{itemPath}' fail to send request to server. Error: {ex.ToString()}."); continue; } uploadTimer.Stop(); FileUploadTrace(itemPath, $"Finished upload chunk '{currentChunk}' of file '{itemPath}', elapsed {uploadTimer.ElapsedMilliseconds} (ms), response code '{response.StatusCode}'."); if (multiChunk) { FileUploadProgress(itemPath, currentChunk, (int)Math.Ceiling(fileStream.Length / (double)chunkSize)); } if (response.IsSuccessStatusCode) { break; } else if (IsFastFailResponse(response)) { FileUploadTrace(itemPath, $"Chunk '{currentChunk}' attempt '{attempt}' of file '{itemPath}' received non-success status code {response.StatusCode} for sending request and cannot continue."); break; } else { FileUploadTrace(itemPath, $"Chunk '{currentChunk}' attempt '{attempt}' of file '{itemPath}' received non-success status code {response.StatusCode} for sending request."); continue; } } // if we don't have success then bail and return the failed response if (!response.IsSuccessStatusCode) { break; } if (contentId != null && response.StatusCode == HttpStatusCode.Created) { // no need to keep uploading since the server said it has all the content FileUploadTrace(itemPath, $"Stop chunking upload the rest of the file '{itemPath}', since server already has all the content."); break; } currentStream = streamParser.GetNextStream(); if (uploadFirstChunk) { break; } } cancellationToken.ThrowIfCancellationRequested(); return(response); }
private async Task ConnectAsync() { // Hostname this.hostname = new HostName(this.ConnectionString.HostName); // Remote service name ( DNS SRV ) var remoteServiceName = $"{XmppCodes.XmppSrvRecordPrefix}.{this.ConnectionString.HostName}"; // Network Socket this.socket = new StreamSocket(); // Socket configuration // Controls the size, in bytes, of the send buffer to be used for sending data on a StreamSocket object. this.socket.Control.OutboundBufferSizeInBytes = this.ConnectionString.PacketSize; // Indicates whether keep-alive packets are sent to the remote destination on a StreamSocket object. // this.socket.Control.KeepAlive = false; // Indicates whether Nagle's algorithm is used on a StreamSocket object // this.socket.Control.NoDelay = false; // The quality of service on a StreamSocket object. this.socket.Control.QualityOfService = SocketQualityOfService.Normal; // Connection attempts are made in parallel or serially. // this.socket.Control.SerializeConnectionAttempts = true; // Make the socket to connect to the Server // 1. First try to connect agains the remote service name with DNS SRV // 2. If it can't connect try using the service name given in the connection string ( tipically the port number ) // https://view.officeapps.live.com/op/view.aspx?src=http%3a%2f%2fvideo.ch9.ms%2fbuild%2f2011%2fslides%2fPLAT-580T_Thaler.pptx bool connected = await this.ConnectAsync(remoteServiceName).ConfigureAwait(false); if (!connected) { connected = await this.ConnectAsync(this.ConnectionString.ServiceName).ConfigureAwait(false); } if (!connected) { this.PublishStateChange(TransportState.ConnectionFailed); this.socket.Dispose(); this.socket = null; return; } // Create streams for reading & writing to the socket this.reader = new DataReader(this.socket.InputStream); this.writer = new DataWriter(this.socket.OutputStream); // Set encodings this.reader.UnicodeEncoding = UnicodeEncoding.Utf8; this.writer.UnicodeEncoding = UnicodeEncoding.Utf8; // Set byte order // this.writer.ByteOrder = Windows.Storage.Streams.ByteOrder.LittleEndian; // Allow partial reads this.reader.InputStreamOptions = InputStreamOptions.Partial; // Create the XMPP stream parser instance this.parser = new StreamParser(); }
public CLRMetaDataParser(StreamParser parser, CLRMetaStreamBinaryData binData, CLRMetaDataTables tables, bool largeStrings, bool largeGuids, bool largeBlobs) { m_parser = parser; m_binData = binData; m_tables = tables; m_largeStrings = largeStrings; m_largeGuids = largeGuids; m_largeBlobs = largeBlobs; }
public void StreamParser_NoEmailInString_NoResults() { StreamParser parser = new StreamParser("lsdfhdsfhsdf"); IList<string> result = parser.Parse(); Assert.AreEqual(result.Count(), 0); }
public async Task SomeMajorTask() { var outputStream = new MemoryStream(); using (var outputZip = new ZipArchive(outputStream, ZipArchiveMode.Create, leaveOpen: true)) using (var containerBuilder = new ContainerBuilder(outputZip)) { var stopWatch = new Stopwatch(); stopWatch.Start(); CSVMapping csvMapping = null; var streamParser = new StreamParser(); var strFilePath = @"C:\Projects\assetfiles\testfile.zip"; var fs = new FileStream(strFilePath, FileMode.Open, FileAccess.Read); var ms = new MemoryStream(); fs.CopyTo(ms); // // var file = _downloader.GetFile(uri); //var zip = _zipProcessor.UnZip(file); var zip = new ZipArchive(ms, ZipArchiveMode.Read, true); foreach (var zipEntry in zip.Entries) { if (!zipEntry.FullName.ToLower().EndsWith(".xml")) { continue; } using (var inputStream = zipEntry.Open()) using (var stream = new MemoryStream()) { _transformer.Transform(inputStream, stream); if (stream.Length == 0) { continue; } stream.Position = 0; var data = await streamParser.Parse(stream); csvMapping = _mappingProvider.GetMappingByData(data); var serializer = new CsvSerializer(containerBuilder, csvMapping); await serializer.Serialize(data.ParsedEntities); } } // Add metadata to each archive if (csvMapping != null) { CSVMappingHelper.AddMetadata(outputZip, csvMapping); } stopWatch.Stop(); var ts = stopWatch.Elapsed; var elapsedTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}", ts.Hours, ts.Minutes, ts.Seconds, ts.Milliseconds / 10); } }
public CLRStreamHeader(StreamParser parser) { Offset = parser.ReadU32(); Size = parser.ReadU32(); Name = parser.ReadVarAsciiString(32); }