/// <summary> /// Meldet sich als Verbraucher an eine Analyse an. /// </summary> /// <param name="analyser">Die Analyseeinheit.</param> public void RegisterPhase2(TSParser analyser) { // See if there is a well known table foreach (var tableType in typeof(WellKnownTable).Assembly.GetExportedTypes()) { if (typeof(WellKnownTable).IsAssignableFrom(tableType)) { if (!tableType.IsAbstract) { if (WellKnownTable.GetWellKnownStream(tableType) == PID) { // Remember SubItems[6].Text = tableType.Name; // Create consumer var parser = TableParser.Create(CountTable, tableType); // Connect analyser.SetFilter(PID, true, parser.AddPayload); // Done break; } } } } }
/// <summary> /// 编译代码 /// </summary> /// <param name="code">代码字节</param> public void CompileCode(TSLexer lexer, string code) { //编译步骤 //第一步:词法分析 TokenData tokenData = lexer.GetTokens(code, this.PackageName); //第二部:词法解释器,根据上下文无关文法(例如BNF巴科斯范式)生成抽象语法树 TSParser parser = new TSParser(tokenData); parser.Parse(); }
/// <summary> /// Beendet die Nutzung dieser Instanz endgültig. /// </summary> public override void Dispose() { // Stop any raw TS dump StopDump(); // Stop always Stop(); // Do proper cleanup using (m_Parser) m_Parser = null; using (m_TIF) m_TIF = null; // Forward base.Dispose(); }
/// <summary> /// Überträgt den Dateiinhalt zur Analyse. /// </summary> /// <param name="target">Die Analyseinstanz, die befüllt werden soll.</param> private void ReadFile(TSParser target) { // Create buffer to read file var buffer = new byte[100000]; // Open the file using (var file = new FileStream(Text, FileMode.Open, FileAccess.Read, FileShare.Read, buffer.Length)) { // Load the file size SubItems[1].Text = Math.Round(file.Length / 1024.0 / 1024.0).ToString("N0"); // Load for (int n; (n = file.Read(buffer, 0, buffer.Length)) > 0;) { target.AddPayload(buffer, 0, n); } } }
void Start() { if (MissionPath.Length == 0) { return; } TSLexer lexer = new TSLexer(new AntlrFileStream(Path.Combine(Application.streamingAssetsPath, MissionPath))); TSParser parser = new TSParser(new CommonTokenStream(lexer)); var file = parser.start(); if (parser.NumberOfSyntaxErrors > 0) { Debug.LogError("Could not parse!"); return; } MissionObjects = new List <TSObject>(); foreach (var decl in file.decl()) { var objectDecl = decl.stmt()?.expression_stmt()?.stmt_expr()?.object_decl(); if (objectDecl == null) { continue; } if (MissionObjects.Count > 0) { Debug.Log("Mission with two mission groups?"); } MissionObjects.Add(ProcessObject(objectDecl)); } if (MissionObjects.Count <= 0) { return; } var mis = MissionObjects[0]; foreach (var obj in mis.RecursiveChildren()) { if (obj.ClassName == "InteriorInstance") { var gobj = Instantiate(InteriorPrefab, transform, false); var positionParts = ParseVectorString(obj.GetField("position")); var position = ConvertPoint(positionParts); var rotationParts = ParseVectorString(obj.GetField("rotation")); var rotation = ConvertRotation(rotationParts); var scaleParts = ParseVectorString(obj.GetField("scale")); var scale = ConvertScale(scaleParts); gobj.transform.localPosition = position; gobj.transform.localRotation = rotation; gobj.transform.localScale = scale; var difPath = ResolvePath(obj.GetField("interiorFile"), MissionPath); gobj.GetComponent <Dif>().filePath = difPath; gobj.GetComponent <Dif>().GenerateMesh(); GlobalMarble.GetComponent <Movement>().AddMesh(gobj.GetComponent <MeshCollider>()); } if (obj.ClassName == "StaticShape" && obj.GetField("dataBlock") == "StartPad") { var positionParts = ParseVectorString(obj.GetField("position")); var position = ConvertPoint(positionParts); var rotationParts = ParseVectorString(obj.GetField("rotation")); var rotation = ConvertRotation(rotationParts); var spawnPoint = position + rotation * new Vector3(0, 0, 3); GlobalMarble.transform.localPosition = spawnPoint; GlobalMarble.GetComponent <Marble>().StartPoint = spawnPoint; var skybox = Instantiate(SkyboxPrefab, transform, false); skybox.transform.localPosition = spawnPoint; } } }
/// <summary> /// Initialisiert die Rekonstruktionsinstanz. /// </summary> /// <param name="parser">Die zugehörige Analyseeinheit.</param> /// <param name="callback">Optional ein Verbraucher für rekonstruierte Pakete.</param> protected TSBuilder( TSParser parser, Action<byte[]> callback ) { // Remember m_Callback = callback; Parser = parser; }
/// <summary> /// Erzeugt eine neue Rekonstruktionsinstanz. /// </summary> /// <param name="parser">Die zugehörige Analyseeinheit.</param> /// <param name="callback">Eine Methode zum Empfang rekonstruierter Pakete.</param> public PESBuilder(TSParser parser, Action <byte[]> callback) : base(parser, callback) { }
private void starter_Tick(object sender, EventArgs e) { // Disable starter.Enabled = false; // Finsih if (null == File) { // Stop Close(); // Done return; } // Set mode m_Loading = true; // May stop cmdStop.Enabled = true; // Reset GUI m_ListItems.Clear(); m_Entries.Clear(); // Be safe try { // Choose decoding mode Section.ISO6937Encoding = ckStandardSI.Checked; // The mode bool TSMode = (0 == string.Compare(File.Extension, ".ts", true)); // Blocksize byte[] Buffer = new byte[TSMode ? 10000000 : 100000]; // Open the file and create parser using (FileStream read = new FileStream(File.FullName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite, Buffer.Length)) using (TSParser parser = new TSParser()) { // Skip junk if (TSMode) { parser.SetFilter(0x12, true, EPGParser.OnData); } // Content for (int n; (n = read.Read(Buffer, 0, Buffer.Length)) > 0;) { // Report progress progress.Value = (int)(read.Position * progress.Maximum / read.Length); // Show up Application.DoEvents(); // Done if (!cmdStop.Enabled) { break; } // Check mode if (TSMode) { // Feed into parser parser.AddPayload(Buffer, 0, n); } else { // SI Table EPGParser.OnData(Buffer, 0, n); } } } } catch (Exception ex) { // Report MessageBox.Show(this, ex.Message); } finally { // Done m_Loading = false; } // Prepare load cmdStop.Text = m_LoadText; cmdStop.Enabled = true; // Load all we found lstEntries.Items.Clear(); lstEntries.Items.AddRange(m_ListItems.ToArray()); // Prepare sorter lstEntries.ListViewItemSorter = new EPGEntry.Comparer(); }
/// <summary> /// Initialisiert die Rekonstruktionsinstanz. /// </summary> /// <param name="parser">Die zugehörige Analyseeinheit.</param> /// <param name="callback">Optional ein Verbraucher für rekonstruierte Pakete.</param> protected TSBuilder(TSParser parser, Action <byte[]> callback) { // Remember m_Callback = callback; Parser = parser; }
/// <summary> /// Erzeugt einen neuen Vermittler. /// </summary> /// <param name="analyser">Die eigentliche Analyseeinheit.</param> /// <param name="item">Die zugehörige Beschreibung.</param> public _Builder(TSParser analyser, StreamItem item) : base(analyser, null) { // Remember m_Item = item; }
/// <summary> /// Meldet sich als Verbraucher an eine Analyse an. /// </summary> /// <param name="analyser">Die Analyseeinheit.</param> public void RegisterPhase1(TSParser analyser) { // Add as a custom builder analyser.RegisterCustomFilter(PID, new _Builder(analyser, this)); }
/// <summary> /// Erzeugt eine neue Rekonstruktionsinstanz. /// </summary> /// <param name="parser">Die zugehörige Analyseeinheit.</param> /// <param name="callback">Eine Methode, an die alle vollständig rekonstruierten Pakete gemeldet werden.</param> public SIBuilder( TSParser parser, Action<byte[]> callback ) : base( parser, callback ) { }
/// <summary> /// Beendet die Nutzung dieser Komponente endgültig. /// </summary> public void Dispose() { // Report private overall statistics Console.WriteLine ( "Program Guide: {0:N0} Tables, {1:N0} Entries\nAssociation Tables: {2:N0}\nService Tables: {4:N0}\nPrograms: {3:N0}\nNetwork Tables: {5:N0}", m_numberOfGuideTables, m_numberOfGuideEntries, m_patProcessed, m_lastPMTs.Count, m_sdtProcessed, m_nitProcessed ); // Network if (m_lastNIT != null) { Console.WriteLine("Overall: {0:N0} Source Groups / Transponders", m_lastNIT.NetworkEntries.Length); } // Separate Console.WriteLine("Service Descriptions:"); // Report service details if (m_lastSDT != null) { foreach (var service in m_lastSDT.Services) { // Load the service descriptor var info = oldSIAPI.DescriptorExtensions.Find <oldSIAPI.Descriptors.Service>(service.Descriptors); // Report Console.WriteLine ( "\tService {0} (0x{0:X4}): {1} {3} [{2}]", service.ServiceIdentifier, info.ServiceType, info.ProviderName, info.ServiceName ); } } // Separate Console.WriteLine("Service Details:"); // Report program details foreach (var program in m_lastPMTs.Values) { Console.WriteLine ( "\tService {0} (0x{0:X4}): {1}", program.ProgramNumber, string.Join(", ", program.ProgramEntries.Select(e => e.StreamType.ToString())) ); } // Request raw statistics Console.WriteLine ( "Received = {0:N0} Bytes / {1:N0} Packets / {2:N0} Callbacks / {9:N0} PAT\nSkipped = {3:N0} Bytes\nScrambled: {4:N0} Packets\nCorrupted: {5:N0} Packets, {6:N0} Streams, {7:N0} Tables\nResynchronisation: {8:N0} Times", m_parser.BytesReceived, m_parser.PacketsReceived, m_parser.Callbacks, m_parser.BytesSkipped, m_parser.Scrambled, m_parser.TransmissionErrors, m_parser.CorruptedStream, m_parser.CorruptedTable, m_parser.Resynchronized, m_parser.ValidPATCount ); // Details for raw statistic foreach (var detailStatistics in m_parser.PacketStatistics) { Console.WriteLine("\tPID {0} (0x{0:X4}) {1:N0} Packets", detailStatistics.Key, detailStatistics.Value); } // Get rid of parser using (m_parser) m_parser = null; }
/// <summary> /// Analysiert die aktuelle Datei. /// </summary> public void Analyse() { // Reset Streams.Clear(); // Be safe try { // First run to get the stream analysis using (var parser = new TSParser { FillStatistics = true }) { // Fill it ReadFile(parser); // Load statistics foreach (var info in parser.PacketStatistics.OrderBy(p => p.Key)) { Streams.Add(new StreamItem(info.Key, info.Value)); } // Add statistics SubItems[2].Text = parser.BytesReceived.ToString("N0"); SubItems[3].Text = parser.BytesSkipped.ToString("N0"); SubItems[4].Text = parser.Callbacks.ToString(); SubItems[5].Text = parser.Resynchronized.ToString(); SubItems[6].Text = parser.Scrambled.ToString("N0"); SubItems[7].Text = parser.TransmissionErrors.ToString(); SubItems[8].Text = parser.ValidPATCount.ToString(); SubItems[9].Text = parser.PacketsReceived.ToString("N0"); // Check mode if (parser.ValidPATCount > 0) { SubItems[10].Text = Math.Round(parser.BytesReceived * 1.0 / parser.ValidPATCount).ToString("N0"); } } // First run to do detail analysis using (var parser = new TSParser()) { // Connect foreach (var stream in Streams) { stream.RegisterPhase1(parser); } // Fill it ReadFile(parser); } // Second run to do get SI Tables using (var parser = new TSParser()) { // Connect foreach (var stream in Streams) { stream.RegisterPhase2(parser); } // Fill it ReadFile(parser); } // See if PAT is available StreamItem pat = Streams.FirstOrDefault(s => s.PID == 0); if (pat != null) { SubItems[11].Text = string.Join(", ", pat.TransportIdentifiers.Select(t => t.ToString()).ToArray()); } } catch (Exception e) { // Remember SubItems[12].Text = e.Message; } // Finish foreach (var stream in Streams) { stream.RefreshUI(); } }