public void Test_AddLine() { var reader = new LineReader (600, 480); reader.AddLine (new Line ("Actor", "Words", "Sound", "Event")); Assert.AreEqual (1, reader.LineCount); }
private void PopulateStaticData() { var itemBaseTypesPath = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + @"\Filtration\ItemBaseTypes.txt"; var itemClassesPath = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + @"\Filtration\ItemClasses.txt"; string itemBaseTypes; try { itemBaseTypes = _fileSystemService.ReadFileAsString(itemBaseTypesPath); } catch (Exception) { itemBaseTypes = string.Empty; } ItemBaseTypes = new LineReader(() => new StringReader(itemBaseTypes)).ToList(); string itemClasses; try { itemClasses = _fileSystemService.ReadFileAsString(itemClassesPath); } catch (Exception) { itemClasses = string.Empty; } ItemClasses = new LineReader(() => new StringReader(itemClasses)).ToList(); }
public void Test_LoadLine() { var reader = new LineReader (600, 480); reader.LoadFromFile ("HelloMiku.txt"); Assert.AreEqual (10, reader.LineCount); }
private void PopulateStaticData() { var itemBaseTypes = _fileSystemService.ReadFileAsString(AppDomain.CurrentDomain.BaseDirectory + "\\Resources\\ItemBaseTypes.txt"); ItemBaseTypes = new LineReader(() => new StringReader(itemBaseTypes)).ToList(); var itemClasses = _fileSystemService.ReadFileAsString(AppDomain.CurrentDomain.BaseDirectory + "\\Resources\\ItemClasses.txt"); ItemClasses = new LineReader(() => new StringReader(itemClasses)).ToList(); }
public void Test_Color() { var reader = new LineReader (600, 480); reader.Color = new Color (1, 2, 3, 4); Assert.AreEqual (new Color (1, 2, 3, 4), reader.Color); reader.SetColor (5, 6, 7, 8); Assert.AreEqual (new Color (5, 6, 7, 8), reader.Color); }
public void Test_New() { var reader = new LineReader (600, 480); Assert.AreEqual (0, reader.LineCount); Assert.AreEqual (0, reader.CurrentPosition); Assert.AreEqual (null, reader.CurrentLine); Assert.AreEqual (600, reader.Width); Assert.AreEqual (480, reader.Height); }
public void TestSetCharacterSize() { var reader = new LineReader (600, 480); reader.CharacterSize = 1; Assert.AreEqual (1, reader.CharacterSize); reader.SetCharacterSize (2); Assert.AreEqual (2, reader.CharacterSize); }
void Start() { reader = GetComponent<LineReader> (); string str; //initialize the binary map array for (int j = 0; j < reader.lines.Count; j++) { str = reader.lines [j]; for (int i = 0; i < str.Length; i++) { binary [i, j] = (int)char.GetNumericValue (str [i]); } } }
public void Test_Jump() { var reader = new LineReader (600, 480); reader.LoadFromFile ("HelloMiku.txt"); reader.Jump (5); Assert.AreEqual (5, reader.CurrentPosition); reader.Next (); Assert.AreEqual (6, reader.CurrentPosition); reader.Prev (); Assert.AreEqual (5, reader.CurrentPosition); reader.Rewind (); Assert.AreEqual (0, reader.CurrentPosition); }
static void Main(string[] args) { string path = @"e:\vl.txt"; string all=File.ReadAllText(path); //for (int i = 0; i < 20; i++) // File.AppendAllText(@"e:\big.txt", all); //const string word = "the"; DateTime t0 = DateTime.Now; Console.WriteLine("Parallel Started!"); Action<string, string, IMapContext<string, int>> map = (mk, mv, context) => { var tokens = mk.Split(); foreach (var token in tokens) context.Emit(token, 1); //var charArray = mk.ToCharArray(); //Array.Reverse(charArray); //context.Emit(new string(charArray), mk.Length); //context.Emit("len", mk.Length); //for (int i = 0; i < 1000; i++) ; }; //Action<string, IEnumerable<int>, IMapContext<string, int>> reduce = (rk, rv, context) => // { // context.Emit(rk, rv.Count()); // }; LineReader reader = new LineReader(all); var smr = new SimpleMapReducer<string, string, string, int, string, int>(reader, map); smr.Run(); Console.WriteLine("Parallel: " + (DateTime.Now - t0)); //Console.ReadKey(); Console.WriteLine("Serial Started!"); t0 = DateTime.Now; reader = new LineReader(all); smr = new SimpleMapReducer<string, string, string, int, string, int>(reader, map); smr.Run(1); Console.WriteLine("Serial: " + (DateTime.Now - t0)); //Console.ReadLine(); }
static void Main(string[] args) { try { string recipePath = args[0]; var macros = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); // add all environment variables to the dictionary IDictionary env = Environment.GetEnvironmentVariables(); foreach (string key in env.Keys) macros.Add(key, (string)env[key]); // parse command line arguments and add to dictionary for (int i = 1; i < args.Length; i++) { string[] nv = args[i].Split('='); if (nv.Length == 2) macros[nv[0]] = nv[1]; else throw new Exception("Invalid argument: " + args[i]); } var program = new Recipe(); program.Write = true; program.SetRootPath(recipePath); program.ClearHandlers(); program.Message += PrintLine; using (var sr = new LineReader(File.OpenText(recipePath), name: recipePath)) { program.Run(sr, macros, string.Empty); } } catch (ApplicationException) { //exit } catch (Exception ex) { Console.WriteLine("Usage: recipe.exe recipe-file key1=val1 key2=val2..."); Console.WriteLine(ex.Message); } }
/// <summary> /// Parses a blueprint from the specified stream, registering all EntityBlueprints contained by the Stream (and replacing any existing data for them). /// The stream is not closed upon completion, and may have multiple EntityBlueprints defined within it. /// </summary> public static void ParseBlueprint(string Contents) { LineReader Reader = new LineReader(Contents.Split(new char[] { '\r', '\n' })); while(GetNextScope(Reader) != LineScope.EndOfFile) { // Parse the Entity header, such as name and what it inherits. BlueprintHeader Header = ParseHeader(Reader); // Parse all components, including properties. List<ComponentBlueprint> Components = new List<ComponentBlueprint>(); while(GetNextScope(Reader) == LineScope.Component) { var CurrentComponent = ParseComponentHeader(Reader); List<ComponentProperty> Properties = new List<ComponentProperty>(); while(GetNextScope(Reader) == LineScope.Property) { var Property = ParseProperty(CurrentComponent, Reader); Properties.Add(Property); } Type ComponentType = ResolveType(CurrentComponent.Type); var ComponentBlueprint = new ComponentBlueprint(ComponentType, CurrentComponent.Name, Properties); Components.Add(ComponentBlueprint); } ApplyInheritance(Header, Components); var Entity = EntityBlueprint.CreateBlueprint(Header.Name, Components); } }
public void IsEmptyRow_ContainsData_ReturnsFalse() { LineReader.IsEmptyRow("1,2,3,4").Should().BeFalse(); }
public void TestEmpty() { var lineReader = new LineReader(""); Assert.Null(lineReader.ReadLine()?.ToString()); }
protected override void Write(M3UFileInfo fileInfo, string value, LineReader reader) { var attrs = value .Split(new[] { M3UAttributes.AttributeSeparator }, StringSplitOptions.RemoveEmptyEntries) .Select(e => KV.Parse(e, M3UAttributes.PairSeparator).Value); if (fileInfo.Streams == null && attrs.Any()) { fileInfo.Streams = new List <M3UStreamInfo>(); } var stream = new M3UStreamInfo(); foreach (var attr in attrs) { switch (attr.Key) { case M3UAttributes.StreamInfAttributes.Bandwidth: stream.Bandwidth = To.Value <int>(attr.Value); break; case M3UAttributes.StreamInfAttributes.ProgramId: stream.ProgramId = To.Value <int>(attr.Value); break; case M3UAttributes.StreamInfAttributes.Codecs: stream.Codecs = attr.Value; break; case M3UAttributes.StreamInfAttributes.Resolution: stream.Resolution = attr.Value; break; } } if (!reader.MoveNext()) { throw new InvalidDataException("Invalid M3U file. Missing a stream URI."); } var streamUri = new Uri(reader.Current.Trim(), UriKind.RelativeOrAbsolute); if (!streamUri.IsAbsoluteUri && !streamUri.IsWellFormedOriginalString()) { throw new InvalidDataException("Invalid M3U file. Include a invalid stream URI.", innerException: new UriFormatException(reader.Current)); } if (!streamUri.IsAbsoluteUri) { var baseUri = Configuration.Default.BaseUri; if (baseUri == null && reader.Adapter is NetworkAdapter adapter) { var uri = adapter.Uri; var components = UriComponents.SchemeAndServer | UriComponents.UserInfo; baseUri = new Uri(uri.GetComponents(components, UriFormat.SafeUnescaped), UriKind.Absolute); } if (baseUri != null) { stream.Uri = new Uri(baseUri, streamUri); } } if (stream.Uri == null) { stream.Uri = streamUri; } fileInfo.Streams.Add(stream); }
static (List <Line> lines, string newLine) ReadLines(string sourceFile) { using var reader = File.OpenText(sourceFile); return(LineReader.ReadAllLines(reader, sourceFile)); }
/// <summary> /// /// </summary> /// <param name="lineReader"></param> /// <param name="keepMeta"></param> protected RankingTSVFile(LineReader lineReader, bool keepMeta) : this(lineReader) { this.keepMeta = keepMeta; }
/// <summary> /// /// </summary> /// <param name="lineReader"></param> protected RankingTSVFile(LineReader lineReader) { this.lineReader = lineReader; columnNames = null; groupCol = -1; labelCol = -1; filterCol = -1; dateStrCol = -1; queryIdCol = -1; queryCol = -1; docIdCol = -1; urlStrCol = -1; // Search has informed us that the .tsv files are encoded in UTF8 string line = this.lineReader.Headers; this.lineReader.Close(); columnNames = line.Split('\t'); ArrayList ftrColNames = new ArrayList(); for(int i=0; i<columnNames.Length; ++i) { if(IsFeatureColumn(i)) ftrColNames.Add(columnNames[i]); } featureColumnNames = (string[])ftrColNames.ToArray(typeof(string)); }
protected override void Write(M3UFileInfo fileInfo, string value, LineReader reader) { var source = value.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries) .Select(e => KV.Parse(e, '=').Value); if (fileInfo.Streams == null && source.Any()) { fileInfo.Streams = new List <M3UStreamInfo>(); } var m3UstreamInfo = new M3UStreamInfo(); foreach (var keyValuePair in source) { var key = keyValuePair.Key; if (key != "BANDWIDTH") { if (key != "PROGRAM-ID") { if (key != "CODECS") { if (key == "RESOLUTION") { m3UstreamInfo.Resolution = keyValuePair.Value; } } else { m3UstreamInfo.Codecs = keyValuePair.Value; } } else { m3UstreamInfo.ProgramId = To.Value <int>(keyValuePair.Value); } } else { m3UstreamInfo.Bandwidth = To.Value <int>(keyValuePair.Value); } } if (!reader.MoveNext()) { throw new InvalidDataException("Invalid M3U file. Missing a stream URI."); } var relativeUri = new Uri(reader.Current.Trim(), UriKind.RelativeOrAbsolute); if (!relativeUri.IsAbsoluteUri && !relativeUri.IsWellFormedOriginalString()) { throw new InvalidDataException("Invalid M3U file. Include a invalid stream URI.", new UriFormatException(reader.Current)); } if (!relativeUri.IsAbsoluteUri) { var baseUri = Configuration.Default.BaseUri; if (baseUri == null && reader.Adapter is NetworkAdapter adapter) { baseUri = new Uri( adapter.Uri.GetComponents(UriComponents.SchemeAndServer | UriComponents.UserInfo, UriFormat.SafeUnescaped), UriKind.Absolute); } if (baseUri != null) { m3UstreamInfo.Uri = new Uri(baseUri, relativeUri); } } if (m3UstreamInfo.Uri == null) { m3UstreamInfo.Uri = relativeUri; } fileInfo.Streams?.Add(m3UstreamInfo); }
protected override void Write(M3UFileInfo fileInfo, string value, LineReader reader) { fileInfo.MediaSequence = To.Value <int>(value); }
/// <summary> /// Reads a property list (key and element pairs) from the input /// character stream in a simple line-oriented format. /// </summary> /// <param name="inStream"></param> public void Load(FileStream inStream) { LineReader line = new LineReader(new StreamReader(inStream)); Load(line); }
static void Main(string[] args) { try { List <string> MappedContents = new List <string>(); for (int i = 0; i <= 3; i++) { MappedContents.Add(File.ReadAllText(@"C:\Users\jsanchez\source\repos\DocProcessing\DocProcessing\OhTemplatesV3\OhTemplates\index.html")); } ; var pdfconverter = new BasicConverter(new PdfTools()); var doc = new HtmlToPdfDocument() { GlobalSettings = { ColorMode = ColorMode.Color, Orientation = Orientation.Portrait, PaperSize = PaperKind.LetterPlus, Margins = { Top = 0, Bottom= 0, Left = 0, Right = 0 } } }; foreach (string Mappedcontent in MappedContents) { string MapPage = Mappedcontent; var page = new ObjectSettings() { PagesCount = true, HtmlContent = MapPage, WebSettings = { DefaultEncoding = "utf-8" }, HeaderSettings = { FontSize = 7, Right = "Page [page] of [toPage]", Line = false, Spacing = 0 } }; doc.Objects.Add(page); } byte[] EeCcdocument = pdfconverter.Convert(doc); File.WriteAllBytes(@"C:\Users\jsanchez\source\repos\DocProcessing\DocProcessing\pruebaBG2.pdf", EeCcdocument); int startRow = 3; int startColumn = 1; int UserSheet = 1; int TransactionSheet = 2; string userkeyField = "NumeroCuenta"; string TransacctionkeyField = "NúmeroDeCuenta"; string SavingsKeyColumn = "NUM_CUENTA_PMCP"; string mailField = "CorreoElectrónico"; char del = '|'; StreamReader transactionStreamReader = new StreamReader(transactionsPath); //string html = File.ReadAllText(Path.Combine(Environment.CurrentDirectory + "\\mappedTemplates", "pageTemplatemapped.html")); int columnNameIndex = 0; //var pdfconverter = new BasicConverter(new PdfTools()); //var doc = new HtmlToPdfDocument() //{ // GlobalSettings = { // ColorMode = ColorMode.Color, // Orientation = Orientation.Portrait, // PaperSize = PaperKind.LetterPlus, // Margins = { Top = 0, Bottom = 0, Left = 0, Right = 0 } // } //}; //var page = new ObjectSettings() //{ // PagesCount = true, // HtmlContent = html, // WebSettings = { DefaultEncoding = "utf-8" }, // HeaderSettings = { FontSize = 7, Right = "Page [page] of [toPage]", Line = false, Spacing = 0 } //}; //doc.Objects.Add(page); //var pdfdoc = pdfconverter.Convert(doc); //File.WriteAllBytes(@"C:\Users\jsanchez\source\repos\DocProcessing\DocProcessing\prueba2.pdf", pdfdoc); //Obtener esquemas de cada trama LineReader userlineReader = new LineReader(GetStringOutlineFromXlxs(OutLinePath, startRow, startColumn, UserSheet)); LineReader transactionlineReader = new LineReader(GetStringOutlineFromXlxs(OutLinePath, startRow, startColumn, TransactionSheet)); CsvReader savingDataReader = GetCsvReader(savingsPath, columnNameIndex, del); List <ClientData> clients = new List <ClientData>(); List <SavingsData> savingsData = new List <SavingsData>(); List <TransactionData> transactions = new List <TransactionData>(); string transactionLine; string line = ""; StreamReader TransactionReader = new StreamReader(transactionsPath); Directory.CreateDirectory(tempfilespath); int currentGroup = 0; //codigo para leer y archivar objetos de transaccion en archivos temporales. using (StreamReader sr = new StreamReader(transactionsPath)) { while ((line = sr.ReadLine()) != null) { TransactionData transaction = new TransactionData() { info = transactionlineReader.readLine(line) }; transaction.AssociateAcount = transaction.info[TransacctionkeyField].ToString(); switch (transaction.info["Descripcióndelaautorización"].ToString()) { case "SALDO MES ANTERIOR": currentGroup++; transaction.Group = currentGroup; currentGroup++; break; case "CUOTAS DEL MES (VER DETALLE)": currentGroup++; transaction.Group = currentGroup; currentGroup++; break; case "SEGURO DE DESGRAVAMEN": transaction.Group = currentGroup; currentGroup = currentGroup + 2; break; case "CUOTAS ADELANTADAS": transaction.Group = currentGroup; currentGroup++; break; case "TOTAL PAGO DE CUOTAS DEL MES": transaction.Group = currentGroup; currentGroup++; break; default: transaction.Group = currentGroup; break; } string serialized = JsonConvert.SerializeObject(transaction); Console.WriteLine(serialized); string subfolderPath = Path.Combine(tempfilespath, transaction.AssociateAcount); Directory.CreateDirectory(subfolderPath); string totalpath = Path.Combine(subfolderPath, transaction.AssociateAcount + "_transactions.txt"); File.AppendAllText(totalpath, serialized + Environment.NewLine); } } //codigo para ensamblar objeto ClientData. using (StreamReader sr = new StreamReader(usersPath)) { line = sr.ReadLine(); ClientData client = new ClientData() { info = userlineReader.readLine(line), }; string clientsr = client.ToString(); client.AccountNumber = client.info[userkeyField].ToString(); client.Email = client.info[mailField].ToString(); client.Transactions = getTransactions(client.AccountNumber); GenerateEECCForClient(client); } //using (DisposableCsvReader reader = new DisposableCsvReader(savingsPath,'|', 0)) { // while (!(reader.EndReached)) { // Dictionary<string, object> information = reader.readLine(); // if (information != null) { // SavingsData svi= new SavingsData() { info = information}; // svi.CustomerAccount = svi.info[SavingsKeyColumn].ToString(); // } // } //} //GenerateSavingsDocument(accountSavingInfo); } catch (Exception ex) { } }
public void Test_SetFeedParameter() { var reader = new LineReader (600, 480); reader.SetFeedParameter (new LineReader.FeedParameters (1, 2)); Assert.AreEqual (1, reader.FeedParameter.TimeAfterOneCharacter); Assert.AreEqual (2, reader.FeedParameter.TimeAfterOneSentense); }
public DaySixSolver() { var reader = new LineReader(); _input = reader.ReadLine("Solutions/DaySix/DaySixInput.txt"); }
private Init(string text) { lineReader = new LineReader(text); blockProcessor = new BlockProcessor(document); }
private static PersonLayoutEnumerable Enumerate(Stream stream, Encoding encoding, byte[] workingBuffer, byte[] separator, int maxWorkingMemoryBytes = 0, IMetricsHost metrics = null, CancellationToken cancellationToken = default) { var data = LineReader.StreamLines(stream, encoding, workingBuffer, separator, maxWorkingMemoryBytes, metrics, cancellationToken); return(new PersonLayoutEnumerable(data, encoding, separator)); }
public Person Post([FromBody] string lineValue) { LineReader.ImportFileIntoRepository(new[] { lineValue }, PeopleRepository); return(PeopleRepository.ReadPeople().Last()); }
/// <summary> /// /// </summary> /// <param name="fs"></param> private void ParseLinks(FileStream fs, Response response) { try { if (File.Exists(response.TempFile) == false) { return; } // Lets ensure that the first non-blank line has a HTML header! using (FileStream temp = System.IO.File.OpenRead(response.TempFile)) { using (LineReader lr = new LineReader(temp, 4096, Encoding.Default)) { bool process = true; while (process == true) { string line = lr.ReadLine(); if (line == null) { return; } line = line.Trim(); if (line.Length == 0) { continue; } bool validHtml = true; if (line.IndexOf("<html>", StringComparison.InvariantCultureIgnoreCase) == -1) { if (line.IndexOf("<!doctype html", StringComparison.InvariantCultureIgnoreCase) == -1) { validHtml = false; } } if (validHtml == true) { break; } else { return; } } } } using (TextReader tr = File.OpenText(response.TempFile)) { SimpleHtmlParser parser = new SimpleHtmlParser(); var doc = parser.Parse(tr); List<string> links = new List<string>(); foreach (System.Xml.XmlElement link in doc.GetElementsByTagName("a")) { if (link.Attributes == null) { continue; } if (link.Attributes["href"] == null) { continue; } var href = link.Attributes["href"].Value.Trim(); string md5 = Text.ConvertByteArrayToHexString(Security.GenerateMd5Hash(href)); if (md5.ToLower() == "6666cd76f96956469e7be39d750cc7d9") { // Ignore "/" continue; } if (links.Contains(md5) == false) { links.Add(md5); woanware.IO.WriteToFileStream(fs, "LINK: " + href + Environment.NewLine); } } } } catch (Exception) { } }
private void LoadInternal(LineReader lr) { char[] convtBuf = new char[1024]; int limit; int keyLen; int valueStart; char c; bool hasSep; bool precedingBackslash; while ((limit = lr.readLine()) >= 0) { c = '\0'; keyLen = 0; valueStart = limit; hasSep = false; //System.out.println("line=<" + new String(lineBuf, 0, limit) + ">"); precedingBackslash = false; while (keyLen < limit) { c = lr.lineBuf[keyLen]; //need check if escaped. if ((c == '=' || c == ':') && !precedingBackslash) { valueStart = keyLen + 1; hasSep = true; break; } else if ((c == ' ' || c == '\t' || c == '\f') && !precedingBackslash) { valueStart = keyLen + 1; break; } if (c == '\\') { precedingBackslash = !precedingBackslash; } else { precedingBackslash = false; } keyLen++; } while (valueStart < limit) { c = lr.lineBuf[valueStart]; if (c != ' ' && c != '\t' && c != '\f') { if (!hasSep && (c == '=' || c == ':')) { hasSep = true; } else { break; } } valueStart++; } String key = LoadConvert(lr.lineBuf, 0, keyLen, convtBuf); String value = LoadConvert(lr.lineBuf, valueStart, limit - valueStart, convtBuf); this[key] = value; } }
public static IEnumerator<object> FromFile(string filename, IProgressListener progress) { progress.Status = "Loading diff..."; Future<string> fText; // We could stream the lines in from the IO thread while we parse them, but this // part of the load is usually pretty quick even on a regular hard disk, and // loading the whole diff at once eliminates some context switches using (var fda = new FileDataAdapter( filename, FileMode.Open, FileAccess.Read, FileShare.Read, 1024 * 128 )) { var fBytes = fda.ReadToEnd(); yield return fBytes; fText = Future.RunInThread( () => Encoding.ASCII.GetString(fBytes.Result) ); yield return fText; } yield return fText; var lr = new LineReader(fText.Result); LineReader.Line line; progress.Status = "Parsing diff..."; var frames = new List<TracebackFrame>(); var moduleNames = new NameTable(StringComparer.Ordinal); var symbolTypes = new NameTable(StringComparer.Ordinal); var functionNames = new NameTable(StringComparer.Ordinal); var deltas = new List<DeltaInfo>(); var tracebacks = new Dictionary<UInt32, TracebackInfo>(); var regexes = new Regexes(); // Regex.Groups[string] does an inefficient lookup, so we do that lookup once here int groupModule = regexes.DiffModule.GroupNumberFromName("module"); int groupSymbolType = regexes.DiffModule.GroupNumberFromName("symbol_type"); int groupTraceId = regexes.BytesDelta.GroupNumberFromName("trace_id"); int groupType = regexes.BytesDelta.GroupNumberFromName("type"); int groupDeltaBytes = regexes.BytesDelta.GroupNumberFromName("delta_bytes"); int groupNewBytes = regexes.BytesDelta.GroupNumberFromName("new_bytes"); int groupOldBytes = regexes.BytesDelta.GroupNumberFromName("old_bytes"); int groupNewCount = regexes.BytesDelta.GroupNumberFromName("new_count"); int groupOldCount = regexes.CountDelta.GroupNumberFromName("old_count"); int groupCountDelta = regexes.CountDelta.GroupNumberFromName("delta_count"); int groupTracebackModule = regexes.TracebackFrame.GroupNumberFromName("module"); int groupTracebackFunction = regexes.TracebackFrame.GroupNumberFromName("function"); int groupTracebackOffset = regexes.TracebackFrame.GroupNumberFromName("offset"); int groupTracebackOffset2 = regexes.TracebackFrame.GroupNumberFromName("offset2"); int groupTracebackPath = regexes.TracebackFrame.GroupNumberFromName("path"); int groupTracebackLine = regexes.TracebackFrame.GroupNumberFromName("line"); int i = 0; while (lr.ReadLine(out line)) { if (i % ProgressInterval == 0) { progress.Maximum = lr.Length; progress.Progress = lr.Position; // Suspend processing until any messages in the windows message queue have been processed yield return new Yield(); } retryFromHere: Match m; if (regexes.DiffModule.TryMatch(ref line, out m)) { moduleNames.Add(m.Groups[groupModule].Value); } else if (regexes.BytesDelta.TryMatch(ref line, out m)) { var traceId = UInt32.Parse(m.Groups[groupTraceId].Value, NumberStyles.HexNumber); var info = new DeltaInfo { Added = (m.Groups[groupType].Value == "+"), BytesDelta = int.Parse(m.Groups[groupDeltaBytes].Value, NumberStyles.HexNumber), NewBytes = int.Parse(m.Groups[groupNewBytes].Value, NumberStyles.HexNumber), OldBytes = int.Parse(m.Groups[groupOldBytes].Value, NumberStyles.HexNumber), NewCount = int.Parse(m.Groups[groupNewCount].Value, NumberStyles.HexNumber), }; if (lr.ReadLine(out line)) { if (regexes.CountDelta.TryMatch(ref line, out m)) { info.OldCount = int.Parse(m.Groups[groupOldCount].Value, NumberStyles.HexNumber); info.CountDelta = int.Parse(m.Groups[groupCountDelta].Value, NumberStyles.HexNumber); } } bool readingLeadingWhitespace = true, doRetry = false; frames.Clear(); var itemModules = new NameTable(StringComparer.Ordinal); var itemFunctions = new NameTable(StringComparer.Ordinal); while (lr.ReadLine(out line)) { if (line.ToString().Trim().Length == 0) { if (readingLeadingWhitespace) continue; else break; } else if (regexes.TracebackFrame.TryMatch(ref line, out m)) { readingLeadingWhitespace = false; var moduleName = moduleNames[m.Groups[groupTracebackModule].Value]; itemModules.Add(moduleName); var functionName = functionNames[m.Groups[groupTracebackFunction].Value]; itemFunctions.Add(functionName); var frame = new TracebackFrame { Module = moduleName, Function = functionName, Offset = UInt32.Parse(m.Groups[groupTracebackOffset].Value, NumberStyles.HexNumber) }; if (m.Groups[groupTracebackOffset2].Success) frame.Offset2 = UInt32.Parse(m.Groups[groupTracebackOffset2].Value, NumberStyles.HexNumber); if (m.Groups[groupTracebackPath].Success) frame.SourceFile = m.Groups[groupTracebackPath].Value; if (m.Groups[groupTracebackLine].Success) frame.SourceLine = int.Parse(m.Groups[groupTracebackLine].Value); frames.Add(frame); } else { // We hit the beginning of a new allocation, so make sure it gets parsed doRetry = true; break; } } if (tracebacks.ContainsKey(traceId)) { info.Traceback = tracebacks[traceId]; Console.WriteLine("Duplicate traceback for id {0}!", traceId); } else { var frameArray = ImmutableArrayPool<TracebackFrame>.Allocate(frames.Count); frames.CopyTo(frameArray.Array, frameArray.Offset); info.Traceback = tracebacks[traceId] = new TracebackInfo { TraceId = traceId, Frames = frameArray, Modules = itemModules, Functions = itemFunctions }; } deltas.Add(info); if (doRetry) goto retryFromHere; } else if (line.StartsWith("//")) { // Comment, ignore it } else if (line.StartsWith("Total increase") || line.StartsWith("Total decrease")) { // Ignore this too } else if (line.StartsWith(" ") && (line.EndsWith(".pdb"))) { // Symbol path for a module, ignore it } else { Console.WriteLine("Unrecognized diff content: {0}", line.ToString()); } } var result = new HeapDiff( filename, moduleNames, functionNames, deltas, tracebacks ); yield return new Result(result); }
public static void Solve(Stream stdin, Stream stdout) { IScanner scanner = new OptimizedPositiveIntReader(stdin); // uncomment when you need more advanced reader // scanner = new Scanner(stdin); scanner = new LineReader(stdin); var writer = new BufferedStdoutWriter(stdout); var testCases = scanner.NextInt(); for (int i = 0; i < testCases; i++) { var line = scanner.Next(); var result = 0; int stringNotCovered = line.Length; for (int j = 1; j < line.Length; j++) { if (line.Substring(0, j).StartsWith(line.Substring(j, Math.Min(line.Length - j, j)))) { // if (result == 0) // { // result = j; // break; // } if (result == 0) { result = j; int k = j; while (k + result < line.Length && line.Substring(k, result) == line.Substring(0, j)) { k += result; } stringNotCovered = line.Length - k - j; } else if (result != 0 && j % result == 0) { // ignore - we already handled it } else { // we already have a solution, just check if it covers more ground // this one doesn't repeat itself if (stringNotCovered > 0 && 2 * j > line.Length - stringNotCovered) { result = j; stringNotCovered = 0; break; } } } } writer.Write(result); writer.Write("\n"); } writer.Flush(); }
public GroupFieldsEnumeratorClass(LineReader lineReader, int groupIdIndex, int labelIndex, int queryIndex, int filterIndex, string filterValue, int dateIndex) { fieldsEnumerator = new FieldsEnumeratorClass(lineReader); this.groupIdIndex = groupIdIndex; //Todo(qiangwu): define a new interface IFilter which implement one method - IsAccept() //to encapsulate the details of the filtering function and decouple it from the enumerator this.labelIndex = labelIndex; this.filterIndex = filterIndex; this.filterValue = filterValue; this.dateIndex = dateIndex; this.queryIndex = queryIndex; }
private async Task Open(string FileName) { FormatHandlerSelectionWindow window; LogFile logFile; ILogFileLoaderModule logFileLoaderModule; LoadWindow loadWindow; IColorProviderModule colorProviderModule; IInlineParserFactoryModule inlineParserBuilderModule; IStringReader stringReader; ILineReader lineReader; ILogReader logReader; LogParser logParser; IStringMatcherFactoryModule stringMatcherFactoryModule; FileStream stream; IProgressReporter progressReporter; FormatHandler[] formatHandlers; FormatHandler formatHandler; formatHandlers = formatHandlerLibraryModule.GetFormatHandlers(FileName).ToArray(); if (formatHandlers.Length == 0) { ShowError("Cannot find any format handler"); return; } else if (formatHandlers.Length < 3) { formatHandler = formatHandlers[0]; } else { window = new FormatHandlerSelectionWindow() { Owner = this, FormatHandlers = formatHandlers.Take(formatHandlers.Length - 1) }; if (!window.ShowDialog() ?? false) { return; } if (window.SelectedFormatHandler == null) { return; } formatHandler = window.SelectedFormatHandler; } logFile = new LogFile(FileName, formatHandler); colorProviderModule = new ColorProviderModule(logger, logFile.FormatHandler.EventColoringRules); inlineParserBuilderModule = new InlineParserFactoryModule(logger, patternLibraryModule, inlineColoringRuleLibraryModule); stringMatcherFactoryModule = new StringMatcherFactoryModule(logger, patternLibraryModule); try { stream = new FileStream(FileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); } catch (Exception ex) { ShowError(ex); return; } using (stream) { progressReporter = new StreamProgressReporter(stream); stringReader = new LogInspect.BaseLib.Readers.StringReader(new StreamReader(stream)); lineReader = new LineReader(stringReader, stringMatcherFactoryModule.CreateStringMatcher(logFile.FormatHandler.NameSpace, logFile.FormatHandler.DiscardLinePatterns)); logReader = new LogReader(lineReader, stringMatcherFactoryModule.CreateStringMatcher(logFile.FormatHandler.NameSpace, logFile.FormatHandler.LogPrefixPatterns), stringMatcherFactoryModule.CreateStringMatcher(logFile.FormatHandler.NameSpace, logFile.FormatHandler.Rules.Where(item => item.Discard).Select(item => item.GetPattern())) ); logParser = new LogParser(logFile.FormatHandler.Columns); logParser.Add(patternLibraryModule.Build(logFile.FormatHandler.NameSpace, logFile.FormatHandler.Rules.Where(item => !item.Discard).Select(item => item.GetPattern()), true)); logFileLoaderModule = new LogFileLoaderModule(logger, logReader, logParser); //logFileLoaderModule = new InfiniteLogFileLoaderModule(logger); loadWindow = new LoadWindow(logFileLoaderModule, progressReporter, logFile); loadWindow.Owner = this; if (loadWindow.Load() ?? false) { try { await appViewModel.Open(logFile, inlineParserBuilderModule, colorProviderModule); } catch (Exception ex) { ShowError(ex); } } } }
public void Test_SetFeedMode() { var reader = new LineReader (600, 480); reader.SetFeedMode (FeedMode.Automatic); Assert.AreEqual (FeedMode.Automatic, reader.FeedMode); }
/// <summary> /// Uses a stack to create a properly tabbed xml string. /// </summary> /// <param name="value">xml string to format</param> /// <returns>formatted xml string</returns> private string StackedAddition(string value) { stack.Clear(); StringBuilder builder = new StringBuilder(); LineReader lineReader = null; try { using (lineReader = new LineReader(value)) { string line = string.Empty; while (!((line = lineReader.ReadLine()).CompareTo("") == 0)) { if (IsComment(line)) { builder.AppendLine(Spacer(stack.Count) + line); } else { string[] nodeData = line.Split(' '); foreach (string s in nodeData) { if (s.CompareTo("") == 0) { goto end; } if (s[0].CompareTo('<') == 0) { if (s.Contains("/>")) { builder.AppendLine(Spacer(stack.Count) + line); break; } else if (!s.StartsWith("</")) { builder.AppendLine(Spacer(stack.Count) + line); stack.Add(s.TrimStart('<').TrimEnd('>')); } } if (s[s.Length - 1].CompareTo('>') == 0) { //int count = GetStackCount(TrimClosingTags(s)); if (s.CompareTo("/>") == 0) { Pop(); } else if (stack.Contains(TrimClosingTags(s)) && s.StartsWith("</")) { Pop(); builder.AppendLine(Spacer(stack.Count) + line); } } end: ; } } } } } catch (Exception e) { } return(builder.ToString()); }
public void TestEmpty() { var lineReader = new LineReader(""); Assert.Null(lineReader.ReadLine().Text); }
protected override void Write(M3UFileInfo fileInfo, string value, LineReader reader) => fileInfo.Version = To.Value <int>(value);
public static IEnumerable <DiffChunk> ParseFragment(string diff) { Guard.ArgumentNotNull(diff, nameof(diff)); var reader = new LineReader(diff); string line; DiffChunk chunk = null; int diffLine = -1; int oldLine = -1; int newLine = -1; while ((line = reader.ReadLine()) != null) { var headerMatch = ChunkHeaderRegex.Match(line); if (headerMatch.Success) { if (chunk != null) { yield return(chunk); } if (diffLine == -1) { diffLine = 0; } chunk = new DiffChunk { OldLineNumber = oldLine = int.Parse(headerMatch.Groups[1].Value, CultureInfo.InvariantCulture), NewLineNumber = newLine = int.Parse(headerMatch.Groups[2].Value, CultureInfo.InvariantCulture), DiffLine = diffLine, }; } else if (chunk != null) { var type = GetLineChange(line[0]); // This might contain info about previous line (e.g. "\ No newline at end of file"). if (type != DiffChangeType.Control) { chunk.Lines.Add(new DiffLine { Type = type, OldLineNumber = type != DiffChangeType.Add ? oldLine : -1, NewLineNumber = type != DiffChangeType.Delete ? newLine : -1, DiffLineNumber = diffLine, Content = line, }); var lineCount = 1; lineCount += LineReader.CountCarriageReturns(line); switch (type) { case DiffChangeType.None: oldLine += lineCount; newLine += lineCount; break; case DiffChangeType.Delete: oldLine += lineCount; break; case DiffChangeType.Add: newLine += lineCount; break; } } } if (diffLine != -1) { ++diffLine; } } if (chunk != null) { yield return(chunk); } }
public void IsEmptyRow_ContainsOnlyCommas_ReturnsTrue() { LineReader.IsEmptyRow(",,,,,").Should().BeTrue(); }
protected override void Write(M3UFileInfo fileInfo, string value, LineReader reader) { fileInfo.AllowCache = To.Value <bool>(value); }
public void IsEmptyRow_NullString_ReturnsTrue() { LineReader.IsEmptyRow(null).Should().BeTrue(); }
public long HQ_LineReader_CountLines() { return(LineReader.CountLines(File.OpenRead(_files[RowCount]), Encoding.UTF8, null, null, CancellationToken.None)); }
protected override void Write(M3UFileInfo fileInfo, string value, LineReader reader) { fileInfo.ProgramDateTime = To.Value <DateTime>(value); }
protected override void Write(M3UFileInfo fileInfo, string value, LineReader reader) { fileInfo.PlaylistType = value; }
public FieldsEnumeratorClass(LineReader lineReader) { this.lineReader = lineReader; }
public SessionIndexableGenerator (KonversationQueryable queryable, string log_file, long offset) { this.queryable = queryable; this.log_file = log_file; this.session_begin_offset = offset; this.prev_line_offset = offset; this.data_sb = new StringBuilder (); this.log_line_as_sb = null; this.reader = null; this.session_begin_time = DateTime.MinValue; this.speakers = new Dictionary<string, bool> (10); // rough default value ParseFilename (Path.GetFileName (log_file), out server_name, out speaking_to); Log.Debug ("Reading from konversation log {0} (server={1}, channel={1})", log_file, server_name, speaking_to); }
public DayTwoSolver() { var reader = new LineReader(); _input = reader.ReadLine("Solutions/DayTwo/DayTwoInput.txt"); }
public LineTextReader (string path, long begin_offset, long end_offset) { this.reader = new ReencodingLineReader (path); this.end_offset = end_offset; this.reader.Position = begin_offset; }
protected override void Write(M3UFileInfo fileInfo, string value, LineReader reader) { }
public FeatureEnumeratorClass(LineReader lineReader, string[] headers) : base(lineReader) { // Find the headers that don't have "m:" in front of them ArrayList featureFieldsList = new ArrayList(headers.Length); for (int i = 0; i < headers.Length; i++) { if (!headers[i].StartsWith("m:")) { featureFieldsList.Add(i); } } featureFields = (int[])featureFieldsList.ToArray(typeof(int)); }
/// <summary> /// Reads a property list (key and element pairs) from the input stream. /// The stream is assumed to be using the ISO 8859-1 character encoding; /// that is each byte is one Latin1 character. Characters not in Latin1, /// and certain special characters, can be represented in keys and /// elements using escape sequences. /// /// See http://java.sun.com/j2se/1.5.0/docs/api/java/util/Properties.html#load(java.io.InputStream) /// </summary> /// <param name="inStream">The input stream to read properties from.</param> public void Load(Stream inStream) { char[] convtBuf = new char[1024]; LineReader lr = new LineReader(inStream); int limit; int keyLen; int valueStart; char c; bool hasSep; bool precedingBackslash; while ((limit = lr.ReadLine()) >= 0) { keyLen = 0; valueStart = limit; hasSep = false; precedingBackslash = false; while (keyLen < limit) { c = lr.lineBuffer[keyLen]; // need check if escaped. if ((c == '=' || c == ':') && !precedingBackslash) { valueStart = keyLen + 1; hasSep = true; break; } else if ((c == ' ' || c == '\t' || c == '\f') && !precedingBackslash) { valueStart = keyLen + 1; break; } if (c == '\\') { precedingBackslash = !precedingBackslash; } else { precedingBackslash = false; } keyLen++; } while (valueStart < limit) { c = lr.lineBuffer[valueStart]; if (c != ' ' && c != '\t' && c != '\f') { if (!hasSep && (c == '=' || c == ':')) { hasSep = true; } else { break; } } valueStart++; } string key = loadConvert(lr.lineBuffer, 0, keyLen, convtBuf); string value = loadConvert(lr.lineBuffer, valueStart, limit - valueStart, convtBuf); SetProperty(key, value); } }
private static ComponentProperty ParseProperty(ComponentHeader CurrentComponent, LineReader Reader) { string Line = Reader.ReadLine().Trim(); int IndexDD = Line.IndexOf(':'); string PropertyName = Line.Substring(0, IndexDD).Trim(); string Arguments = Line.Substring(IndexDD + 1).Trim(); var ComponentArgs = ComponentArgument.Parse(Arguments); return new ComponentProperty(CurrentComponent.Name, PropertyName, ComponentArgs.Single()); /*string Name = Line.Substring(0, IndexDD).Trim(); string Remainder = Line.Substring(IndexDD + 1).Trim(); int IndexArguments = Remainder.IndexOf('('); string GeneratorName; ComponentArgument[] Arguments; if(IndexArguments == -1) { Arguments = new ComponentArgument[] { new ComponentArgument(Remainder.Trim()) }; GeneratorName = "Identity"; } else { GeneratorName = Remainder.Substring(0, IndexArguments).Trim(); int CloseParen = GetMatchedBracketIndex(Remainder, IndexArguments); string ArgumentText = Remainder.Substring(IndexArguments + 1, CloseParen - IndexArguments - 1).Trim(); Arguments = ParseArguments(ArgumentText).ToArray(); } var Generator = GetGenerator(GeneratorName); return new ComponentProperty(CurrentComponent.Name, Name, Generator, Arguments);*/ }
// Note that we don't filter out rows with parsing issues since it's not acceptable to // produce a different set of rows when subsetting columns. Any parsing errors need to be // translated to NaN, not result in skipping the row. We should produce some diagnostics // to alert the user to the issues. private Cursor(TextLoader parent, ParseStats stats, bool[] active, LineReader reader, int srcNeeded, int cthd) : base(parent._host) { Ch.Assert(active == null || active.Length == parent._bindings.Infos.Length); Ch.AssertValue(reader); Ch.AssertValue(stats); Ch.Assert(srcNeeded >= 0); Ch.Assert(cthd > 0); _total = -1; _batch = -1; _bindings = parent._bindings; _parser = parent._parser; _active = active; _reader = reader; _stats = stats; _srcNeeded = srcNeeded; ParallelState state = null; if (cthd > 1) { state = new ParallelState(this, out _rows, cthd); } else { _rows = _parser.CreateRowSet(_stats, 1, _active); } try { _getters = new Delegate[_bindings.Infos.Length]; for (int i = 0; i < _getters.Length; i++) { if (_active != null && !_active[i]) { continue; } ColumnPipe v = _rows.Pipes[i]; Ch.Assert(v != null); _getters[i] = v.GetGetter(); Ch.Assert(_getters[i] != null); } if (state != null) { _ator = ParseParallel(state).GetEnumerator(); state = null; } else { _ator = ParseSequential().GetEnumerator(); } } finally { if (state != null) { state.Dispose(); } } }
public virtual void TestCompress() { JobConf job = new JobConf(); job.Set(JobContext.TaskAttemptId, attempt); job.Set(FileOutputFormat.Compress, "true"); FileOutputFormat.SetOutputPath(job, workDir.GetParent().GetParent()); FileOutputFormat.SetWorkOutputPath(job, workDir); FileSystem fs = workDir.GetFileSystem(job); if (!fs.Mkdirs(workDir)) { NUnit.Framework.Assert.Fail("Failed to create output directory"); } string file = "test_compress.txt"; // A reporter that does nothing Reporter reporter = Reporter.Null; TextOutputFormat <object, object> theOutputFormat = new TextOutputFormat <object, object >(); RecordWriter <object, object> theRecordWriter = theOutputFormat.GetRecordWriter(localFs , job, file, reporter); Org.Apache.Hadoop.IO.Text key1 = new Org.Apache.Hadoop.IO.Text("key1"); Org.Apache.Hadoop.IO.Text key2 = new Org.Apache.Hadoop.IO.Text("key2"); Org.Apache.Hadoop.IO.Text val1 = new Org.Apache.Hadoop.IO.Text("val1"); Org.Apache.Hadoop.IO.Text val2 = new Org.Apache.Hadoop.IO.Text("val2"); NullWritable nullWritable = NullWritable.Get(); try { theRecordWriter.Write(key1, val1); theRecordWriter.Write(null, nullWritable); theRecordWriter.Write(null, val1); theRecordWriter.Write(nullWritable, val2); theRecordWriter.Write(key2, nullWritable); theRecordWriter.Write(key1, null); theRecordWriter.Write(null, null); theRecordWriter.Write(key2, val2); } finally { theRecordWriter.Close(reporter); } StringBuilder expectedOutput = new StringBuilder(); expectedOutput.Append(key1).Append("\t").Append(val1).Append("\n"); expectedOutput.Append(val1).Append("\n"); expectedOutput.Append(val2).Append("\n"); expectedOutput.Append(key2).Append("\n"); expectedOutput.Append(key1).Append("\n"); expectedOutput.Append(key2).Append("\t").Append(val2).Append("\n"); DefaultCodec codec = new DefaultCodec(); codec.SetConf(job); Path expectedFile = new Path(workDir, file + codec.GetDefaultExtension()); FileInputStream istream = new FileInputStream(expectedFile.ToString()); CompressionInputStream cistream = codec.CreateInputStream(istream); LineReader reader = new LineReader(cistream); string output = string.Empty; Org.Apache.Hadoop.IO.Text @out = new Org.Apache.Hadoop.IO.Text(); while (reader.ReadLine(@out) > 0) { output += @out; output += "\n"; } reader.Close(); NUnit.Framework.Assert.AreEqual(expectedOutput.ToString(), output); }
private static LineScope GetNextScope(LineReader Reader) { string Line = Reader.PeekLine(); if(Line == null) return LineScope.EndOfFile; int Result = Line.TakeWhile(c => c == '\t').Count(); if(Result > (int)LineScope.Property) throw new InvalidDataException("Found too many scopes for line '" + Line + "'."); return (LineScope)Result; }
public DayOneSolver() { var reader = new LineReader(); _input = reader.ReadLine("Solutions/DayOne/DayOneInput.txt"); }
public bool HasNextIndexable () { data_sb.Length = 0; session_num_lines = 0; speakers.Clear (); if (reader == null) { // Log files are in system encoding reader = new ReencodingLineReader (log_file, Encoding.Default); reader.Position = session_begin_offset; log_line_as_sb = reader.ReadLineAsStringBuilder (); //Log.Debug ("Read line from {0}:[{1}]", log_file, log_line_as_sb); } if (log_line_as_sb == null) { reader.Close (); return false; } else { // Update session_begin_offset session_begin_offset = prev_line_offset; } return true; }
private static ComponentHeader ParseComponentHeader(LineReader Reader) { string Line = Reader.ReadLine().Trim(); var IndexFirstSpace = Line.IndexOfAny(new char[] { ' ', '\t' }); string Type, Name; if(IndexFirstSpace == -1) { Type = Line.Trim(); Name = Type; } else { Type = Line.Substring(0, IndexFirstSpace).Trim(); Line = Line.Substring(IndexFirstSpace).Trim(); if(Line[0] != '(') throw new InvalidDataException("Expected first character past Component name to be ( for renaming."); int NameEnd = Line.IndexOf(')'); if(NameEnd == -1) throw new InvalidDataException("Expected closing ) to match rename start for Component."); Name = Line.Substring(1, NameEnd - 1).Trim(); } return new ComponentHeader() { Name = Name, Type = Type }; }
private static BlueprintHeader ParseHeader(LineReader Reader) { var Line = Reader.ReadLine(); var IndexDD = Line.IndexOf(':'); string Name; string[] Inherits; if(IndexDD == -1) { Name = Line.Trim(); Inherits = new string[0]; } else { Name = Line.Substring(0, IndexDD).Trim(); Inherits = Line.Substring(IndexDD + 1).Trim().Split(new char[] { ',', ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries).Select(c => c.Trim()).ToArray(); } return new BlueprintHeader() { Name = Name, Inherits = Inherits }; }
public FeatureEnumerableClass(LineReader lineReader, string[] headers) { this.lineReader = lineReader; this.headers = headers; }