/// <exception cref="System.Exception"/> public virtual void TestNewLines() { LineReader @in = null; try { @in = MakeStream("a\nbb\n\nccc\rdddd\r\neeeee"); Text @out = new Text(); @in.ReadLine(@out); NUnit.Framework.Assert.AreEqual("line1 length", 1, @out.GetLength()); @in.ReadLine(@out); NUnit.Framework.Assert.AreEqual("line2 length", 2, @out.GetLength()); @in.ReadLine(@out); NUnit.Framework.Assert.AreEqual("line3 length", 0, @out.GetLength()); @in.ReadLine(@out); NUnit.Framework.Assert.AreEqual("line4 length", 3, @out.GetLength()); @in.ReadLine(@out); NUnit.Framework.Assert.AreEqual("line5 length", 4, @out.GetLength()); @in.ReadLine(@out); NUnit.Framework.Assert.AreEqual("line5 length", 5, @out.GetLength()); NUnit.Framework.Assert.AreEqual("end of file", 0, @in.ReadLine(@out)); } finally { if (@in != null) { @in.Close(); } } }
//--------------------------------------------------------------------- /// <summary> /// Loads an instance of T from a file. The file may contain a /// serialized form of an editable instance or it may be a text file /// that needs parsing. /// </summary> public static T Load <T>(string path, ITextParser <T> parser) { if (Path.GetExtension(path) == FileExtension) { // Deserialize an editable instance from the file // Binary serialization: IFormatter formatter = new BinaryFormatter(); Stream stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); using (stream) { IEditable <T> editableObject = (IEditable <T>)formatter.Deserialize(stream); if (!editableObject.IsComplete) { throw new System.ApplicationException("Not complete T"); } return(editableObject.GetComplete()); } } else { LineReader reader = OpenTextFile(path); try { return(parser.Parse(reader)); } finally { reader.Close(); } } }
public bool HasNextIndexable() { data_sb.Length = 0; session_num_lines = 0; speakers.Clear(); if (reader == null) { // Log files are in system encoding reader = new ReencodingLineReader(log_file, Encoding.Default); reader.Position = session_begin_offset; log_line_as_sb = reader.ReadLineAsStringBuilder(); //Log.Debug ("Read line from {0}:[{1}]", log_file, log_line_as_sb); } if (log_line_as_sb == null) { reader.Close(); return(false); } else { // Update session_begin_offset session_begin_offset = prev_line_offset; } return(true); }
private void HandleFailedSend(FileLock markLock, string bufferName, ref LineReader streamReader, FileLock readerLock, List <string> memoryBuffer, int failLimit, Action <List <string> > alternativeWrite) { try { markLock.Lock(); var buffers = ReadMark(markLock).Buffers; var buffer = GetBufferIndex(bufferName, buffers, out var index); if (buffer.FailedSendCount >= failLimit) { alternativeWrite(memoryBuffer); HandleSuccessSend(markLock, bufferName, ref streamReader, readerLock, memoryBuffer); } else { var modifiedBuffer = new BufferInfo(buffer.Name, buffer.Written, buffer.Read, buffer.WriteActive, buffer.LastFlushTime, buffer.FailedSendCount + 1, DateTime.Now); buffers[index] = modifiedBuffer; WriteMark(markLock, buffers); streamReader.Close(); streamReader = null; } } finally { markLock.Unlock(); } }
//--------------------------------------------------------------------- private void TryParse(string filename, int errorLineNum) { try { reader = OpenFile(filename); // This method is only called on bad files, so we expect the // statement below to throw an exception. Since we knowingly // ignore the variable "landUses", disable the CS0219 warning // "The variable '...' is assigned but its value is never used'. #pragma warning disable 0219 IList <LandUse> landUses = parser.Parse(reader); #pragma warning restore 0219 } catch (System.Exception e) { Data.Output.WriteLine(e.Message.Replace(Data.Directory, Data.DirPlaceholder)); LineReaderException lrExc = e as LineReaderException; if (lrExc != null) { Assert.AreEqual(errorLineNum, lrExc.LineNumber); } Data.Output.WriteLine(); throw; } finally { reader.Close(); } }
public void GoodFile() { const string filename = "GoodFile.txt"; IParameterDataset dataset; try { reader = OpenFile(filename); dataset = parser.Parse(reader); } finally { reader.Close(); } try { // Now that we know the data file is properly formatted, read // data from it and compare it against parameter dataset. reader = OpenFile(filename); inputLine = new InputLine(reader); Assert.AreEqual(parser.LandisDataValue, ReadInputVar <string>("LandisData")); CheckPercentageTable("CohortBiomassReductions", dataset.CohortReductions, "DeadPoolReductions"); CheckPercentageTable("DeadPoolReductions", dataset.PoolReductions, null); } finally { inputLine = null; reader.Close(); } }
//--------------------------------------------------------------------- private Scenario ParseFile(string filename) { reader = OpenFile(filename); Scenario scenario = parser.Parse(reader); reader.Close(); return(scenario); }
//--------------------------------------------------------------------- private IDataset ParseFile(string filename) { reader = OpenFile(filename); IDataset dataset = parser.Parse(reader); reader.Close(); return(dataset); }
//--------------------------------------------------------------------- private IList <LandUse> ParseFile(string filename) { reader = OpenFile(filename); IList <LandUse> landUses = parser.Parse(reader); reader.Close(); return(landUses); }
//--------------------------------------------------------------------- private IParameters ParseFile(string filename) { reader = OpenFile(filename); IParameters parameters = parser.Parse(reader); reader.Close(); return(parameters); }
//--------------------------------------------------------------------- private IParameters ParseFile(string filename) { try { reader = OpenFile(filename); return(parser.Parse(reader)); } finally { reader.Close(); } }
//--------------------------------------------------------------------- private ExtensionInfo ParseFile(string filename) { try { reader = OpenFile(filename); return(parser.Parse(reader)); } finally { reader.Close(); } }
public void Init() { parser = new ParameterParser(); Ecoregions.DatasetParser ecoregionsParser = new Ecoregions.DatasetParser(); reader = OpenFile("Ecoregions.txt"); try { ParameterParser.EcoregionsDataset = ecoregionsParser.Parse(reader); } finally { reader.Close(); } }
public void Init() { Species.DatasetParser speciesParser = new Species.DatasetParser(); reader = OpenFile("Species.txt"); try { speciesDataset = speciesParser.Parse(reader); } finally { reader.Close(); } parser = new ParametersParser(speciesDataset); }
/// <summary> /// Loads an instance of T from a file. The file may contain a /// serialized form of an editable instance or it may be a text file /// that needs parsing. /// </summary> private T Load <T>(string path, ITextParser <T> parser) { LineReader reader = this.OpenTextFile(path); try { return(parser.Parse(reader)); } finally { reader.Close(); } }
/// <exception cref="System.IO.IOException"/> public static IList <FileSplit> GetSplitsForFile(FileStatus status, Configuration conf, int numLinesPerSplit) { IList <FileSplit> splits = new AList <FileSplit>(); Path fileName = status.GetPath(); if (status.IsDirectory()) { throw new IOException("Not a file: " + fileName); } FileSystem fs = fileName.GetFileSystem(conf); LineReader lr = null; try { FSDataInputStream @in = fs.Open(fileName); lr = new LineReader(@in, conf); Text line = new Text(); int numLines = 0; long begin = 0; long length = 0; int num = -1; while ((num = lr.ReadLine(line)) > 0) { numLines++; length += num; if (numLines == numLinesPerSplit) { splits.AddItem(CreateFileSplit(fileName, begin, length)); begin += length; length = 0; numLines = 0; } } if (numLines != 0) { splits.AddItem(CreateFileSplit(fileName, begin, length)); } } finally { if (lr != null) { lr.Close(); } } return(splits); }
/// <summary>Parse the command line arguments into lines and display the result.</summary> /// <param name="args"/> /// <exception cref="System.Exception"/> public static void Main(string[] args) { foreach (string arg in args) { System.Console.Out.WriteLine("Working on " + arg); LineReader reader = MakeStream(Unquote(arg)); Org.Apache.Hadoop.IO.Text line = new Org.Apache.Hadoop.IO.Text(); int size = reader.ReadLine(line); while (size > 0) { System.Console.Out.WriteLine("Got: " + line.ToString()); size = reader.ReadLine(line); } reader.Close(); } }
public void Init() { parser = new ParametersParser(); Species.DatasetParser speciesParser = new Species.DatasetParser(); LineReader speciesReader = OpenFile("SpeciesDataset.txt"); try { ParametersParser.SpeciesDataset = speciesParser.Parse(speciesReader); } finally { speciesReader.Close(); } Data.Output.WriteLine("{0} = \"{1}\"", dataDirPlaceholder, Data.Directory); Data.Output.WriteLine(); }
//--------------------------------------------------------------------- private void TryParse(string filename, int errorLineNum) { try { reader = OpenFile(filename); IDataset dataset = parser.Parse(reader); } catch (System.Exception e) { Data.Output.WriteLine(e.Message); LineReaderException lrExc = e as LineReaderException; if (lrExc != null) { Assert.AreEqual(errorLineNum, lrExc.LineNumber); } throw; } finally { reader.Close(); } }
//--------------------------------------------------------------------- private void TryParse(string filename, int errorLineNum) { try { reader = OpenFile(filename); Scenario scenario = parser.Parse(reader); } catch (System.Exception e) { Data.Output.WriteLine(); Data.Output.WriteLine(e.Message.Replace(Data.Directory, Data.DirPlaceholder)); LineReaderException lrExc = e as LineReaderException; if (lrExc != null) { Assert.AreEqual(errorLineNum, lrExc.LineNumber); } throw; } finally { reader.Close(); } }
//--------------------------------------------------------------------- private void TryParse(string filename) { int?errorLineNum = Testing.FindErrorMarker(MakeInputPath(filename)); try { reader = OpenFile(filename); IParameterDataset dataset = parser.Parse(reader); } catch (System.Exception e) { Data.Output.WriteLine(); Data.Output.WriteLine(e.Message.Replace(Data.Directory, Data.DirPlaceholder)); LineReaderException lrExc = e as LineReaderException; if (lrExc != null && errorLineNum.HasValue) { Assert.AreEqual(errorLineNum.Value, lrExc.LineNumber); } throw; } finally { reader.Close(); } }
public void Init() { Ecoregions.DatasetParser ecoregionsParser = new Ecoregions.DatasetParser(); reader = OpenFile("Ecoregions.txt"); try { ecoregionDataset = ecoregionsParser.Parse(reader); } finally { reader.Close(); } Species.DatasetParser speciesParser = new Species.DatasetParser(); reader = OpenFile("Species.txt"); try { speciesDataset = speciesParser.Parse(reader); } finally { reader.Close(); } parser = new ParametersParser(ecoregionDataset, speciesDataset); }
private void HandleSuccessSend(FileLock markLock, string bufferName, ref LineReader streamReader, FileLock readerLock, List <string> memoryBuffer) { try { var buffer = MarkRead(markLock, bufferName, memoryBuffer.Count); if (buffer.WriteActive == false && buffer.Read >= buffer.Written) { streamReader.Close(); streamReader = null; DeleteBuffer(readerLock.Path); } } catch (IOException e) { _logger.LogError(e, "Mark read failed."); } finally { memoryBuffer.Clear(); } }
/// <exception cref="System.Exception"/> public virtual void TestUTF8() { LineReader @in = null; try { @in = MakeStream("abcd\u20acbdcd\u20ac"); Text line = new Text(); @in.ReadLine(line); NUnit.Framework.Assert.AreEqual("readLine changed utf8 characters", "abcd\u20acbdcd\u20ac" , line.ToString()); @in = MakeStream("abc\u200axyz"); @in.ReadLine(line); NUnit.Framework.Assert.AreEqual("split on fake newline", "abc\u200axyz", line.ToString ()); } finally { if (@in != null) { @in.Close(); } } }
public override void Close() { reader.Close(); }
public virtual void TestCompress() { JobConf job = new JobConf(); job.Set(JobContext.TaskAttemptId, attempt); job.Set(FileOutputFormat.Compress, "true"); FileOutputFormat.SetOutputPath(job, workDir.GetParent().GetParent()); FileOutputFormat.SetWorkOutputPath(job, workDir); FileSystem fs = workDir.GetFileSystem(job); if (!fs.Mkdirs(workDir)) { NUnit.Framework.Assert.Fail("Failed to create output directory"); } string file = "test_compress.txt"; // A reporter that does nothing Reporter reporter = Reporter.Null; TextOutputFormat <object, object> theOutputFormat = new TextOutputFormat <object, object >(); RecordWriter <object, object> theRecordWriter = theOutputFormat.GetRecordWriter(localFs , job, file, reporter); Org.Apache.Hadoop.IO.Text key1 = new Org.Apache.Hadoop.IO.Text("key1"); Org.Apache.Hadoop.IO.Text key2 = new Org.Apache.Hadoop.IO.Text("key2"); Org.Apache.Hadoop.IO.Text val1 = new Org.Apache.Hadoop.IO.Text("val1"); Org.Apache.Hadoop.IO.Text val2 = new Org.Apache.Hadoop.IO.Text("val2"); NullWritable nullWritable = NullWritable.Get(); try { theRecordWriter.Write(key1, val1); theRecordWriter.Write(null, nullWritable); theRecordWriter.Write(null, val1); theRecordWriter.Write(nullWritable, val2); theRecordWriter.Write(key2, nullWritable); theRecordWriter.Write(key1, null); theRecordWriter.Write(null, null); theRecordWriter.Write(key2, val2); } finally { theRecordWriter.Close(reporter); } StringBuilder expectedOutput = new StringBuilder(); expectedOutput.Append(key1).Append("\t").Append(val1).Append("\n"); expectedOutput.Append(val1).Append("\n"); expectedOutput.Append(val2).Append("\n"); expectedOutput.Append(key2).Append("\n"); expectedOutput.Append(key1).Append("\n"); expectedOutput.Append(key2).Append("\t").Append(val2).Append("\n"); DefaultCodec codec = new DefaultCodec(); codec.SetConf(job); Path expectedFile = new Path(workDir, file + codec.GetDefaultExtension()); FileInputStream istream = new FileInputStream(expectedFile.ToString()); CompressionInputStream cistream = codec.CreateInputStream(istream); LineReader reader = new LineReader(cistream); string output = string.Empty; Org.Apache.Hadoop.IO.Text @out = new Org.Apache.Hadoop.IO.Text(); while (reader.ReadLine(@out) > 0) { output += @out; output += "\n"; } reader.Close(); NUnit.Framework.Assert.AreEqual(expectedOutput.ToString(), output); }
//--------------------------------------------------------------------- private void ReadAndCheckParameters(string filename) { IParameters parameters; try { reader = OpenFile(filename); parameters = parser.Parse(reader); } finally { reader.Close(); } try { // Now that we know the data file is properly formatted, read // data from it and compare it against parameter object. reader = OpenFile(filename); inputLine = new InputLine(reader); Assert.AreEqual(parser.LandisDataValue, ReadInputVar <string>("LandisData")); Assert.AreEqual(ReadInputVar <int>("Timestep"), parameters.Timestep); Assert.AreEqual(ReadInputVar <SeedingAlgorithms>("SeedingAlgorithm"), parameters.SeedAlgorithm); inputLine.MatchName("MinRelativeBiomass"); inputLine.GetNext(); List <IEcoregion> ecoregions = ReadEcoregions(); for (byte shadeClass = 1; shadeClass <= 5; shadeClass++) { StringReader currentLine = new StringReader(inputLine.ToString()); Assert.AreEqual(shadeClass, ReadInputValue <byte>(currentLine)); foreach (IEcoregion ecoregion in ecoregions) { // TODO: Eventually allow equality testing for Percentage Assert.AreEqual((double)ReadInputValue <Percentage>(currentLine), (double)parameters.MinRelativeBiomass[shadeClass][ecoregion]); } inputLine.GetNext(); } inputLine.MatchName("BiomassParameters"); inputLine.GetNext(); while (inputLine.VariableName != "EstablishProbabilities") { StringReader currentLine = new StringReader(inputLine.ToString()); ISpecies species = ReadSpecies(currentLine); Assert.AreEqual(ReadInputValue <double>(currentLine), parameters.LeafLongevity[species]); Assert.AreEqual(ReadInputValue <double>(currentLine), parameters.WoodyDecayRate[species]); Assert.AreEqual(ReadInputValue <double>(currentLine), parameters.MortCurveShapeParm[species]); inputLine.GetNext(); } CheckParameterTable("EstablishProbabilities", parameters.EstablishProbability, "MaxANPP"); CheckParameterTable("MaxANPP", parameters.MaxANPP, "LeafLitter:DecayRates"); const string AgeOnlyDisturbanceParms = "AgeOnlyDisturbances:BiomassParameters"; CheckParameterTable("LeafLitter:DecayRates", parameters.LeafLitterDecayRate, AgeOnlyDisturbanceParms); if (parameters.AgeOnlyDisturbanceParms != null) { Assert.AreEqual(ReadInputVar <string>(AgeOnlyDisturbanceParms), parameters.AgeOnlyDisturbanceParms); } } finally { inputLine = null; reader.Close(); } }
public virtual void TestBuiltInGzipDecompressor() { // NOTE: This fails on RHEL4 with "java.io.IOException: header crc mismatch" // due to buggy version of zlib (1.2.1.2) included. JobConf jobConf = new JobConf(defaultConf); jobConf.SetBoolean("io.native.lib.available", false); CompressionCodec gzip = new GzipCodec(); ReflectionUtils.SetConf(gzip, jobConf); localFs.Delete(workDir, true); NUnit.Framework.Assert.AreEqual("[non-native (Java) codec]", typeof(BuiltInGzipDecompressor ), gzip.GetDecompressorType()); System.Console.Out.WriteLine(ColorBrYellow + "testBuiltInGzipDecompressor() using" + " non-native (Java Inflater) Decompressor (" + gzip.GetDecompressorType() + ")" + ColorNormal); // copy single-member test file to HDFS string fn1 = "testConcatThenCompress.txt" + gzip.GetDefaultExtension(); Path fnLocal1 = new Path(Runtime.GetProperty("test.concat.data", "/tmp"), fn1); Path fnHDFS1 = new Path(workDir, fn1); localFs.CopyFromLocalFile(fnLocal1, fnHDFS1); // copy multiple-member test file to HDFS // (actually in "seekable gzip" format, a la JIRA PIG-42) string fn2 = "testCompressThenConcat.txt" + gzip.GetDefaultExtension(); Path fnLocal2 = new Path(Runtime.GetProperty("test.concat.data", "/tmp"), fn2); Path fnHDFS2 = new Path(workDir, fn2); localFs.CopyFromLocalFile(fnLocal2, fnHDFS2); FileInputFormat.SetInputPaths(jobConf, workDir); // here's first pair of DecompressorStreams: FileInputStream in1 = new FileInputStream(fnLocal1.ToString()); FileInputStream in2 = new FileInputStream(fnLocal2.ToString()); NUnit.Framework.Assert.AreEqual("concat bytes available", 2734, in1.Available()); NUnit.Framework.Assert.AreEqual("concat bytes available", 3413, in2.Available()); // w/hdr CRC CompressionInputStream cin2 = gzip.CreateInputStream(in2); LineReader @in = new LineReader(cin2); Text @out = new Text(); int numBytes; int totalBytes = 0; int lineNum = 0; while ((numBytes = @in.ReadLine(@out)) > 0) { ++lineNum; totalBytes += numBytes; } @in.Close(); NUnit.Framework.Assert.AreEqual("total uncompressed bytes in concatenated test file" , 5346, totalBytes); NUnit.Framework.Assert.AreEqual("total uncompressed lines in concatenated test file" , 84, lineNum); // test BuiltInGzipDecompressor with lots of different input-buffer sizes DoMultipleGzipBufferSizes(jobConf, false); // test GzipZlibDecompressor (native), just to be sure // (FIXME? could move this call to testGzip(), but would need filename // setup above) (alternatively, maybe just nuke testGzip() and extend this?) DoMultipleGzipBufferSizes(jobConf, true); }