public virtual void TestFailureGetNonExistentMetaBlock() { if (skip) { return; } writer.Append(Runtime.GetBytesForString("keyX"), Runtime.GetBytesForString ("valueX")); // create a new metablock DataOutputStream outMeta = writer.PrepareMetaBlock("testX", Compression.Algorithm .Gz.GetName()); outMeta.Write(123); outMeta.Write(Runtime.GetBytesForString("foo")); outMeta.Close(); CloseOutput(); TFile.Reader reader = new TFile.Reader(fs.Open(path), fs.GetFileStatus(path).GetLen (), conf); DataInputStream mb = reader.GetMetaBlock("testX"); NUnit.Framework.Assert.IsNotNull(mb); mb.Close(); try { DataInputStream mbBad = reader.GetMetaBlock("testY"); NUnit.Framework.Assert.Fail("Error on handling non-existent metablocks."); } catch (Exception) { } // noop, expecting exceptions reader.Close(); }
public virtual void TestBadChunks() { FilePath fn = GetTestFile(); int byteAm = 10000; FileOutputStream fout = new FileOutputStream(fn); byte[] bytes = new byte[byteAm]; rnd.NextBytes(bytes); fout.Write(bytes); fout.Close(); // attempt to read it DataVerifier vf = new DataVerifier(); DataVerifier.VerifyOutput vout = new DataVerifier.VerifyOutput(0, 0, 0, 0); DataInputStream @in = null; try { @in = new DataInputStream(new FileInputStream(fn)); vout = vf.VerifyFile(byteAm, @in); } catch (Exception) { } finally { if (@in != null) { @in.Close(); } } NUnit.Framework.Assert.IsTrue(vout.GetChunksSame() == 0); }
/// <exception cref="System.Exception"/> private void CheckTokenIdentifier <_T0>(UserGroupInformation ugi, Org.Apache.Hadoop.Security.Token.Token <_T0> token) where _T0 : TokenIdentifier { NUnit.Framework.Assert.IsNotNull(token); // should be able to use token.decodeIdentifier() but webhdfs isn't // registered with the service loader for token decoding DelegationTokenIdentifier identifier = new DelegationTokenIdentifier(); byte[] tokenId = token.GetIdentifier(); DataInputStream @in = new DataInputStream(new ByteArrayInputStream(tokenId)); try { identifier.ReadFields(@in); } finally { @in.Close(); } NUnit.Framework.Assert.IsNotNull(identifier); Log.Info("A valid token should have non-null password, and should be renewed successfully" ); NUnit.Framework.Assert.IsTrue(null != dtSecretManager.RetrievePassword(identifier )); dtSecretManager.RenewToken((Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier >)token, "JobTracker"); ugi.DoAs(new _PrivilegedExceptionAction_309(this, token)); }
/// <summary>close opened files.</summary> /// <exception cref="System.IO.IOException"/> public virtual void Close() { if (blockInFd != null && ((dropCacheBehindAllReads) || (dropCacheBehindLargeReads && IsLongRead()))) { try { NativeIO.POSIX.GetCacheManipulator().PosixFadviseIfPossible(block.GetBlockName(), blockInFd, lastCacheDropOffset, offset - lastCacheDropOffset, NativeIO.POSIX.PosixFadvDontneed ); } catch (Exception e) { Log.Warn("Unable to drop cache on file close", e); } } if (curReadahead != null) { curReadahead.Cancel(); } IOException ioe = null; if (checksumIn != null) { try { checksumIn.Close(); } catch (IOException e) { // close checksum file ioe = e; } checksumIn = null; } if (blockIn != null) { try { blockIn.Close(); } catch (IOException e) { // close data file ioe = e; } blockIn = null; blockInFd = null; } if (volumeRef != null) { IOUtils.Cleanup(null, volumeRef); volumeRef = null; } // throw IOException if there is any if (ioe != null) { throw ioe; } }
/// <summary>Assert MapWritable does not grow across calls to readFields.</summary> /// <exception cref="System.Exception"/> /// <seealso><a href="https://issues.apache.org/jira/browse/HADOOP-2244">HADOOP-2244</a> /// </seealso> public virtual void TestMultipleCallsToReadFieldsAreSafe() { // Create an instance and add a key/value. MapWritable m = new MapWritable(); Text t = new Text(GetName()); m[t] = t; // Get current size of map. Key values are 't'. int count = m.Count; // Now serialize... save off the bytes. ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); m.Write(dos); dos.Close(); // Now add new values to the MapWritable. m[new Text("key1")] = new Text("value1"); m[new Text("key2")] = new Text("value2"); // Now deserialize the original MapWritable. Ensure count and key values // match original state. ByteArrayInputStream bais = new ByteArrayInputStream(baos.ToByteArray()); DataInputStream dis = new DataInputStream(bais); m.ReadFields(dis); Assert.Equal(count, m.Count); Assert.True(m[t].Equals(t)); dis.Close(); }
private static void ProcessPostRequest(HttpRequest request, HttpResponse response, HttpSessionState session) { response.ContentType = "application/octet-stream"; var memoryStream = new MemoryStream(); var output = new DataOutputStream(memoryStream); try { var input = new DataInputStream(request.InputStream); var version = request.Headers["version"]; if (version != null) { if (!HttpProcessor.PROTOCOL_VERSION.Equals(version, StringComparison.Ordinal)) { throw new IOException(String.Format( CultureInfo.CurrentCulture, Resources.IO_InvalidProtocolVersion, version, HttpProcessor.PROTOCOL_VERSION)); } } if (input.ReadInt16() == HttpProcessor.INVOCATION_CODE) { InvokeMethod(session, input.ReadInt32(), input, output); } input.Close(); } catch (Exception e) { if (output == null) { output = new DataOutputStream(response.OutputStream); } output.WriteInt16(HttpProcessor.RESULT_EXCEPTION); Debug.WriteLine(e.StackTrace); output.WriteString(e.ToString()); } response.SetContentLength(memoryStream.Length); try { var data = new Byte[memoryStream.Length]; memoryStream.Seek(0, SeekOrigin.Begin); memoryStream.Read(data, 0, data.Length); response.OutputStream.Write(data, 0, data.Length); } finally { if (output != null) { output.Close(); } response.OutputStream.Close(); } }
/// <exception cref="System.IO.IOException"/> public override void Close() { if (state == EditLogFileInputStream.State.Open) { dataIn.Close(); } state = EditLogFileInputStream.State.Closed; }
/// <summary> /// Closes this input stream and releases any system resources associated with /// the stream. /// </summary> /// <remarks> /// Closes this input stream and releases any system resources associated with /// the stream. /// <p> /// The <code>close</code> method of <code>SASLInputStream</code> calls the /// <code>close</code> method of its underlying input stream. /// </remarks> /// <exception> /// IOException /// if an I/O error occurs. /// </exception> /// <exception cref="System.IO.IOException"/> public override void Close() { DisposeSasl(); ostart = 0; ofinish = 0; inStream.Close(); isOpen = false; }
/// <summary>Close the Event reader</summary> /// <exception cref="System.IO.IOException"/> public virtual void Close() { if (@in != null) { @in.Close(); } @in = null; }
/// <exception cref="System.IO.IOException"/> public override byte[] ReadFile(Path path, int len) { DataInputStream dis = fs.Open(path); byte[] buffer = new byte[len]; IOUtils.ReadFully(dis, buffer, 0, len); dis.Close(); return(buffer); }
/// <exception cref="System.IO.IOException"/> public static byte[] ReadFile(FileContext fc, Path path, int len) { DataInputStream dis = fc.Open(path); byte[] buffer = new byte[len]; IOUtils.ReadFully(dis, buffer, 0, len); dis.Close(); return(buffer); }
/// <exception cref="System.IO.IOException"/> private void ReadFile(FileSystem fileSys, Path name) { //Just read file so that getNumBlockLocations are incremented DataInputStream stm = fileSys.Open(name); byte[] buffer = new byte[4]; stm.Read(buffer, 0, 4); stm.Close(); }
// read a long value from the scanner /// <exception cref="System.IO.IOException"/> public virtual byte[] ReadLongValue(TFile.Reader.Scanner scanner, int len) { DataInputStream din = scanner.Entry().GetValueStream(); byte[] b = new byte[len]; din.ReadFully(b); din.Close(); return(b); }
/// <summary> /// 查找指定资源包中的指定资源文件并返回为Byte[] /// </summary> /// /// <param name="fileName"></param> /// <param name="resName"></param> /// <returns></returns> public static byte[] OpenResource(string fileName, string resName) { Stream ins0 = null; DataInputStream dis = null; try { ins0 = Resources.OpenStream(fileName); dis = new DataInputStream(ins0); LPKHeader header = ReadHeader(dis); LPKTable[] fileTable = ReadLPKTable(dis, (int)header.GetTables()); bool find = false; int fileIndex = 0; string innerName = null; for (int i = 0; i < fileTable.Length; i++) { innerName = StringUtils.NewString(fileTable[i].GetFileName()).Trim(); if (innerName.Equals(resName, StringComparison.InvariantCultureIgnoreCase)) { find = true; fileIndex = i; break; } } if (find == false) { throw new Exception("File not found. ( " + fileName + " )"); } else { return(ReadFileFromPak(dis, header, fileTable[fileIndex])); } } catch (Exception ex) { Log.Exception(ex); throw new Exception("File not found. ( " + fileName + " )"); } finally { if (dis != null) { try { dis.Close(); dis = null; } catch (IOException ex) { Log.Exception(ex); } } } }
/// <exception cref="System.IO.IOException"/> private void ReadFile(FileSystem fileSys, Path name) { DataInputStream stm = fileSys.Open(name); byte[] buffer = new byte[4]; int bytesRead = stm.Read(buffer, 0, 4); NUnit.Framework.Assert.AreEqual("oom", Sharpen.Runtime.GetStringForBytes(buffer, 0, bytesRead)); stm.Close(); }
/// <exception cref="System.IO.IOException"/> private static DelegationTokenIdentifier DecodeToken(Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier> token, Text tokenKind) { ByteArrayInputStream buf = new ByteArrayInputStream(token.GetIdentifier()); DataInputStream dis = new DataInputStream(buf); DelegationTokenIdentifier id = new DelegationTokenIdentifier(tokenKind); id.ReadFields(dis); dis.Close(); return(id); }
/// <exception cref="System.IO.IOException"/> public override void Close() { if (null != infbuf) { infbuf.Close(); } if (null != outfbuf) { outfbuf.Close(); } }
public async Task CloseFile(bool fileWasForWriting) { if (fileWasForWriting) { OutputStream.Close(); } else { InputStream.Close(); } }
public virtual void TestCompressorDecopressorLogicWithCompressionStreams() { DataOutputStream deflateOut = null; DataInputStream inflateIn = null; int ByteSize = 1024 * 100; byte[] bytes = Generate(ByteSize); int bufferSize = 262144; int compressionOverhead = (bufferSize / 6) + 32; try { DataOutputBuffer compressedDataBuffer = new DataOutputBuffer(); CompressionOutputStream deflateFilter = new BlockCompressorStream(compressedDataBuffer , new Lz4Compressor(bufferSize), bufferSize, compressionOverhead); deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter)); deflateOut.Write(bytes, 0, bytes.Length); deflateOut.Flush(); deflateFilter.Finish(); DataInputBuffer deCompressedDataBuffer = new DataInputBuffer(); deCompressedDataBuffer.Reset(compressedDataBuffer.GetData(), 0, compressedDataBuffer .GetLength()); CompressionInputStream inflateFilter = new BlockDecompressorStream(deCompressedDataBuffer , new Lz4Decompressor(bufferSize), bufferSize); inflateIn = new DataInputStream(new BufferedInputStream(inflateFilter)); byte[] result = new byte[ByteSize]; inflateIn.Read(result); Assert.AssertArrayEquals("original array not equals compress/decompressed array", result, bytes); } catch (IOException) { NUnit.Framework.Assert.Fail("testLz4CompressorDecopressorLogicWithCompressionStreams ex error !!!" ); } finally { try { if (deflateOut != null) { deflateOut.Close(); } if (inflateIn != null) { inflateIn.Close(); } } catch (Exception) { } } }
public void Close() { isReading = false; if (mInputStream != null) { mInputStream.Close(); } if (mReaderThread != null) { mReaderThread.Join(); } }
protected internal virtual void Read(string filename) { try { DataInputStream @in = IOUtils.GetDataInputStream(filename); Read(@in); @in.Close(); } catch (IOException e) { Sharpen.Runtime.PrintStackTrace(e); } }
/// <exception cref="System.IO.IOException"/> private void ReadNumMetablocks(TFile.Reader reader, int n) { int len = Runtime.GetBytesForString(("something to test" + 0)).Length; for (int i = 0; i < n; i++) { DataInputStream din = reader.GetMetaBlock("TfileMeta" + i); byte[] b = new byte[len]; din.ReadFully(b); Assert.True("faield to match metadata", Arrays.Equals(Runtime.GetBytesForString (("something to test" + i)), b)); din.Close(); } }
public virtual float ReadSingle() { FillBuffer(4); var byteArrayInputStream = new ByteArrayInputStream(ReadSwapped(4)); var dataInputStream = new DataInputStream(byteArrayInputStream); var result = dataInputStream.ReadFloat(); byteArrayInputStream.Close(); dataInputStream.Close(); return(result); }
public virtual double ReadDouble() { FillBuffer(8); var byteArrayInputStream = new ByteArrayInputStream(ReadSwapped(8)); var dataInputStream = new DataInputStream(byteArrayInputStream); var result = dataInputStream.ReadDouble(); byteArrayInputStream.Close(); dataInputStream.Close(); return(result); }
/// <summary>Read the cut points from the given sequence file.</summary> /// <param name="fs">the file system</param> /// <param name="p">the path to read</param> /// <param name="job">the job config</param> /// <returns>the strings to split the partitions on</returns> /// <exception cref="System.IO.IOException"/> private static Text[] ReadPartitions(FileSystem fs, Path p, Configuration conf) { int reduces = conf.GetInt(MRJobConfig.NumReduces, 1); Text[] result = new Text[reduces - 1]; DataInputStream reader = fs.Open(p); for (int i = 0; i < reduces - 1; ++i) { result[i] = new Text(); result[i].ReadFields(reader); } reader.Close(); return(result); }
public override void Configure(JobConf jconf) { conf = jconf; try { Path[] localArchives = DistributedCache.GetLocalCacheArchives(conf); Path[] localFiles = DistributedCache.GetLocalCacheFiles(conf); // read the cached files (unzipped, unjarred and text) // and put it into a single file TEST_ROOT_DIR/test.txt string TestRootDir = jconf.Get("test.build.data", "/tmp"); Path file = new Path("file:///", TestRootDir); FileSystem fs = FileSystem.GetLocal(conf); if (!fs.Mkdirs(file)) { throw new IOException("Mkdirs failed to create " + file.ToString()); } Path fileOut = new Path(file, "test.txt"); fs.Delete(fileOut, true); DataOutputStream @out = fs.Create(fileOut); for (int i = 0; i < localArchives.Length; i++) { // read out the files from these archives FilePath f = new FilePath(localArchives[i].ToString()); FilePath txt = new FilePath(f, "test.txt"); FileInputStream fin = new FileInputStream(txt); DataInputStream din = new DataInputStream(fin); string str = din.ReadLine(); din.Close(); @out.WriteBytes(str); @out.WriteBytes("\n"); } for (int i_1 = 0; i_1 < localFiles.Length; i_1++) { // read out the files from these archives FilePath txt = new FilePath(localFiles[i_1].ToString()); FileInputStream fin = new FileInputStream(txt); DataInputStream din = new DataInputStream(fin); string str = din.ReadLine(); @out.WriteBytes(str); @out.WriteBytes("\n"); } @out.Close(); } catch (IOException ie) { System.Console.Out.WriteLine(StringUtils.StringifyException(ie)); } }
/// <summary> /// Get the token identifier object, or null if it could not be constructed /// (because the class could not be loaded, for example). /// </summary> /// <returns>the token identifier, or null</returns> /// <exception cref="System.IO.IOException"></exception> public virtual T DecodeIdentifier() { Type cls = GetClassForIdentifier(GetKind()); if (cls == null) { return(null); } TokenIdentifier tokenIdentifier = ReflectionUtils.NewInstance(cls, null); ByteArrayInputStream buf = new ByteArrayInputStream(identifier); DataInputStream @in = new DataInputStream(buf); tokenIdentifier.ReadFields(@in); @in.Close(); return((T)tokenIdentifier); }
/// <exception cref="System.IO.IOException"/> public virtual void Map(Text key, LongWritable value, OutputCollector <Text, LongWritable > collector, Reporter reporter) { string name = key.ToString(); long size = value.Get(); long seed = long.Parse(name); random.SetSeed(seed); reporter.SetStatus("opening " + name); DataInputStream @in = new DataInputStream(fs.Open(new Path(DataDir, name))); long read = 0; try { while (read < size) { long remains = size - read; int n = (remains <= buffer.Length) ? (int)remains : buffer.Length; @in.ReadFully(buffer, 0, n); read += n; if (fastCheck) { Arrays.Fill(check, unchecked ((byte)random.Next(byte.MaxValue))); } else { random.NextBytes(check); } if (n != buffer.Length) { Arrays.Fill(buffer, n, buffer.Length, unchecked ((byte)0)); Arrays.Fill(check, n, check.Length, unchecked ((byte)0)); } NUnit.Framework.Assert.IsTrue(Arrays.Equals(buffer, check)); reporter.SetStatus("reading " + name + "@" + read + "/" + size); } } finally { @in.Close(); } collector.Collect(new Text("bytes"), new LongWritable(read)); reporter.SetStatus("read " + name); }
/// <exception cref="System.IO.IOException"/> private void SomeReadingWithMetaBlock(TFile.Reader reader) { DataInputStream din = null; ReadNumMetablocks(reader, 10); try { din = reader.GetMetaBlock("NO ONE"); Assert.True(false); } catch (MetaBlockDoesNotExist) { } // should catch din = reader.GetMetaBlock("TFileMeta100"); int read = din.Read(); Assert.True("check for status", (read == -1)); din.Close(); }
/// <exception cref="System.IO.IOException"/> private static bool IsSequenceFile(FileSystem fs, Path f) { DataInputStream @in = fs.Open(f); try { byte[] seq = Sharpen.Runtime.GetBytesForString("SEQ"); for (int i = 0; i < seq.Length; ++i) { if (seq[i] != @in.Read()) { return(false); } } } finally { @in.Close(); } return(true); }
public virtual double ReadDouble() { FillBuffer(8); var byteArrayInputStream = new ByteArrayInputStream(ReadSwapped(8)); var dataInputStream = new DataInputStream(byteArrayInputStream); var result = dataInputStream.ReadDouble(); byteArrayInputStream.Close(); dataInputStream.Close(); return result; }
public virtual float ReadSingle() { FillBuffer(4); var byteArrayInputStream = new ByteArrayInputStream(ReadSwapped(4)); var dataInputStream = new DataInputStream(byteArrayInputStream); var result = dataInputStream.ReadFloat(); byteArrayInputStream.Close(); dataInputStream.Close(); return result; }