public virtual void TestDataOutputByteBufferCompatibility() { DataOutputBuffer dob = new DataOutputBuffer(); DataOutputByteBuffer dobb = new DataOutputByteBuffer(); Random r = new Random(); long seed = r.NextLong(); r.SetSeed(seed); System.Console.Out.WriteLine("SEED: " + seed); WriteJunk(dob, r, seed, 1000); WriteJunk(dobb, r, seed, 1000); byte[] check = ToBytes(dobb.GetData(), dobb.GetLength()); Assert.Equal(check.Length, dob.GetLength()); Assert.AssertArrayEquals(check, Arrays.CopyOf(dob.GetData(), dob.GetLength())); dob.Reset(); dobb.Reset(); WriteJunk(dob, r, seed, 3000); WriteJunk(dobb, r, seed, 3000); check = ToBytes(dobb.GetData(), dobb.GetLength()); Assert.Equal(check.Length, dob.GetLength()); Assert.AssertArrayEquals(check, Arrays.CopyOf(dob.GetData(), dob.GetLength())); dob.Reset(); dobb.Reset(); WriteJunk(dob, r, seed, 1000); WriteJunk(dobb, r, seed, 1000); check = ToBytes(dobb.GetData(), dobb.GetLength()); Assert.Equal("Failed Checking length = " + check.Length, check .Length, dob.GetLength()); Assert.AssertArrayEquals(check, Arrays.CopyOf(dob.GetData(), dob.GetLength())); }
public virtual void TestMany() { //Write a big set of data, one of each primitive type array foreach (object x in bigSet) { //write each test object two ways //First, transparently via ObjectWritable ObjectWritable.WriteObject(@out, x, x.GetType(), null, true); //Second, explicitly via ArrayPrimitiveWritable (new ArrayPrimitiveWritable(x)).Write(@out); } //Now read the data back in @in.Reset(@out.GetData(), @out.GetLength()); for (int x_1 = 0; x_1 < resultSet.Length;) { //First, transparently resultSet[x_1++] = ObjectWritable.ReadObject(@in, null); //Second, explicitly ArrayPrimitiveWritable apw = new ArrayPrimitiveWritable(); apw.ReadFields(@in); resultSet[x_1++] = apw.Get(); } //validate data structures and values Assert.Equal(expectedResultSet.Length, resultSet.Length); for (int x_2 = 0; x_2 < resultSet.Length; x_2++) { Assert.Equal("ComponentType of array " + x_2, expectedResultSet [x_2].GetType().GetElementType(), resultSet[x_2].GetType().GetElementType()); } Assert.True("In and Out arrays didn't match values", Arrays.DeepEquals (expectedResultSet, resultSet)); }
/// <exception cref="System.Exception"/> public virtual void TestIO() { DataOutputBuffer @out = new DataOutputBuffer(); DataInputBuffer @in = new DataInputBuffer(); for (int i = 0; i < NumIterations; i++) { // generate a random string string before; if (i == 0) { before = GetLongString(); } else { before = GetTestString(); } // write it @out.Reset(); Org.Apache.Hadoop.IO.Text.WriteString(@out, before); // test that it reads correctly @in.Reset(@out.GetData(), @out.GetLength()); string after = Org.Apache.Hadoop.IO.Text.ReadString(@in); Assert.True(before.Equals(after)); // Test compatibility with Java's other decoder int strLenSize = WritableUtils.GetVIntSize(Org.Apache.Hadoop.IO.Text.Utf8Length(before )); string after2 = Runtime.GetStringForBytes(@out.GetData(), strLenSize, @out .GetLength() - strLenSize, "UTF-8"); Assert.True(before.Equals(after2)); } }
/// <summary>Read raw bytes from a SequenceFile.</summary> /// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> public override bool NextKeyValue() { lock (this) { if (done) { return(false); } long pos = @in.GetPosition(); bool eof = -1 == @in.NextRawKey(buffer); if (!eof) { if (key == null) { key = new BytesWritable(); } if (value == null) { value = new BytesWritable(); } key.Set(buffer.GetData(), 0, buffer.GetLength()); buffer.Reset(); @in.NextRawValue(vbytes); vbytes.WriteUncompressedBytes(buffer); value.Set(buffer.GetData(), 0, buffer.GetLength()); buffer.Reset(); } return(!(done = (eof || (pos >= end && @in.SyncSeen())))); } }
public virtual void TestGzipCompatibility() { Random r = new Random(); long seed = r.NextLong(); r.SetSeed(seed); Log.Info("seed: " + seed); DataOutputBuffer dflbuf = new DataOutputBuffer(); GZIPOutputStream gzout = new GZIPOutputStream(dflbuf); byte[] b = new byte[r.Next(128 * 1024 + 1)]; r.NextBytes(b); gzout.Write(b); gzout.Close(); DataInputBuffer gzbuf = new DataInputBuffer(); gzbuf.Reset(dflbuf.GetData(), dflbuf.GetLength()); Configuration conf = new Configuration(); conf.SetBoolean(CommonConfigurationKeys.IoNativeLibAvailableKey, false); CompressionCodec codec = ReflectionUtils.NewInstance <GzipCodec>(conf); Decompressor decom = codec.CreateDecompressor(); NUnit.Framework.Assert.IsNotNull(decom); Assert.Equal(typeof(BuiltInGzipDecompressor), decom.GetType()); InputStream gzin = codec.CreateInputStream(gzbuf, decom); dflbuf.Reset(); IOUtils.CopyBytes(gzin, dflbuf, 4096); byte[] dflchk = Arrays.CopyOf(dflbuf.GetData(), dflbuf.GetLength()); Assert.AssertArrayEquals(b, dflchk); }
private static byte[] ByteArrayForBloomKey(DataOutputBuffer buf) { int cleanLength = buf.GetLength(); byte[] ba = buf.GetData(); if (cleanLength != ba.Length) { ba = new byte[cleanLength]; System.Array.Copy(buf.GetData(), 0, ba, 0, cleanLength); } return(ba); }
/// <summary>Serialize the shuffle port into a ByteBuffer for use later on.</summary> /// <param name="port">the port to be sent to the ApplciationMaster</param> /// <returns>the serialized form of the port.</returns> /// <exception cref="System.IO.IOException"/> public static ByteBuffer SerializeMetaData(int port) { //TODO these bytes should be versioned DataOutputBuffer port_dob = new DataOutputBuffer(); port_dob.WriteInt(port); return ByteBuffer.Wrap(port_dob.GetData(), 0, port_dob.GetLength()); }
/// <exception cref="System.IO.IOException"/> private void SetupTokens(ContainerLaunchContext container, ContainerId containerID ) { IDictionary <string, string> environment = container.GetEnvironment(); environment[ApplicationConstants.ApplicationWebProxyBaseEnv] = application.GetWebProxyBase (); // Set AppSubmitTime and MaxAppAttempts to be consumable by the AM. ApplicationId applicationId = application.GetAppAttemptId().GetApplicationId(); environment[ApplicationConstants.AppSubmitTimeEnv] = rmContext.GetRMApps()[applicationId ].GetSubmitTime().ToString(); environment[ApplicationConstants.MaxAppAttemptsEnv] = rmContext.GetRMApps()[applicationId ].GetMaxAppAttempts().ToString(); Credentials credentials = new Credentials(); DataInputByteBuffer dibb = new DataInputByteBuffer(); if (container.GetTokens() != null) { // TODO: Don't do this kind of checks everywhere. dibb.Reset(container.GetTokens()); credentials.ReadTokenStorageStream(dibb); } // Add AMRMToken Org.Apache.Hadoop.Security.Token.Token <AMRMTokenIdentifier> amrmToken = CreateAndSetAMRMToken (); if (amrmToken != null) { credentials.AddToken(amrmToken.GetService(), amrmToken); } DataOutputBuffer dob = new DataOutputBuffer(); credentials.WriteTokenStorageToStream(dob); container.SetTokens(ByteBuffer.Wrap(dob.GetData(), 0, dob.GetLength())); }
/// <exception cref="System.Exception"/> public virtual void TestLocalizerStatusSerDe() { LocalizerStatus rsrcS = CreateLocalizerStatus(); NUnit.Framework.Assert.IsTrue(rsrcS is LocalizerStatusPBImpl); LocalizerStatusPBImpl rsrcPb = (LocalizerStatusPBImpl)rsrcS; DataOutputBuffer @out = new DataOutputBuffer(); rsrcPb.GetProto().WriteDelimitedTo(@out); DataInputBuffer @in = new DataInputBuffer(); @in.Reset(@out.GetData(), 0, @out.GetLength()); YarnServerNodemanagerServiceProtos.LocalizerStatusProto rsrcPbD = YarnServerNodemanagerServiceProtos.LocalizerStatusProto .ParseDelimitedFrom(@in); NUnit.Framework.Assert.IsNotNull(rsrcPbD); LocalizerStatus rsrcD = new LocalizerStatusPBImpl(rsrcPbD); NUnit.Framework.Assert.AreEqual(rsrcS, rsrcD); NUnit.Framework.Assert.AreEqual("localizer0", rsrcS.GetLocalizerId()); NUnit.Framework.Assert.AreEqual("localizer0", rsrcD.GetLocalizerId()); NUnit.Framework.Assert.AreEqual(CreateLocalResourceStatus(), rsrcS.GetResourceStatus (0)); NUnit.Framework.Assert.AreEqual(CreateLocalResourceStatus(), rsrcD.GetResourceStatus (0)); }
private static ByteBuffer ConvertCredentialsToByteBuffer(Credentials credentials) { ByteBuffer appAttemptTokens = null; DataOutputBuffer dob = new DataOutputBuffer(); try { if (credentials != null) { credentials.WriteTokenStorageToStream(dob); appAttemptTokens = ByteBuffer.Wrap(dob.GetData(), 0, dob.GetLength()); } return(appAttemptTokens); } catch (IOException) { Log.Error("Failed to convert Credentials to ByteBuffer."); System.Diagnostics.Debug.Assert(false); return(null); } finally { IOUtils.CloseStream(dob); } }
public virtual void TestPacket() { Random r = new Random(12345L); byte[] data = new byte[chunkSize]; r.NextBytes(data); byte[] checksum = new byte[checksumSize]; r.NextBytes(checksum); DataOutputBuffer os = new DataOutputBuffer(data.Length * 2); byte[] packetBuf = new byte[data.Length * 2]; DFSPacket p = new DFSPacket(packetBuf, maxChunksPerPacket, 0, 0, checksumSize, false ); p.SetSyncBlock(true); p.WriteData(data, 0, data.Length); p.WriteChecksum(checksum, 0, checksum.Length); p.WriteTo(os); //we have set syncBlock to true, so the header has the maximum length int headerLen = PacketHeader.PktMaxHeaderLen; byte[] readBuf = os.GetData(); AssertArrayRegionsEqual(readBuf, headerLen, checksum, 0, checksum.Length); AssertArrayRegionsEqual(readBuf, headerLen + checksum.Length, data, 0, data.Length ); }
/// <summary> /// Write a protobuf to a buffer 'numProtos' times, and then /// read them back, making sure all data comes through correctly. /// </summary> /// <exception cref="System.IO.IOException"/> private void DoTest(int numProtos) { Configuration conf = new Configuration(); DataOutputBuffer @out = new DataOutputBuffer(); // Write numProtos protobufs to the buffer Message[] sent = new Message[numProtos]; for (int i = 0; i < numProtos; i++) { // Construct a test protocol buffer using one of the // protos that ships with the protobuf library Message testProto = ((DescriptorProtos.EnumValueDescriptorProto)DescriptorProtos.EnumValueDescriptorProto .NewBuilder().SetName("test" + i).SetNumber(i).Build()); ObjectWritable.WriteObject(@out, testProto, typeof(DescriptorProtos.EnumValueDescriptorProto ), conf); sent[i] = testProto; } // Read back the data DataInputBuffer @in = new DataInputBuffer(); @in.Reset(@out.GetData(), @out.GetLength()); for (int i_1 = 0; i_1 < numProtos; i_1++) { Message received = (Message)ObjectWritable.ReadObject(@in, conf); Assert.Equal(sent[i_1], received); } }
/// <summary>Write the given object to the stream.</summary> /// <remarks> /// Write the given object to the stream. If it is a Text or BytesWritable, /// write it directly. Otherwise, write it to a buffer and then write the /// length and data to the stream. /// </remarks> /// <param name="obj">the object to write</param> /// <exception cref="System.IO.IOException"/> private void WriteObject(Writable obj) { // For Text and BytesWritable, encode them directly, so that they end up // in C++ as the natural translations. if (obj is Text) { Text t = (Text)obj; int len = t.GetLength(); WritableUtils.WriteVInt(stream, len); stream.Write(t.GetBytes(), 0, len); } else { if (obj is BytesWritable) { BytesWritable b = (BytesWritable)obj; int len = b.GetLength(); WritableUtils.WriteVInt(stream, len); stream.Write(b.GetBytes(), 0, len); } else { buffer.Reset(); obj.Write(buffer); int length = buffer.GetLength(); WritableUtils.WriteVInt(stream, length); stream.Write(buffer.GetData(), 0, length); } } }
/// <exception cref="System.IO.IOException"/> protected internal override void FlushAndSync(bool durable) { int numReadyBytes = buf.CountReadyBytes(); if (numReadyBytes > 0) { int numReadyTxns = buf.CountReadyTxns(); long firstTxToFlush = buf.GetFirstReadyTxId(); System.Diagnostics.Debug.Assert(numReadyTxns > 0); // Copy from our double-buffer into a new byte array. This is for // two reasons: // 1) The IPC code has no way of specifying to send only a slice of // a larger array. // 2) because the calls to the underlying nodes are asynchronous, we // need a defensive copy to avoid accidentally mutating the buffer // before it is sent. DataOutputBuffer bufToSend = new DataOutputBuffer(numReadyBytes); buf.FlushTo(bufToSend); System.Diagnostics.Debug.Assert(bufToSend.GetLength() == numReadyBytes); byte[] data = bufToSend.GetData(); System.Diagnostics.Debug.Assert(data.Length == bufToSend.GetLength()); QuorumCall <AsyncLogger, Void> qcall = loggers.SendEdits(segmentTxId, firstTxToFlush , numReadyTxns, data); loggers.WaitForWriteQuorum(qcall, writeTimeoutMs, "sendEdits"); // Since we successfully wrote this batch, let the loggers know. Any future // RPCs will thus let the loggers know of the most recent transaction, even // if a logger has fallen behind. loggers.SetCommittedTxId(firstTxToFlush + numReadyTxns - 1); } }
/// <exception cref="System.IO.IOException"/> public static void TestReadInRange(long val, int lower, int upper, bool expectSuccess ) { DataOutputBuffer buf = new DataOutputBuffer(); DataInputBuffer inbuf = new DataInputBuffer(); WritableUtils.WriteVLong(buf, val); try { inbuf.Reset(buf.GetData(), 0, buf.GetLength()); long val2 = WritableUtils.ReadVIntInRange(inbuf, lower, upper); if (!expectSuccess) { Fail("expected readVIntInRange to throw an exception"); } Assert.Equal(val, val2); } catch (IOException e) { if (expectSuccess) { Log.Error("unexpected exception:", e); Fail("readVIntInRange threw an unexpected exception"); } } }
/// <exception cref="System.IO.IOException"/> public virtual void DoTestLimitedIO(string str, int len) { DataOutputBuffer @out = new DataOutputBuffer(); DataInputBuffer @in = new DataInputBuffer(); @out.Reset(); try { Org.Apache.Hadoop.IO.Text.WriteString(@out, str, len); Fail("expected writeString to fail when told to write a string " + "that was too long! The string was '" + str + "'"); } catch (IOException) { } Org.Apache.Hadoop.IO.Text.WriteString(@out, str, len + 1); // test that it reads correctly @in.Reset(@out.GetData(), @out.GetLength()); @in.Mark(len); string after; try { after = Org.Apache.Hadoop.IO.Text.ReadString(@in, len); Fail("expected readString to fail when told to read a string " + "that was too long! The string was '" + str + "'"); } catch (IOException) { } @in.Reset(); after = Org.Apache.Hadoop.IO.Text.ReadString(@in, len + 1); Assert.True(str.Equals(after)); }
/// <exception cref="System.Exception"/> public virtual void TestBadLength() { int Dlen = 100; DataOutputBuffer dob = new DataOutputBuffer(Dlen + 4); IFileOutputStream ifos = new IFileOutputStream(dob); for (int i = 0; i < Dlen; ++i) { ifos.Write(i); } ifos.Close(); DataInputBuffer dib = new DataInputBuffer(); dib.Reset(dob.GetData(), Dlen + 4); IFileInputStream ifis = new IFileInputStream(dib, 100, new Configuration()); int i_1 = 0; try { while (i_1 < Dlen - 8) { NUnit.Framework.Assert.AreEqual(i_1++, ifis.Read()); } ifis.Close(); } catch (ChecksumException) { NUnit.Framework.Assert.AreEqual("Checksum before close", i_1, Dlen - 8); return; } Fail("Did not detect bad data in checksum"); }
/// <summary> /// If valueClass is undefined, readFields should throw an exception indicating /// that the field is null. /// </summary> /// <remarks> /// If valueClass is undefined, readFields should throw an exception indicating /// that the field is null. Otherwise, readFields should succeed. /// </remarks> /// <exception cref="System.IO.IOException"/> public virtual void TestThrowUndefinedValueException() { // Get a buffer containing a simple text array Text[] elements = new Text[] { new Text("zero"), new Text("one"), new Text("two") }; TestArrayWritable.TextArrayWritable sourceArray = new TestArrayWritable.TextArrayWritable (); sourceArray.Set(elements); // Write it to a normal output buffer DataOutputBuffer @out = new DataOutputBuffer(); DataInputBuffer @in = new DataInputBuffer(); sourceArray.Write(@out); // Read the output buffer with TextReadable. Since the valueClass is defined, // this should succeed TestArrayWritable.TextArrayWritable destArray = new TestArrayWritable.TextArrayWritable (); @in.Reset(@out.GetData(), @out.GetLength()); destArray.ReadFields(@in); Writable[] destElements = destArray.Get(); Assert.True(destElements.Length == elements.Length); for (int i = 0; i < elements.Length; i++) { Assert.Equal(destElements[i], elements[i]); } }
/// <exception cref="System.IO.IOException"/> public virtual void TestSerializeAndDeserializeNull() { bool gotException = false; try { new EnumSetWritable <TestEnumSetWritable.TestEnumSet>(null); } catch (RuntimeException) { gotException = true; } Assert.True("Instantiation of empty EnumSetWritable with no element type class " + "provided should throw exception", gotException); EnumSetWritable <TestEnumSetWritable.TestEnumSet> nullFlagWritable = new EnumSetWritable <TestEnumSetWritable.TestEnumSet>(null, typeof(TestEnumSetWritable.TestEnumSet)); DataOutputBuffer @out = new DataOutputBuffer(); ObjectWritable.WriteObject(@out, nullFlagWritable, nullFlagWritable.GetType(), null ); DataInputBuffer @in = new DataInputBuffer(); @in.Reset(@out.GetData(), @out.GetLength()); EnumSet <TestEnumSetWritable.TestEnumSet> read = ((EnumSetWritable <TestEnumSetWritable.TestEnumSet >)ObjectWritable.ReadObject(@in, null)).Get(); Assert.Equal(read, null); }
public virtual void TestNodeHeartBeatResponse() { NodeHeartbeatResponse record = Org.Apache.Hadoop.Yarn.Util.Records.NewRecord <NodeHeartbeatResponse >(); IDictionary <ApplicationId, ByteBuffer> appCredentials = new Dictionary <ApplicationId , ByteBuffer>(); Credentials app1Cred = new Credentials(); Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier> token1 = new Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier>(); token1.SetKind(new Text("kind1")); app1Cred.AddToken(new Text("token1"), token1); Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier> token2 = new Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier>(); token2.SetKind(new Text("kind2")); app1Cred.AddToken(new Text("token2"), token2); DataOutputBuffer dob = new DataOutputBuffer(); app1Cred.WriteTokenStorageToStream(dob); ByteBuffer byteBuffer1 = ByteBuffer.Wrap(dob.GetData(), 0, dob.GetLength()); appCredentials[ApplicationId.NewInstance(1234, 1)] = byteBuffer1; record.SetSystemCredentialsForApps(appCredentials); NodeHeartbeatResponse proto = new NodeHeartbeatResponsePBImpl(((NodeHeartbeatResponsePBImpl )record).GetProto()); NUnit.Framework.Assert.AreEqual(appCredentials, proto.GetSystemCredentialsForApps ()); }
public virtual void TestSerialization() { TestDelegationToken.TestDelegationTokenIdentifier origToken = new TestDelegationToken.TestDelegationTokenIdentifier (new Text("alice"), new Text("bob"), new Text("colin")); TestDelegationToken.TestDelegationTokenIdentifier newToken = new TestDelegationToken.TestDelegationTokenIdentifier (); origToken.SetIssueDate(123); origToken.SetMasterKeyId(321); origToken.SetMaxDate(314); origToken.SetSequenceNumber(12345); // clone origToken into newToken DataInputBuffer inBuf = new DataInputBuffer(); DataOutputBuffer outBuf = new DataOutputBuffer(); origToken.Write(outBuf); inBuf.Reset(outBuf.GetData(), 0, outBuf.GetLength()); newToken.ReadFields(inBuf); // now test the fields Assert.Equal("alice", newToken.GetUser().GetUserName()); Assert.Equal(new Text("bob"), newToken.GetRenewer()); Assert.Equal("colin", newToken.GetUser().GetRealUser().GetUserName ()); Assert.Equal(123, newToken.GetIssueDate()); Assert.Equal(321, newToken.GetMasterKeyId()); Assert.Equal(314, newToken.GetMaxDate()); Assert.Equal(12345, newToken.GetSequenceNumber()); Assert.Equal(origToken, newToken); }
/// <exception cref="System.IO.IOException"/> internal static DataInputBuffer CreateFakeCredentials(Random r, int nTok) { Credentials creds = new Credentials(); byte[] password = new byte[20]; Text kind = new Text(); Text service = new Text(); Text alias = new Text(); for (int i = 0; i < nTok; ++i) { byte[] identifier = Sharpen.Runtime.GetBytesForString(("idef" + i)); r.NextBytes(password); kind.Set("kind" + i); service.Set("service" + i); alias.Set("token" + i); Org.Apache.Hadoop.Security.Token.Token token = new Org.Apache.Hadoop.Security.Token.Token (identifier, password, kind, service); creds.AddToken(alias, token); } DataOutputBuffer buf = new DataOutputBuffer(); creds.WriteTokenStorageToStream(buf); DataInputBuffer ret = new DataInputBuffer(); ret.Reset(buf.GetData(), 0, buf.GetLength()); return(ret); }
/// <exception cref="System.Exception"/> public virtual void TestCompare() { DataOutputBuffer out1 = new DataOutputBuffer(); DataOutputBuffer out2 = new DataOutputBuffer(); DataOutputBuffer out3 = new DataOutputBuffer(); Text.Comparator comparator = new Text.Comparator(); for (int i = 0; i < NumIterations; i++) { // reset output buffer out1.Reset(); out2.Reset(); out3.Reset(); // generate two random strings string str1 = GetTestString(); string str2 = GetTestString(); if (i == 0) { str1 = GetLongString(); str2 = GetLongString(); } else { str1 = GetTestString(); str2 = GetTestString(); } // convert to texts Org.Apache.Hadoop.IO.Text txt1 = new Org.Apache.Hadoop.IO.Text(str1); Org.Apache.Hadoop.IO.Text txt2 = new Org.Apache.Hadoop.IO.Text(str2); Org.Apache.Hadoop.IO.Text txt3 = new Org.Apache.Hadoop.IO.Text(str1); // serialize them txt1.Write(out1); txt2.Write(out2); txt3.Write(out3); // compare two strings by looking at their binary formats int ret1 = comparator.Compare(out1.GetData(), 0, out1.GetLength(), out2.GetData() , 0, out2.GetLength()); // compare two strings int ret2 = txt1.CompareTo(txt2); Assert.Equal(ret1, ret2); Assert.Equal("Equivalence of different txt objects, same content" , 0, txt1.CompareTo(txt3)); Assert.Equal("Equvalence of data output buffers", 0, comparator .Compare(out1.GetData(), 0, out3.GetLength(), out3.GetData(), 0, out3.GetLength( ))); } }
/// <exception cref="System.IO.IOException"/> public virtual string ToString(T obj) { outBuf.Reset(); serializer.Serialize(obj); byte[] buf = new byte[outBuf.GetLength()]; System.Array.Copy(outBuf.GetData(), 0, buf, 0, buf.Length); return(new string(Base64.EncodeBase64(buf), Charsets.Utf8)); }
/// <summary> /// A helper function to serialize the JobTokenIdentifier to be sent to the /// ShuffleHandler as ServiceData. /// </summary> /// <param name="jobToken"> /// the job token to be used for authentication of /// shuffle data requests. /// </param> /// <returns>the serialized version of the jobToken.</returns> /// <exception cref="System.IO.IOException"/> public static ByteBuffer SerializeServiceData(Org.Apache.Hadoop.Security.Token.Token <JobTokenIdentifier> jobToken) { //TODO these bytes should be versioned DataOutputBuffer jobToken_dob = new DataOutputBuffer(); jobToken.Write(jobToken_dob); return ByteBuffer.Wrap(jobToken_dob.GetData(), 0, jobToken_dob.GetLength()); }
public virtual void TestBaseBuffers() { DataOutputBuffer dob = new DataOutputBuffer(); Random r = new Random(); long seed = r.NextLong(); r.SetSeed(seed); System.Console.Out.WriteLine("SEED: " + seed); WriteJunk(dob, r, seed, 1000); DataInputBuffer dib = new DataInputBuffer(); dib.Reset(dob.GetData(), 0, dob.GetLength()); ReadJunk(dib, r, seed, 1000); dob.Reset(); WriteJunk(dob, r, seed, 1000); dib.Reset(dob.GetData(), 0, dob.GetLength()); ReadJunk(dib, r, seed, 1000); }
/// <exception cref="System.Exception"/> public virtual void TestNullEncoding() { string s = new string(new char[] { 0 }); DataOutputBuffer dob = new DataOutputBuffer(); new UTF8(s).Write(dob); Assert.Equal(s, Runtime.GetStringForBytes(dob.GetData( ), 2, dob.GetLength() - 2, "UTF-8")); }
/// <exception cref="System.IO.IOException"/> public static void TestValue(int val, int vintlen) { DataOutputBuffer buf = new DataOutputBuffer(); DataInputBuffer inbuf = new DataInputBuffer(); WritableUtils.WriteVInt(buf, val); if (Log.IsDebugEnabled()) { Log.Debug("Value = " + val); BytesWritable printer = new BytesWritable(); printer.Set(buf.GetData(), 0, buf.GetLength()); Log.Debug("Buffer = " + printer); } inbuf.Reset(buf.GetData(), 0, buf.GetLength()); Assert.Equal(val, WritableUtils.ReadVInt(inbuf)); Assert.Equal(vintlen, buf.GetLength()); Assert.Equal(vintlen, WritableUtils.GetVIntSize(val)); Assert.Equal(vintlen, WritableUtils.DecodeVIntSize(buf.GetData ()[0])); }
/// <summary> /// Generate a string with the url-quoted base64 encoded serialized form /// of the Writable. /// </summary> /// <param name="obj">the object to serialize</param> /// <returns>the encoded string</returns> /// <exception cref="System.IO.IOException"/> private static string EncodeWritable(IWritable obj) { DataOutputBuffer buf = new DataOutputBuffer(); obj.Write(buf); Base64 encoder = new Base64(0, null, true); byte[] raw = new byte[buf.GetLength()]; System.Array.Copy(buf.GetData(), 0, raw, 0, buf.GetLength()); return(encoder.EncodeToString(raw)); }
public virtual void TestCompressorDecopressorLogicWithCompressionStreams() { DataOutputStream deflateOut = null; DataInputStream inflateIn = null; int ByteSize = 1024 * 100; byte[] bytes = Generate(ByteSize); int bufferSize = 262144; int compressionOverhead = (bufferSize / 6) + 32; try { DataOutputBuffer compressedDataBuffer = new DataOutputBuffer(); CompressionOutputStream deflateFilter = new BlockCompressorStream(compressedDataBuffer , new Lz4Compressor(bufferSize), bufferSize, compressionOverhead); deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter)); deflateOut.Write(bytes, 0, bytes.Length); deflateOut.Flush(); deflateFilter.Finish(); DataInputBuffer deCompressedDataBuffer = new DataInputBuffer(); deCompressedDataBuffer.Reset(compressedDataBuffer.GetData(), 0, compressedDataBuffer .GetLength()); CompressionInputStream inflateFilter = new BlockDecompressorStream(deCompressedDataBuffer , new Lz4Decompressor(bufferSize), bufferSize); inflateIn = new DataInputStream(new BufferedInputStream(inflateFilter)); byte[] result = new byte[ByteSize]; inflateIn.Read(result); Assert.AssertArrayEquals("original array not equals compress/decompressed array", result, bytes); } catch (IOException) { NUnit.Framework.Assert.Fail("testLz4CompressorDecopressorLogicWithCompressionStreams ex error !!!" ); } finally { try { if (deflateOut != null) { deflateOut.Close(); } if (inflateIn != null) { inflateIn.Close(); } } catch (Exception) { } } }