/// <exception cref="System.IO.IOException"/> protected internal override InputStream GetInputStream(int bufferSize, byte[] key , byte[] iv) { ByteArrayInputStream @in = new ByteArrayInputStream(buffer, 0, bufferLen); return(new CryptoInputStream(@in, codec, bufferSize, key, iv)); }
/// <summary>Cancel a token by removing it from cache.</summary> /// <returns>Identifier of the canceled token</returns> /// <exception cref="Org.Apache.Hadoop.Security.Token.SecretManager.InvalidToken">for invalid token /// </exception> /// <exception cref="Org.Apache.Hadoop.Security.AccessControlException">if the user isn't allowed to cancel /// </exception> /// <exception cref="System.IO.IOException"/> public virtual TokenIdent CancelToken(Org.Apache.Hadoop.Security.Token.Token <TokenIdent > token, string canceller) { lock (this) { ByteArrayInputStream buf = new ByteArrayInputStream(token.GetIdentifier()); DataInputStream @in = new DataInputStream(buf); TokenIdent id = CreateIdentifier(); id.ReadFields(@in); Log.Info("Token cancelation requested for identifier: " + id); if (id.GetUser() == null) { throw new SecretManager.InvalidToken("Token with no owner"); } string owner = id.GetUser().GetUserName(); Text renewer = id.GetRenewer(); HadoopKerberosName cancelerKrbName = new HadoopKerberosName(canceller); string cancelerShortName = cancelerKrbName.GetShortName(); if (!canceller.Equals(owner) && (renewer == null || renewer.ToString().IsEmpty() || !cancelerShortName.Equals(renewer.ToString()))) { throw new AccessControlException(canceller + " is not authorized to cancel the token" ); } AbstractDelegationTokenSecretManager.DelegationTokenInformation info = Collections.Remove (currentTokens, id); if (info == null) { throw new SecretManager.InvalidToken("Token not found"); } RemoveStoredToken(id); return(id); } }
public void TestCreatePropertySets() { Type[] expected = new Type[] { typeof(NoPropertySetStreamException), typeof(SummaryInformation), typeof(NoPropertySetStreamException) }; for (int i = 0; i < expected.Length; i++) { Stream in1 = new ByteArrayInputStream(poiFiles[i].GetBytes()); Object o; try { o = PropertySetFactory.Create(in1); } catch (NoPropertySetStreamException ex) { o = ex; } catch (MarkUnsupportedException ex) { o = ex; } in1.Close(); Assert.AreEqual(o.GetType(), expected[i]); } }
public void Available_InitializedStream_ReturnsNumberOfAvailableBytes() { byte[] data = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; ByteArrayInputStream stream = ByteArrayInputStream.Create(data); Assert.AreEqual(11, stream.Available()); }
/// <summary>Assert MapWritable does not grow across calls to readFields.</summary> /// <exception cref="System.Exception"/> /// <seealso><a href="https://issues.apache.org/jira/browse/HADOOP-2244">HADOOP-2244</a> /// </seealso> public virtual void TestMultipleCallsToReadFieldsAreSafe() { // Create an instance and add a key/value. MapWritable m = new MapWritable(); Text t = new Text(GetName()); m[t] = t; // Get current size of map. Key values are 't'. int count = m.Count; // Now serialize... save off the bytes. ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); m.Write(dos); dos.Close(); // Now add new values to the MapWritable. m[new Text("key1")] = new Text("value1"); m[new Text("key2")] = new Text("value2"); // Now deserialize the original MapWritable. Ensure count and key values // match original state. ByteArrayInputStream bais = new ByteArrayInputStream(baos.ToByteArray()); DataInputStream dis = new DataInputStream(bais); m.ReadFields(dis); Assert.Equal(count, m.Count); Assert.True(m[t].Equals(t)); dis.Close(); }
/// <exception cref="System.IO.IOException"></exception> private void AssertNoCrLfHelper(string expect, string input) { byte[] inbytes = Sharpen.Runtime.GetBytesForString(input); byte[] expectBytes = Sharpen.Runtime.GetBytesForString(expect); for (int i = 0; i < 5; ++i) { byte[] buf = new byte[i]; ByteArrayInputStream bis = new ByteArrayInputStream(inbytes); InputStream @in = new AutoCRLFInputStream(bis, true); ByteArrayOutputStream @out = new ByteArrayOutputStream(); if (i > 0) { int n; while ((n = @in.Read(buf)) >= 0) { @out.Write(buf, 0, n); } } else { int c; while ((c = @in.Read()) != -1) { @out.Write(c); } } @out.Flush(); @in.Close(); @out.Close(); byte[] actualBytes = @out.ToByteArray(); NUnit.Framework.Assert.AreEqual(Encode(expectBytes), Encode(actualBytes), "bufsize=" + i); } }
/// <summary> /// Compresses a GZIP file. /// </summary> /// <param name="bytes"> The uncompressed bytes. </param> /// <returns> The compressed bytes. </returns> /// <exception cref="IOException"> if an I/O error occurs. </exception> public static byte[] Gzip(byte[] bytes) { /* create the streams */ var @is = new ByteArrayInputStream(bytes); try { var bout = new ByteArrayOutputStream(); var os = new GZIPOutputStream(bout); try { /* copy data between the streams */ var buf = new byte[4096]; var len = 0; while ((len = @is.read(buf, 0, buf.Length)) != -1) { os.write(buf, 0, len); } } finally { os.close(); } /* return the compressed bytes */ return(bout.toByteArray()); } finally { @is.close(); } }
/// <summary> /// Checks whether an {@link InputStream} is in the Horrible /// Property Set Format. /// </summary> /// <param name="stream">The {@link InputStream} To check. In order To /// perform the check, the method Reads the first bytes from the /// stream. After Reading, the stream is Reset To the position it /// had before Reading. The {@link InputStream} must support the /// {@link InputStream#mark} method.</param> /// <returns> /// <c>true</c> if the stream is a property Set /// stream; otherwise, <c>false</c>. /// </returns> public static bool IsPropertySetStream(Stream stream) { ByteArrayInputStream dis = stream as ByteArrayInputStream; /* * Read at most this many bytes. */ int BUFFER_SIZE = 50; /* * Mark the current position in the stream so that we can * Reset To this position if the stream does not contain a * property Set. */ if (dis == null || !dis.MarkSupported()) { throw new MarkUnsupportedException(stream.GetType().Name); } dis.Mark(BUFFER_SIZE); /* * Read a couple of bytes from the stream. */ byte[] buffer = new byte[BUFFER_SIZE]; int bytes = stream.Read(buffer, 0, (int)Math.Min(buffer.Length, dis.Available())); bool isPropertySetStream = IsPropertySetStream(buffer, 0, bytes); stream.Seek(0, SeekOrigin.Begin); dis.Reset(); return(isPropertySetStream); }
public virtual void TestValidateResponseJsonErrorUnknownException() { IDictionary <string, object> json = new Dictionary <string, object>(); json[HttpExceptionUtils.ErrorExceptionJson] = "FooException"; json[HttpExceptionUtils.ErrorClassnameJson] = "foo.FooException"; json[HttpExceptionUtils.ErrorMessageJson] = "EX"; IDictionary <string, object> response = new Dictionary <string, object>(); response[HttpExceptionUtils.ErrorJson] = json; ObjectMapper jsonMapper = new ObjectMapper(); string msg = jsonMapper.WriteValueAsString(response); InputStream @is = new ByteArrayInputStream(Runtime.GetBytesForString(msg) ); HttpURLConnection conn = Org.Mockito.Mockito.Mock <HttpURLConnection>(); Org.Mockito.Mockito.When(conn.GetErrorStream()).ThenReturn(@is); Org.Mockito.Mockito.When(conn.GetResponseMessage()).ThenReturn("msg"); Org.Mockito.Mockito.When(conn.GetResponseCode()).ThenReturn(HttpURLConnection.HttpBadRequest ); try { HttpExceptionUtils.ValidateResponse(conn, HttpURLConnection.HttpCreated); NUnit.Framework.Assert.Fail(); } catch (IOException ex) { Assert.True(ex.Message.Contains("EX")); Assert.True(ex.Message.Contains("foo.FooException")); } }
/// <summary> /// Compresses a BZIP2 file. /// </summary> /// <param name="bytes"> The uncompressed bytes. </param> /// <returns> The compressed bytes without the header. </returns> /// <exception cref="IOException"> if an I/O erorr occurs. </exception> public static byte[] Bzip2(byte[] bytes) { var @is = new ByteArrayInputStream(bytes); try { var bout = new ByteArrayOutputStream(); OutputStream os = new CBZip2OutputStream(bout, 1); try { var buf = new byte[4096]; var len = 0; while ((len = @is.read(buf, 0, buf.Length)) != -1) { os.write(buf, 0, len); } } finally { os.close(); } /* strip the header from the byte array and return it */ bytes = bout.toByteArray(); var bzip2 = new byte[bytes.Length - 2]; Array.Copy(bytes, 2, bzip2, 0, bzip2.Length); return(bzip2); } finally { @is.close(); } }
public byte[] doRequest(byte type, byte[] data = null, File f = null) { ByteArrayOutputStream bout = new ByteArrayOutputStream(); if (data == null) { bout.writeInt(1); bout.writeByte(type); } else { bout.writeInt(data.Length+1); bout.writeByte(type); bout.write(data); } byte[] aout = bout.getArray(); stream.Write(aout, 0, aout.Length); stream.Flush(); byte[] resp = readBytes(8); ByteArrayInputStream bin = new ByteArrayInputStream(resp); int error = bin.readInt(); int len = bin.readInt(); resp = readBytes(len); if (error == 2) throw new AlreadyEditingException(f); if (error != 0) { ByteArrayInputStream i = new ByteArrayInputStream(resp); string s = i.ReadString(); throw new Exception("Network error: " + s); } return resp; }
/// <summary>Regression test for https://github.com/couchbase/couchbase-lite-android-core/issues/70 /// </summary> /// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> /// <exception cref="System.IO.IOException"></exception> public virtual void TestAttachmentDisappearsAfterSave() { // create a doc with an attachment Document doc = database.CreateDocument(); string content = "This is a test attachment!"; ByteArrayInputStream body = new ByteArrayInputStream(Sharpen.Runtime.GetBytesForString (content)); UnsavedRevision rev = doc.CreateRevision(); rev.SetAttachment("index.html", "text/plain; charset=utf-8", body); rev.Save(); // make sure the doc's latest revision has the attachment IDictionary <string, object> attachments = (IDictionary)doc.GetCurrentRevision().GetProperty ("_attachments"); NUnit.Framework.Assert.IsNotNull(attachments); NUnit.Framework.Assert.AreEqual(1, attachments.Count); // make sure the rev has the attachment attachments = (IDictionary)rev.GetProperty("_attachments"); NUnit.Framework.Assert.IsNotNull(attachments); NUnit.Framework.Assert.AreEqual(1, attachments.Count); // create new properties to add IDictionary <string, object> properties = new Dictionary <string, object>(); properties.Put("foo", "bar"); // make sure the new rev still has the attachment UnsavedRevision rev2 = doc.CreateRevision(); rev2.GetProperties().PutAll(properties); rev2.Save(); attachments = (IDictionary)rev2.GetProperty("_attachments"); NUnit.Framework.Assert.IsNotNull(attachments); NUnit.Framework.Assert.AreEqual(1, attachments.Count); }
public virtual void TestFormatWithoutForceEnterYes() { // we check for a non empty dir, so create a child path FilePath data = new FilePath(hdfsDir, "file"); if (!data.Mkdirs()) { NUnit.Framework.Assert.Fail("Failed to create dir " + data.GetPath()); } // capture the input stream InputStream origIn = Runtime.@in; ByteArrayInputStream bins = new ByteArrayInputStream(Sharpen.Runtime.GetBytesForString ("Y\n")); Runtime.SetIn(bins); string[] argv = new string[] { "-format" }; try { NameNode.CreateNameNode(argv, config); NUnit.Framework.Assert.Fail("createNameNode() did not call System.exit()"); } catch (ExitUtil.ExitException e) { NUnit.Framework.Assert.AreEqual("Format should have succeeded", 0, e.status); } Runtime.SetIn(origIn); string cid = GetClusterId(config); NUnit.Framework.Assert.IsTrue("Didn't get new ClusterId", (cid != null && !cid.Equals (string.Empty))); }
/// <exception cref="System.IO.IOException"/> private void DoTestReceiveAndMirror(PacketReceiver pr, int dataLen, int checksumsLen ) { byte[] Data = AppendTestUtil.InitBuffer(dataLen); byte[] Checksums = AppendTestUtil.InitBuffer(checksumsLen); byte[] packet = PrepareFakePacket(Data, Checksums); ByteArrayInputStream @in = new ByteArrayInputStream(packet); pr.ReceiveNextPacket(@in); ByteBuffer parsedData = pr.GetDataSlice(); Assert.AssertArrayEquals(Data, RemainingAsArray(parsedData)); ByteBuffer parsedChecksums = pr.GetChecksumSlice(); Assert.AssertArrayEquals(Checksums, RemainingAsArray(parsedChecksums)); PacketHeader header = pr.GetHeader(); NUnit.Framework.Assert.AreEqual(Seqno, header.GetSeqno()); NUnit.Framework.Assert.AreEqual(OffsetInBlock, header.GetOffsetInBlock()); NUnit.Framework.Assert.AreEqual(dataLen + checksumsLen + Ints.Bytes, header.GetPacketLen ()); // Mirror the packet to an output stream and make sure it matches // the packet we sent. ByteArrayOutputStream mirrored = new ByteArrayOutputStream(); mirrored = Org.Mockito.Mockito.Spy(mirrored); pr.MirrorPacketTo(new DataOutputStream(mirrored)); // The write should be done in a single call. Otherwise we may hit // nasty interactions with nagling (eg HDFS-4049). Org.Mockito.Mockito.Verify(mirrored, Org.Mockito.Mockito.Times(1)).Write(Org.Mockito.Mockito .Any <byte[]>(), Org.Mockito.Mockito.AnyInt(), Org.Mockito.Mockito.Eq(packet.Length )); Org.Mockito.Mockito.VerifyNoMoreInteractions(mirrored); Assert.AssertArrayEquals(packet, mirrored.ToByteArray()); }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.TypeLoadException"/> private static void DemonstrateSerializationColumnDataClassifier() { System.Console.Out.WriteLine(); System.Console.Out.WriteLine("Demonstrating working with a serialized classifier using serializeTo"); ColumnDataClassifier cdc = new ColumnDataClassifier(where + "examples/cheese2007.prop"); cdc.TrainClassifier(where + "examples/cheeseDisease.train"); // Exhibit serialization and deserialization working. Serialized to bytes in memory for simplicity System.Console.Out.WriteLine(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); cdc.SerializeClassifier(oos); oos.Close(); byte[] @object = baos.ToByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(@object); ObjectInputStream ois = new ObjectInputStream(bais); ColumnDataClassifier cdc2 = ColumnDataClassifier.GetClassifier(ois); ois.Close(); // We compare the output of the deserialized classifier cdc2 versus the original one cl // For both we use a ColumnDataClassifier to convert text lines to examples System.Console.Out.WriteLine("Making predictions with both classifiers"); foreach (string line in ObjectBank.GetLineIterator(where + "examples/cheeseDisease.test", "utf-8")) { IDatum <string, string> d = cdc.MakeDatumFromLine(line); IDatum <string, string> d2 = cdc2.MakeDatumFromLine(line); System.Console.Out.Printf("%s =origi=> %s (%.4f)%n", line, cdc.ClassOf(d), cdc.ScoresOf(d).GetCount(cdc.ClassOf(d))); System.Console.Out.Printf("%s =deser=> %s (%.4f)%n", line, cdc2.ClassOf(d2), cdc2.ScoresOf(d).GetCount(cdc2.ClassOf(d))); } }
public virtual void TestDataAfterPackFooterSplitHeaderRead() { TestRepository d = new TestRepository <FileRepository>(db); byte[] data = Constants.Encode("a"); RevBlob b = d.Blob(data); int objects = 248; TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(32 * 1024); PackHeader(pack, objects + 1); int offset = 13; StringBuilder sb = new StringBuilder(); for (int i = 0; i < offset; i++) { sb.Append(i); } offset = sb.ToString().Length; int lenByte = (Constants.OBJ_BLOB) << 4 | (offset & unchecked ((int)(0x0F))); offset >>= 4; if (offset > 0) { lenByte |= 1 << 7; } pack.Write(lenByte); while (offset > 0) { lenByte = offset & unchecked ((int)(0x7F)); offset >>= 6; if (offset > 0) { lenByte |= 1 << 7; } pack.Write(lenByte); } Deflate(pack, Constants.Encode(sb.ToString())); for (int i_1 = 0; i_1 < objects; i_1++) { // The last pack header written falls across the 8192 byte boundary // between [8189:8210] pack.Write((Constants.OBJ_REF_DELTA) << 4 | 4); b.CopyRawTo(pack); Deflate(pack, new byte[] { unchecked ((int)(0x1)), unchecked ((int)(0x1)), unchecked ( (int)(0x1)), (byte)('b') }); } Digest(pack); byte[] packData = pack.ToByteArray(); byte[] streamData = new byte[packData.Length + 1]; System.Array.Copy(packData, 0, streamData, 0, packData.Length); streamData[packData.Length] = unchecked ((int)(0x7e)); InputStream @in = new ByteArrayInputStream(streamData); PackParser p = Index(@in); p.SetAllowThin(true); p.SetCheckEofAfterPackFooter(false); p.SetExpectDataAfterPackFooter(true); p.Parse(NullProgressMonitor.INSTANCE); NUnit.Framework.Assert.AreEqual(unchecked ((int)(0x7e)), @in.Read()); }
public virtual void TestDataAfterPackFooterSplitObjectRead() { byte[] data = Constants.Encode("0123456789"); // Build a pack ~17k int objects = 900; TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(32 * 1024); PackHeader(pack, objects); for (int i = 0; i < objects; i++) { pack.Write((Constants.OBJ_BLOB) << 4 | 10); Deflate(pack, data); } Digest(pack); byte[] packData = pack.ToByteArray(); byte[] streamData = new byte[packData.Length + 1]; System.Array.Copy(packData, 0, streamData, 0, packData.Length); streamData[packData.Length] = unchecked ((int)(0x7e)); InputStream @in = new ByteArrayInputStream(streamData); PackParser p = Index(@in); p.SetAllowThin(true); p.SetCheckEofAfterPackFooter(false); p.SetExpectDataAfterPackFooter(true); p.Parse(NullProgressMonitor.INSTANCE); NUnit.Framework.Assert.AreEqual(unchecked ((int)(0x7e)), @in.Read()); }
public virtual void TestDataAfterPackFooterSingleRead() { TestRepository d = new TestRepository <FileRepository>(db); RevBlob a = d.Blob("a"); TemporaryBuffer.Heap pack = new TemporaryBuffer.Heap(32 * 1024); PackHeader(pack, 1); pack.Write((Constants.OBJ_REF_DELTA) << 4 | 4); a.CopyRawTo(pack); Deflate(pack, new byte[] { unchecked ((int)(0x1)), unchecked ((int)(0x1)), unchecked ( (int)(0x1)), (byte)('b') }); Digest(pack); byte[] packData = pack.ToByteArray(); byte[] streamData = new byte[packData.Length + 1]; System.Array.Copy(packData, 0, streamData, 0, packData.Length); streamData[packData.Length] = unchecked ((int)(0x7e)); InputStream @in = new ByteArrayInputStream(streamData); PackParser p = Index(@in); p.SetAllowThin(true); p.SetCheckEofAfterPackFooter(false); p.SetExpectDataAfterPackFooter(true); p.Parse(NullProgressMonitor.INSTANCE); NUnit.Framework.Assert.AreEqual(unchecked ((int)(0x7e)), @in.Read()); }
public virtual void TestOneBlockAndHalf_Copy() { TemporaryBuffer b = new TemporaryBuffer.LocalFile(); byte[] test = new TestRng(Sharpen.Extensions.GetTestName()).NextBytes(TemporaryBuffer.Block .SZ * 3 / 2); try { ByteArrayInputStream @in = new ByteArrayInputStream(test); b.Write(@in.Read()); b.Copy(@in); b.Close(); NUnit.Framework.Assert.AreEqual(test.Length, b.Length()); { byte[] r = b.ToByteArray(); NUnit.Framework.Assert.IsNotNull(r); NUnit.Framework.Assert.AreEqual(test.Length, r.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(test, r)); } { ByteArrayOutputStream o = new ByteArrayOutputStream(); b.WriteTo(o, null); o.Close(); byte[] r = o.ToByteArray(); NUnit.Framework.Assert.AreEqual(test.Length, r.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(test, r)); } } finally { b.Destroy(); } }
/// <summary> /// クリップボードに格納された文字列を元に,デシリアライズされたオブジェクトを取得します /// </summary> /// <param name="s"></param> /// <returns></returns> private Object getDeserializedObjectFromText(String s) { if (s.StartsWith(CLIP_PREFIX)) { int index = s.IndexOf(":"); index = s.IndexOf(":", index + 1); Object ret = null; try { ByteArrayInputStream bais = new ByteArrayInputStream(Base64.decode(str.sub(s, index + 1))); ObjectInputStream ois = new ObjectInputStream(bais); ret = ois.readObject(); } catch (Exception ex) { ret = null; Logger.write(typeof(ClipboardModel) + ".getDeserializedObjectFromText; ex=" + ex + "\n"); } return(ret); } else { return(null); } }
public virtual void TestInCoreLimit_SwitchOnCopy() { TemporaryBuffer b = new TemporaryBuffer.LocalFile(); byte[] test = new TestRng(Sharpen.Extensions.GetTestName()).NextBytes(TemporaryBuffer .DEFAULT_IN_CORE_LIMIT * 2); try { ByteArrayInputStream @in = new ByteArrayInputStream(test, TemporaryBuffer.DEFAULT_IN_CORE_LIMIT , test.Length - TemporaryBuffer.DEFAULT_IN_CORE_LIMIT); b.Write(test, 0, TemporaryBuffer.DEFAULT_IN_CORE_LIMIT); b.Copy(@in); b.Close(); NUnit.Framework.Assert.AreEqual(test.Length, b.Length()); { byte[] r = b.ToByteArray(); NUnit.Framework.Assert.IsNotNull(r); NUnit.Framework.Assert.AreEqual(test.Length, r.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(test, r)); } { ByteArrayOutputStream o = new ByteArrayOutputStream(); b.WriteTo(o, null); o.Close(); byte[] r = o.ToByteArray(); NUnit.Framework.Assert.AreEqual(test.Length, r.Length); NUnit.Framework.Assert.IsTrue(Arrays.Equals(test, r)); } } finally { b.Destroy(); } }
/* * public Set<Integer> getPatterns(String sentId, Integer tokenId) throws SQLException, IOException, ClassNotFoundException { * if(useDBForTokenPatterns){ * Connection conn = SQLConnection.getConnection(); * * String query = "Select patterns from " + tableName + " where sentid=\'" + sentId + "\' and tokenid = " + tokenId; * Statement stmt = conn.createStatement(); * ResultSet rs = stmt.executeQuery(query); * Set<Integer> pats = null; * if(rs.next()){ * byte[] st = (byte[]) rs.getObject(1); * ByteArrayInputStream baip = new ByteArrayInputStream(st); * ObjectInputStream ois = new ObjectInputStream(baip); * pats = (Set<Integer>) ois.readObject(); * * } * conn.close(); * return pats; * } * else * return patternsForEachToken.get(sentId).get(tokenId); * }*/ public override IDictionary <int, ICollection <E> > GetPatternsForAllTokens(string sentId) { try { IConnection conn = SQLConnection.GetConnection(); //Map<Integer, Set<Integer>> pats = new ConcurrentHashMap<Integer, Set<Integer>>(); string query = "Select patterns from " + tableName + " where sentid=\'" + sentId + "\'"; IStatement stmt = conn.CreateStatement(); IResultSet rs = stmt.ExecuteQuery(query); IDictionary <int, ICollection <E> > patsToken = new Dictionary <int, ICollection <E> >(); if (rs.Next()) { byte[] st = (byte[])rs.GetObject(1); ByteArrayInputStream baip = new ByteArrayInputStream(st); ObjectInputStream ois = new ObjectInputStream(baip); patsToken = (IDictionary <int, ICollection <E> >)ois.ReadObject(); } //pats.put(rs.getInt("tokenid"), patsToken); conn.Close(); return(patsToken); } catch (Exception e) { throw new Exception(e); } }
public virtual void TestFormatWithoutForceEnterNo() { // we check for a non empty dir, so create a child path FilePath data = new FilePath(hdfsDir, "file"); if (!data.Mkdirs()) { NUnit.Framework.Assert.Fail("Failed to create dir " + data.GetPath()); } // capture the input stream InputStream origIn = Runtime.@in; ByteArrayInputStream bins = new ByteArrayInputStream(Sharpen.Runtime.GetBytesForString ("N\n")); Runtime.SetIn(bins); string[] argv = new string[] { "-format" }; try { NameNode.CreateNameNode(argv, config); NUnit.Framework.Assert.Fail("createNameNode() did not call System.exit()"); } catch (ExitUtil.ExitException e) { NUnit.Framework.Assert.AreEqual("Format should not have succeeded", 1, e.status); } Runtime.SetIn(origIn); // check if the version file does not exists. FilePath version = new FilePath(hdfsDir, "current/VERSION"); NUnit.Framework.Assert.IsFalse("Check version should not exist", version.Exists() ); }
private void loadOvTable(String dirName, int id, Directory parent, File table) { Directory dir = new Directory(this, parent, true, dirName, id); addDir(dir); parent.childrenDirs.Add(dir); ByteArrayInputStream tbl = new ByteArrayInputStream(table.getContents()); int i = 0; while (tbl.lengthAvailable(32)) { uint ovId = tbl.readUInt(); uint ramAddr = tbl.readUInt(); uint ramSize = tbl.readUInt(); uint bssSize = tbl.readUInt(); uint staticInitStart = tbl.readUInt(); uint staticInitEnd = tbl.readUInt(); ushort fileID = tbl.readUShort(); tbl.skip(6); //unused 0's File f = loadFile(dirName + "_" + ovId + ".bin", fileID, dir); // f.isSystemFile = true; i++; } }
private Directory loadDir(ByteArrayInputStream s, Directory parent) { int id = s.readInt(); string name = s.ReadString(); Directory d = new Directory(this, parent, false, name, id); addDir(d); if (parent != null) { parent.childrenDirs.Add(d); } int dirCount = s.readInt(); for (int i = 0; i < dirCount; i++) { d.childrenDirs.Add(loadDir(s, d)); } int fileCount = s.readInt(); for (int i = 0; i < fileCount; i++) { int fid = s.readInt(); int fsize = s.readInt(); string fname = s.ReadString(); NetFile f = new NetFile(this, d, fname, fid, fsize); d.childrenFiles.Add(f); addFile(f); } return(d); }
public override IDictionary <int, ICollection <E> > GetPatternsForAllTokens(string sentId) { try { TermQuery query = new TermQuery(new Term("sentid", sentId)); TopDocs tp = searcher.Search(query, 1); if (tp.totalHits > 0) { foreach (ScoreDoc s in tp.scoreDocs) { int docId = s.doc; Org.Apache.Lucene.Document.Document d = searcher.Doc(docId); byte[] st = d.GetBinaryValue("patterns").bytes; ByteArrayInputStream baip = new ByteArrayInputStream(st); ObjectInputStream ois = new ObjectInputStream(baip); return((IDictionary <int, ICollection <E> >)ois.ReadObject()); } } else { throw new Exception("Why no patterns for sentid " + sentId + ". Number of documents in index are " + Size()); } } catch (IOException e) { throw new Exception(e); } catch (TypeLoadException e) { throw new Exception(e); } return(null); }
/// <exception cref="System.IO.IOException"/> private static UserGroupInformation GetTokenUGI(ServletContext context, HttpServletRequest request, string tokenString, Configuration conf) { Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier> token = new Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier>(); token.DecodeFromUrlString(tokenString); IPEndPoint serviceAddress = GetNNServiceAddress(context, request); if (serviceAddress != null) { SecurityUtil.SetTokenService(token, serviceAddress); token.SetKind(DelegationTokenIdentifier.HdfsDelegationKind); } ByteArrayInputStream buf = new ByteArrayInputStream(token.GetIdentifier()); DataInputStream @in = new DataInputStream(buf); DelegationTokenIdentifier id = new DelegationTokenIdentifier(); id.ReadFields(@in); if (context != null) { NameNode nn = NameNodeHttpServer.GetNameNodeFromContext(context); if (nn != null) { // Verify the token. nn.GetNamesystem().VerifyToken(id, token.GetPassword()); } } UserGroupInformation ugi = id.GetUser(); ugi.AddToken(token); return(ugi); }
public void IsNonBlocking_InitializedStream_ReturnsTrue() { byte[] data = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; ByteArrayInputStream stream = ByteArrayInputStream.Create(data); Assert.AreEqual(true, stream.IsNonBlocking()); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> public static Couchbase.Lite.Document CreateTask(Database database, string title, Bitmap image, string listId) { SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" ); Calendar calendar = GregorianCalendar.GetInstance(); string currentTimeString = dateFormatter.Format(calendar.GetTime()); IDictionary <string, object> properties = new Dictionary <string, object>(); properties.Put("type", DocType); properties.Put("title", title); properties.Put("checked", false); properties.Put("created_at", currentTimeString); properties.Put("list_id", listId); Couchbase.Lite.Document document = database.CreateDocument(); UnsavedRevision revision = document.CreateRevision(); revision.SetUserProperties(properties); if (image != null) { ByteArrayOutputStream @out = new ByteArrayOutputStream(); image.Compress(Bitmap.CompressFormat.Jpeg, 50, @out); ByteArrayInputStream @in = new ByteArrayInputStream(@out.ToByteArray()); revision.SetAttachment("image", "image/jpg", @in); } revision.Save(); return(document); }
public static IList <Tree> ConvertToTrees(byte[] input) { try { IList <Tree> output = new List <Tree>(); ByteArrayInputStream bis = new ByteArrayInputStream(input); GZIPInputStream gis = new GZIPInputStream(bis); ObjectInputStream ois = new ObjectInputStream(gis); int size = ErasureUtils.UncheckedCast <int>(ois.ReadObject()); for (int i = 0; i < size; ++i) { string rawTree = ErasureUtils.UncheckedCast(ois.ReadObject()); Tree tree = Tree.ValueOf(rawTree, trf); tree.SetSpans(); output.Add(tree); } ois.Close(); gis.Close(); bis.Close(); return(output); } catch (IOException e) { throw new RuntimeIOException(e); } catch (TypeLoadException e) { throw new Exception(e); } }
/// <exception cref="System.IO.IOException"></exception> private void Test(byte[] input, byte[] expected, bool detectBinary) { InputStream bis1 = new ByteArrayInputStream(input); InputStream cis1 = new EolCanonicalizingInputStream(bis1, detectBinary); int index1 = 0; for (int b = cis1.Read(); b != -1; b = cis1.Read()) { NUnit.Framework.Assert.AreEqual(expected[index1], unchecked ((byte)b)); index1++; } NUnit.Framework.Assert.AreEqual(expected.Length, index1); for (int bufferSize = 1; bufferSize < 10; bufferSize++) { byte[] buffer = new byte[bufferSize]; InputStream bis2 = new ByteArrayInputStream(input); InputStream cis2 = new EolCanonicalizingInputStream(bis2, detectBinary); int read = 0; for (int readNow = cis2.Read(buffer, 0, buffer.Length); readNow != -1 && read < expected .Length; readNow = cis2.Read(buffer, 0, buffer.Length)) { for (int index2 = 0; index2 < readNow; index2++) { NUnit.Framework.Assert.AreEqual(expected[read + index2], buffer[index2]); } read += readNow; } NUnit.Framework.Assert.AreEqual(expected.Length, read); cis2.Close(); } cis1.Close(); }
public virtual void load() { addDir(mainDir); addFile(fntFile); mainDir.childrenFiles.Add(fntFile); addFile(fatFile); mainDir.childrenFiles.Add(fatFile); freeSpaceDelimiter = fntFile; //read the fnt ByteArrayInputStream fnt = new ByteArrayInputStream(fntFile.getContents()); loadDir(fnt, "root", 0xF000, mainDir); }
private void loadDir(ByteArrayInputStream fnt, string dirName, int dirID, Directory parent) { fnt.savePos(); fnt.seek(8 * (dirID & 0xFFF)); uint subTableOffs = fnt.readUInt(); int fileID = fnt.readUShort(); //Crappy hack for MKDS course .carc's. //Their main dir starting ID is 2, which is weird... // if (parent == mainDir) fileID = 0; Directory thisDir = new Directory(this, parent, false, dirName, dirID); addDir(thisDir); parent.childrenDirs.Add(thisDir); fnt.seek((int)subTableOffs); while (true) { byte data = fnt.readByte(); int len = data & 0x7F; bool isDir = (data & 0x80) != 0; if (len == 0) break; String name = fnt.ReadString(len); if (isDir) { int subDirID = fnt.readUShort(); loadDir(fnt, name, subDirID, thisDir); } else { loadFile(name, fileID, thisDir); fileID++; } } fnt.loadPos(); }
private Directory loadDir(ByteArrayInputStream s, Directory parent) { int id = s.readInt(); string name = s.ReadString(); Directory d = new Directory(this, parent, false, name, id); addDir(d); if(parent != null) parent.childrenDirs.Add(d); int dirCount = s.readInt(); for (int i = 0; i < dirCount; i++) d.childrenDirs.Add(loadDir(s, d)); int fileCount = s.readInt(); for (int i = 0; i < fileCount; i++) { int fid = s.readInt(); int fsize = s.readInt(); string fname = s.ReadString(); NetFile f = new NetFile(this, d, fname, fid, fsize); d.childrenFiles.Add(f); addFile(f); } return d; }
private static sbyte[] getApplicationCertificate(Context context) { if (context == null) { return null; } sbyte[] cert = null; string packageName = context.PackageName; if (context != null) { try { PackageInfo pkgInfo = context.PackageManager.getPackageInfo(packageName, PackageManager.GET_SIGNATURES); if (pkgInfo == null) { return null; } Signature[] sigs = pkgInfo.signatures; if (sigs == null) { } else { CertificateFactory cf = CertificateFactory.getInstance("X.509"); ByteArrayInputStream stream = new ByteArrayInputStream(sigs[0].toByteArray()); X509Certificate x509cert = X509Certificate.getInstance(stream); cert = x509cert.PublicKey.Encoded; } } catch (PackageManager.NameNotFoundException e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); } catch (CertificateException e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); } catch (javax.security.cert.CertificateException e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); } } return cert; }
public virtual float ReadSingle() { FillBuffer(4); var byteArrayInputStream = new ByteArrayInputStream(ReadSwapped(4)); var dataInputStream = new DataInputStream(byteArrayInputStream); var result = dataInputStream.ReadFloat(); byteArrayInputStream.Close(); dataInputStream.Close(); return result; }
private void loadOvTable(String dirName, int id, Directory parent, File table, out OverlayFile[] arr) { Directory dir = new Directory(this, parent, true, dirName, id); addDir(dir); parent.childrenDirs.Add(dir); ByteArrayInputStream tbl = new ByteArrayInputStream(table.getContents()); arr = new OverlayFile[tbl.available / 32]; int i = 0; while (tbl.lengthAvailable(32)) { uint ovId = tbl.readUInt(); uint ramAddr = tbl.readUInt(); uint ramSize = tbl.readUInt(); uint bssSize = tbl.readUInt(); uint staticInitStart = tbl.readUInt(); uint staticInitEnd = tbl.readUInt(); ushort fileID = tbl.readUShort(); tbl.skip(6); //unused 0's OverlayFile f = loadOvFile(fileID, dir, table, tbl.getPos() - 0x20); f.isSystemFile = true; arr[i] = f; i++; } }
public virtual double ReadDouble() { FillBuffer(8); var byteArrayInputStream = new ByteArrayInputStream(ReadSwapped(8)); var dataInputStream = new DataInputStream(byteArrayInputStream); var result = dataInputStream.ReadDouble(); byteArrayInputStream.Close(); dataInputStream.Close(); return result; }
private void loadOvTable(String dirName, int id, Directory parent, File table) { Directory dir = new Directory(this, parent, true, dirName, id); addDir(dir); parent.childrenDirs.Add(dir); ByteArrayInputStream tbl = new ByteArrayInputStream(table.getContents()); int i = 0; while (tbl.lengthAvailable(32)) { uint ovId = tbl.readUInt(); uint ramAddr = tbl.readUInt(); uint ramSize = tbl.readUInt(); uint bssSize = tbl.readUInt(); uint staticInitStart = tbl.readUInt(); uint staticInitEnd = tbl.readUInt(); ushort fileID = tbl.readUShort(); tbl.skip(6); //unused 0's File f = loadFile(dirName+"_"+ovId+".bin", fileID, dir); // f.isSystemFile = true; i++; } }