public Message(sbyte command, byte[] data) { this.command = command; ms = new MemoryStream(data); iss = new BinaryReaderEx(ms); dis = new DataInputStream(iss); }
public static string[] load_dna(string filename) { List<string> list = new List<string>(); string[] result = null; try { FileInputStream fstream = new FileInputStream(filename); DataInputStream @in = new DataInputStream(fstream); BufferedReader buffer = new BufferedReader(new InputStreamReader(@in)); string line; while((line = buffer.readLine()) != null) { list.Add(line); } @in.close(); result = new string[list.Count]; for (int i = 0; i < list.Count; i++) { result[i] = (string)list[i]; } } catch(java.io.IOException e) { Console.WriteLine("Unable to create matrix from " + filename + ": " + e.Message); Environment.Exit(-1); } return result; }
// protected constructor --------------------------------------------- /// <summary> /// <p> /// Protected constructor. /// </p> /// </summary> /// /// <param name="inputStream">ICU uprop.dat file input stream</param> /// <exception cref="IOException">throw if data file fails authentication</exception> /// @draft 2.1 protected internal UCharacterNameReader(Stream inputStream) { IBM.ICU.Impl.ICUBinary.ReadHeader(inputStream, DATA_FORMAT_ID_, this); m_dataInputStream_ = new DataInputStream(inputStream); }
public static Fst importFst(string basename, Semiring semiring) { Fst fst = new Fst(semiring); HashMap hashMap = Convert.importSymbols(new StringBuilder().append(basename).append(".input.syms").toString()); if (hashMap == null) { hashMap = new HashMap(); hashMap.put("<eps>", Integer.valueOf(0)); } HashMap hashMap2 = Convert.importSymbols(new StringBuilder().append(basename).append(".output.syms").toString()); if (hashMap2 == null) { hashMap2 = new HashMap(); hashMap2.put("<eps>", Integer.valueOf(0)); } HashMap hashMap3 = Convert.importSymbols(new StringBuilder().append(basename).append(".states.syms").toString()); FileInputStream fileInputStream = new FileInputStream(new StringBuilder().append(basename).append(".fst.txt").toString()); DataInputStream dataInputStream = new DataInputStream(fileInputStream); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(dataInputStream, "UTF-8")); int num = 1; HashMap hashMap4 = new HashMap(); string text; while ((text = bufferedReader.readLine()) != null) { string[] array = String.instancehelper_split(text, "\\t"); Integer integer; if (hashMap3 == null) { integer = Integer.valueOf(Integer.parseInt(array[0])); } else { integer = (Integer)hashMap3.get(array[0]); } State state = (State)hashMap4.get(integer); if (state == null) { state = new State(semiring.zero()); fst.addState(state); hashMap4.put(integer, state); } if (num != 0) { num = 0; fst.setStart(state); } if (array.Length > 2) { Integer integer2; if (hashMap3 == null) { integer2 = Integer.valueOf(Integer.parseInt(array[1])); } else { integer2 = (Integer)hashMap3.get(array[1]); } State state2 = (State)hashMap4.get(integer2); if (state2 == null) { state2 = new State(semiring.zero()); fst.addState(state2); hashMap4.put(integer2, state2); } if (hashMap.get(array[2]) == null) { hashMap.put(array[2], Integer.valueOf(hashMap.size())); } int iLabel = ((Integer)hashMap.get(array[2])).intValue(); if (hashMap2.get(array[3]) == null) { hashMap2.put(array[3], Integer.valueOf(hashMap2.size())); } int oLabel = ((Integer)hashMap2.get(array[3])).intValue(); float weight; if (array.Length > 4) { weight = Float.parseFloat(array[4]); } else { weight = 0f; } Arc arc = new Arc(iLabel, oLabel, weight, state2); state.addArc(arc); } else if (array.Length > 1) { float finalWeight = Float.parseFloat(array[1]); state.setFinalWeight(finalWeight); } else { state.setFinalWeight(0f); } } dataInputStream.close(); fst.setIsyms(Utils.toStringArray(hashMap)); fst.setOsyms(Utils.toStringArray(hashMap2)); return(fst); }
public override void readPacketData(DataInputStream datainputstream) { }
public static int readIntLE(ref DataInputStream @in) { return(Loader.reverseBytes(@in.readInt())); }
private static object loadKeyframeSequence( ref DataInputStream @in, ref List <Object3D> objectList, ref List <Object3D> rootObjectList) { KeyframeSequence keyframeSequence = new KeyframeSequence(); Loader.loadObject3D((Object3D)keyframeSequence, ref @in, ref objectList, ref rootObjectList); int interpolation = @in.readUnsignedByte(); int mode = @in.readUnsignedByte(); int num1 = @in.readUnsignedByte(); int duration = Loader.readIntLE(ref @in); int first = Loader.readIntLE(ref @in); int last = Loader.readIntLE(ref @in); int numComponents = Loader.readIntLE(ref @in); int numKeyframes = Loader.readIntLE(ref @in); keyframeSequence.setInterpolation(interpolation); keyframeSequence.setRepeatMode(mode); keyframeSequence.setDuration(duration); keyframeSequence.setKeyframeSize(numKeyframes, numComponents); keyframeSequence.setValidRange(first, last); switch (num1) { case 0: float[] numArray1 = new float[numComponents]; for (int index1 = 0; index1 < numKeyframes; ++index1) { int time = Loader.readIntLE(ref @in); for (int index2 = 0; index2 < numComponents; ++index2) { numArray1[index2] = Loader.readFloatLE(ref @in); } keyframeSequence.setKeyframe(index1, time, numArray1); } break; case 2: float num2 = 1.525902E-05f; float[] numArray2 = new float[4]; float[] numArray3 = new float[4]; for (int index = 0; index < numComponents; ++index) { numArray2[index] = Loader.readFloatLE(ref @in); } for (int index = 0; index < numComponents; ++index) { numArray3[index] = Loader.readFloatLE(ref @in); } float[] numArray4 = new float[numComponents]; for (int index1 = 0; index1 < numKeyframes; ++index1) { int time = Loader.readIntLE(ref @in); for (int index2 = 0; index2 < numComponents; ++index2) { float num3 = (float)Loader.readUShortLE(ref @in) * num2 * numArray3[index2] + numArray2[index2]; numArray4[index2] = num3; } keyframeSequence.setKeyframe(index1, time, numArray4); } break; } return((object)keyframeSequence); }
// ---------------------------------------------------------------- // Constructor /// <summary> /// Constructs a UPropertyAliases object. The binary file DATA_FILE_NAME is /// read from the jar/classpath and unflattened into member variables of this /// object. /// </summary> /// public UPropertyAliases() { // Open the .icu file from the jar/classpath Stream mask0 = IBM.ICU.Impl.ICUData.GetRequiredStream(DATA_FILE_NAME); BufferedStream b = new BufferedStream(mask0, DATA_BUFFER_SIZE); // Read and discard Unicode version... /* byte unicodeVersion[] = */ IBM.ICU.Impl.ICUBinary.ReadHeader(b, DATA_FORMAT_ID, this); DataInputStream d = new DataInputStream(b); // Record the origin position of the file. Keep enough around // to seek back to the start of the header. d.Mark(256); short enumToName_offset = d.ReadShort(); short nameToEnum_offset = d.ReadShort(); short enumToValue_offset = d.ReadShort(); short total_size = d.ReadShort(); short valueMap_offset = d.ReadShort(); short valueMap_count = d.ReadShort(); short nameGroupPool_offset = d.ReadShort(); short nameGroupPool_count = d.ReadShort(); short stringPool_offset = d.ReadShort(); short stringPool_count = d.ReadShort(); if (DEBUG) { System.Console.Out.WriteLine("enumToName_offset=" + enumToName_offset + "\n" + "nameToEnum_offset=" + nameToEnum_offset + "\n" + "enumToValue_offset=" + enumToValue_offset + "\n" + "total_size=" + total_size + "\n" + "valueMap_offset=" + valueMap_offset + "\n" + "valueMap_count=" + valueMap_count + "\n" + "nameGroupPool_offset=" + nameGroupPool_offset + "\n" + "nameGroupPool_count=" + nameGroupPool_count + "\n" + "stringPool_offset=" + stringPool_offset + "\n" + "stringPool_count=" + stringPool_count); } byte[] raw = new byte[total_size]; d.Reset(); d.ReadFully(raw); d.Close(); UPropertyAliases.Builder builder = new UPropertyAliases.Builder(raw); stringPool = builder .ReadStringPool(stringPool_offset, stringPool_count); nameGroupPool = builder.ReadNameGroupPool(nameGroupPool_offset, nameGroupPool_count); builder.SetupValueMap_map(valueMap_offset, valueMap_count); // Some of the following data structures have to be set up // here, _not_ in Builder. That's because they are instances // of non-static inner classes, and they contain implicit // references to this. builder.Seek(enumToName_offset); enumToName = new UPropertyAliases.NonContiguousEnumToShort(builder); builder.NameGroupOffsetToIndex(enumToName.offsetArray); builder.Seek(nameToEnum_offset); nameToEnum = new UPropertyAliases.NameToEnum(this, builder); builder.Seek(enumToValue_offset); enumToValue = new UPropertyAliases.NonContiguousEnumToShort(builder); builder.ValueMapOffsetToIndex(enumToValue.offsetArray); valueMapArray = new UPropertyAliases.ValueMap [valueMap_count]; for (int i = 0; i < valueMap_count; ++i) { // Must seek to the start of each entry. builder.Seek(builder.valueMap_map[i]); valueMapArray[i] = new UPropertyAliases.ValueMap(this, builder); } builder.Close(); }
public static short readShortLE(ref DataInputStream @in) { return(Loader.reverseBytes(@in.readShort())); }
private static sbyte[] InitNamePool() { lock (typeof(CharacterName)) { sbyte[] strPool = null; if (RefStrPool != null && (strPool = RefStrPool.get()) != null) { return(strPool); } DataInputStream dis = null; try { dis = new DataInputStream(new InflaterInputStream(AccessController.doPrivileged(new PrivilegedActionAnonymousInnerClassHelper()))); Lookup = new int[(Character.MAX_CODE_POINT + 1) >> 8][]; int total = dis.ReadInt(); int cpEnd = dis.ReadInt(); sbyte[] ba = new sbyte[cpEnd]; dis.ReadFully(ba); int nameOff = 0; int cpOff = 0; int cp = 0; do { int len = ba[cpOff++] & 0xff; if (len == 0) { len = ba[cpOff++] & 0xff; // always big-endian cp = ((ba[cpOff++] & 0xff) << 16) | ((ba[cpOff++] & 0xff) << 8) | ((ba[cpOff++] & 0xff)); } else { cp++; } int hi = cp >> 8; if (Lookup[hi] == null) { Lookup[hi] = new int[0x100]; } Lookup[hi][cp & 0xff] = (nameOff << 8) | len; nameOff += len; } while (cpOff < cpEnd); strPool = new sbyte[total - cpEnd]; dis.ReadFully(strPool); RefStrPool = new SoftReference <>(strPool); } catch (Exception x) { throw new InternalError(x.Message, x); } finally { try { if (dis != null) { dis.Close(); } } catch (Exception) { } } return(strPool); } }
public override void FromInputStream(DataInputStream reader) { }
public void Run() { // Create a DataInputStream for communication; the client is using a DataOutputStream to write to us var dInStream = new DataInputStream(socket.InputStream); var dOutStream = new DataOutputStream(socket.OutputStream); Task.Delay(100).Wait(); try { // Get the next message while (!StopToken.IsCancellationRequested) { string data = String.Empty; try { data = ReadString(dInStream, dOutStream); } //catch (Java.IO.IOException) //{ // BTDirectActivity.Current.RelayToast("Server thread error 'Read() returns -1' - connection lost."); // continue; //} catch (System.AggregateException e) { if (e.InnerExceptions.All(ex => ex is Java.IO.IOException)) { BTDirectActivity.Current.RelayToast("Connection to client lost."); break; } else { throw e; } } var message = Message.FromCharStream(socket.RemoteDevice.Address, data); BTDirectActivity.Current.RelayToast($"Server received ({message.Type}) '{message.Content}' from {socket.RemoteDevice.Address}."); server.numMessagesReceived++; // Handle first-connection protocol so we can find out our MAC address if (message.Type == MsgType.Notify && message.Content.StartsWith(CONFIRM_AS_CLIENT)) { // Oh, good - now we known our own bliddy MAC address at last! if (server.MyMACaddress == "unknown") { server.MyMACaddress = message.Content.Split(onNEXT)[1]; } BTDirectActivity.Current.UpdateThisDevice(); //// Let them know we not only heard them, but heard them from /their/ MAC address so they can record it. //SendString(dOutStream, dInStream, // new Message(MsgType.Reply, $"{CONFIRM_AS_SERVER}{NEXT}{socket.RemoteDevice.Address}").ToCharStream()); // Try to establish a reciprocal connection. //BTDirectActivity.Current.Connect(socket.RemoteDevice); } // Acknowledge receipt of message (including its ID number) if (server.DoACK && message.Type != MsgType.Ack) { SendString(dOutStream, dInStream, new Message() { From = server.MyMACaddress, Type = MsgType.Ack, ID = message.ID, Content = $"{message.Type}: {message.Content}" }.ToCharStream()); server.numAcksSentOut++; Log.Info(_tag, $"Sending ack of '{message.Content}' to {socket.RemoteDevice.Address}"); } // Now do something with it! BluetoothMessageCenter.ActOnMessage(message); } } catch (Exception e) { // The EOFException clauses are related to the WiFi version; the BT streams appear to work differently, but this is retained in case it catches something later. if (e.InnerException != null) { if (e.InnerException is Java.IO.EOFException) { Log.Debug(_tag, $"Wrapped EOFException: {e}. Stream seems closed! Status of socket is: ConnectionType {socket.ConnectionType}, IsConnected {socket.IsConnected}."); } else { Log.Debug(_tag, $"Wrapped exception: {e}. Socket connected: {socket.IsConnected}."); throw e; } } else { if (e is Java.IO.EOFException) { Log.Debug(_tag, $"EOFException: {e}. Stream seems closed! Status of socket is: ConnectionType {socket.ConnectionType}, IsConnected {socket.IsConnected}."); } else { Log.Debug(_tag, $"Exception: {e}. Socket connected: {socket.IsConnected}."); throw e; } } } finally { Log.Debug(_tag, $"ServerThread to {socket.RemoteDevice.Address} shutting down."); Stop(); } }
} // end of marshal method new public void unmarshal(DataInputStream dis) { base.unmarshal(dis); try { _aggregateID.unmarshal(dis); _forceID = dis.readByte(); _aggregateState = dis.readByte(); _aggregateType.unmarshal(dis); _formation = dis.readUint(); _aggregateMarking.unmarshal(dis); _dimensions.unmarshal(dis); _orientation.unmarshal(dis); _centerOfMass.unmarshal(dis); _velocity.unmarshal(dis); _numberOfDisAggregates = dis.readUshort(); _numberOfDisEntities = dis.readUshort(); _numberOfSilentAggregateTypes = dis.readUshort(); _numberOfSilentEntityTypes = dis.readUshort(); for (int idx = 0; idx < _numberOfDisAggregates; idx++) { AggregateID anX = new AggregateID(); anX.unmarshal(dis); _aggregateIDList.Add(anX); } ; for (int idx = 0; idx < _numberOfDisEntities; idx++) { EntityID anX = new EntityID(); anX.unmarshal(dis); _entityIDList.Add(anX); } ; _pad2 = dis.readByte(); for (int idx = 0; idx < _numberOfSilentAggregateTypes; idx++) { EntityType anX = new EntityType(); anX.unmarshal(dis); _silentAggregateSystemList.Add(anX); } ; for (int idx = 0; idx < _numberOfSilentEntityTypes; idx++) { EntityType anX = new EntityType(); anX.unmarshal(dis); _silentEntitySystemList.Add(anX); } ; _numberOfVariableDatumRecords = dis.readUint(); for (int idx = 0; idx < _numberOfVariableDatumRecords; idx++) { VariableDatum anX = new VariableDatum(); anX.unmarshal(dis); _variableDatumList.Add(anX); } ; } // end try catch (Exception e) { Trace.WriteLine(e); Trace.Flush(); } } // end of unmarshal method
/// <summary>This method actually executes the client-side SASL handshake.</summary> /// <param name="underlyingOut">connection output stream</param> /// <param name="underlyingIn">connection input stream</param> /// <param name="userName">SASL user name</param> /// <param name="saslProps">properties of SASL negotiation</param> /// <param name="callbackHandler">for responding to SASL callbacks</param> /// <returns>new pair of streams, wrapped after SASL negotiation</returns> /// <exception cref="System.IO.IOException">for any error</exception> private IOStreamPair DoSaslHandshake(OutputStream underlyingOut, InputStream underlyingIn , string userName, IDictionary <string, string> saslProps, CallbackHandler callbackHandler ) { DataOutputStream @out = new DataOutputStream(underlyingOut); DataInputStream @in = new DataInputStream(underlyingIn); SaslParticipant sasl = SaslParticipant.CreateClientSaslParticipant(userName, saslProps , callbackHandler); @out.WriteInt(SaslTransferMagicNumber); @out.Flush(); try { // Start of handshake - "initial response" in SASL terminology. DataTransferSaslUtil.SendSaslMessage(@out, new byte[0]); // step 1 byte[] remoteResponse = DataTransferSaslUtil.ReadSaslMessage(@in); byte[] localResponse = sasl.EvaluateChallengeOrResponse(remoteResponse); IList <CipherOption> cipherOptions = null; if (DataTransferSaslUtil.RequestedQopContainsPrivacy(saslProps)) { // Negotiate cipher suites if configured. Currently, the only supported // cipher suite is AES/CTR/NoPadding, but the protocol allows multiple // values for future expansion. string cipherSuites = conf.Get(DFSConfigKeys.DfsEncryptDataTransferCipherSuitesKey ); if (cipherSuites != null && !cipherSuites.IsEmpty()) { if (!cipherSuites.Equals(CipherSuite.AesCtrNopadding.GetName())) { throw new IOException(string.Format("Invalid cipher suite, %s=%s", DFSConfigKeys. DfsEncryptDataTransferCipherSuitesKey, cipherSuites)); } CipherOption option = new CipherOption(CipherSuite.AesCtrNopadding); cipherOptions = Lists.NewArrayListWithCapacity(1); cipherOptions.AddItem(option); } } DataTransferSaslUtil.SendSaslMessageAndNegotiationCipherOptions(@out, localResponse , cipherOptions); // step 2 (client-side only) SaslResponseWithNegotiatedCipherOption response = DataTransferSaslUtil.ReadSaslMessageAndNegotiatedCipherOption (@in); localResponse = sasl.EvaluateChallengeOrResponse(response.payload); System.Diagnostics.Debug.Assert(localResponse == null); // SASL handshake is complete DataTransferSaslUtil.CheckSaslComplete(sasl, saslProps); CipherOption cipherOption = null; if (sasl.IsNegotiatedQopPrivacy()) { // Unwrap the negotiated cipher option cipherOption = DataTransferSaslUtil.Unwrap(response.cipherOption, sasl); } // If negotiated cipher option is not null, we will use it to create // stream pair. return(cipherOption != null?DataTransferSaslUtil.CreateStreamPair(conf, cipherOption , underlyingOut, underlyingIn, false) : sasl.CreateStreamPair(@out, @in)); } catch (IOException ioe) { DataTransferSaslUtil.SendGenericSaslErrorMessage(@out, ioe.Message); throw; } }
/// <summary> /// Constructor which directly instantiates the DataInputStream containing the /// model contents. /// </summary> /// <param name="dis"> /// The DataInputStream containing the model information. </param> public BinaryQNModelReader(DataInputStream dis) : base(new BinaryFileDataReader(dis)) { }
/// <exception cref="System.IO.IOException"/> /// <exception cref="GeneralSecurityException"/> private void CryptoCodecTest(Configuration conf, int seed, int count, string encCodecClass , string decCodecClass, byte[] iv) { CryptoCodec encCodec = null; try { encCodec = (CryptoCodec)ReflectionUtils.NewInstance(conf.GetClassByName(encCodecClass ), conf); } catch (TypeLoadException) { throw new IOException("Illegal crypto codec!"); } Log.Info("Created a Codec object of type: " + encCodecClass); // Generate data DataOutputBuffer data = new DataOutputBuffer(); RandomDatum.Generator generator = new RandomDatum.Generator(seed); for (int i = 0; i < count; ++i) { generator.Next(); RandomDatum key = generator.GetKey(); RandomDatum value = generator.GetValue(); key.Write(data); value.Write(data); } Log.Info("Generated " + count + " records"); // Encrypt data DataOutputBuffer encryptedDataBuffer = new DataOutputBuffer(); CryptoOutputStream @out = new CryptoOutputStream(encryptedDataBuffer, encCodec, bufferSize , key, iv); @out.Write(data.GetData(), 0, data.GetLength()); @out.Flush(); @out.Close(); Log.Info("Finished encrypting data"); CryptoCodec decCodec = null; try { decCodec = (CryptoCodec)ReflectionUtils.NewInstance(conf.GetClassByName(decCodecClass ), conf); } catch (TypeLoadException) { throw new IOException("Illegal crypto codec!"); } Log.Info("Created a Codec object of type: " + decCodecClass); // Decrypt data DataInputBuffer decryptedDataBuffer = new DataInputBuffer(); decryptedDataBuffer.Reset(encryptedDataBuffer.GetData(), 0, encryptedDataBuffer.GetLength ()); CryptoInputStream @in = new CryptoInputStream(decryptedDataBuffer, decCodec, bufferSize , key, iv); DataInputStream dataIn = new DataInputStream(new BufferedInputStream(@in)); // Check DataInputBuffer originalData = new DataInputBuffer(); originalData.Reset(data.GetData(), 0, data.GetLength()); DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData )); for (int i_1 = 0; i_1 < count; ++i_1) { RandomDatum k1 = new RandomDatum(); RandomDatum v1 = new RandomDatum(); k1.ReadFields(originalIn); v1.ReadFields(originalIn); RandomDatum k2 = new RandomDatum(); RandomDatum v2 = new RandomDatum(); k2.ReadFields(dataIn); v2.ReadFields(dataIn); Assert.True("original and encrypted-then-decrypted-output not equal" , k1.Equals(k2) && v1.Equals(v2)); // original and encrypted-then-decrypted-output have the same hashCode IDictionary <RandomDatum, string> m = new Dictionary <RandomDatum, string>(); m[k1] = k1.ToString(); m[v1] = v1.ToString(); string result = m[k2]; Assert.Equal("k1 and k2 hashcode not equal", result, k1.ToString ()); result = m[v2]; Assert.Equal("v1 and v2 hashcode not equal", result, v1.ToString ()); } // Decrypt data byte-at-a-time originalData.Reset(data.GetData(), 0, data.GetLength()); decryptedDataBuffer.Reset(encryptedDataBuffer.GetData(), 0, encryptedDataBuffer.GetLength ()); @in = new CryptoInputStream(decryptedDataBuffer, decCodec, bufferSize, key, iv); // Check originalIn = new DataInputStream(new BufferedInputStream(originalData)); int expected; do { expected = originalIn.Read(); Assert.Equal("Decrypted stream read by byte does not match", expected , @in.Read()); }while (expected != -1); // Seek to a certain position and decrypt originalData.Reset(data.GetData(), 0, data.GetLength()); decryptedDataBuffer.Reset(encryptedDataBuffer.GetData(), 0, encryptedDataBuffer.GetLength ()); @in = new CryptoInputStream(new TestCryptoStreams.FakeInputStream(decryptedDataBuffer ), decCodec, bufferSize, key, iv); int seekPos = data.GetLength() / 3; @in.Seek(seekPos); // Check TestCryptoStreams.FakeInputStream originalInput = new TestCryptoStreams.FakeInputStream (originalData); originalInput.Seek(seekPos); do { expected = originalInput.Read(); Assert.Equal("Decrypted stream read by byte does not match", expected , @in.Read()); }while (expected != -1); Log.Info("SUCCESS! Completed checking " + count + " records"); // Check secure random generator TestSecureRandom(encCodec); }
public virtual float ReadSingle() { FillBuffer(4); var byteArrayInputStream = new ByteArrayInputStream(ReadSwapped(4)); var dataInputStream = new DataInputStream(byteArrayInputStream); var result = dataInputStream.ReadFloat(); byteArrayInputStream.Close(); dataInputStream.Close(); return result; }
/// <exception cref="System.IO.IOException"/> public static void Main(string[] args) { if (args.Length != 4) { logger.Info("Usage: MnistConverter dataFile labelFile outFile propsFile"); return; } DataInputStream xStream = IOUtils.GetDataInputStream(args[0]); DataInputStream yStream = IOUtils.GetDataInputStream(args[1]); PrintWriter oStream = new PrintWriter(new FileWriter(args[2])); PrintWriter pStream = new PrintWriter(new FileWriter(args[3])); int xMagic = xStream.ReadInt(); if (xMagic != 2051) { throw new Exception("Bad format of xStream"); } int yMagic = yStream.ReadInt(); if (yMagic != 2049) { throw new Exception("Bad format of yStream"); } int xNumImages = xStream.ReadInt(); int yNumLabels = yStream.ReadInt(); if (xNumImages != yNumLabels) { throw new Exception("x and y sizes don't match"); } logger.Info("Images and label file both contain " + xNumImages + " entries."); int xRows = xStream.ReadInt(); int xColumns = xStream.ReadInt(); for (int i = 0; i < xNumImages; i++) { int label = yStream.ReadUnsignedByte(); int[] matrix = new int[xRows * xColumns]; for (int j = 0; j < xRows * xColumns; j++) { matrix[j] = xStream.ReadUnsignedByte(); } oStream.Print(label); foreach (int k in matrix) { oStream.Print('\t'); oStream.Print(k); } oStream.Println(); } logger.Info("Converted."); xStream.Close(); yStream.Close(); oStream.Close(); // number from 1; column 0 is the class pStream.Println("goldAnswerColumn = 0"); pStream.Println("useClassFeature = true"); pStream.Println("sigma = 10"); // not optimized, but weak regularization seems appropriate when much data, few features for (int j_1 = 0; j_1 < xRows * xColumns; j_1++) { pStream.Println((j_1 + 1) + ".realValued = true"); } pStream.Close(); }
public void Invoke(HttpSessionState session, DataInputStream input) { _result = HttpProcessor.GetClient <MemberServiceSoapClient>(session).CheckUserExists( input.ReadString(), input.ReadString()); }
public EphGalileo(DataInputStream dai, bool oldVersion) { read(dai, oldVersion); }
private static bool loadObject( ref DataInputStream @in, ref List <Object3D> objectList, ref List <Object3D> rootObjectList) { int num = @in.read(); if (num == -1) { return(false); } int length = Loader.readIntLE(ref @in); sbyte[] numArray = new sbyte[length]; @in.readFully(numArray, length); DataInputStream in1 = new DataInputStream((InputStream) new ByteArrayInputStream(numArray, length)); object obj = (object)null; switch (num) { case 0: in1.readUnsignedByte(); in1.readUnsignedByte(); in1.readBoolean(); Loader.readIntLE(ref in1); Loader.readIntLE(ref in1); break; case 1: obj = Loader.loadAnimationController(ref in1, ref objectList, ref rootObjectList); break; case 2: obj = Loader.loadAnimationTrack(ref in1, ref objectList, ref rootObjectList); break; case 3: obj = Loader.loadAppearance(ref in1, ref objectList, ref rootObjectList); break; case 5: obj = Loader.loadCamera(ref in1, ref objectList, ref rootObjectList); break; case 6: obj = Loader.loadCompositingMode(ref in1, ref objectList, ref rootObjectList); break; case 8: obj = Loader.loadPolygonMode(ref in1, ref objectList, ref rootObjectList); break; case 9: obj = (object)new Group(); Loader.loadGroup((Group)obj, ref in1, ref objectList, ref rootObjectList); break; case 10: obj = Loader.loadImage2D(ref in1, ref objectList, ref rootObjectList); break; case 11: obj = Loader.loadTriangleStripArray(ref in1, ref objectList, ref rootObjectList); break; case 14: obj = (object)new Mesh(); Loader.loadMesh((Mesh)obj, ref in1, ref objectList, ref rootObjectList); break; case 16: obj = Loader.loadSkinnedMesh(ref in1, ref objectList, ref rootObjectList); break; case 17: obj = Loader.loadTexture2D(ref in1, ref objectList, ref rootObjectList); break; case 19: obj = Loader.loadKeyframeSequence(ref in1, ref objectList, ref rootObjectList); break; case 20: obj = Loader.loadVertexArray(ref in1, ref objectList, ref rootObjectList); break; case 21: obj = Loader.loadVertexBuffer(ref in1, ref objectList, ref rootObjectList); break; case 22: obj = Loader.loadWorld(ref in1, ref objectList, ref rootObjectList); break; } objectList.Add((Object3D)obj); if (obj != null) { rootObjectList.Add((Object3D)obj); } return(true); }
public virtual Pool loadDensityFile(string path, float floor) { Properties properties = new Properties(); int num = 0; DataInputStream dataInputStream = this.readS3BinaryHeader(path, properties); string property = properties.getProperty("version"); if (property == null || !String.instancehelper_equals(property, "1.0")) { string text = new StringBuilder().append("Unsupported version in ").append(path).toString(); throw new IOException(text); } string property2 = properties.getProperty("chksum0"); int num2 = (property2 == null || !String.instancehelper_equals(property2, "yes")) ? 0 : 1; this.resetChecksum(); int num3 = this.readInt(dataInputStream); int num4 = this.readInt(dataInputStream); int num5 = this.readInt(dataInputStream); int[] array = new int[num4]; int i; for (i = 0; i < num4; i++) { array[i] = this.readInt(dataInputStream); } i = this.readInt(dataInputStream); this.logger.fine(new StringBuilder().append("Number of states ").append(num3).toString()); this.logger.fine(new StringBuilder().append("Number of streams ").append(num4).toString()); this.logger.fine(new StringBuilder().append("Number of gaussians per state ").append(num5).toString()); this.logger.fine(new StringBuilder().append("Vector length ").append(array.Length).toString()); this.logger.fine(new StringBuilder().append("Raw length ").append(i).toString()); for (int j = 0; j < num4; j++) { num += array[j]; } if (!Sphinx3Loader.assertionsDisabled && i != num5 * num * num3) { throw new AssertionError(); } Pool pool = new Pool(path); pool.setFeature(Pool.Feature.__NUM_SENONES, num3); pool.setFeature(Pool.Feature.__NUM_STREAMS, num4); pool.setFeature(Pool.Feature.__NUM_GAUSSIANS_PER_STATE, num5); for (int k = 0; k < num3; k++) { for (int l = 0; l < num4; l++) { for (int m = 0; m < num5; m++) { float[] array2 = this.readFloatArray(dataInputStream, array[l]); Utilities.floorData(array2, floor); pool.put(k * num4 * num5 + l * num5 + m, array2); } } } this.validateChecksum(dataInputStream, num2 != 0); dataInputStream.close(); this.numStates = num3; this.numStreams = num4; this.numGaussiansPerState = num5; this.vectorLength = array; return(pool); }
public static int readUShortLE(ref DataInputStream @in) { return((int)Loader.readShortLE(ref @in) & (int)ushort.MaxValue); }
private char readChar(DataInputStream dataInputStream) { return((char)((sbyte)dataInputStream.readByte())); }
public static float readFloatLE(ref DataInputStream @in) { return(BitConverter.ToSingle(BitConverter.GetBytes(Loader.readIntLE(ref @in)), 0)); }
private DLCPack loadPack(int packId, int sellId) { AppEngine canvas = AppEngine.getCanvas(); SimWorld simWorld = canvas.getSimWorld(); SimData simData = canvas.getSimData(); TextManager textManager = canvas.getTextManager(); DLCPack dlcPack = new DLCPack(); StringBuffer stringBuffer = new StringBuffer(); stringBuffer.setLength(0); stringBuffer.append(this.m_rootFolder); stringBuffer.append(DLCManager.pathSeparatorChar); stringBuffer.append(sellId); string str1 = stringBuffer.toString(); stringBuffer.setLength(0); stringBuffer.append(str1); stringBuffer.append(DLCManager.pathSeparatorChar); stringBuffer.append(DLCManager.PACKDATA_FILENAME); DataInputStream dis = new DataInputStream(JavaLib.getResourceAsStream(stringBuffer.toString(), false)); stringBuffer.setLength(0); stringBuffer.append(str1); stringBuffer.append(DLCManager.pathSeparatorChar); stringBuffer.append(DLCManager.STRINGS_DLC_FILENAME); string filename = stringBuffer.toString(); int stringPooldId = textManager.addStringsFile(filename); dis.readInt(); dlcPack.d_packId = dis.readInt(); dlcPack.d_packName = RecObject.readXMLtoBinString(dis); string str2 = this.readXMLtoBinFilename(dis, str1); dlcPack.d_objectTextureFilename = str2; string str3 = this.readXMLtoBinFilename(dis, str1); dlcPack.d_objectScrollingTextureFilename = str3; dlcPack.d_objectScrollingTextureTiming = dis.readInt(); string str4 = this.readXMLtoBinFilename(dis, str1); dlcPack.d_simMaleFilename = str4; string str5 = this.readXMLtoBinFilename(dis, str1); dlcPack.d_simFemaleFilename = str5; string str6 = this.readXMLtoBinFilename(dis, str1); dlcPack.d_miniCarModelFilename = str6; string str7 = this.readXMLtoBinFilename(dis, str1); dlcPack.d_miniCarTextureFilename = str7; int length1 = (int)dis.readByte(); short[] numArray1 = new short[length1]; for (int index = 0; index < length1; ++index) { int action = (int)dis.readShort(); numArray1[index] = (short)action; simData.unlockAction(action, packId); } int length2 = (int)dis.readShort(); RecObject[] recObjectArray = new RecObject[length2]; for (int index = 0; index < length2; ++index) { recObjectArray[index] = new RecObject(); recObjectArray[index].readDLC(dis, packId, index, stringPooldId, str1); } int length3 = (int)dis.readByte(); RecItem[] recItemArray = new RecItem[length3]; for (int index = 0; index < length3; ++index) { recItemArray[index] = new RecItem(); recItemArray[index].readDLC(dis, packId, index, stringPooldId, str1); } int length4 = (int)dis.readByte(); short[] numArray2 = new short[length4]; for (int index = 0; index < length4; ++index) { int wallId = (int)dis.readShort(); numArray2[index] = (short)wallId; simWorld.unlockWall(wallId, packId); } int length5 = (int)dis.readByte(); short[] numArray3 = new short[length5]; for (int index = 0; index < length5; ++index) { int floorId = (int)dis.readShort(); numArray3[index] = (short)floorId; simWorld.unlockFloor(floorId, packId); } int num = (int)dis.readByte(); int length6 = 15; short[][] numArray4 = new short[length6][]; int[][] numArray5 = new int[length6][]; string[][][] strArray1 = new string[length6][][]; for (int index1 = 0; index1 < length6; ++index1) { int length7 = (int)dis.readByte(); short[] numArray6 = new short[length7]; int[] numArray7 = new int[length7]; string[][] strArray2 = new string[length7][]; for (int index2 = 0; index2 < length7; ++index2) { numArray6[index2] = (short)dis.readInt(); numArray7[index2] = dis.readInt(); int length8 = (int)dis.readByte(); string[] strArray3 = new string[length8]; for (int index3 = 0; index3 < length8; ++index3) { string str8 = this.readXMLtoBinFilename(dis, str1); strArray3[index3] = str8; } strArray2[index2] = strArray3; } numArray4[index1] = numArray6; numArray5[index1] = numArray7; strArray1[index1] = strArray2; } dlcPack.d_objectRecords = recObjectArray; dlcPack.d_itemRecords = recItemArray; dlcPack.d_simAttribUserIds = numArray4; dlcPack.d_simAttribFlags = numArray5; dlcPack.d_simAttribTextureFilenames = strArray1; return(dlcPack); }
public override void readPacketData(DataInputStream datainputstream) { entityId = datainputstream.readInt(); field_21018_b = DataWatcher.readWatchableObjects(datainputstream); }
//PES 09192009 This method used Reflection which is slow, new method 'UnMarshalRawPDU' should be used instead /// <summary> /// Unmarshal all data into the pdu object. This method calls the all the base unmarshals. /// Deprecated: This method used Reflection, use UnMarshalRawPDU method instead /// </summary> /// <param name="pdu">object where the unmarshalled data will be stored</param> /// <param name="dStream">location of where the unmarshalled data is located</param> private static void ReturnUnmarshalledPDU(object pdu, DataInputStream dStream) { //unmarshal is the method name found in each of the PDU classes pdu.GetType().InvokeMember("unmarshal", System.Reflection.BindingFlags.InvokeMethod, null, pdu, new object[] { dStream }); }
public override void readPacketData(DataInputStream datainputstream) { username = datainputstream.readUTF(); }
public void testUncompressedDisjointBuffers() { OutputCollector collect = new OutputCollector(); OutStream @out = new OutStream("test", 400, null, collect); PositionCollector[] positions = new PositionCollector[1024]; DataOutput stream = new DataOutputStream(@out); for (int i = 0; i < 1024; ++i) { positions[i] = new PositionCollector(); @out.getPosition(positions[i]); stream.writeInt(i); } @out.Flush(); Assert.Equal("test", @out.ToString()); Assert.Equal(4096, collect.buffer.size()); ByteBuffer[] inBuf = new ByteBuffer[3]; inBuf[0] = ByteBuffer.allocate(1100); inBuf[1] = ByteBuffer.allocate(2200); inBuf[2] = ByteBuffer.allocate(1100); collect.buffer.setByteBuffer(inBuf[0], 0, 1024); collect.buffer.setByteBuffer(inBuf[1], 1024, 2048); collect.buffer.setByteBuffer(inBuf[2], 3072, 1024); for (int i = 0; i < inBuf.Length; ++i) { inBuf[i].flip(); } InStream @in = InStream.create(null, "test", inBuf, new long[] { 0, 1024, 3072 }, 4096, null, 400); Assert.Equal("uncompressed stream test position: 0 length: 4096" + " range: 0 offset: 0 limit: 0", @in.ToString()); DataInputStream inStream = new DataInputStream(@in); for (int i = 0; i < 1024; ++i) { int x = inStream.readInt(); Assert.Equal(i, x); } Assert.Equal(0, @in.available()); for (int i = 1023; i >= 0; --i) { @in.seek(positions[i]); Assert.Equal(i, inStream.readInt()); } @in = InStream.create(null, "test", new ByteBuffer[] { inBuf[1], inBuf[2] }, new long[] { 1024, 3072 }, 4096, null, 400); inStream = new DataInputStream(@in); positions[256].reset(); @in.seek(positions[256]); for (int i = 256; i < 1024; ++i) { Assert.Equal(i, inStream.readInt()); } @in = InStream.create(null, "test", new ByteBuffer[] { inBuf[0], inBuf[2] }, new long[] { 0, 3072 }, 4096, null, 400); inStream = new DataInputStream(@in); positions[768].reset(); for (int i = 0; i < 256; ++i) { Assert.Equal(i, inStream.readInt()); } @in.seek(positions[768]); for (int i = 768; i < 1024; ++i) { Assert.Equal(i, inStream.readInt()); } }
/// <summary>Analyze the results</summary> /// <exception cref="System.IO.IOException">on error</exception> private static void AnalyzeResults() { FileSystem fs = FileSystem.Get(config); Path reduceFile = new Path(new Path(baseDir, OutputDirName), "part-00000"); DataInputStream @in; @in = new DataInputStream(fs.Open(reduceFile)); BufferedReader lines; lines = new BufferedReader(new InputStreamReader(@in)); long totalTimeAL1 = 0l; long totalTimeAL2 = 0l; long totalTimeTPmS = 0l; long lateMaps = 0l; long numOfExceptions = 0l; long successfulFileOps = 0l; long mapStartTimeTPmS = 0l; long mapEndTimeTPmS = 0l; string resultTPSLine1 = null; string resultTPSLine2 = null; string resultALLine1 = null; string resultALLine2 = null; string line; while ((line = lines.ReadLine()) != null) { StringTokenizer tokens = new StringTokenizer(line, " \t\n\r\f%;"); string attr = tokens.NextToken(); if (attr.EndsWith(":totalTimeAL1")) { totalTimeAL1 = long.Parse(tokens.NextToken()); } else { if (attr.EndsWith(":totalTimeAL2")) { totalTimeAL2 = long.Parse(tokens.NextToken()); } else { if (attr.EndsWith(":totalTimeTPmS")) { totalTimeTPmS = long.Parse(tokens.NextToken()); } else { if (attr.EndsWith(":latemaps")) { lateMaps = long.Parse(tokens.NextToken()); } else { if (attr.EndsWith(":numOfExceptions")) { numOfExceptions = long.Parse(tokens.NextToken()); } else { if (attr.EndsWith(":successfulFileOps")) { successfulFileOps = long.Parse(tokens.NextToken()); } else { if (attr.EndsWith(":mapStartTimeTPmS")) { mapStartTimeTPmS = long.Parse(tokens.NextToken()); } else { if (attr.EndsWith(":mapEndTimeTPmS")) { mapEndTimeTPmS = long.Parse(tokens.NextToken()); } } } } } } } } } // Average latency is the average time to perform 'n' number of // operations, n being the number of files double avgLatency1 = (double)totalTimeAL1 / successfulFileOps; double avgLatency2 = (double)totalTimeAL2 / successfulFileOps; // The time it takes for the longest running map is measured. Using that, // cluster transactions per second is calculated. It includes time to // retry any of the failed operations double longestMapTimeTPmS = (double)(mapEndTimeTPmS - mapStartTimeTPmS); double totalTimeTPS = (longestMapTimeTPmS == 0) ? (1000 * successfulFileOps) : (double )(1000 * successfulFileOps) / longestMapTimeTPmS; // The time it takes to perform 'n' operations is calculated (in ms), // n being the number of files. Using that time, the average execution // time is calculated. It includes time to retry any of the // failed operations double AverageExecutionTime = (totalTimeTPmS == 0) ? (double)successfulFileOps : (double)totalTimeTPmS / successfulFileOps; if (operation.Equals(OpCreateWrite)) { // For create/write/close, it is treated as two transactions, // since a file create from a client perspective involves create and close resultTPSLine1 = " TPS: Create/Write/Close: " + (int)(totalTimeTPS * 2); resultTPSLine2 = "Avg exec time (ms): Create/Write/Close: " + AverageExecutionTime; resultALLine1 = " Avg Lat (ms): Create/Write: " + avgLatency1; resultALLine2 = " Avg Lat (ms): Close: " + avgLatency2; } else { if (operation.Equals(OpOpenRead)) { resultTPSLine1 = " TPS: Open/Read: " + (int)totalTimeTPS; resultTPSLine2 = " Avg Exec time (ms): Open/Read: " + AverageExecutionTime; resultALLine1 = " Avg Lat (ms): Open: " + avgLatency1; if (readFileAfterOpen) { resultALLine2 = " Avg Lat (ms): Read: " + avgLatency2; } } else { if (operation.Equals(OpRename)) { resultTPSLine1 = " TPS: Rename: " + (int)totalTimeTPS; resultTPSLine2 = " Avg Exec time (ms): Rename: " + AverageExecutionTime; resultALLine1 = " Avg Lat (ms): Rename: " + avgLatency1; } else { if (operation.Equals(OpDelete)) { resultTPSLine1 = " TPS: Delete: " + (int)totalTimeTPS; resultTPSLine2 = " Avg Exec time (ms): Delete: " + AverageExecutionTime; resultALLine1 = " Avg Lat (ms): Delete: " + avgLatency1; } } } } string[] resultLines = new string[] { "-------------- NNBench -------------- : ", " Version: " + NnbenchVersion, " Date & time: " + sdf.Format(Sharpen.Extensions.CreateDate(Runtime.CurrentTimeMillis())), string.Empty , " Test Operation: " + operation, " Start time: " + sdf.Format(Sharpen.Extensions.CreateDate(startTime)), " Maps to run: " + numberOfMaps, " Reduces to run: " + numberOfReduces, " Block Size (bytes): " + blockSize, " Bytes to write: " + bytesToWrite, " Bytes per checksum: " + bytesPerChecksum, " Number of files: " + numberOfFiles, " Replication factor: " + replicationFactorPerFile, " Successful file operations: " + successfulFileOps, string.Empty, " # maps that missed the barrier: " + lateMaps, " # exceptions: " + numOfExceptions, string.Empty , resultTPSLine1, resultTPSLine2, resultALLine1, resultALLine2, string.Empty, " RAW DATA: AL Total #1: " + totalTimeAL1, " RAW DATA: AL Total #2: " + totalTimeAL2, " RAW DATA: TPS Total (ms): " + totalTimeTPmS, " RAW DATA: Longest Map Time (ms): " + longestMapTimeTPmS , " RAW DATA: Late maps: " + lateMaps, " RAW DATA: # of exceptions: " + numOfExceptions, string.Empty }; TextWriter res = new TextWriter(new FileOutputStream(new FilePath(DefaultResFileName ), true)); // Write to a file and also dump to log for (int i = 0; i < resultLines.Length; i++) { Log.Info(resultLines[i]); res.WriteLine(resultLines[i]); } }
/// <exception cref="IOException"/> public static Exprent ParseAnnotationElement(DataInputStream data, ConstantPool pool ) { int tag = data.ReadUnsignedByte(); switch (tag) { case 'e': { // enum constant string className = pool.GetPrimitiveConstant(data.ReadUnsignedShort()).GetString( ); string constName = pool.GetPrimitiveConstant(data.ReadUnsignedShort()).GetString( ); FieldDescriptor descr = FieldDescriptor.ParseDescriptor(className); return(new FieldExprent(constName, descr.type.value, true, null, descr, null)); } case 'c': { // class string descriptor = pool.GetPrimitiveConstant(data.ReadUnsignedShort()).GetString (); VarType type = FieldDescriptor.ParseDescriptor(descriptor).type; string value; switch (type.type) { case ICodeConstants.Type_Object: { value = type.value; break; } case ICodeConstants.Type_Byte: { value = typeof(byte).FullName; break; } case ICodeConstants.Type_Char: { value = typeof(char).FullName; break; } case ICodeConstants.Type_Double: { value = typeof(double).FullName; break; } case ICodeConstants.Type_Float: { value = typeof(float).FullName; break; } case ICodeConstants.Type_Int: { value = typeof(int).FullName; break; } case ICodeConstants.Type_Long: { value = typeof(long).FullName; break; } case ICodeConstants.Type_Short: { value = typeof(short).FullName; break; } case ICodeConstants.Type_Boolean: { value = typeof(bool).FullName; break; } case ICodeConstants.Type_Void: { value = typeof(void).FullName; break; } default: { throw new Exception("invalid class type: " + type.type); } } return(new ConstExprent(VarType.Vartype_Class, value, null)); } case '[': { // array List <Exprent> elements = new System.Collections.Generic.List <Exprent>(); int len = data.ReadUnsignedShort(); if (len > 0) { elements = new List <Exprent>(len); for (int i = 0; i < len; i++) { elements.Add(ParseAnnotationElement(data, pool)); } } VarType newType; if ((elements.Count == 0)) { newType = new VarType(ICodeConstants.Type_Object, 1, "java/lang/Object"); } else { VarType elementType = elements[0].GetExprType(); newType = new VarType(elementType.type, 1, elementType.value); } NewExprent newExpr = new NewExprent(newType, new System.Collections.Generic.List < Exprent>(), null); newExpr.SetDirectArrayInit(true); newExpr.SetLstArrayElements(elements); return(newExpr); } case '@': { // annotation return(ParseAnnotation(data, pool)); } default: { PrimitiveConstant cn = pool.GetPrimitiveConstant(data.ReadUnsignedShort()); switch (tag) { case 'B': { return(new ConstExprent(VarType.Vartype_Byte, cn.value, null)); } case 'C': { return(new ConstExprent(VarType.Vartype_Char, cn.value, null)); } case 'D': { return(new ConstExprent(VarType.Vartype_Double, cn.value, null)); } case 'F': { return(new ConstExprent(VarType.Vartype_Float, cn.value, null)); } case 'I': { return(new ConstExprent(VarType.Vartype_Int, cn.value, null)); } case 'J': { return(new ConstExprent(VarType.Vartype_Long, cn.value, null)); } case 'S': { return(new ConstExprent(VarType.Vartype_Short, cn.value, null)); } case 'Z': { return(new ConstExprent(VarType.Vartype_Boolean, cn.value, null)); } case 's': { return(new ConstExprent(VarType.Vartype_String, cn.value, null)); } default: { throw new Exception("invalid element type!"); } } break; } } }
public void testDisjointBuffers() { OutputCollector collect = new OutputCollector(); CompressionCodec codec = new ZlibCodec(); OutStream @out = new OutStream("test", 400, codec, collect); PositionCollector[] positions = new PositionCollector[1024]; DataOutput stream = new DataOutputStream(@out); for (int i = 0; i < 1024; ++i) { positions[i] = new PositionCollector(); @out.getPosition(positions[i]); stream.writeInt(i); } @out.Flush(); Assert.Equal("test", @out.ToString()); Assert.Equal(1674, collect.buffer.size()); ByteBuffer[] inBuf = new ByteBuffer[3]; inBuf[0] = ByteBuffer.allocate(500); inBuf[1] = ByteBuffer.allocate(1200); inBuf[2] = ByteBuffer.allocate(500); collect.buffer.setByteBuffer(inBuf[0], 0, 483); collect.buffer.setByteBuffer(inBuf[1], 483, 1625 - 483); collect.buffer.setByteBuffer(inBuf[2], 1625, 1674 - 1625); for (int i = 0; i < inBuf.Length; ++i) { inBuf[i].flip(); } InStream @in = InStream.create(null, "test", inBuf, new long[] { 0, 483, 1625 }, 1674, codec, 400); Assert.Equal("compressed stream test position: 0 length: 1674 range: 0" + " offset: 0 limit: 0 range 0 = 0 to 483;" + " range 1 = 483 to 1142; range 2 = 1625 to 49", @in.ToString()); DataInputStream inStream = new DataInputStream(@in); for (int i = 0; i < 1024; ++i) { int x = inStream.readInt(); Assert.Equal(i, x); } Assert.Equal(0, @in.available()); for (int i = 1023; i >= 0; --i) { @in.seek(positions[i]); Assert.Equal(i, inStream.readInt()); } @in = InStream.create(null, "test", new ByteBuffer[] { inBuf[1], inBuf[2] }, new long[] { 483, 1625 }, 1674, codec, 400); inStream = new DataInputStream(@in); positions[303].reset(); @in.seek(positions[303]); for (int i = 303; i < 1024; ++i) { Assert.Equal(i, inStream.readInt()); } @in = InStream.create(null, "test", new ByteBuffer[] { inBuf[0], inBuf[2] }, new long[] { 0, 1625 }, 1674, codec, 400); inStream = new DataInputStream(@in); positions[1001].reset(); for (int i = 0; i < 300; ++i) { Assert.Equal(i, inStream.readInt()); } @in.seek(positions[1001]); for (int i = 1001; i < 1024; ++i) { Assert.Equal(i, inStream.readInt()); } }
/// <summary>Find out the number of bytes in the block that match its crc.</summary> /// <remarks> /// Find out the number of bytes in the block that match its crc. /// This algorithm assumes that data corruption caused by unexpected /// datanode shutdown occurs only in the last crc chunk. So it checks /// only the last chunk. /// </remarks> /// <param name="blockFile">the block file</param> /// <param name="genStamp">generation stamp of the block</param> /// <returns>the number of valid bytes</returns> private long ValidateIntegrityAndSetLength(FilePath blockFile, long genStamp) { DataInputStream checksumIn = null; InputStream blockIn = null; try { FilePath metaFile = FsDatasetUtil.GetMetaFile(blockFile, genStamp); long blockFileLen = blockFile.Length(); long metaFileLen = metaFile.Length(); int crcHeaderLen = DataChecksum.GetChecksumHeaderSize(); if (!blockFile.Exists() || blockFileLen == 0 || !metaFile.Exists() || metaFileLen < crcHeaderLen) { return(0); } checksumIn = new DataInputStream(new BufferedInputStream(new FileInputStream(metaFile ), HdfsConstants.IoFileBufferSize)); // read and handle the common header here. For now just a version DataChecksum checksum = BlockMetadataHeader.ReadDataChecksum(checksumIn, metaFile ); int bytesPerChecksum = checksum.GetBytesPerChecksum(); int checksumSize = checksum.GetChecksumSize(); long numChunks = Math.Min((blockFileLen + bytesPerChecksum - 1) / bytesPerChecksum , (metaFileLen - crcHeaderLen) / checksumSize); if (numChunks == 0) { return(0); } IOUtils.SkipFully(checksumIn, (numChunks - 1) * checksumSize); blockIn = new FileInputStream(blockFile); long lastChunkStartPos = (numChunks - 1) * bytesPerChecksum; IOUtils.SkipFully(blockIn, lastChunkStartPos); int lastChunkSize = (int)Math.Min(bytesPerChecksum, blockFileLen - lastChunkStartPos ); byte[] buf = new byte[lastChunkSize + checksumSize]; checksumIn.ReadFully(buf, lastChunkSize, checksumSize); IOUtils.ReadFully(blockIn, buf, 0, lastChunkSize); checksum.Update(buf, 0, lastChunkSize); long validFileLength; if (checksum.Compare(buf, lastChunkSize)) { // last chunk matches crc validFileLength = lastChunkStartPos + lastChunkSize; } else { // last chunck is corrupt validFileLength = lastChunkStartPos; } // truncate if extra bytes are present without CRC if (blockFile.Length() > validFileLength) { RandomAccessFile blockRAF = new RandomAccessFile(blockFile, "rw"); try { // truncate blockFile blockRAF.SetLength(validFileLength); } finally { blockRAF.Close(); } } return(validFileLength); } catch (IOException e) { FsDatasetImpl.Log.Warn(e); return(0); } finally { IOUtils.CloseStream(checksumIn); IOUtils.CloseStream(blockIn); } }
public void Invoke(HttpSessionState session, DataInputStream input) { _result = HttpProcessor.GetClient <DashboardServiceSoapClient>(session).GetItems( input.ReadString(), input.ReadString()); }
public virtual double ReadDouble() { FillBuffer(8); var byteArrayInputStream = new ByteArrayInputStream(ReadSwapped(8)); var dataInputStream = new DataInputStream(byteArrayInputStream); var result = dataInputStream.ReadDouble(); byteArrayInputStream.Close(); dataInputStream.Close(); return result; }