public byte[] toByteArray() { short datalen = 0; byte[] data = null; byte[] bytes = null; byte[] byteNew = null; try { if (dos != null) { dos.Flush(); data = ms.ToArray(); datalen = (short)data.Length; dos.Close(); } MemoryStream bos1 = new MemoryStream(datalen + 3); DataOutputStream dos1 = new DataOutputStream(new BinaryWriterIns(bos1)); dos1.WriteByteNew(command); dos1.WriteShort(datalen); if (datalen > 0) { dos1.Write(data); } bytes = bos1.ToArray(); byteNew = new byte[bytes.Length - 3]; int n = byteNew.Length; Array.Copy(bytes, 3, byteNew, 0, n); byteNew[0] = (byte)command; dos1.Close(); } catch (IOException e) { Debug.Log(e.ToString()); } return byteNew; }
/// <summary> /// 心跳请求 /// </summary> public void HeartBeat() { try { // 创建HTTP连接 using (HttpURLConnection httpConn = Settings.url.OpenConnection() as HttpURLConnection) { httpConn.RequestMethod = "POST"; // 启用POST方式 httpConn.UseCaches = false; // 不启用缓存 httpConn.DoOutput = true; // 启用输出流 httpConn.DoInput = true; // 启用输入流 httpConn.InstanceFollowRedirects = true; // 启用HTTP重定向 //httpConn.SetRequestProperty("Content-Type", "application/x-www-form-urlencoded"); // 设置请求类型 httpConn.SetRequestProperty("Content-Type", "application/json"); // 设置请求类型 httpConn.ConnectTimeout = 10000; // 设置超时时间 // 获取输出流 using (DataOutputStream outStream = new DataOutputStream(httpConn.OutputStream)) { // 格式化心跳参数 if (!Settings.HeartBeatParams.Has("action")) { Settings.HeartBeatParams.Put("action", "AdSubAppHeartBeat"); } if (!Settings.HeartBeatParams.Has("cpuId")) { //Settings.HeartBeatParams.Put("cpuId", Settings.CpuId); Settings.HeartBeatParams.Put("cpuId", "666999"); } if (!Settings.HeartBeatParams.Has("version")) { Settings.HeartBeatParams.Put("version", Settings.Version); } outStream.WriteBytes(Settings.HeartBeatParams.ToString().Replace("\r", "").Replace("\n", "").Replace(" ", "")); // 将数据写入输出流 Settings.HeartBeatParams.Remove("lastCmd"); Settings.HeartBeatParams.Remove("errMsg"); outStream.Flush(); // 立刻输出缓存数据 } // 判断是否响应成功 if (httpConn.ResponseCode == HttpStatus.Ok) { using (InputStreamReader inStream = new InputStreamReader(httpConn.InputStream)) // 获取输入流 using (BufferedReader buffer = new BufferedReader(inStream)) // 获取输入流读取器 { string inputLine = null, heartBeatResult = null; while ((inputLine = buffer.ReadLine()) != null) { heartBeatResult += inputLine + "\n"; } // 解析心跳返回数据 ParseHeartBeatResult(heartBeatResult); } } else { long Code = (long)httpConn.ResponseCode; // HTTP error RunOnUiThread(() => { Toast.MakeText(this, "心跳线程: HTTP error code " + Code, ToastLength.Long).Show(); }); } httpConn.Disconnect(); // 断开HTTP连接 } } catch (Exception e) { System.Console.WriteLine("HeartBeat Exception: " + e.Message); } }
internal void FlattenData(Stream os) { DataOutputStream dos = new DataOutputStream(os); int i; // Remove comments and whitespace from the rules to make it smaller. string strippedRules = RBBIRuleScanner.StripRules(fRules); // Calculate the size of each section in the data in bytes. // Sizes here are padded up to a multiple of 8 for better memory alignment. // Sections sizes actually stored in the header are for the actual data // without the padding. // int headerSize = 24 * 4; // align8(sizeof(RBBIDataHeader)); int forwardTableSize = Align8(fForwardTables.GetTableSize()); int reverseTableSize = Align8(fReverseTables.GetTableSize()); // int safeFwdTableSize = Align8(fSafeFwdTables.getTableSize()); int safeRevTableSize = Align8(fSafeRevTables.GetTableSize()); int trieSize = Align8(fSetBuilder.GetTrieSize()); int statusTableSize = Align8(fRuleStatusVals.Count * 4); int rulesSize = Align8((strippedRules.Length) * 2); int totalSize = headerSize + forwardTableSize + /* reverseTableSize */ 0 + /* safeFwdTableSize */ 0 + (safeRevTableSize > 0 ? safeRevTableSize : reverseTableSize) + statusTableSize + trieSize + rulesSize; int outputPos = 0; // Track stream position, starting from RBBIDataHeader. // // Write out an ICU Data Header // ICUBinary.WriteHeader(RBBIDataWrapper.DATA_FORMAT, RBBIDataWrapper.FORMAT_VERSION, 0, dos); // // Write out the RBBIDataHeader // int[] header = new int[RBBIDataWrapper.DH_SIZE]; // sizeof struct RBBIDataHeader header[RBBIDataWrapper.DH_MAGIC] = 0xb1a0; header[RBBIDataWrapper.DH_FORMATVERSION] = RBBIDataWrapper.FORMAT_VERSION; header[RBBIDataWrapper.DH_LENGTH] = totalSize; // fLength, the total size of all rule sections. header[RBBIDataWrapper.DH_CATCOUNT] = fSetBuilder.NumCharCategories; // fCatCount. // Only save the forward table and the safe reverse table, // because these are the only ones used at run-time. // // For the moment, we still build the other tables if they are present in the rule source files, // for backwards compatibility. Old rule files need to work, and this is the simplest approach. // // Additional backwards compatibility consideration: if no safe rules are provided, consider the // reverse rules to actually be the safe reverse rules. header[RBBIDataWrapper.DH_FTABLE] = headerSize; // fFTable header[RBBIDataWrapper.DH_FTABLELEN] = forwardTableSize; // fTableLen // Do not save Reverse Table. header[RBBIDataWrapper.DH_RTABLE] = header[RBBIDataWrapper.DH_FTABLE] + forwardTableSize; // fRTable header[RBBIDataWrapper.DH_RTABLELEN] = 0; // fRTableLen // Do not save the Safe Forward table. header[RBBIDataWrapper.DH_SFTABLE] = header[RBBIDataWrapper.DH_RTABLE] + 0; // fSTable header[RBBIDataWrapper.DH_SFTABLELEN] = 0; // fSTableLen // Safe reverse table. Use if present, otherwise save regular reverse table as the safe reverse. header[RBBIDataWrapper.DH_SRTABLE] = header[RBBIDataWrapper.DH_SFTABLE] + 0; // fSRTable if (safeRevTableSize > 0) { header[RBBIDataWrapper.DH_SRTABLELEN] = safeRevTableSize; } else { Debug.Assert(reverseTableSize > 0); header[RBBIDataWrapper.DH_SRTABLELEN] = reverseTableSize; } header[RBBIDataWrapper.DH_TRIE] = header[RBBIDataWrapper.DH_SRTABLE] + header[RBBIDataWrapper.DH_SRTABLELEN]; // fTrie header[RBBIDataWrapper.DH_TRIELEN] = fSetBuilder.GetTrieSize(); // fTrieLen header[RBBIDataWrapper.DH_STATUSTABLE] = header[RBBIDataWrapper.DH_TRIE] + header[RBBIDataWrapper.DH_TRIELEN]; header[RBBIDataWrapper.DH_STATUSTABLELEN] = statusTableSize; // fStatusTableLen header[RBBIDataWrapper.DH_RULESOURCE] = header[RBBIDataWrapper.DH_STATUSTABLE] + statusTableSize; header[RBBIDataWrapper.DH_RULESOURCELEN] = strippedRules.Length * 2; for (i = 0; i < header.Length; i++) { dos.WriteInt32(header[i]); outputPos += 4; } // Write out the actual state tables. short[] tableData; tableData = fForwardTables.ExportTable(); Assert.Assrt(outputPos == header[4]); for (i = 0; i < tableData.Length; i++) { dos.WriteInt16(tableData[i]); outputPos += 2; } /* do not write the reverse table * tableData = fReverseTables.exportTable(); * Assert.Assrt(outputPos == header[6]); * for (i = 0; i < tableData.length; i++) { * dos.WriteInt16(tableData[i]); * outputPos += 2; * } */ /* do not write safe forwards table * Assert.Assrt(outputPos == header[8]); * tableData = fSafeFwdTables.exportTable(); * for (i = 0; i < tableData.length; i++) { * dos.WriteInt16(tableData[i]); * outputPos += 2; * } */ // Write the safe reverse table. // If not present, write the plain reverse table (old style rule compatibility) Assert.Assrt(outputPos == header[10]); if (safeRevTableSize > 0) { tableData = fSafeRevTables.ExportTable(); } else { tableData = fReverseTables.ExportTable(); } for (i = 0; i < tableData.Length; i++) { dos.WriteInt16(tableData[i]); outputPos += 2; } // write out the Trie table Assert.Assrt(outputPos == header[12]); fSetBuilder.SerializeTrie(os); outputPos += header[13]; while (outputPos % 8 != 0) { // pad to an 8 byte boundary dos.Write(0); outputPos += 1; } // Write out the status {tag} table. Assert.Assrt(outputPos == header[16]); foreach (var val in fRuleStatusVals) { dos.WriteInt32(val); outputPos += 4; } while (outputPos % 8 != 0) { // pad to an 8 byte boundary dos.Write(0); outputPos += 1; } // Write out the stripped rules (rules with extra spaces removed // These go last in the data area, even though they are not last in the header. Assert.Assrt(outputPos == header[14]); dos.WriteChars(strippedRules); outputPos += strippedRules.Length * 2; while (outputPos % 8 != 0) { // pad to an 8 byte boundary dos.Write(0); outputPos += 1; } }
/// <exception cref="System.Exception"/> private void Run(bool ioEx, bool rtEx) { Path inDir = new Path("testing/mt/input"); Path outDir = new Path("testing/mt/output"); // Hack for local FS that does not have the concept of a 'mounting point' if (IsLocalFS()) { string localPathRoot = Runtime.GetProperty("test.build.data", "/tmp").Replace(' ' , '+'); inDir = new Path(localPathRoot, inDir); outDir = new Path(localPathRoot, outDir); } JobConf conf = CreateJobConf(); FileSystem fs = FileSystem.Get(conf); fs.Delete(outDir, true); if (!fs.Mkdirs(inDir)) { throw new IOException("Mkdirs failed to create " + inDir.ToString()); } { DataOutputStream file = fs.Create(new Path(inDir, "part-0")); file.WriteBytes("a\nb\n\nc\nd\ne"); file.Close(); } conf.SetJobName("mt"); conf.SetInputFormat(typeof(TextInputFormat)); conf.SetOutputKeyClass(typeof(LongWritable)); conf.SetOutputValueClass(typeof(Text)); conf.SetMapOutputKeyClass(typeof(LongWritable)); conf.SetMapOutputValueClass(typeof(Text)); conf.SetOutputFormat(typeof(TextOutputFormat)); conf.SetOutputKeyClass(typeof(LongWritable)); conf.SetOutputValueClass(typeof(Text)); conf.SetMapperClass(typeof(TestMultithreadedMapRunner.IDMap)); conf.SetReducerClass(typeof(TestMultithreadedMapRunner.IDReduce)); FileInputFormat.SetInputPaths(conf, inDir); FileOutputFormat.SetOutputPath(conf, outDir); conf.SetMapRunnerClass(typeof(MultithreadedMapRunner)); conf.SetInt(MultithreadedMapper.NumThreads, 2); if (ioEx) { conf.SetBoolean("multithreaded.ioException", true); } if (rtEx) { conf.SetBoolean("multithreaded.runtimeException", true); } JobClient jc = new JobClient(conf); RunningJob job = jc.SubmitJob(conf); while (!job.IsComplete()) { Sharpen.Thread.Sleep(100); } if (job.IsSuccessful()) { NUnit.Framework.Assert.IsFalse(ioEx || rtEx); } else { NUnit.Framework.Assert.IsTrue(ioEx || rtEx); } }
public void Return(DataOutputStream output) { output.WriteSubmitQuestionResult(_result); }
public void Return(DataOutputStream output) { output.WriteGetResponseResult(_result); }
public abstract void ToOutputStream(DataOutputStream writer);
public virtual void Write(float value) { if (disposed) throw new ObjectDisposedException ("BinaryWriter", "Cannot write to a closed BinaryWriter"); var byteArrayOutputStream = new ByteArrayOutputStream(); var dataOutputStream = new DataOutputStream(byteArrayOutputStream); dataOutputStream.WriteFloat(value); byteArrayOutputStream.Flush(); WriteSwapped(byteArrayOutputStream.ToByteArray(), 4); byteArrayOutputStream.Close(); dataOutputStream.Close(); }
public Message(sbyte command) { this.command = command; ms = new MemoryStream(); os = new BinaryWriterIns(ms); dos = new DataOutputStream(os); }
public void Return(DataOutputStream output) { output.WriteGetPhotosForCollectionResult(_result); }
public void Return(DataOutputStream output) { output.WriteUploadTagsResult(_result); }
public void Return(DataOutputStream output) { output.WriteByteArray(Convert.FromBase64String(_result)); }
public void Return(DataOutputStream output) { output.WriteString(_result); }
public void Return(DataOutputStream output) { output.WriteBoolean(_result); }
/// <exception cref="System.IO.IOException"/> private static void CodecTest(Configuration conf, int seed, int count, string codecClass ) { // Create the codec CompressionCodec codec = null; try { codec = (CompressionCodec)ReflectionUtils.NewInstance(conf.GetClassByName(codecClass ), conf); } catch (TypeLoadException) { throw new IOException("Illegal codec!"); } Log.Info("Created a Codec object of type: " + codecClass); // Generate data DataOutputBuffer data = new DataOutputBuffer(); RandomDatum.Generator generator = new RandomDatum.Generator(seed); for (int i = 0; i < count; ++i) { generator.Next(); RandomDatum key = generator.GetKey(); RandomDatum value = generator.GetValue(); key.Write(data); value.Write(data); } Log.Info("Generated " + count + " records"); // Compress data DataOutputBuffer compressedDataBuffer = new DataOutputBuffer(); CompressionOutputStream deflateFilter = codec.CreateOutputStream(compressedDataBuffer ); DataOutputStream deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter )); deflateOut.Write(data.GetData(), 0, data.GetLength()); deflateOut.Flush(); deflateFilter.Finish(); Log.Info("Finished compressing data"); // De-compress data DataInputBuffer deCompressedDataBuffer = new DataInputBuffer(); deCompressedDataBuffer.Reset(compressedDataBuffer.GetData(), 0, compressedDataBuffer .GetLength()); CompressionInputStream inflateFilter = codec.CreateInputStream(deCompressedDataBuffer ); DataInputStream inflateIn = new DataInputStream(new BufferedInputStream(inflateFilter )); // Check DataInputBuffer originalData = new DataInputBuffer(); originalData.Reset(data.GetData(), 0, data.GetLength()); DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData )); for (int i_1 = 0; i_1 < count; ++i_1) { RandomDatum k1 = new RandomDatum(); RandomDatum v1 = new RandomDatum(); k1.ReadFields(originalIn); v1.ReadFields(originalIn); RandomDatum k2 = new RandomDatum(); RandomDatum v2 = new RandomDatum(); k2.ReadFields(inflateIn); v2.ReadFields(inflateIn); Assert.True("original and compressed-then-decompressed-output not equal" , k1.Equals(k2) && v1.Equals(v2)); // original and compressed-then-decompressed-output have the same hashCode IDictionary <RandomDatum, string> m = new Dictionary <RandomDatum, string>(); m[k1] = k1.ToString(); m[v1] = v1.ToString(); string result = m[k2]; Assert.Equal("k1 and k2 hashcode not equal", result, k1.ToString ()); result = m[v2]; Assert.Equal("v1 and v2 hashcode not equal", result, v1.ToString ()); } // De-compress data byte-at-a-time originalData.Reset(data.GetData(), 0, data.GetLength()); deCompressedDataBuffer.Reset(compressedDataBuffer.GetData(), 0, compressedDataBuffer .GetLength()); inflateFilter = codec.CreateInputStream(deCompressedDataBuffer); // Check originalIn = new DataInputStream(new BufferedInputStream(originalData)); int expected; do { expected = originalIn.Read(); Assert.Equal("Inflated stream read by byte does not match", expected , inflateFilter.Read()); }while (expected != -1); Log.Info("SUCCESS! Completed checking " + count + " records"); }
public void Return(DataOutputStream output) { output.WriteGetCommentResult(_result); }
public override void Marshal(DataOutputStream dos) { base.Marshal(dos); if (dos != null) { try { this._aggregateID.Marshal(dos); dos.WriteUnsignedByte((byte)this._forceID); dos.WriteUnsignedByte((byte)this._aggregateState); this._aggregateType.Marshal(dos); dos.WriteUnsignedInt((uint)this._formation); this._aggregateMarking.Marshal(dos); this._dimensions.Marshal(dos); this._orientation.Marshal(dos); this._centerOfMass.Marshal(dos); this._velocity.Marshal(dos); dos.WriteUnsignedShort((ushort)this._aggregateIDList.Count); dos.WriteUnsignedShort((ushort)this._entityIDList.Count); dos.WriteUnsignedShort((ushort)this._silentAggregateSystemList.Count); dos.WriteUnsignedShort((ushort)this._silentEntitySystemList.Count); for (int idx = 0; idx < this._aggregateIDList.Count; idx++) { AggregateID aAggregateID = (AggregateID)this._aggregateIDList[idx]; aAggregateID.Marshal(dos); } for (int idx = 0; idx < this._entityIDList.Count; idx++) { EntityID aEntityID = (EntityID)this._entityIDList[idx]; aEntityID.Marshal(dos); } dos.WriteUnsignedByte((byte)this._pad2); for (int idx = 0; idx < this._silentAggregateSystemList.Count; idx++) { EntityType aEntityType = (EntityType)this._silentAggregateSystemList[idx]; aEntityType.Marshal(dos); } for (int idx = 0; idx < this._silentEntitySystemList.Count; idx++) { EntityType aEntityType = (EntityType)this._silentEntitySystemList[idx]; aEntityType.Marshal(dos); } dos.WriteUnsignedInt((uint)this._variableDatumList.Count); for (int idx = 0; idx < this._variableDatumList.Count; idx++) { VariableDatum aVariableDatum = (VariableDatum)this._variableDatumList[idx]; aVariableDatum.Marshal(dos); } } catch (Exception e) { if (PduBase.TraceExceptions) { Trace.WriteLine(e); Trace.Flush(); } this.RaiseExceptionOccured(e); if (PduBase.ThrowExceptions) { throw e; } } } }
/// <exception cref="System.IO.IOException"/> public virtual void WriteUncompressedBytes(DataOutputStream outStream) { outStream.Write(data, start, dataSize); }
public void testDisjointBuffers() { OutputCollector collect = new OutputCollector(); CompressionCodec codec = new ZlibCodec(); OutStream @out = new OutStream("test", 400, codec, collect); PositionCollector[] positions = new PositionCollector[1024]; DataOutput stream = new DataOutputStream(@out); for (int i = 0; i < 1024; ++i) { positions[i] = new PositionCollector(); @out.getPosition(positions[i]); stream.writeInt(i); } @out.Flush(); Assert.Equal("test", @out.ToString()); Assert.Equal(1674, collect.buffer.size()); ByteBuffer[] inBuf = new ByteBuffer[3]; inBuf[0] = ByteBuffer.allocate(500); inBuf[1] = ByteBuffer.allocate(1200); inBuf[2] = ByteBuffer.allocate(500); collect.buffer.setByteBuffer(inBuf[0], 0, 483); collect.buffer.setByteBuffer(inBuf[1], 483, 1625 - 483); collect.buffer.setByteBuffer(inBuf[2], 1625, 1674 - 1625); for (int i = 0; i < inBuf.Length; ++i) { inBuf[i].flip(); } InStream @in = InStream.create(null, "test", inBuf, new long[] { 0, 483, 1625 }, 1674, codec, 400); Assert.Equal("compressed stream test position: 0 length: 1674 range: 0" + " offset: 0 limit: 0 range 0 = 0 to 483;" + " range 1 = 483 to 1142; range 2 = 1625 to 49", @in.ToString()); DataInputStream inStream = new DataInputStream(@in); for (int i = 0; i < 1024; ++i) { int x = inStream.readInt(); Assert.Equal(i, x); } Assert.Equal(0, @in.available()); for (int i = 1023; i >= 0; --i) { @in.seek(positions[i]); Assert.Equal(i, inStream.readInt()); } @in = InStream.create(null, "test", new ByteBuffer[] { inBuf[1], inBuf[2] }, new long[] { 483, 1625 }, 1674, codec, 400); inStream = new DataInputStream(@in); positions[303].reset(); @in.seek(positions[303]); for (int i = 303; i < 1024; ++i) { Assert.Equal(i, inStream.readInt()); } @in = InStream.create(null, "test", new ByteBuffer[] { inBuf[0], inBuf[2] }, new long[] { 0, 1625 }, 1674, codec, 400); inStream = new DataInputStream(@in); positions[1001].reset(); for (int i = 0; i < 300; ++i) { Assert.Equal(i, inStream.readInt()); } @in.seek(positions[1001]); for (int i = 1001; i < 1024; ++i) { Assert.Equal(i, inStream.readInt()); } }
/// <exception cref="System.ArgumentException"/> /// <exception cref="System.IO.IOException"/> public virtual void WriteCompressedBytes(DataOutputStream outStream) { throw new ArgumentException("UncompressedBytes cannot be compressed!"); }
public void Return(DataOutputStream output) { output.WriteInt32Array(_result); }
private static void InvokeMethod(HttpSessionState session, Int32 requestID, DataInputStream input, DataOutputStream output) { _methods[requestID].Invoke(session, input); output.WriteInt16(HttpProcessor.RESULT_SUCCESSFUL); _methods[requestID].Return(output); }
private static void WriteJarArchiveClass(this DataOutputStream stream, string name, MetadataJavaClass @class) { stream.WriteUTF(name); stream.WriteJarClass(@class); }
/// <summary> /// Entry point to the Compile application. /// <para/> /// This program takes any number of arguments: the first is the name of the /// desired stemming algorithm to use (a list is available in the package /// description) , all of the rest should be the path or paths to a file or /// files containing a stemmer table to compile. /// </summary> /// <param name="args">the command line arguments</param> public static void Main(string[] args) { if (args.Length < 1) { return; } // LUCENENET NOTE: This line does nothing in .NET // and also does nothing in Java...what? //args[0].ToUpperInvariant(); // Reads the first char of the first arg backward = args[0][0] == '-'; int qq = (backward) ? 1 : 0; bool storeorig = false; if (args[0][qq] == '0') { storeorig = true; qq++; } multi = args[0][qq] == 'M'; if (multi) { qq++; } string charset = SystemProperties.GetProperty("egothor.stemmer.charset", "UTF-8"); var stemmerTables = new List <string>(); // LUCENENET specific // command line argument overrides environment variable or default, if supplied for (int i = 1; i < args.Length; i++) { if ("-e".Equals(args[i], StringComparison.Ordinal) || "--encoding".Equals(args[i], StringComparison.Ordinal)) { charset = args[i]; } else { stemmerTables.Add(args[i]); } } char[] optimizer = new char[args[0].Length - qq]; for (int i = 0; i < optimizer.Length; i++) { optimizer[i] = args[0][qq + i]; } foreach (var stemmerTable in stemmerTables) { // System.out.println("[" + args[i] + "]"); Diff diff = new Diff(); //int stems = 0; // not used int words = 0; AllocTrie(); Console.WriteLine(stemmerTable); using (TextReader input = new StreamReader( new FileStream(stemmerTable, FileMode.Open, FileAccess.Read), Encoding.GetEncoding(charset))) { string line; while ((line = input.ReadLine()) != null) { try { line = line.ToLowerInvariant(); StringTokenizer st = new StringTokenizer(line); string stem = st.NextToken(); if (storeorig) { trie.Add(stem, "-a"); words++; } while (st.HasMoreTokens()) { string token = st.NextToken(); if (token.Equals(stem, StringComparison.Ordinal) == false) { trie.Add(token, diff.Exec(token, stem)); words++; } } } catch (InvalidOperationException /*x*/) { // no base token (stem) on a line } } } Optimizer o = new Optimizer(); Optimizer2 o2 = new Optimizer2(); Lift l = new Lift(true); Lift e = new Lift(false); Gener g = new Gener(); for (int j = 0; j < optimizer.Length; j++) { string prefix; switch (optimizer[j]) { case 'G': trie = trie.Reduce(g); prefix = "G: "; break; case 'L': trie = trie.Reduce(l); prefix = "L: "; break; case 'E': trie = trie.Reduce(e); prefix = "E: "; break; case '2': trie = trie.Reduce(o2); prefix = "2: "; break; case '1': trie = trie.Reduce(o); prefix = "1: "; break; default: continue; } trie.PrintInfo(Console.Out, prefix + " "); } using (DataOutputStream os = new DataOutputStream( new FileStream(stemmerTable + ".out", FileMode.OpenOrCreate, FileAccess.Write))) { os.WriteUTF(args[0]); trie.Store(os); } } }
private static void WriteAccessFlagsParameter(this DataOutputStream stream, JavaAccessFlags flags) { stream.WriteInt((int)flags); }
public void Return(DataOutputStream output) { /* Void. Do nothing. */ }
private static void WriteJavaParameter(this DataOutputStream stream, MetadataJavaMethodParameter parameter) { stream.WriteUTF(parameter.Name); stream.WriteUTF(parameter.Type); }
public RegionFile(string file) { // Referenced classes of package net.minecraft.src: // RegionFileChunkBuffer lastModified = 0L; fileName = file; Debugln((new java.lang.StringBuilder()).Append("REGION LOAD ").Append(fileName).ToString ()); sizeDelta = 0; try { if (File.Exists(file)) { lastModified = new DateTimeOffset(File.GetLastWriteTime(file)).ToUnixTimeMilliseconds(); } string dir = System.IO.Path.GetDirectoryName(file); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } fs = File.Open(file, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite); dataFile = new DataOutputStream(fs); //new java.io.RandomAccessFile(file, "rw"); dataFileIn = new DataInputStream(fs); if (dataFile.Length < 4096L) { for (int i = 0; i < 1024; i++) { dataFile.WriteInt(0); } for (int j = 0; j < 1024; j++) { dataFile.WriteInt(0); } sizeDelta += 8192; } if ((dataFile.Length & 4095L) != 0L) { for (int k = 0; (long)k < (dataFile.Length & 4095L); k++) { dataFile.Write(0); } } int l = (int)dataFile.Length / 4096; sectorFree = new System.Collections.ArrayList(l); for (int i1 = 0; i1 < l; i1++) { sectorFree.Add(true); } sectorFree[0] = false; sectorFree[1] = false; dataFile.Seek(0L); for (int j1 = 0; j1 < 1024; j1++) { int l1 = dataFileIn.ReadInt(); offsets[j1] = l1; if (l1 == 0 || (l1 >> 8) + (l1 & 0xff) > sectorFree.Count) { continue; } for (int j2 = 0; j2 < (l1 & 0xff); j2++) { sectorFree[(l1 >> 8) + j2] = false; } } for (int k1 = 0; k1 < 1024; k1++) { int i2 = dataFileIn.ReadInt(); chunkTimestamps[k1] = i2; } } catch (System.IO.IOException ioexception) { Sharpen.Runtime.PrintStackTrace(ioexception); } }
/// <summary> /// Writes a string to the specified DataOutput using /// <a href="DataInput.html#modified-utf-8">modified UTF-8</a> /// encoding in a machine-independent manner. /// <para> /// First, two bytes are written to out as if by the <code>writeShort</code> /// method giving the number of bytes to follow. This value is the number of /// bytes actually written out, not the length of the string. Following the /// length, each character of the string is output, in sequence, using the /// modified UTF-8 encoding for the character. If no exception is thrown, the /// counter <code>written</code> is incremented by the total number of /// bytes written to the output stream. This will be at least two /// plus the length of <code>str</code>, and at most two plus /// thrice the length of <code>str</code>. /// /// </para> /// </summary> /// <param name="str"> a string to be written. </param> /// <param name="out"> destination to write to </param> /// <returns> The number of bytes written out. </returns> /// <exception cref="IOException"> if an I/O error occurs. </exception> //JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: static int writeUTF(String str, DataOutput out) throws IOException internal static int WriteUTF(String str, DataOutput @out) { int strlen = str.Length(); int utflen = 0; int c, count = 0; /* use charAt instead of copying String to char array */ for (int i = 0; i < strlen; i++) { c = str.CharAt(i); if ((c >= 0x0001) && (c <= 0x007F)) { utflen++; } else if (c > 0x07FF) { utflen += 3; } else { utflen += 2; } } if (utflen > 65535) { throw new UTFDataFormatException("encoded string too long: " + utflen + " bytes"); } sbyte[] bytearr = null; if (@out is DataOutputStream) { DataOutputStream dos = (DataOutputStream)@out; if (dos.Bytearr == null || (dos.Bytearr.Length < (utflen + 2))) { dos.Bytearr = new sbyte[(utflen * 2) + 2]; } bytearr = dos.Bytearr; } else { bytearr = new sbyte[utflen + 2]; } bytearr[count++] = unchecked ((sbyte)(((int)((uint)utflen >> 8)) & 0xFF)); bytearr[count++] = unchecked ((sbyte)(((int)((uint)utflen >> 0)) & 0xFF)); int i = 0; for (i = 0; i < strlen; i++) { c = str.CharAt(i); if (!((c >= 0x0001) && (c <= 0x007F))) { break; } bytearr[count++] = (sbyte)c; } for (; i < strlen; i++) { c = str.CharAt(i); if ((c >= 0x0001) && (c <= 0x007F)) { bytearr[count++] = (sbyte)c; } else if (c > 0x07FF) { bytearr[count++] = unchecked ((sbyte)(0xE0 | ((c >> 12) & 0x0F))); bytearr[count++] = unchecked ((sbyte)(0x80 | ((c >> 6) & 0x3F))); bytearr[count++] = unchecked ((sbyte)(0x80 | ((c >> 0) & 0x3F))); } else { bytearr[count++] = unchecked ((sbyte)(0xC0 | ((c >> 6) & 0x1F))); bytearr[count++] = unchecked ((sbyte)(0x80 | ((c >> 0) & 0x3F))); } } @out.Write(bytearr, 0, utflen + 2); return(utflen + 2); }
public override void writePacketData(DataOutputStream dataoutputstream) { dataoutputstream.writeShort(healthMP); }
public virtual void TestSortedLongWritable() { Configuration conf = new Configuration(); Path path = new Path(Root, name); FileSystem fs = path.GetFileSystem(conf); FSDataOutputStream @out = fs.Create(path); try { TFile.Writer writer = new TFile.Writer(@out, BlockSize, "gz", jClassLongWritableComparator , conf); try { LongWritable key = new LongWritable(0); for (long i = 0; i < Nentry; ++i) { key.Set(Cube(i - Nentry / 2)); DataOutputStream dos = writer.PrepareAppendKey(-1); try { key.Write(dos); } finally { dos.Close(); } dos = writer.PrepareAppendValue(-1); try { dos.Write(Runtime.GetBytesForString(BuildValue(i))); } finally { dos.Close(); } } } finally { writer.Close(); } } finally { @out.Close(); } FSDataInputStream @in = fs.Open(path); try { TFile.Reader reader = new TFile.Reader(@in, fs.GetFileStatus(path).GetLen(), conf ); try { TFile.Reader.Scanner scanner = reader.CreateScanner(); long i = 0; BytesWritable value = new BytesWritable(); for (; !scanner.AtEnd(); scanner.Advance()) { scanner.Entry().GetValue(value); Assert.Equal(BuildValue(i), Runtime.GetStringForBytes( value.GetBytes(), 0, value.GetLength())); ++i; } } finally { reader.Close(); } } finally { @in.Close(); } }
/// <summary> /// Do client side SASL authentication with server via the given InputStream /// and OutputStream /// </summary> /// <param name="inS">InputStream to use</param> /// <param name="outS">OutputStream to use</param> /// <returns>AuthMethod used to negotiate the connection</returns> /// <exception cref="System.IO.IOException"/> public virtual SaslRpcServer.AuthMethod SaslConnect(InputStream inS, OutputStream outS) { DataInputStream inStream = new DataInputStream(new BufferedInputStream(inS)); DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream(outS)); // redefined if/when a SASL negotiation starts, can be queried if the // negotiation fails authMethod = SaslRpcServer.AuthMethod.Simple; SendSaslMessage(outStream, negotiateRequest); // loop until sasl is complete or a rpc error occurs bool done = false; do { int totalLen = inStream.ReadInt(); ProtobufRpcEngine.RpcResponseMessageWrapper responseWrapper = new ProtobufRpcEngine.RpcResponseMessageWrapper (); responseWrapper.ReadFields(inStream); RpcHeaderProtos.RpcResponseHeaderProto header = responseWrapper.GetMessageHeader( ); switch (header.GetStatus()) { case RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.Error: case RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto.Fatal: { // might get a RPC error during throw new RemoteException(header.GetExceptionClassName(), header.GetErrorMsg()); } default: { break; } } if (totalLen != responseWrapper.GetLength()) { throw new SaslException("Received malformed response length"); } if (header.GetCallId() != Server.AuthProtocol.Sasl.callId) { throw new SaslException("Non-SASL response during negotiation"); } RpcHeaderProtos.RpcSaslProto saslMessage = RpcHeaderProtos.RpcSaslProto.ParseFrom (responseWrapper.GetMessageBytes()); if (Log.IsDebugEnabled()) { Log.Debug("Received SASL message " + saslMessage); } // handle sasl negotiation process RpcHeaderProtos.RpcSaslProto.Builder response = null; switch (saslMessage.GetState()) { case RpcHeaderProtos.RpcSaslProto.SaslState.Negotiate: { // create a compatible SASL client, throws if no supported auths RpcHeaderProtos.RpcSaslProto.SaslAuth saslAuthType = SelectSaslClient(saslMessage .GetAuthsList()); // define auth being attempted, caller can query if connect fails authMethod = SaslRpcServer.AuthMethod.ValueOf(saslAuthType.GetMethod()); byte[] responseToken = null; if (authMethod == SaslRpcServer.AuthMethod.Simple) { // switching to SIMPLE done = true; } else { // not going to wait for success ack byte[] challengeToken = null; if (saslAuthType.HasChallenge()) { // server provided the first challenge challengeToken = saslAuthType.GetChallenge().ToByteArray(); saslAuthType = ((RpcHeaderProtos.RpcSaslProto.SaslAuth)RpcHeaderProtos.RpcSaslProto.SaslAuth .NewBuilder(saslAuthType).ClearChallenge().Build()); } else { if (saslClient.HasInitialResponse()) { challengeToken = new byte[0]; } } responseToken = (challengeToken != null) ? saslClient.EvaluateChallenge(challengeToken ) : new byte[0]; } response = CreateSaslReply(RpcHeaderProtos.RpcSaslProto.SaslState.Initiate, responseToken ); response.AddAuths(saslAuthType); break; } case RpcHeaderProtos.RpcSaslProto.SaslState.Challenge: { if (saslClient == null) { // should probably instantiate a client to allow a server to // demand a specific negotiation throw new SaslException("Server sent unsolicited challenge"); } byte[] responseToken = SaslEvaluateToken(saslMessage, false); response = CreateSaslReply(RpcHeaderProtos.RpcSaslProto.SaslState.Response, responseToken ); break; } case RpcHeaderProtos.RpcSaslProto.SaslState.Success: { // simple server sends immediate success to a SASL client for // switch to simple if (saslClient == null) { authMethod = SaslRpcServer.AuthMethod.Simple; } else { SaslEvaluateToken(saslMessage, true); } done = true; break; } default: { throw new SaslException("RPC client doesn't support SASL " + saslMessage.GetState ()); } } if (response != null) { SendSaslMessage(outStream, ((RpcHeaderProtos.RpcSaslProto)response.Build())); } }while (!done); return(authMethod); }
public int write(DataOutputStream dos) { throw new NotImplementedException(); }
public bool Stop() { DataOutputStream.Close(); return(true); }
public override void ToOutputStream(DataOutputStream writer) { }
///<summary> ///Automatically sets the length of the marshalled data, then calls the marshal method. ///</summary> new public void marshalAutoLengthSet(DataOutputStream dos) { //Set the length prior to marshalling data this.setLength((ushort)this.getMarshalledSize()); this.marshal(dos); }
public void Return(DataOutputStream output) { output.WriteGetVideosResult(_result); }
/// <summary> /// Automatically sets the length of the marshalled data, then calls the marshal method. /// </summary> /// <param name="dos">The DataOutputStream instance to which the PDU is marshaled.</param> public override void MarshalAutoLengthSet(DataOutputStream dos) { // Set the length prior to marshalling data this.Length = (ushort)this.GetMarshalledSize(); this.Marshal(dos); }
/// <summary> /// Constructor which takes a GISModel and a DataOutputStream and prepares /// itself to write the model to that stream. /// </summary> /// <param name="model"> /// The GISModel which is to be persisted. </param> /// <param name="dos"> /// The stream which will be used to persist the model. </param> public BinaryGISModelWriter(AbstractModel model, DataOutputStream dos) : base(model) { output = dos; }
public void testUncompressedDisjointBuffers() { OutputCollector collect = new OutputCollector(); OutStream @out = new OutStream("test", 400, null, collect); PositionCollector[] positions = new PositionCollector[1024]; DataOutput stream = new DataOutputStream(@out); for (int i = 0; i < 1024; ++i) { positions[i] = new PositionCollector(); @out.getPosition(positions[i]); stream.writeInt(i); } @out.Flush(); Assert.Equal("test", @out.ToString()); Assert.Equal(4096, collect.buffer.size()); ByteBuffer[] inBuf = new ByteBuffer[3]; inBuf[0] = ByteBuffer.allocate(1100); inBuf[1] = ByteBuffer.allocate(2200); inBuf[2] = ByteBuffer.allocate(1100); collect.buffer.setByteBuffer(inBuf[0], 0, 1024); collect.buffer.setByteBuffer(inBuf[1], 1024, 2048); collect.buffer.setByteBuffer(inBuf[2], 3072, 1024); for (int i = 0; i < inBuf.Length; ++i) { inBuf[i].flip(); } InStream @in = InStream.create(null, "test", inBuf, new long[] { 0, 1024, 3072 }, 4096, null, 400); Assert.Equal("uncompressed stream test position: 0 length: 4096" + " range: 0 offset: 0 limit: 0", @in.ToString()); DataInputStream inStream = new DataInputStream(@in); for (int i = 0; i < 1024; ++i) { int x = inStream.readInt(); Assert.Equal(i, x); } Assert.Equal(0, @in.available()); for (int i = 1023; i >= 0; --i) { @in.seek(positions[i]); Assert.Equal(i, inStream.readInt()); } @in = InStream.create(null, "test", new ByteBuffer[] { inBuf[1], inBuf[2] }, new long[] { 1024, 3072 }, 4096, null, 400); inStream = new DataInputStream(@in); positions[256].reset(); @in.seek(positions[256]); for (int i = 256; i < 1024; ++i) { Assert.Equal(i, inStream.readInt()); } @in = InStream.create(null, "test", new ByteBuffer[] { inBuf[0], inBuf[2] }, new long[] { 0, 3072 }, 4096, null, 400); inStream = new DataInputStream(@in); positions[768].reset(); for (int i = 0; i < 256; ++i) { Assert.Equal(i, inStream.readInt()); } @in.seek(positions[768]); for (int i = 768; i < 1024; ++i) { Assert.Equal(i, inStream.readInt()); } }
public void Return(DataOutputStream output) { output.WriteGetEntryResult(_result); }
public override void writePacketData(DataOutputStream dataoutputstream) { dataoutputstream.writeInt(entityId); }
//update is called once per frame private void Update() { //if the agent is crossing a road, they can choose a random faster speed - to simulat a human running NavMeshHit hit; if (!agent.SamplePathPosition(NavMesh.AllAreas, 0.0f, out hit)) { if ((hit.mask & crossingMask) != 0) { int random = UnityEngine.Random.Range(8, 15); agent.speed = random; //run across roads } else { agent.speed = 5; //walk around the paths } } //if the agent is close to their waypoint, choose another at random if (agent.remainingDistance < 0.5) { int d = UnityEngine.Random.Range(0, wps.Length); agent.SetAreaCost(4, 20); agent.SetDestination(wps[d].transform.position); } //change the acceleration if the agent is turning corners to help navigate else if (agent.hasPath) { agent.isStopped = false; Vector3 toTarget = agent.steeringTarget - this.transform.position; float turnAngle = Vector3.Angle(this.transform.forward, toTarget); agent.acceleration = turnAngle * agent.speed; } // Simple dead reckoning algorithm below; checking to see if distance moved since last espdu (change) > threshold value change[0] += Mathf.Abs(this.agent.transform.position.x - prev_location.x); prev_location.x = this.agent.transform.position.x; change[1] += Mathf.Abs(this.agent.transform.position.y - prev_location.y); prev_location.y = this.agent.transform.position.y; if ((change[0] > threshold) | (change[1] > threshold)) { change[0] = 0; change[1] = 0; this.sendNewEspdu = true; } else { this.sendNewEspdu = false; } // Sending the new Espdu if necessary (if Dead Reckoning threshold passed) if (this.sendNewEspdu) { // Declaring the position of the Bot (in WSP - World Space Position) Vector3Double loc = espdu.EntityLocation; // Issues here loc.X = this.agent.transform.position.x; loc.Y = this.agent.transform.position.y; loc.Z = 0.0; if (espdu.EntityLocation.X.Equals(null) | espdu.EntityLocation.Y.Equals(null)) { Debug.LogError("Espdu's location value is NULL!!!"); } // Declaring the Bot's velocity Vector3Float vel = espdu.EntityLinearVelocity; vel.X = this.agent.velocity.x; vel.Y = this.agent.velocity.y; vel.Z = 0.0f; if (espdu.EntityLinearVelocity.X.Equals(null) | espdu.EntityLinearVelocity.Y.Equals(null)) { Debug.LogError("Espdu's linear velocity value is NULL!!!"); } // Declaring the DeadReckoning Algorithm to be used (R, P, W) espdu.DeadReckoningParameters.DeadReckoningAlgorithm = (byte)2; // Sending the Espdu espdu.Timestamp = DisTime.DisRelativeTimestamp; // Prepare output DataOutputStream dos = new DataOutputStream(Endian.Big); espdu.MarshalAutoLengthSet(dos); // Transmit broadcast messages Sender.SendMessages(dos.ConvertToBytes()); string mess = string.Format("Message sent with TimeStamp [{0}] Time Of[{1}]", espdu.Timestamp, (espdu.Timestamp >> 1)); Debug.Log(mess); this.sendNewEspdu = false; } }
private static void ProcessPostRequest(HttpRequest request, HttpResponse response, HttpSessionState session) { response.ContentType = "application/octet-stream"; var memoryStream = new MemoryStream(); var output = new DataOutputStream(memoryStream); try { var input = new DataInputStream(request.InputStream); var version = request.Headers["version"]; if (version != null) { if (!HttpProcessor.PROTOCOL_VERSION.Equals(version, StringComparison.Ordinal)) throw new IOException(String.Format( CultureInfo.CurrentCulture, Resources.IO_InvalidProtocolVersion, version, HttpProcessor.PROTOCOL_VERSION)); } if (input.ReadInt16() == HttpProcessor.INVOCATION_CODE) InvokeMethod(session, input.ReadInt32(), input, output); input.Close(); } catch (Exception e) { if (output == null) output = new DataOutputStream(response.OutputStream); output.WriteInt16(HttpProcessor.RESULT_EXCEPTION); Debug.WriteLine(e.StackTrace); output.WriteString(e.ToString()); } response.SetContentLength(memoryStream.Length); try { var data = new Byte[memoryStream.Length]; memoryStream.Seek(0, SeekOrigin.Begin); memoryStream.Read(data, 0, data.Length); response.OutputStream.Write(data, 0, data.Length); } finally { if (output != null) output.Close(); response.OutputStream.Close(); } }