/// <summary> /// Outputs the device name and ip. /// </summary> /// <param name="outputSocket">Output socket.</param> /// <param name="deviceName">Device name.</param> /// <param name="deviceAddress">Device address.</param> //端末名とIPアドレスのセットを送る void outputDeviceNameAndIp(Socket outputSocket, String deviceName, String deviceAddress)//*** Host And Guest *** { BufferedWriter bufferedWriter; try { bufferedWriter = new BufferedWriter( new OutputStreamWriter(outputSocket.OutputStream) ); //デバイス名を書き込む bufferedWriter.Write(deviceName); bufferedWriter.NewLine(); //IPアドレスを書き込む bufferedWriter.Write(deviceAddress); bufferedWriter.NewLine(); //出力終了の文字列を書き込む bufferedWriter.Write("outputFinish"); //出力する bufferedWriter.Flush(); } catch (IOException e) { e.PrintStackTrace(); } }
private static void logMsg(string text) { string path = "/storage/emulated/0/logCsharp.txt"; Java.IO.File logFile = new Java.IO.File(path); //File logFile = new File("sdcard/log.file"); if (!logFile.Exists()) { try { // logFile.Mkdir(); logFile.CreateNewFile(); } catch (Java.IO.IOException e) { // TODO Auto-generated catch block e.PrintStackTrace(); } } try { //BufferedWriter for performance, true to set append to file flag BufferedWriter buf = new BufferedWriter(new FileWriter(logFile, true)); buf.Append(text); buf.NewLine(); buf.Close(); } catch (Java.IO.IOException e) { // TODO Auto-generated catch block e.PrintStackTrace(); } }
/** * 打开日志文件并写入日志 * * @return **/ private /*synchronized*/ static void log2File(string mylogtype, string tag, string text) { Date nowtime = new Date(); string date = FILE_SUFFIX.Format(nowtime); string dateLogContent = LOG_FORMAT.Format(nowtime) + ":" + mylogtype + ":" + tag + ":" + text; // 日志输出格式 File destDir = new File(LOG_FILE_PATH); if (!destDir.Exists()) { destDir.Mkdirs(); } File file = new File(LOG_FILE_PATH, LOG_FILE_NAME + date); try { FileWriter filerWriter = new FileWriter(file, true); BufferedWriter bufWriter = new BufferedWriter(filerWriter); bufWriter.Write(dateLogContent); bufWriter.NewLine(); bufWriter.Close(); filerWriter.Close(); } catch (IOException e) { e.PrintStackTrace(); } }
public static void LogDetails(Context context, string text) { File path = context.GetExternalFilesDir(null); File file = new File(path, "UEFuel.txt"); if (!file.Exists()) { try { file.CreateNewFile(); } catch { } } try { BufferedWriter buf = new BufferedWriter(new FileWriter(file, true)); buf.Append(text); buf.NewLine(); buf.Close(); } catch (Exception ex) { System.Console.WriteLine(ex.Message); } }
/// <exception cref="System.IO.IOException"/> public static void OutputMatrix(BufferedWriter bout, SimpleMatrix matrix) { for (int i = 0; i < matrix.GetNumElements(); ++i) { bout.Write(" " + matrix.Get(i)); } bout.NewLine(); }
/// <exception cref="System.IO.IOException"/> public static void CommunicateWithNERServer(string host, int port, string charset, BufferedReader input, BufferedWriter output, bool closeOnBlank) { if (host == null) { host = "localhost"; } for (string userInput; (userInput = input.ReadLine()) != null;) { if (userInput.Matches("\\n?")) { if (closeOnBlank) { break; } else { continue; } } try { // TODO: why not keep the same socket for multiple lines? Socket socket = new Socket(host, port); PrintWriter @out = new PrintWriter(new OutputStreamWriter(socket.GetOutputStream(), charset), true); BufferedReader @in = new BufferedReader(new InputStreamReader(socket.GetInputStream(), charset)); // send material to NER to socket @out.Println(userInput); // Print the results of NER string result; while ((result = @in.ReadLine()) != null) { if (output == null) { EncodingPrintWriter.Out.Println(result, charset); } else { output.Write(result); output.NewLine(); } } @in.Close(); socket.Close(); } catch (UnknownHostException) { log.Info("Cannot find host: "); log.Info(host); return; } catch (IOException) { log.Info("I/O error in the connection to: "); log.Info(host); return; } } }
public override void Run() { int failures = 0; try { FileOutputStream fos = new FileOutputStream(this.filename); OutputStreamWriter ow = new OutputStreamWriter(fos, "utf-8"); BufferedWriter bw = new BufferedWriter(ow); foreach (IList <IHasWord> sentence in this.sentences) { Tree tree = this._enclosing.parser.ParseTree(sentence); if (tree == null) { ++failures; ParserPanel.log.Info("Failed on sentence " + sentence); } else { bw.Write(tree.ToString()); bw.NewLine(); } this.progress.SetValue(this.progress.GetValue() + 1); if (this.cancelled) { break; } } bw.Flush(); bw.Close(); ow.Close(); fos.Close(); } catch (IOException e) { JOptionPane.ShowMessageDialog(this._enclosing, "Could not save file " + this.filename + "\n" + e, null, JOptionPane.ErrorMessage); Sharpen.Runtime.PrintStackTrace(e); this._enclosing.SetStatus("Error saving parsed document"); } if (failures == 0) { this.button.SetText("Success!"); } else { this.button.SetText("Done. " + failures + " parses failed"); } if (this.cancelled && failures == 0) { this.dialog.SetVisible(false); } else { this.button.AddActionListener(null); } }
/// <summary>write cofiguration</summary> /// <returns/> /// <exception cref="System.IO.IOException"/> private FilePath WriteFile() { FilePath f = new FilePath(testDir, "tst.xml"); BufferedWriter @out = new BufferedWriter(new FileWriter(f)); string properties = "<properties><property key=\"key\" value=\"value\"/><property key=\"key1\" value=\"value1\"/></properties>"; @out.Write("<queues>"); @out.NewLine(); @out.Write("<queue><name>first</name><acl-submit-job>user1,user2 group1,group2</acl-submit-job><acl-administer-jobs>user3,user4 group3,group4</acl-administer-jobs><state>running</state></queue>" ); @out.NewLine(); @out.Write("<queue><name>second</name><acl-submit-job>u1,u2 g1,g2</acl-submit-job>" + properties + "<state>stopped</state></queue>"); @out.NewLine(); @out.Write("</queues>"); @out.Flush(); @out.Close(); return(f); }
private static void logMsg(string text) { //string path = "/storage/emulated/0/logsX.txt"; //var path1 = System.Environment.GetFolderPath(System.Environment.SpecialFolder.ApplicationData); //System.IO.File.Create(path); //Java.IO.File sdCard = Android.OS.Environment.ExternalStorageDirectory; //Java.IO.File dir = new Java.IO.File(sdCard.AbsolutePath); //dir.Mkdirs(); //Java.IO.File file = new Java.IO.File(dir, "iootext.txt"); //if (!file.Exists()) //{ // file.CreateNewFile(); // file.Mkdir(); // FileWriter writer = new FileWriter(file); // // Writes the content to the file // writer.Write(""); // writer.Flush(); // writer.Close(); //} string path = "/storage/emulated/0/logCsharp.txt"; Java.IO.File logFile = new Java.IO.File(path); //File logFile = new File("sdcard/log.file"); if (!logFile.Exists()) { try { // logFile.Mkdir(); logFile.CreateNewFile(); } catch (Java.IO.IOException e) { // TODO Auto-generated catch block e.PrintStackTrace(); } } try { //BufferedWriter for performance, true to set append to file flag BufferedWriter buf = new BufferedWriter(new FileWriter(logFile, true)); buf.Append(text); buf.NewLine(); buf.Close(); } catch (Java.IO.IOException e) { // TODO Auto-generated catch block e.PrintStackTrace(); } }
public static void LogDetails(Context context, string text) { File path = context.GetExternalFilesDir(null); File file = new File(path, "UECrusher.txt"); if (!file.Exists()) { try { file.CreateNewFile(); } catch { } } try { BufferedWriter buf = new BufferedWriter(new FileWriter(file, true)); buf.Append(text); buf.NewLine(); buf.Close(); } catch (Exception ex) { System.Console.WriteLine(ex.Message); } //int length = (int)file.Length(); //byte[] bytes = new byte[length]; //if (file.Exists()) //{ // FileInputStream input = new FileInputStream(file); // try // { // input.Read(bytes); // } // finally // { // input.Close(); // } //} //String actual = Encoding.ASCII.GetString(bytes); //FileOutputStream stream = new FileOutputStream(file); //try //{ // stream.Write(Encoding.ASCII.GetBytes(actual + "\n" + message)); //} //finally //{ // stream.Close(); //} }
/// <summary>Tcp /// Gets the ip address. /// </summary> /// <returns>The ip address.</returns> //public string getIpAddress() //{ // string IpAddress = "1234"; // return IpAddress; //} /// <summary>Tcp /// Outputs the device name and ip. /// 端末名とIPアドレスのセットをゲストに送る /// </summary> /// <param name="outputSocket">Output socket.</param> /// <param name="deviceName">Device name.</param> /// <param name="deviceAddress">Device address.</param> void outputDeviceNameAndIp(Socket outputSocket, String deviceName, String deviceAddress) { BufferedWriter bufferedWriter; try { bufferedWriter = new BufferedWriter( new OutputStreamWriter(outputSocket.OutputStream) ); //(3)FileOutputStreamオブジェクトの生成 // OutputStream xyz = new OutputStream("xyz.txt"); //(5)OutputStreamWriterオブジェクトの生成 //bufferedWriter = new BufferedWriter( // new OutputStreamWriter(xyz, "Shift_JIS") //); //デバイス名を書き込む bufferedWriter.Write(deviceName); bufferedWriter.NewLine(); //IPアドレスを書き込む bufferedWriter.Write(deviceAddress); bufferedWriter.NewLine(); //出力終了の文字列を書き込む bufferedWriter.Write("outputFinish"); //出力する bufferedWriter.Flush(); } catch (IOException e) { e.PrintStackTrace(); } }
public void Log(string logMessage) { if (logFile == null) { logFile = GetLogFile(); } FileWriter fileWriter = new FileWriter(logFile, true); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter); DateTime currentTime = new DateTime(DateTime.Now.Ticks); string currentlogtime = DateTime.Now.ToString() + " " + GetVersion(); bufferedWriter.Append(currentlogtime + ": " + logMessage); bufferedWriter.NewLine(); bufferedWriter.Flush(); bufferedWriter.Close(); }
public virtual void TranslateLines(BufferedReader br, BufferedWriter bw) { try { string line; while ((line = br.ReadLine()) != null) { bw.Write(Apply(line)); bw.NewLine(); } } catch (IOException e) { throw new RuntimeIOException(e); } }
public virtual void Evaluate(IParserQuery query, Tree gold, PrintWriter pw) { if (!(query is ShiftReduceParserQuery)) { throw new ArgumentException("This evaluator only works for the ShiftReduceParser"); } ShiftReduceParserQuery srquery = (ShiftReduceParserQuery)query; try { switch (mode) { case TreeRecorder.Mode.Binarized: { @out.Write(srquery.GetBestBinarizedParse().ToString()); break; } case TreeRecorder.Mode.Debinarized: { @out.Write(srquery.debinarized.ToString()); break; } default: { throw new ArgumentException("Unknown mode " + mode); } } @out.NewLine(); } catch (IOException e) { throw new RuntimeIOException(e); } }
/// <exception cref="System.IO.IOException"/> public static void Main(string[] args) { string modelPath = null; string outputPath = null; string inputPath = null; string testTreebankPath = null; IFileFilter testTreebankFilter = null; IList <string> unusedArgs = Generics.NewArrayList(); for (int argIndex = 0; argIndex < args.Length;) { if (Sharpen.Runtime.EqualsIgnoreCase(args[argIndex], "-model")) { modelPath = args[argIndex + 1]; argIndex += 2; } else { if (Sharpen.Runtime.EqualsIgnoreCase(args[argIndex], "-output")) { outputPath = args[argIndex + 1]; argIndex += 2; } else { if (Sharpen.Runtime.EqualsIgnoreCase(args[argIndex], "-input")) { inputPath = args[argIndex + 1]; argIndex += 2; } else { if (Sharpen.Runtime.EqualsIgnoreCase(args[argIndex], "-testTreebank")) { Pair <string, IFileFilter> treebankDescription = ArgUtils.GetTreebankDescription(args, argIndex, "-testTreebank"); argIndex = argIndex + ArgUtils.NumSubArgs(args, argIndex) + 1; testTreebankPath = treebankDescription.First(); testTreebankFilter = treebankDescription.Second(); } else { unusedArgs.Add(args[argIndex++]); } } } } } string[] newArgs = Sharpen.Collections.ToArray(unusedArgs, new string[unusedArgs.Count]); LexicalizedParser parser = ((LexicalizedParser)LexicalizedParser.LoadModel(modelPath, newArgs)); DVModel model = DVParser.GetModelFromLexicalizedParser(parser); File outputFile = new File(outputPath); FileSystem.CheckNotExistsOrFail(outputFile); FileSystem.MkdirOrFail(outputFile); int count = 0; if (inputPath != null) { Reader input = new BufferedReader(new FileReader(inputPath)); DocumentPreprocessor processor = new DocumentPreprocessor(input); foreach (IList <IHasWord> sentence in processor) { count++; // index from 1 IParserQuery pq = parser.ParserQuery(); if (!(pq is RerankingParserQuery)) { throw new ArgumentException("Expected a RerankingParserQuery"); } RerankingParserQuery rpq = (RerankingParserQuery)pq; if (!rpq.Parse(sentence)) { throw new Exception("Unparsable sentence: " + sentence); } IRerankerQuery reranker = rpq.RerankerQuery(); if (!(reranker is DVModelReranker.Query)) { throw new ArgumentException("Expected a DVModelReranker"); } DeepTree deepTree = ((DVModelReranker.Query)reranker).GetDeepTrees()[0]; IdentityHashMap <Tree, SimpleMatrix> vectors = deepTree.GetVectors(); foreach (KeyValuePair <Tree, SimpleMatrix> entry in vectors) { log.Info(entry.Key + " " + entry.Value); } FileWriter fout = new FileWriter(outputPath + File.separator + "sentence" + count + ".txt"); BufferedWriter bout = new BufferedWriter(fout); bout.Write(SentenceUtils.ListToString(sentence)); bout.NewLine(); bout.Write(deepTree.GetTree().ToString()); bout.NewLine(); foreach (IHasWord word in sentence) { OutputMatrix(bout, model.GetWordVector(word.Word())); } Tree rootTree = FindRootTree(vectors); OutputTreeMatrices(bout, rootTree, vectors); bout.Flush(); fout.Close(); } } }
/// <summary>A fast, rule-based tokenizer for Spanish based on AnCora.</summary> /// <remarks> /// A fast, rule-based tokenizer for Spanish based on AnCora. /// Performs punctuation splitting and light tokenization by default. /// <p> /// Currently, this tokenizer does not do line splitting. It assumes that the input /// file is delimited by the system line separator. The output will be equivalently /// delimited. /// </p> /// </remarks> /// <param name="args"/> public static void Main(string[] args) { Properties options = StringUtils.ArgsToProperties(args, ArgOptionDefs()); if (options.Contains("help")) { log.Info(Usage()); return; } // Lexer options ITokenizerFactory <CoreLabel> tf = SpanishTokenizer.CoreLabelFactory(); string orthoOptions = options.Contains("ancora") ? AncoraOptions : string.Empty; if (options.Contains("options")) { orthoOptions = orthoOptions.IsEmpty() ? options.GetProperty("options") : orthoOptions + ',' + options; } bool tokens = PropertiesUtils.GetBool(options, "tokens", false); if (!tokens) { orthoOptions = orthoOptions.IsEmpty() ? "tokenizeNLs" : orthoOptions + ",tokenizeNLs"; } tf.SetOptions(orthoOptions); // Other options string encoding = options.GetProperty("encoding", "UTF-8"); bool toLower = PropertiesUtils.GetBool(options, "lowerCase", false); Locale es = new Locale("es"); bool onePerLine = PropertiesUtils.GetBool(options, "onePerLine", false); // Read the file from stdin int nLines = 0; int nTokens = 0; long startTime = Runtime.NanoTime(); try { ITokenizer <CoreLabel> tokenizer = tf.GetTokenizer(new BufferedReader(new InputStreamReader(Runtime.@in, encoding))); BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(System.Console.Out, encoding)); bool printSpace = false; while (tokenizer.MoveNext()) { ++nTokens; string word = tokenizer.Current.Word(); if (word.Equals(SpanishLexer.NewlineToken)) { ++nLines; if (!onePerLine) { writer.NewLine(); printSpace = false; } } else { string outputToken = toLower ? word.ToLower(es) : word; if (onePerLine) { writer.Write(outputToken); writer.NewLine(); } else { if (printSpace) { writer.Write(" "); } writer.Write(outputToken); printSpace = true; } } } } catch (UnsupportedEncodingException e) { throw new RuntimeIOException("Bad character encoding", e); } catch (IOException e) { throw new RuntimeIOException(e); } long elapsedTime = Runtime.NanoTime() - startTime; double linesPerSec = (double)nLines / (elapsedTime / 1e9); System.Console.Error.Printf("Done! Tokenized %d lines (%d tokens) at %.2f lines/sec%n", nLines, nTokens, linesPerSec); }
/// <exception cref="NGit.Api.Errors.RefNotFoundException"></exception> /// <exception cref="System.IO.IOException"></exception> /// <exception cref="NGit.Api.Errors.NoHeadException"></exception> /// <exception cref="NGit.Api.Errors.JGitInternalException"></exception> private RebaseResult InitFilesAndRewind() { // we need to store everything into files so that we can implement // --skip, --continue, and --abort // first of all, we determine the commits to be applied IList <RevCommit> cherryPickList = new AList <RevCommit>(); Ref head = repo.GetRef(Constants.HEAD); if (head == null || head.GetObjectId() == null) { throw new RefNotFoundException(MessageFormat.Format(JGitText.Get().refNotResolved , Constants.HEAD)); } string headName; if (head.IsSymbolic()) { headName = head.GetTarget().GetName(); } else { headName = "detached HEAD"; } ObjectId headId = head.GetObjectId(); if (headId == null) { throw new RefNotFoundException(MessageFormat.Format(JGitText.Get().refNotResolved , Constants.HEAD)); } RevCommit headCommit = walk.LookupCommit(headId); monitor.BeginTask(JGitText.Get().obtainingCommitsForCherryPick, ProgressMonitor.UNKNOWN ); LogCommand cmd = new Git(repo).Log().AddRange(upstreamCommit, headCommit); Iterable <RevCommit> commitsToUse = cmd.Call(); foreach (RevCommit commit in commitsToUse) { cherryPickList.AddItem(commit); } // if the upstream commit is in a direct line to the current head, // the log command will not report any commits; in this case, // we create the cherry-pick list ourselves if (cherryPickList.IsEmpty()) { Iterable <RevCommit> parents = new Git(repo).Log().Add(upstreamCommit).Call(); foreach (RevCommit parent in parents) { if (parent.Equals(headCommit)) { break; } if (parent.ParentCount != 1) { throw new JGitInternalException(JGitText.Get().canOnlyCherryPickCommitsWithOneParent ); } cherryPickList.AddItem(parent); } } // nothing to do: return with UP_TO_DATE_RESULT if (cherryPickList.IsEmpty()) { return(RebaseResult.UP_TO_DATE_RESULT); } Sharpen.Collections.Reverse(cherryPickList); // create the folder for the meta information FileUtils.Mkdir(rebaseDir); CreateFile(repo.Directory, Constants.ORIG_HEAD, headId.Name); CreateFile(rebaseDir, REBASE_HEAD, headId.Name); CreateFile(rebaseDir, HEAD_NAME, headName); CreateFile(rebaseDir, ONTO, upstreamCommit.Name); CreateFile(rebaseDir, INTERACTIVE, string.Empty); BufferedWriter fw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream (new FilePath(rebaseDir, GIT_REBASE_TODO)), Constants.CHARACTER_ENCODING)); fw.Write("# Created by EGit: rebasing " + upstreamCommit.Name + " onto " + headId .Name); fw.NewLine(); try { StringBuilder sb = new StringBuilder(); ObjectReader reader = walk.GetObjectReader(); foreach (RevCommit commit_1 in cherryPickList) { sb.Length = 0; sb.Append(RebaseCommand.Action.PICK.ToToken()); sb.Append(" "); sb.Append(reader.Abbreviate(commit_1).Name); sb.Append(" "); sb.Append(commit_1.GetShortMessage()); fw.Write(sb.ToString()); fw.NewLine(); } } finally { fw.Close(); } monitor.EndTask(); // we rewind to the upstream commit monitor.BeginTask(MessageFormat.Format(JGitText.Get().rewinding, upstreamCommit.GetShortMessage ()), ProgressMonitor.UNKNOWN); CheckoutCommit(upstreamCommit); monitor.EndTask(); return(null); }
/// <summary> /// Removes the number of lines given in the parameter from the /// <code>git-rebase-todo</code> file but preserves comments and other lines /// that can not be parsed as steps /// </summary> /// <param name="numSteps"></param> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> private void PopSteps(int numSteps) { if (numSteps == 0) { return; } IList <string> todoLines = new AList <string>(); IList <string> poppedLines = new AList <string>(); FilePath todoFile = new FilePath(rebaseDir, GIT_REBASE_TODO); FilePath doneFile = new FilePath(rebaseDir, DONE); BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream( todoFile), Constants.CHARACTER_ENCODING)); try { // check if the line starts with a action tag (pick, skip...) while (poppedLines.Count < numSteps) { string popCandidate = br.ReadLine(); if (popCandidate == null) { break; } if (popCandidate[0] == '#') { continue; } int spaceIndex = popCandidate.IndexOf(' '); bool pop = false; if (spaceIndex >= 0) { string actionToken = Sharpen.Runtime.Substring(popCandidate, 0, spaceIndex); pop = RebaseCommand.Action.Parse(actionToken) != null; } if (pop) { poppedLines.AddItem(popCandidate); } else { todoLines.AddItem(popCandidate); } } string readLine = br.ReadLine(); while (readLine != null) { todoLines.AddItem(readLine); readLine = br.ReadLine(); } } finally { br.Close(); } BufferedWriter todoWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream (todoFile), Constants.CHARACTER_ENCODING)); try { foreach (string writeLine in todoLines) { todoWriter.Write(writeLine); todoWriter.NewLine(); } } finally { todoWriter.Close(); } if (poppedLines.Count > 0) { // append here BufferedWriter doneWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream (doneFile, true), Constants.CHARACTER_ENCODING)); try { foreach (string writeLine in poppedLines) { doneWriter.Write(writeLine); doneWriter.NewLine(); } } finally { doneWriter.Close(); } } }
/// <summary>Copy a table in this database into a new delimited text file.</summary> /// <remarks>Copy a table in this database into a new delimited text file.</remarks> /// <param name="cursor">Cursor to export</param> /// <param name="out">Writer to export to</param> /// <param name="header">If <code>true</code> the first line contains the column names /// </param> /// <param name="delim">The column delimiter, <code>null</code> for default (comma)</param> /// <param name="quote">The quote character</param> /// <param name="filter">valid export filter</param> /// <exception cref="System.IO.IOException"></exception> public static void ExportWriter(Cursor cursor, BufferedWriter @out, bool header, string delim, char quote, ExportFilter filter) { string delimiter = (delim == null) ? DEFAULT_DELIMITER : delim; // create pattern which will indicate whether or not a value needs to be // quoted or not (contains delimiter, separator, or newline) Sharpen.Pattern needsQuotePattern = Sharpen.Pattern.Compile("(?:" + Sharpen.Pattern .Quote(delimiter) + ")|(?:" + Sharpen.Pattern.Quote(string.Empty + quote) + ")|(?:[\n\r])" ); IList <Column> origCols = cursor.GetTable().GetColumns(); IList <Column> columns = new AList <Column>(origCols); columns = filter.FilterColumns(columns); ICollection <string> columnNames = null; if (!origCols.Equals(columns)) { // columns have been filtered columnNames = new HashSet <string>(); foreach (Column c in columns) { columnNames.AddItem(c.GetName()); } } // print the header row (if desired) if (header) { for (Iterator <Column> iter = columns.Iterator(); iter.HasNext();) { WriteValue(@out, iter.Next().GetName(), quote, needsQuotePattern); if (iter.HasNext()) { @out.Write(delimiter); } } @out.NewLine(); } // print the data rows IDictionary <string, object> row; object[] unfilteredRowData = new object[columns.Count]; while ((row = cursor.GetNextRow(columnNames)) != null) { // fill raw row data in array for (int i = 0; i < columns.Count; i++) { unfilteredRowData[i] = row.Get(columns[i].GetName()); } // apply filter object[] rowData = filter.FilterRow(unfilteredRowData); // print row for (int i_1 = 0; i_1 < columns.Count; i_1++) { object obj = rowData[i_1]; if (obj != null) { string value = null; if (obj is byte[]) { value = ByteUtil.ToHexString((byte[])obj); } else { value = rowData[i_1].ToString(); } WriteValue(@out, value, quote, needsQuotePattern); } if (i_1 < columns.Count - 1) { @out.Write(delimiter); } } @out.NewLine(); } @out.Flush(); }
/// <exception cref="System.IO.IOException"/> public static void Main(string[] args) { // Parse the arguments Properties props = StringUtils.ArgsToProperties(args); ArgumentParser.FillOptions(new Type[] { typeof(ArgumentParser), typeof(SplitTrainingSet) }, props); if (SplitNames.Length != SplitWeights.Length) { throw new ArgumentException("Name and weight arrays must be of the same length"); } double totalWeight = 0.0; foreach (double weight in SplitWeights) { totalWeight += weight; if (weight < 0.0) { throw new ArgumentException("Split weights cannot be negative"); } } if (totalWeight <= 0.0) { throw new ArgumentException("Split weights must total to a positive weight"); } IList <double> splitWeights = new List <double>(); foreach (double weight_1 in SplitWeights) { splitWeights.Add(weight_1 / totalWeight); } logger.Info("Splitting into " + splitWeights.Count + " lists with weights " + splitWeights); if (Seed == 0L) { Seed = Runtime.NanoTime(); logger.Info("Random seed not set by options, using " + Seed); } Random random = new Random(Seed); IList <IList <Tree> > splits = new List <IList <Tree> >(); foreach (double d in splitWeights) { splits.Add(new List <Tree>()); } Treebank treebank = new MemoryTreebank(null); treebank.LoadPath(Input); logger.Info("Splitting " + treebank.Count + " trees"); foreach (Tree tree in treebank) { int index = WeightedIndex(splitWeights, random); splits[index].Add(tree); } for (int i = 0; i < splits.Count; ++i) { string filename = Output + "." + SplitNames[i]; IList <Tree> split = splits[i]; logger.Info("Writing " + split.Count + " trees to " + filename); FileWriter fout = new FileWriter(filename); BufferedWriter bout = new BufferedWriter(fout); foreach (Tree tree_1 in split) { bout.Write(tree_1.ToString()); bout.NewLine(); } bout.Close(); fout.Close(); } }
protected override void SetUp() { FilePath logFile = new FilePath(historyLog); if (!logFile.GetParentFile().Exists()) { if (!logFile.GetParentFile().Mkdirs()) { Log.Error("Cannot create dirs for history log file: " + historyLog); } } if (!logFile.CreateNewFile()) { Log.Error("Cannot create history log file: " + historyLog); } BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream (historyLog))); writer.Write("$!!FILE=file1.log!!"); writer.NewLine(); writer.Write("Meta VERSION=\"1\" ."); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0004\" JOBNAME=\"streamjob21364.jar\" USER=\"hadoop\" SUBMIT_TIME=\"1237962008012\" JOBCONF=\"hdfs:///job_200903250600_0004/job.xml\" ." ); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0004\" JOB_PRIORITY=\"NORMAL\" ."); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0004\" LAUNCH_TIME=\"1237962008712\" TOTAL_MAPS=\"2\" TOTAL_REDUCES=\"0\" JOB_STATUS=\"PREP\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0004_m_000003\" TASK_TYPE=\"SETUP\" START_TIME=\"1237962008736\" SPLITS=\"\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"SETUP\" TASKID=\"task_200903250600_0004_m_000003\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000003_0\" START_TIME=\"1237962010929\" TRACKER_NAME=\"tracker_50445\" HTTP_PORT=\"50060\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"SETUP\" TASKID=\"task_200903250600_0004_m_000003\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000003_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962012459\" HOSTNAME=\"host.com\" STATE_STRING=\"setup\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0004_m_000003\" TASK_TYPE=\"SETUP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962023824\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" ." ); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0004\" JOB_STATUS=\"RUNNING\" ."); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0004_m_000000\" TASK_TYPE=\"MAP\" START_TIME=\"1237962024049\" SPLITS=\"host1.com,host2.com,host3.com\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0004_m_000001\" TASK_TYPE=\"MAP\" START_TIME=\"1237962024065\" SPLITS=\"host1.com,host2.com,host3.com\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0004_m_000000\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000000_0\" START_TIME=\"1237962026157\" TRACKER_NAME=\"tracker_50524\" HTTP_PORT=\"50060\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0004_m_000000\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000000_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962041307\" HOSTNAME=\"host.com\" STATE_STRING=\"Records R/W=2681/1\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_READ)(HDFS_BYTES_READ)(56630)][(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(28327)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(2681)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(28327)][(MAP_OUTPUT_RECORDS)(Map output records)(2681)]}\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0004_m_000000\" TASK_TYPE=\"MAP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962054138\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_READ)(HDFS_BYTES_READ)(56630)][(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(28327)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(2681)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(28327)][(MAP_OUTPUT_RECORDS)(Map output records)(2681)]}\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0004_m_000001\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000001_0\" START_TIME=\"1237962026077\" TRACKER_NAME=\"tracker_50162\" HTTP_PORT=\"50060\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0004_m_000001\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000001_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962041030\" HOSTNAME=\"host.com\" STATE_STRING=\"Records R/W=2634/1\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_READ)(HDFS_BYTES_READ)(28316)][(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(28303)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(2634)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(28303)][(MAP_OUTPUT_RECORDS)(Map output records)(2634)]}\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0004_m_000001\" TASK_TYPE=\"MAP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962054187\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_READ)(HDFS_BYTES_READ)(28316)][(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(28303)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(2634)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(28303)][(MAP_OUTPUT_RECORDS)(Map output records)(2634)]}\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0004_m_000002\" TASK_TYPE=\"CLEANUP\" START_TIME=\"1237962054187\" SPLITS=\"\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"CLEANUP\" TASKID=\"task_200903250600_0004_m_000002\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000002_0\" START_TIME=\"1237962055578\" TRACKER_NAME=\"tracker_50162\" HTTP_PORT=\"50060\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"CLEANUP\" TASKID=\"task_200903250600_0004_m_000002\" TASK_ATTEMPT_ID=\"attempt_200903250600_0004_m_000002_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962056782\" HOSTNAME=\"host.com\" STATE_STRING=\"cleanup\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0004_m_000002\" TASK_TYPE=\"CLEANUP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237962069193\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" ." ); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0004\" FINISH_TIME=\"1237962069193\" JOB_STATUS=\"SUCCESS\" FINISHED_MAPS=\"2\" FINISHED_REDUCES=\"0\" FAILED_MAPS=\"0\" FAILED_REDUCES=\"0\" COUNTERS=\"{(org.apache.hadoop.mapred.JobInProgress$Counter)(Job Counters )[(TOTAL_LAUNCHED_MAPS)(Launched map tasks)(2)]}{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_READ)(HDFS_BYTES_READ)(84946)][(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(56630)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(5315)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(56630)][(MAP_OUTPUT_RECORDS)(Map output records)(5315)]}\" ." ); writer.NewLine(); writer.Write("$!!FILE=file2.log!!"); writer.NewLine(); writer.Write("Meta VERSION=\"1\" ."); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0023\" JOBNAME=\"TestJob\" USER=\"hadoop2\" SUBMIT_TIME=\"1237964779799\" JOBCONF=\"hdfs:///job_200903250600_0023/job.xml\" ." ); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0023\" JOB_PRIORITY=\"NORMAL\" ."); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0023\" LAUNCH_TIME=\"1237964780928\" TOTAL_MAPS=\"2\" TOTAL_REDUCES=\"0\" JOB_STATUS=\"PREP\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0023_r_000001\" TASK_TYPE=\"SETUP\" START_TIME=\"1237964780940\" SPLITS=\"\" ." ); writer.NewLine(); writer.Write("ReduceAttempt TASK_TYPE=\"SETUP\" TASKID=\"task_200903250600_0023_r_000001\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_r_000001_0\" START_TIME=\"1237964720322\" TRACKER_NAME=\"tracker_3065\" HTTP_PORT=\"50060\" ." ); writer.NewLine(); writer.Write("ReduceAttempt TASK_TYPE=\"SETUP\" TASKID=\"task_200903250600_0023_r_000001\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_r_000001_0\" TASK_STATUS=\"SUCCESS\" SHUFFLE_FINISHED=\"1237964722118\" SORT_FINISHED=\"1237964722118\" FINISH_TIME=\"1237964722118\" HOSTNAME=\"host.com\" STATE_STRING=\"setup\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(REDUCE_INPUT_GROUPS)(Reduce input groups)(0)][(COMBINE_OUTPUT_RECORDS)(Combine output records)(0)][(REDUCE_SHUFFLE_BYTES)(Reduce shuffle bytes)(0)][(REDUCE_OUTPUT_RECORDS)(Reduce output records)(0)][(SPILLED_RECORDS)(Spilled Records)(0)][(REDUCE_INPUT_RECORDS)(Reduce input records)(0)]}\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0023_r_000001\" TASK_TYPE=\"SETUP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237964796054\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(REDUCE_INPUT_GROUPS)(Reduce input groups)(0)][(COMBINE_OUTPUT_RECORDS)(Combine output records)(0)][(REDUCE_SHUFFLE_BYTES)(Reduce shuffle bytes)(0)][(REDUCE_OUTPUT_RECORDS)(Reduce output records)(0)][(SPILLED_RECORDS)(Spilled Records)(0)][(REDUCE_INPUT_RECORDS)(Reduce input records)(0)]}\" ." ); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0023\" JOB_STATUS=\"RUNNING\" ."); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0023_m_000000\" TASK_TYPE=\"MAP\" START_TIME=\"1237964796176\" SPLITS=\"\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0023_m_000001\" TASK_TYPE=\"MAP\" START_TIME=\"1237964796176\" SPLITS=\"\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0023_m_000000\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_m_000000_0\" START_TIME=\"1237964809765\" TRACKER_NAME=\"tracker_50459\" HTTP_PORT=\"50060\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0023_m_000000\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_m_000000_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237964911772\" HOSTNAME=\"host.com\" STATE_STRING=\"\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(500000000)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(5000000)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(5000000)][(MAP_OUTPUT_RECORDS)(Map output records)(5000000)]}\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0023_m_000000\" TASK_TYPE=\"MAP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237964916534\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(500000000)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(5000000)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(5000000)][(MAP_OUTPUT_RECORDS)(Map output records)(5000000)]}\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0023_m_000001\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_m_000001_0\" START_TIME=\"1237964798169\" TRACKER_NAME=\"tracker_1524\" HTTP_PORT=\"50060\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0023_m_000001\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_m_000001_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237964962960\" HOSTNAME=\"host.com\" STATE_STRING=\"\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(500000000)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(5000000)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(5000000)][(MAP_OUTPUT_RECORDS)(Map output records)(5000000)]}\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0023_m_000001\" TASK_TYPE=\"MAP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237964976870\" COUNTERS=\"{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(500000000)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(5000000)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(5000000)][(MAP_OUTPUT_RECORDS)(Map output records)(5000000)]}\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0023_r_000000\" TASK_TYPE=\"CLEANUP\" START_TIME=\"1237964976871\" SPLITS=\"\" ." ); writer.NewLine(); writer.Write("ReduceAttempt TASK_TYPE=\"CLEANUP\" TASKID=\"task_200903250600_0023_r_000000\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_r_000000_0\" START_TIME=\"1237964977208\" TRACKER_NAME=\"tracker_1524\" HTTP_PORT=\"50060\" ." ); writer.NewLine(); writer.Write("ReduceAttempt TASK_TYPE=\"CLEANUP\" TASKID=\"task_200903250600_0023_r_000000\" TASK_ATTEMPT_ID=\"attempt_200903250600_0023_r_000000_0\" TASK_STATUS=\"SUCCESS\" SHUFFLE_FINISHED=\"1237964979031\" SORT_FINISHED=\"1237964979031\" FINISH_TIME=\"1237964979032\" HOSTNAME=\"host.com\" STATE_STRING=\"cleanup\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(REDUCE_INPUT_GROUPS)(Reduce input groups)(0)][(COMBINE_OUTPUT_RECORDS)(Combine output records)(0)][(REDUCE_SHUFFLE_BYTES)(Reduce shuffle bytes)(0)][(REDUCE_OUTPUT_RECORDS)(Reduce output records)(0)][(SPILLED_RECORDS)(Spilled Records)(0)][(REDUCE_INPUT_RECORDS)(Reduce input records)(0)]}\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0023_r_000000\" TASK_TYPE=\"CLEANUP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237964991879\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(REDUCE_INPUT_GROUPS)(Reduce input groups)(0)][(COMBINE_OUTPUT_RECORDS)(Combine output records)(0)][(REDUCE_SHUFFLE_BYTES)(Reduce shuffle bytes)(0)][(REDUCE_OUTPUT_RECORDS)(Reduce output records)(0)][(SPILLED_RECORDS)(Spilled Records)(0)][(REDUCE_INPUT_RECORDS)(Reduce input records)(0)]}\" ." ); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0023\" FINISH_TIME=\"1237964991879\" JOB_STATUS=\"SUCCESS\" FINISHED_MAPS=\"2\" FINISHED_REDUCES=\"0\" FAILED_MAPS=\"0\" FAILED_REDUCES=\"0\" COUNTERS=\"{(org.apache.hadoop.mapred.JobInProgress$Counter)(Job Counters )[(TOTAL_LAUNCHED_MAPS)(Launched map tasks)(2)]}{(FileSystemCounters)(FileSystemCounters)[(HDFS_BYTES_WRITTEN)(HDFS_BYTES_WRITTEN)(1000000000)]}{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(MAP_INPUT_RECORDS)(Map input records)(10000000)][(SPILLED_RECORDS)(Spilled Records)(0)][(MAP_INPUT_BYTES)(Map input bytes)(10000000)][(MAP_OUTPUT_RECORDS)(Map output records)(10000000)]}\" ." ); writer.NewLine(); writer.Write("$!!FILE=file3.log!!"); writer.NewLine(); writer.Write("Meta VERSION=\"1\" ."); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0034\" JOBNAME=\"TestJob\" USER=\"hadoop3\" SUBMIT_TIME=\"1237966370007\" JOBCONF=\"hdfs:///job_200903250600_0034/job.xml\" ." ); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0034\" JOB_PRIORITY=\"NORMAL\" ."); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0034\" LAUNCH_TIME=\"1237966371076\" TOTAL_MAPS=\"2\" TOTAL_REDUCES=\"0\" JOB_STATUS=\"PREP\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0034_m_000003\" TASK_TYPE=\"SETUP\" START_TIME=\"1237966371093\" SPLITS=\"\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"SETUP\" TASKID=\"task_200903250600_0034_m_000003\" TASK_ATTEMPT_ID=\"attempt_200903250600_0034_m_000003_0\" START_TIME=\"1237966371524\" TRACKER_NAME=\"tracker_50118\" HTTP_PORT=\"50060\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"SETUP\" TASKID=\"task_200903250600_0034_m_000003\" TASK_ATTEMPT_ID=\"attempt_200903250600_0034_m_000003_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237966373174\" HOSTNAME=\"host.com\" STATE_STRING=\"setup\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0034_m_000003\" TASK_TYPE=\"SETUP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237966386098\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" ." ); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0034\" JOB_STATUS=\"RUNNING\" ."); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0034_m_000000\" TASK_TYPE=\"MAP\" START_TIME=\"1237966386111\" SPLITS=\"\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0034_m_000001\" TASK_TYPE=\"MAP\" START_TIME=\"1237966386124\" SPLITS=\"\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"MAP\" TASKID=\"task_200903250600_0034_m_000001\" TASK_ATTEMPT_ID=\"attempt_200903250600_0034_m_000001_0\" TASK_STATUS=\"FAILED\" FINISH_TIME=\"1237967174546\" HOSTNAME=\"host.com\" ERROR=\"java.io.IOException: Task process exit with nonzero status of 15." ); writer.NewLine(); writer.Write(" at org.apache.hadoop.mapred.TaskRunner.run(TaskRunner.java:424)"); writer.NewLine(); writer.Write(",java.io.IOException: Task process exit with nonzero status of 15." ); writer.NewLine(); writer.Write(" at org.apache.hadoop.mapred.TaskRunner.run(TaskRunner.java:424)"); writer.NewLine(); writer.Write("\" ."); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0034_m_000002\" TASK_TYPE=\"CLEANUP\" START_TIME=\"1237967170815\" SPLITS=\"\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"CLEANUP\" TASKID=\"task_200903250600_0034_m_000002\" TASK_ATTEMPT_ID=\"attempt_200903250600_0034_m_000002_0\" START_TIME=\"1237967168653\" TRACKER_NAME=\"tracker_3105\" HTTP_PORT=\"50060\" ." ); writer.NewLine(); writer.Write("MapAttempt TASK_TYPE=\"CLEANUP\" TASKID=\"task_200903250600_0034_m_000002\" TASK_ATTEMPT_ID=\"attempt_200903250600_0034_m_000002_0\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237967171301\" HOSTNAME=\"host.com\" STATE_STRING=\"cleanup\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" ." ); writer.NewLine(); writer.Write("Task TASKID=\"task_200903250600_0034_m_000002\" TASK_TYPE=\"CLEANUP\" TASK_STATUS=\"SUCCESS\" FINISH_TIME=\"1237967185818\" COUNTERS=\"{(org.apache.hadoop.mapred.Task$Counter)(Map-Reduce Framework)[(SPILLED_RECORDS)(Spilled Records)(0)]}\" ." ); writer.NewLine(); writer.Write("Job JOBID=\"job_200903250600_0034\" FINISH_TIME=\"1237967185818\" JOB_STATUS=\"KILLED\" FINISHED_MAPS=\"0\" FINISHED_REDUCES=\"0\" ." ); writer.NewLine(); writer.Close(); }
/// <exception cref="NGit.Api.Errors.RefNotFoundException"></exception> /// <exception cref="System.IO.IOException"></exception> /// <exception cref="NGit.Api.Errors.NoHeadException"></exception> /// <exception cref="NGit.Api.Errors.JGitInternalException"></exception> private RebaseResult InitFilesAndRewind() { // we need to store everything into files so that we can implement // --skip, --continue, and --abort Ref head = repo.GetRef(Constants.HEAD); if (head == null || head.GetObjectId() == null) { throw new RefNotFoundException(MessageFormat.Format(JGitText.Get().refNotResolved , Constants.HEAD)); } string headName; if (head.IsSymbolic()) { headName = head.GetTarget().GetName(); } else { headName = "detached HEAD"; } ObjectId headId = head.GetObjectId(); if (headId == null) { throw new RefNotFoundException(MessageFormat.Format(JGitText.Get().refNotResolved , Constants.HEAD)); } RevCommit headCommit = walk.LookupCommit(headId); RevCommit upstream = walk.LookupCommit(upstreamCommit.Id); if (walk.IsMergedInto(upstream, headCommit)) { return(RebaseResult.UP_TO_DATE_RESULT); } else { if (walk.IsMergedInto(headCommit, upstream)) { // head is already merged into upstream, fast-foward monitor.BeginTask(MessageFormat.Format(JGitText.Get().resettingHead, upstreamCommit .GetShortMessage()), ProgressMonitor.UNKNOWN); CheckoutCommit(upstreamCommit); monitor.EndTask(); UpdateHead(headName, upstreamCommit); return(RebaseResult.FAST_FORWARD_RESULT); } } monitor.BeginTask(JGitText.Get().obtainingCommitsForCherryPick, ProgressMonitor.UNKNOWN ); // determine the commits to be applied LogCommand cmd = new Git(repo).Log().AddRange(upstreamCommit, headCommit); Iterable <RevCommit> commitsToUse = cmd.Call(); IList <RevCommit> cherryPickList = new AList <RevCommit>(); foreach (RevCommit commit in commitsToUse) { if (commit.ParentCount != 1) { throw new JGitInternalException(JGitText.Get().canOnlyCherryPickCommitsWithOneParent ); } cherryPickList.AddItem(commit); } Sharpen.Collections.Reverse(cherryPickList); // create the folder for the meta information FileUtils.Mkdir(rebaseDir); CreateFile(repo.Directory, Constants.ORIG_HEAD, headId.Name); CreateFile(rebaseDir, REBASE_HEAD, headId.Name); CreateFile(rebaseDir, HEAD_NAME, headName); CreateFile(rebaseDir, ONTO, upstreamCommit.Name); CreateFile(rebaseDir, INTERACTIVE, string.Empty); BufferedWriter fw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream (new FilePath(rebaseDir, GIT_REBASE_TODO)), Constants.CHARACTER_ENCODING)); fw.Write("# Created by EGit: rebasing " + upstreamCommit.Name + " onto " + headId .Name); fw.NewLine(); try { StringBuilder sb = new StringBuilder(); ObjectReader reader = walk.GetObjectReader(); foreach (RevCommit commit_1 in cherryPickList) { sb.Length = 0; sb.Append(RebaseCommand.Action.PICK.ToToken()); sb.Append(" "); sb.Append(reader.Abbreviate(commit_1).Name); sb.Append(" "); sb.Append(commit_1.GetShortMessage()); fw.Write(sb.ToString()); fw.NewLine(); } } finally { fw.Close(); } monitor.EndTask(); // we rewind to the upstream commit monitor.BeginTask(MessageFormat.Format(JGitText.Get().rewinding, upstreamCommit.GetShortMessage ()), ProgressMonitor.UNKNOWN); bool checkoutOk = false; try { checkoutOk = CheckoutCommit(upstreamCommit); } finally { if (!checkoutOk) { FileUtils.Delete(rebaseDir, FileUtils.RECURSIVE); } } monitor.EndTask(); return(null); }
/// <exception cref="System.IO.IOException"/> private static int TokReader(Reader r, BufferedWriter writer, Pattern parseInsidePattern, Pattern filterPattern, string options, bool preserveLines, bool oneLinePerElement, bool dump, bool lowerCase) { int numTokens = 0; bool beginLine = true; bool printing = (parseInsidePattern == null); // start off printing, unless you're looking for a start entity Matcher m = null; if (parseInsidePattern != null) { m = parseInsidePattern.Matcher(string.Empty); } // create once as performance hack // System.err.printf("parseInsidePattern is: |%s|%n", parseInsidePattern); for (Edu.Stanford.Nlp.Process.PTBTokenizer <CoreLabel> tokenizer = new Edu.Stanford.Nlp.Process.PTBTokenizer <CoreLabel>(r, new CoreLabelTokenFactory(), options); tokenizer.MoveNext();) { CoreLabel obj = tokenizer.Current; // String origStr = obj.get(CoreAnnotations.TextAnnotation.class).replaceFirst("\n+$", ""); // DanC added this to fix a lexer bug, hopefully now corrected string origStr = obj.Get(typeof(CoreAnnotations.TextAnnotation)); string str; if (lowerCase) { str = origStr.ToLower(Locale.English); obj.Set(typeof(CoreAnnotations.TextAnnotation), str); } else { str = origStr; } if (m != null && m.Reset(origStr).Matches()) { printing = m.Group(1).IsEmpty(); // turn on printing if no end element slash, turn it off it there is // System.err.printf("parseInsidePattern matched against: |%s|, printing is %b.%n", origStr, printing); if (!printing) { // true only if matched a stop beginLine = true; if (oneLinePerElement) { writer.NewLine(); } } } else { if (printing) { if (dump) { // after having checked for tags, change str to be exhaustive str = obj.ToShorterString(); } if (filterPattern != null && filterPattern.Matcher(origStr).Matches()) { } else { // skip if (preserveLines) { if (NewlineToken.Equals(origStr)) { beginLine = true; writer.NewLine(); } else { if (!beginLine) { writer.Write(' '); } else { beginLine = false; } // writer.write(str.replace("\n", "")); writer.Write(str); } } else { if (oneLinePerElement) { if (!beginLine) { writer.Write(' '); } else { beginLine = false; } writer.Write(str); } else { writer.Write(str); writer.NewLine(); } } } } } numTokens++; } return(numTokens); }
// main method only /// <exception cref="System.IO.IOException"/> public static void Main(string[] args) { string outputFilename = string.Empty; string tagSeparator = string.Empty; string treeRange = string.Empty; string inputEncoding = "UTF-8"; string outputEncoding = "UTF-8"; string treeFilter = string.Empty; bool noTags = false; bool noSpaces = false; IList <string> inputFilenames = new List <string>(); for (int i = 0; i < args.Length; ++i) { if ((Sharpen.Runtime.EqualsIgnoreCase(args[i], "-output") || Sharpen.Runtime.EqualsIgnoreCase(args[i], "--output")) && (i + 1 < args.Length)) { outputFilename = args[i + 1]; i++; } else { if ((Sharpen.Runtime.EqualsIgnoreCase(args[i], "-tagSeparator") || Sharpen.Runtime.EqualsIgnoreCase(args[i], "--tagSeparator")) && (i + 1 < args.Length)) { tagSeparator = args[i + 1]; i++; } else { if ((Sharpen.Runtime.EqualsIgnoreCase(args[i], "-treeRange") || Sharpen.Runtime.EqualsIgnoreCase(args[i], "--treeRange")) && (i + 1 < args.Length)) { treeRange = args[i + 1]; i++; } else { if ((Sharpen.Runtime.EqualsIgnoreCase(args[i], "-inputEncoding") || Sharpen.Runtime.EqualsIgnoreCase(args[i], "--inputEncoding")) && (i + 1 < args.Length)) { inputEncoding = args[i + 1]; i++; } else { if ((Sharpen.Runtime.EqualsIgnoreCase(args[i], "-outputEncoding") || Sharpen.Runtime.EqualsIgnoreCase(args[i], "--outputEncoding")) && (i + 1 < args.Length)) { outputEncoding = args[i + 1]; i++; } else { if ((Sharpen.Runtime.EqualsIgnoreCase(args[i], "-treeFilter") || Sharpen.Runtime.EqualsIgnoreCase(args[i], "--treeFilter")) && (i + 1 < args.Length)) { treeFilter = args[i + 1]; i++; } else { if (Sharpen.Runtime.EqualsIgnoreCase(args[i], "-noTags") || Sharpen.Runtime.EqualsIgnoreCase(args[i], "--noTags")) { noTags = true; } else { if (Sharpen.Runtime.EqualsIgnoreCase(args[i], "-noSpaces") || Sharpen.Runtime.EqualsIgnoreCase(args[i], "--noSpaces")) { noSpaces = true; } else { inputFilenames.Add(args[i]); } } } } } } } } } if (outputFilename.Equals(string.Empty)) { log.Info("Must specify an output filename, -output"); System.Environment.Exit(2); } if (inputFilenames.Count == 0) { log.Info("Must specify one or more input filenames"); System.Environment.Exit(2); } FileOutputStream fos = new FileOutputStream(outputFilename); OutputStreamWriter osw = new OutputStreamWriter(fos, outputEncoding); BufferedWriter bout = new BufferedWriter(osw); Properties props = new Properties(); foreach (string filename in inputFilenames) { string description = TaggedFileRecord.Format + "=" + TaggedFileRecord.Format.Trees + "," + filename; if (!treeRange.IsEmpty()) { description = TaggedFileRecord.TreeRange + "=" + treeRange + "," + description; } if (!treeFilter.IsEmpty()) { description = TaggedFileRecord.TreeFilter + "=" + treeFilter + "," + description; } description = TaggedFileRecord.Encoding + "=" + inputEncoding + "," + description; TaggedFileRecord record = TaggedFileRecord.CreateRecord(props, description); foreach (IList <TaggedWord> sentence in record.Reader()) { string output = SentenceUtils.ListToString(sentence, noTags, tagSeparator); if (noSpaces) { output = output.ReplaceAll(" ", string.Empty); } bout.Write(output); bout.NewLine(); } } bout.Flush(); bout.Close(); osw.Close(); fos.Close(); }