public virtual void TestConfigurationBlock() { AppContext ctx = Org.Mockito.Mockito.Mock <AppContext>(); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = Org.Mockito.Mockito.Mock <Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job >(); Path path = new Path("conf"); Configuration configuration = new Configuration(); configuration.Set("Key for test", "Value for test"); Org.Mockito.Mockito.When(job.GetConfFile()).ThenReturn(path); Org.Mockito.Mockito.When(job.LoadConfFile()).ThenReturn(configuration); Org.Mockito.Mockito.When(ctx.GetJob(Any <JobId>())).ThenReturn(job); TestBlocks.ConfBlockForTest configurationBlock = new TestBlocks.ConfBlockForTest( this, ctx); PrintWriter pWriter = new PrintWriter(data); HtmlBlock.Block html = new BlockForTest(new TestBlocks.HtmlBlockForTest(this), pWriter , 0, false); configurationBlock.Render(html); pWriter.Flush(); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("Sorry, can't do anything without a JobID" )); configurationBlock.AddParameter(AMParams.JobId, "job_01_01"); data.Reset(); configurationBlock.Render(html); pWriter.Flush(); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("Key for test")); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("Value for test")); }
/// <summary> /// Save the file to device. /// </summary> public void SaveFile() { // Create the CSV task document string doc = TaskManager.CreateCSVDocument(); // Confirm we're allowed to save to this device, ask for permission if not. ConfirmWritePermission(); // Get the default location for the export. File exportFile = new File(Android.OS.Environment.ExternalStorageDirectory, "tasks.csv"); // Now try to write the document to their device. try { PrintWriter writer = new PrintWriter(exportFile); writer.Append(doc); writer.Flush(); } catch (IOException e) { Toast.MakeText(context, e.Message, ToastLength.Long).Show(); return; } // And try to open it. Will be blocked by MAM if necessary Toast.MakeText(context, context.GetString(Resource.String.save_success, exportFile.Path), ToastLength.Short).Show(); OpenFile(exportFile); }
private static void RunPipeline(StanfordCoreNLP pipeline, string text, PrintWriter @out) { Annotation annotation = new Annotation(text); pipeline.Annotate(annotation); // An Annotation is a Map and you can get and use the various analyses individually. @out.Println(); // The toString() method on an Annotation just prints the text of the Annotation // But you can see what is in it with other methods like toShorterString() @out.Println("The top level annotation"); @out.Println(annotation.ToShorterString()); IList <ICoreMap> sentences = annotation.Get(typeof(CoreAnnotations.SentencesAnnotation)); foreach (ICoreMap sentence in sentences) { // Print out token annotations foreach (CoreLabel token in sentence.Get(typeof(CoreAnnotations.TokensAnnotation))) { // Print out words, lemma, ne, and normalized ne string word = token.Get(typeof(CoreAnnotations.TextAnnotation)); string lemma = token.Get(typeof(CoreAnnotations.LemmaAnnotation)); string pos = token.Get(typeof(CoreAnnotations.PartOfSpeechAnnotation)); string ne = token.Get(typeof(CoreAnnotations.NamedEntityTagAnnotation)); string normalized = token.Get(typeof(CoreAnnotations.NormalizedNamedEntityTagAnnotation)); @out.Println("token: " + "word=" + word + ", lemma=" + lemma + ", pos=" + pos + ", ne=" + ne + ", normalized=" + normalized); } } @out.Flush(); }
public virtual void TestHsTasksBlock() { Task task = GetTask(0); IDictionary <TaskId, Task> tasks = new Dictionary <TaskId, Task>(); tasks[task.GetID()] = task; AppContext ctx = Org.Mockito.Mockito.Mock <AppContext>(); AppForTest app = new AppForTest(ctx); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = Org.Mockito.Mockito.Mock <Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job >(); Org.Mockito.Mockito.When(job.GetTasks()).ThenReturn(tasks); app.SetJob(job); TestBlocks.HsTasksBlockForTest block = new TestBlocks.HsTasksBlockForTest(this, app ); block.AddParameter(AMParams.TaskType, "r"); PrintWriter pWriter = new PrintWriter(data); HtmlBlock.Block html = new BlockForTest(new TestBlocks.HtmlBlockForTest(this), pWriter , 0, false); block.Render(html); pWriter.Flush(); // should be printed information about task NUnit.Framework.Assert.IsTrue(data.ToString().Contains("task_0_0001_r_000000")); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("SUCCEEDED")); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("100001")); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("100011")); NUnit.Framework.Assert.IsTrue(data.ToString().Contains(string.Empty)); }
/// <summary>Startup: start ZK.</summary> /// <remarks> /// Startup: start ZK. It is only after this that /// the binding information is valid. /// </remarks> /// <exception cref="System.Exception"/> protected override void ServiceStart() { SetupSecurity(); ZooKeeperServer zkServer = new ZooKeeperServer(); FileTxnSnapLog ftxn = new FileTxnSnapLog(dataDir, dataDir); zkServer.SetTxnLogFactory(ftxn); zkServer.SetTickTime(tickTime); Log.Info("Starting Local Zookeeper service"); factory = ServerCnxnFactory.CreateFactory(); factory.Configure(GetAddress(port), -1); factory.Startup(zkServer); string connectString = GetConnectionString(); Log.Info("In memory ZK started at {}\n", connectString); if (Log.IsDebugEnabled()) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); zkServer.DumpConf(pw); pw.Flush(); Log.Debug(sw.ToString()); } binding = new BindingInformation(); binding.ensembleProvider = new FixedEnsembleProvider(connectString); binding.description = GetName() + " reachable at \"" + connectString + "\""; AddDiagnostics(binding.description); // finally: set the binding information in the config GetConfig().Set(KeyRegistryZkQuorum, connectString); }
public virtual void TestHamlet() { Org.Apache.Hadoop.Yarn.Webapp.Hamlet.Hamlet h = NewHamlet().Title("test").H1("heading 1" ).P("#id.class").B("hello").Em("world!").().Div("#footer").("Brought to you by") .A("http://hostname/", "Somebody").(); PrintWriter @out = h.GetWriter(); @out.Flush(); NUnit.Framework.Assert.AreEqual(0, h.nestLevel); Org.Mockito.Mockito.Verify(@out).Write("<title"); Org.Mockito.Mockito.Verify(@out).Write("test"); Org.Mockito.Mockito.Verify(@out).Write("</title>"); Org.Mockito.Mockito.Verify(@out).Write("<h1"); Org.Mockito.Mockito.Verify(@out).Write("heading 1"); Org.Mockito.Mockito.Verify(@out).Write("</h1>"); Org.Mockito.Mockito.Verify(@out).Write("<p"); Org.Mockito.Mockito.Verify(@out).Write(" id=\"id\""); Org.Mockito.Mockito.Verify(@out).Write(" class=\"class\""); Org.Mockito.Mockito.Verify(@out).Write("<b"); Org.Mockito.Mockito.Verify(@out).Write("hello"); Org.Mockito.Mockito.Verify(@out).Write("</b>"); Org.Mockito.Mockito.Verify(@out).Write("<em"); Org.Mockito.Mockito.Verify(@out).Write("world!"); Org.Mockito.Mockito.Verify(@out).Write("</em>"); Org.Mockito.Mockito.Verify(@out).Write("<div"); Org.Mockito.Mockito.Verify(@out).Write(" id=\"footer\""); Org.Mockito.Mockito.Verify(@out).Write("Brought to you by"); Org.Mockito.Mockito.Verify(@out).Write("<a"); Org.Mockito.Mockito.Verify(@out).Write(" href=\"http://hostname/\""); Org.Mockito.Mockito.Verify(@out).Write("Somebody"); Org.Mockito.Mockito.Verify(@out).Write("</a>"); Org.Mockito.Mockito.Verify(@out).Write("</div>"); Org.Mockito.Mockito.Verify(@out, Org.Mockito.Mockito.Never()).Write("</p>"); }
/// <summary> /// Save the current tasks to the device. /// </summary> /// <remarks> /// Example of MAM policy - allow saving to device. /// A manual check of the current MAM policy must be performed to determine whether or not saving to the device is allowed. /// NOTE: If the user's policy asks the app to encrypt files, the output of this process will also be encrypted. /// </remarks> /// <param name="doc">The formatted CSV string representation of the current tasks.</param> public void Save(string doc) { if (!MAMPolicyManager.GetPolicy(Application.Context).GetIsSaveToLocationAllowed(SaveLocation.Local, Authenticator.User)) { Toast.MakeText(Application.Context, Resource.String.err_not_allowed, ToastLength.Long).Show(); return; } // Confirm we're allowed to save to this device, ask for permission if not. ConfirmWritePermission(); // Get the default location for the export. File exportFile = new File(Android.OS.Environment.ExternalStorageDirectory, "tasks.csv"); // Now try to write the document to their device. try { PrintWriter writer = new PrintWriter(exportFile); writer.Append(doc); writer.Flush(); } catch (IOException e) { Toast.MakeText(Application.Context, e.Message, ToastLength.Long).Show(); return; } // And try to open it. Will be blocked by MAM if necessary Toast.MakeText(Application.Context, Application.Context.GetString(Resource.String.save_success, exportFile.Path), ToastLength.Short).Show(); OpenFile(exportFile); }
/// <summary>Start a job to compute sigma</summary> /// <exception cref="System.IO.IOException"/> private void Compute(string name, Summation sigma) { if (sigma.GetValue() != null) { throw new IOException("sigma.getValue() != null, sigma=" + sigma); } //setup remote directory FileSystem fs = FileSystem.Get(GetConf()); Path dir = fs.MakeQualified(new Path(parameters.remoteDir, name)); if (!Org.Apache.Hadoop.Examples.PI.Util.CreateNonexistingDirectory(fs, dir)) { return; } //setup a job Job job = CreateJob(name, sigma); Path outdir = new Path(dir, "out"); FileOutputFormat.SetOutputPath(job, outdir); //start a map/reduce job string startmessage = "steps/parts = " + sigma.E.GetSteps() + "/" + parameters.nParts + " = " + Org.Apache.Hadoop.Examples.PI.Util.Long2string(sigma.E.GetSteps() / parameters .nParts); Org.Apache.Hadoop.Examples.PI.Util.RunJob(name, job, parameters.machine, startmessage , timer); IList <TaskResult> results = Org.Apache.Hadoop.Examples.PI.Util.ReadJobOutputs(fs, outdir); Org.Apache.Hadoop.Examples.PI.Util.WriteResults(name, results, fs, parameters.remoteDir ); fs.Delete(dir, true); //combine results IList <TaskResult> combined = Org.Apache.Hadoop.Examples.PI.Util.Combine(results); PrintWriter @out = Org.Apache.Hadoop.Examples.PI.Util.CreateWriter(parameters.localDir , name); try { foreach (TaskResult r in combined) { string s = TaskResult2string(name, r); @out.WriteLine(s); @out.Flush(); [email protected](s); } } finally { @out.Close(); } if (combined.Count == 1) { Summation s = combined[0].GetElement(); if (sigma.Contains(s) && s.Contains(sigma)) { sigma.SetValue(s.GetValue()); } } }
/// <exception cref="System.IO.IOException"></exception> /// <exception cref="NGit.Api.Errors.NoFilepatternException"></exception> /// <exception cref="NGit.Api.Errors.NoHeadException"></exception> /// <exception cref="NGit.Api.Errors.NoMessageException"></exception> /// <exception cref="NGit.Api.Errors.ConcurrentRefUpdateException"></exception> /// <exception cref="NGit.Api.Errors.JGitInternalException"></exception> /// <exception cref="NGit.Api.Errors.WrongRepositoryStateException"></exception> public virtual void SetupRepository() { // create initial commit git = new Git(db); initialCommit = git.Commit().SetMessage("initial commit").Call(); // create file indexFile = new FilePath(db.WorkTree, "a.txt"); FileUtils.CreateNewFile(indexFile); PrintWriter writer = new PrintWriter(indexFile); writer.Write("content"); writer.Flush(); // add file and commit it git.Add().AddFilepattern("a.txt").Call(); git.Commit().SetMessage("adding a.txt").Call(); // modify file and add to index writer.Write("new content"); writer.Close(); git.Add().AddFilepattern("a.txt").Call(); // create a file not added to the index untrackedFile = new FilePath(db.WorkTree, "notAddedToIndex.txt"); FileUtils.CreateNewFile(untrackedFile); PrintWriter writer2 = new PrintWriter(untrackedFile); writer2.Write("content"); writer2.Close(); }
public virtual string GetAnswers(IList <In> l, PlainTextDocumentReaderAndWriter.OutputStyle outputStyle, bool preserveSpacing) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); PrintAnswers(l, pw, outputStyle, preserveSpacing); pw.Flush(); return(sw.ToString()); }
public virtual void TestPreformatted() { Org.Apache.Hadoop.Yarn.Webapp.Hamlet.Hamlet h = NewHamlet().Div().I("inline before pre" ).Pre().("pre text1\npre text2").I("inline in pre").("pre text after inline").() .I("inline after pre").(); PrintWriter @out = h.GetWriter(); @out.Flush(); NUnit.Framework.Assert.AreEqual(5, h.indents); }
public override void EndDocument() { // Theoretically, there shouldn't be anything in the buffer after // the last closing tag, but if there is, it's probably better to // echo it than ignore it outWriter.Print(XMLUtils.EscapeXML(textToBeTransformed.ToString())); // we need to flush because there are no other ways we // explicitely flush outWriter.Flush(); }
/// <summary>Writes out data from this Object to the Writer w.</summary> /// <param name="w">Where output is written</param> /// <exception cref="System.IO.IOException">If data can't be written</exception> public virtual void WriteData(TextWriter w) { PrintWriter @out = new PrintWriter(w); foreach (BinaryRule br in this) { @out.Println(br.ToString(index)); } @out.Flush(); }
private void Dump(PrintWriter pw) { pw.Println(" model = " + GetProperty("model")); pw.Println(" arch = " + GetProperty("arch")); pw.Println(" wordFunction = " + GetProperty("wordFunction")); if (this.GetMode() == TaggerConfig.Mode.Train || this.GetMode() == TaggerConfig.Mode.Dump) { pw.Println(" trainFile = " + GetProperty("file")); } else { if (this.GetMode() == TaggerConfig.Mode.Tag) { pw.Println(" textFile = " + GetProperty("file")); } else { if (this.GetMode() == TaggerConfig.Mode.Test) { pw.Println(" testFile = " + GetProperty("file")); } } } pw.Println(" closedClassTags = " + GetProperty("closedClassTags")); pw.Println(" closedClassTagThreshold = " + GetProperty("closedClassTagThreshold")); pw.Println(" curWordMinFeatureThresh = " + GetProperty("curWordMinFeatureThresh")); pw.Println(" debug = " + GetProperty("debug")); pw.Println(" debugPrefix = " + GetProperty("debugPrefix")); pw.Println(" " + TagSeparatorProperty + " = " + GetProperty(TagSeparatorProperty)); pw.Println(" " + EncodingProperty + " = " + GetProperty(EncodingProperty)); pw.Println(" iterations = " + GetProperty("iterations")); pw.Println(" lang = " + GetProperty("lang")); pw.Println(" learnClosedClassTags = " + GetProperty("learnClosedClassTags")); pw.Println(" minFeatureThresh = " + GetProperty("minFeatureThresh")); pw.Println(" openClassTags = " + GetProperty("openClassTags")); pw.Println("rareWordMinFeatureThresh = " + GetProperty("rareWordMinFeatureThresh")); pw.Println(" rareWordThresh = " + GetProperty("rareWordThresh")); pw.Println(" search = " + GetProperty("search")); pw.Println(" sgml = " + GetProperty("sgml")); pw.Println(" sigmaSquared = " + GetProperty("sigmaSquared")); pw.Println(" regL1 = " + GetProperty("regL1")); pw.Println(" tagInside = " + GetProperty("tagInside")); pw.Println(" tokenize = " + GetProperty("tokenize")); pw.Println(" tokenizerFactory = " + GetProperty("tokenizerFactory")); pw.Println(" tokenizerOptions = " + GetProperty("tokenizerOptions")); pw.Println(" verbose = " + GetProperty("verbose")); pw.Println(" verboseResults = " + GetProperty("verboseResults")); pw.Println(" veryCommonWordThresh = " + GetProperty("veryCommonWordThresh")); pw.Println(" xmlInput = " + GetProperty("xmlInput")); pw.Println(" outputFile = " + GetProperty("outputFile")); pw.Println(" outputFormat = " + GetProperty("outputFormat")); pw.Println(" outputFormatOptions = " + GetProperty("outputFormatOptions")); pw.Println(" nthreads = " + GetProperty("nthreads")); pw.Flush(); }
/// <summary>Writes out data from this Object.</summary> /// <param name="w">Data is written to this Writer</param> public virtual void WriteData(TextWriter w) { PrintWriter @out = new PrintWriter(w); // all lines have one rule per line foreach (UnaryRule ur in this) { @out.Println(ur.ToString(index)); } @out.Flush(); }
public virtual void TestScriptStyle() { Org.Apache.Hadoop.Yarn.Webapp.Hamlet.Hamlet h = NewHamlet().Script("a.js").Script ("b.js").Style("h1 { font-size: 1.2em }"); PrintWriter @out = h.GetWriter(); @out.Flush(); NUnit.Framework.Assert.AreEqual(0, h.nestLevel); Org.Mockito.Mockito.Verify(@out, Org.Mockito.Mockito.Times(2)).Write(" type=\"text/javascript\"" ); Org.Mockito.Mockito.Verify(@out).Write(" type=\"text/css\""); }
public virtual void TestSubView() { Injector injector = WebAppTests.CreateMockInjector(this); injector.GetInstance <TestSubViews.MainView>().Render(); PrintWriter @out = injector.GetInstance <HttpServletResponse>().GetWriter(); @out.Flush(); Org.Mockito.Mockito.Verify(@out).Write("sub1 text"); Org.Mockito.Mockito.Verify(@out).Write("sub2 text"); Org.Mockito.Mockito.Verify(@out, Org.Mockito.Mockito.Times(16)).WriteLine(); }
/// <summary>Print an Annotation to an output stream.</summary> /// <remarks> /// Print an Annotation to an output stream. /// The target OutputStream is assumed to already by buffered. /// </remarks> /// <param name="doc"/> /// <param name="target"/> /// <param name="options"/> /// <exception cref="System.IO.IOException"/> public override void Print(Annotation doc, OutputStream target, AnnotationOutputter.Options options) { PrintWriter writer = new PrintWriter(IOUtils.EncodedOutputStreamWriter(target, options.encoding)); // vv A bunch of nonsense to get tokens vv if (doc.Get(typeof(CoreAnnotations.SentencesAnnotation)) != null) { foreach (ICoreMap sentence in doc.Get(typeof(CoreAnnotations.SentencesAnnotation))) { if (sentence.Get(typeof(CoreAnnotations.TokensAnnotation)) != null) { IList <CoreLabel> tokens = sentence.Get(typeof(CoreAnnotations.TokensAnnotation)); SemanticGraph depTree = sentence.Get(typeof(SemanticGraphCoreAnnotations.BasicDependenciesAnnotation)); for (int i = 0; i < tokens.Count; ++i) { // ^^ end nonsense to get tokens ^^ // Try to get the incoming dependency edge int head = -1; string deprel = null; if (depTree != null) { ICollection <int> rootSet = depTree.GetRoots().Stream().Map(null).Collect(Collectors.ToSet()); IndexedWord node = depTree.GetNodeByIndexSafe(i + 1); if (node != null) { IList <SemanticGraphEdge> edgeList = depTree.GetIncomingEdgesSorted(node); if (!edgeList.IsEmpty()) { System.Diagnostics.Debug.Assert(edgeList.Count == 1); head = edgeList[0].GetGovernor().Index(); deprel = edgeList[0].GetRelation().ToString(); } else { if (rootSet.Contains(i + 1)) { head = 0; deprel = "ROOT"; } } } } // Write the token writer.Print(Line(i + 1, tokens[i], head, deprel)); writer.Println(); } } writer.Println(); } } // extra blank line at end of sentence writer.Flush(); }
public virtual void TestTasksBlock() { ApplicationId appId = ApplicationIdPBImpl.NewInstance(0, 1); JobId jobId = new JobIdPBImpl(); jobId.SetId(0); jobId.SetAppId(appId); TaskId taskId = new TaskIdPBImpl(); taskId.SetId(0); taskId.SetTaskType(TaskType.Map); taskId.SetJobId(jobId); Task task = Org.Mockito.Mockito.Mock <Task>(); Org.Mockito.Mockito.When(task.GetID()).ThenReturn(taskId); TaskReport report = Org.Mockito.Mockito.Mock <TaskReport>(); Org.Mockito.Mockito.When(report.GetProgress()).ThenReturn(0.7f); Org.Mockito.Mockito.When(report.GetTaskState()).ThenReturn(TaskState.Succeeded); Org.Mockito.Mockito.When(report.GetStartTime()).ThenReturn(100001L); Org.Mockito.Mockito.When(report.GetFinishTime()).ThenReturn(100011L); Org.Mockito.Mockito.When(report.GetStatus()).ThenReturn("Dummy Status \n*"); Org.Mockito.Mockito.When(task.GetReport()).ThenReturn(report); Org.Mockito.Mockito.When(task.GetType()).ThenReturn(TaskType.Map); IDictionary <TaskId, Task> tasks = new Dictionary <TaskId, Task>(); tasks[taskId] = task; AppContext ctx = Org.Mockito.Mockito.Mock <AppContext>(); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = Org.Mockito.Mockito.Mock <Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job >(); Org.Mockito.Mockito.When(job.GetTasks()).ThenReturn(tasks); Org.Apache.Hadoop.Mapreduce.V2.App.Webapp.App app = new Org.Apache.Hadoop.Mapreduce.V2.App.Webapp.App (ctx); app.SetJob(job); TasksBlockForTest taskBlock = new TasksBlockForTest(app); taskBlock.AddParameter(AMParams.TaskType, "m"); PrintWriter pWriter = new PrintWriter(data); HtmlBlock.Block html = new BlockForTest(new TestBlocks.HtmlBlockForTest(this), pWriter , 0, false); taskBlock.Render(html); pWriter.Flush(); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("task_0_0001_m_000000")); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("70.00")); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("SUCCEEDED")); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("100001")); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("100011")); NUnit.Framework.Assert.IsFalse(data.ToString().Contains("Dummy Status \n*")); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("Dummy Status \\n*")); }
public virtual void TestSubViews() { Org.Apache.Hadoop.Yarn.Webapp.Hamlet.Hamlet h = NewHamlet().Title("test sub-views" ).Div("#view1").(typeof(TestHamlet.TestView1)).().Div("#view2").(typeof(TestHamlet.TestView2 )).(); PrintWriter @out = h.GetWriter(); @out.Flush(); NUnit.Framework.Assert.AreEqual(0, h.nestLevel); Org.Mockito.Mockito.Verify(@out).Write("[" + typeof(TestHamlet.TestView1).FullName + "]"); Org.Mockito.Mockito.Verify(@out).Write("[" + typeof(TestHamlet.TestView2).FullName + "]"); }
private void DoMetasave(NameNode nn2) { nn2.GetNamesystem().WriteLock(); try { PrintWriter pw = new PrintWriter(System.Console.Error); nn2.GetNamesystem().GetBlockManager().MetaSave(pw); pw.Flush(); } finally { nn2.GetNamesystem().WriteUnlock(); } }
public virtual void TestEnumAttrs() { Org.Apache.Hadoop.Yarn.Webapp.Hamlet.Hamlet h = NewHamlet().Meta_http("Content-type" , "text/html; charset=utf-8").Title("test enum attrs").Link().$rel("stylesheet") .$media(EnumSet.Of(HamletSpec.Media.screen, HamletSpec.Media.print)).$type("text/css" ).$href("style.css").().Link().$rel(EnumSet.Of(HamletSpec.LinkType.index, HamletSpec.LinkType .start)).$href("index.html").(); h.Div("#content").("content").(); PrintWriter @out = h.GetWriter(); @out.Flush(); NUnit.Framework.Assert.AreEqual(0, h.nestLevel); Org.Mockito.Mockito.Verify(@out).Write(" media=\"screen, print\""); Org.Mockito.Mockito.Verify(@out).Write(" rel=\"start index\""); }
public static bool WriteFile(TransducerGraph graph, string dir, string name) { try { File baseDir = new File(dir); if (baseDir.Exists()) { if (!baseDir.IsDirectory()) { return(false); } } else { if (!baseDir.Mkdirs()) { return(false); } } File file = new File(baseDir, name + ".dot"); PrintWriter w; try { w = new PrintWriter(new FileWriter(file)); string dotString = graph.AsDOTString(); w.Print(dotString); w.Flush(); w.Close(); } catch (FileNotFoundException) { log.Info("Failed to open file in writeToDOTfile: " + file); return(false); } catch (IOException) { log.Info("Failed to open file in writeToDOTfile: " + file); return(false); } return(true); } catch (Exception e) { Sharpen.Runtime.PrintStackTrace(e); return(false); } }
/// <summary>Lists the nodes matching the given node states</summary> /// <param name="nodeStates"/> /// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> private void ListClusterNodes(ICollection <NodeState> nodeStates) { PrintWriter writer = new PrintWriter(new OutputStreamWriter(sysout, Sharpen.Extensions.GetEncoding ("UTF-8"))); IList <NodeReport> nodesReport = client.GetNodeReports(Sharpen.Collections.ToArray (nodeStates, new NodeState[0])); writer.WriteLine("Total Nodes:" + nodesReport.Count); writer.Printf(NodesPattern, "Node-Id", "Node-State", "Node-Http-Address", "Number-of-Running-Containers" ); foreach (NodeReport nodeReport in nodesReport) { writer.Printf(NodesPattern, nodeReport.GetNodeId(), nodeReport.GetNodeState(), nodeReport .GetHttpAddress(), nodeReport.GetNumContainers()); } writer.Flush(); }
/// <summary>Writes out data from this Object to the Writer w.</summary> /// <remarks> /// Writes out data from this Object to the Writer w. Rules are separated by /// newline, and rule elements are delimited by \t. /// </remarks> /// <exception cref="System.IO.IOException"/> public virtual void WriteData(TextWriter w) { PrintWriter @out = new PrintWriter(w); foreach (IntTaggedWord itw in seenCounter.KeySet()) { @out.Println(itw.ToLexicalEntry(wordIndex, tagIndex) + " SEEN " + seenCounter.GetCount(itw)); } foreach (IntTaggedWord itw_1 in GetUnknownWordModel().UnSeenCounter().KeySet()) { @out.Println(itw_1.ToLexicalEntry(wordIndex, tagIndex) + " UNSEEN " + GetUnknownWordModel().UnSeenCounter().GetCount(itw_1)); } for (int i = 0; i < smooth.Length; i++) { @out.Println("smooth[" + i + "] = " + smooth[i]); } @out.Flush(); }
/// <exception cref="System.IO.IOException"/> private void WriteNodeHealthScriptFile(string scriptStr, bool setExecutable) { PrintWriter pw = null; try { FileUtil.SetWritable(nodeHealthscriptFile, true); FileUtil.SetReadable(nodeHealthscriptFile, true); pw = new PrintWriter(new FileOutputStream(nodeHealthscriptFile)); pw.WriteLine(scriptStr); pw.Flush(); } finally { pw.Close(); } FileUtil.SetExecutable(nodeHealthscriptFile, setExecutable); }
/// <exception cref="System.IO.IOException"/> public override void Print(Annotation doc, OutputStream target, AnnotationOutputter.Options options) { PrintWriter writer = new PrintWriter(IOUtils.EncodedOutputStreamWriter(target, options.encoding)); IList <ICoreMap> sentences = doc.Get(typeof(CoreAnnotations.SentencesAnnotation)); foreach (ICoreMap sentence in sentences) { SemanticGraph sg = sentence.Get(typeof(SemanticGraphCoreAnnotations.BasicDependenciesAnnotation)); if (sg != null) { writer.Print(conllUWriter.PrintSemanticGraph(sg)); } else { writer.Print(conllUWriter.PrintPOSAnnotations(sentence)); } } writer.Flush(); }
/// <summary>Lists the application attempts matching the given applicationid</summary> /// <param name="applicationId"/> /// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> private void ListApplicationAttempts(string applicationId) { PrintWriter writer = new PrintWriter(new OutputStreamWriter(sysout, Sharpen.Extensions.GetEncoding ("UTF-8"))); IList <ApplicationAttemptReport> appAttemptsReport = client.GetApplicationAttempts (ConverterUtils.ToApplicationId(applicationId)); writer.WriteLine("Total number of application attempts " + ":" + appAttemptsReport .Count); writer.Printf(ApplicationAttemptsPattern, "ApplicationAttempt-Id", "State", "AM-Container-Id" , "Tracking-URL"); foreach (ApplicationAttemptReport appAttemptReport in appAttemptsReport) { writer.Printf(ApplicationAttemptsPattern, appAttemptReport.GetApplicationAttemptId (), appAttemptReport.GetYarnApplicationAttemptState(), appAttemptReport.GetAMContainerId ().ToString(), appAttemptReport.GetTrackingUrl()); } writer.Flush(); }
public virtual void TestTable() { Org.Apache.Hadoop.Yarn.Webapp.Hamlet.Hamlet h = NewHamlet().Title("test table").Link ("style.css"); HamletSpec.TABLE t = h.Table("#id"); for (int i = 0; i < 3; ++i) { t.Tr().Td("1").Td("2").(); } t.(); PrintWriter @out = h.GetWriter(); @out.Flush(); NUnit.Framework.Assert.AreEqual(0, h.nestLevel); Org.Mockito.Mockito.Verify(@out).Write("<table"); Org.Mockito.Mockito.Verify(@out).Write("</table>"); Org.Mockito.Mockito.Verify(@out, Org.Mockito.Mockito.AtLeast(1)).Write("</td>"); Org.Mockito.Mockito.Verify(@out, Org.Mockito.Mockito.AtLeast(1)).Write("</tr>"); }
/// <exception cref="System.IO.IOException"></exception> /// <exception cref="NGit.Api.Errors.JGitInternalException"></exception> /// <exception cref="NGit.Api.Errors.GitAPIException"></exception> public virtual void SetupRepository() { // create initial commit git = new Git(db); initialCommit = git.Commit().SetMessage("initial commit").Call(); // create nested file FilePath dir = new FilePath(db.WorkTree, "dir"); FileUtils.Mkdir(dir); FilePath nestedFile = new FilePath(dir, "b.txt"); FileUtils.CreateNewFile(nestedFile); PrintWriter nesterFileWriter = new PrintWriter(nestedFile); nesterFileWriter.Write("content"); nesterFileWriter.Flush(); // create file indexFile = new FilePath(db.WorkTree, "a.txt"); FileUtils.CreateNewFile(indexFile); PrintWriter writer = new PrintWriter(indexFile); writer.Write("content"); writer.Flush(); // add file and commit it git.Add().AddFilepattern("dir").AddFilepattern("a.txt").Call(); secondCommit = git.Commit().SetMessage("adding a.txt and dir/b.txt").Call(); prestage = DirCache.Read(db.GetIndexFile(), db.FileSystem).GetEntry(indexFile.GetName ()); // modify file and add to index writer.Write("new content"); writer.Close(); nesterFileWriter.Write("new content"); nesterFileWriter.Close(); git.Add().AddFilepattern("a.txt").AddFilepattern("dir").Call(); // create a file not added to the index untrackedFile = new FilePath(db.WorkTree, "notAddedToIndex.txt"); FileUtils.CreateNewFile(untrackedFile); PrintWriter writer2 = new PrintWriter(untrackedFile); writer2.Write("content"); writer2.Close(); }
// Handy dandy for dumping an action list during debugging public static System.String actionListToString(ActionList al, System.String[] args) { // cut and paste arg code from main() could be better but it works bool showActions = true; bool showOffset = false; bool showDebugSource = false; bool decompile = false; bool defunc = true; bool tabbedGlyphs = true; int index = 0; while (args != null && (index < args.Length) && (args[index].StartsWith("-"))) { if (args[index].Equals("-decompile")) { decompile = true; ++index; } else if (args[index].Equals("-nofunctions")) { defunc = false; ++index; } else if (args[index].Equals("-asm")) { decompile = false; ++index; } else if (args[index].Equals("-noactions")) { showActions = false; ++index; } else if (args[index].Equals("-showoffset")) { showOffset = true; ++index; } else if (args[index].Equals("-showdebugsource")) { showDebugSource = true; ++index; } else if (args[index].ToUpper().Equals("-tabbedGlyphs".ToUpper())) { tabbedGlyphs = true; ++index; } } System.IO.StringWriter sw = new System.IO.StringWriter(); //UPGRADE_ISSUE: Constructor 'java.io.PrintWriter.PrintWriter' was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1000_javaioPrintWriterPrintWriter_javaioWriter'" System.IO.StreamWriter out_Renamed = new PrintWriter(sw); SwfxPrinter printer = new SwfxPrinter(out_Renamed); printer.showActions = showActions; printer.showOffset = showOffset; printer.showDebugSource = showDebugSource; printer.decompile = decompile; printer.defunc = defunc; printer.tabbedGlyphs = tabbedGlyphs; printer.printActions(al); out_Renamed.Flush(); return sw.ToString(); }