public void StringIsTokenizedWithDefaultDelimiters() { const string toTokenize = "First\tSecond\tThird"; StringTokenizer tokenizer = new StringTokenizer(toTokenize); Assert.IsTrue(tokenizer.HasMoreTokens()); Assert.AreEqual("First", tokenizer.NextToken()); Assert.IsTrue(tokenizer.HasMoreTokens()); Assert.AreEqual("Second", tokenizer.NextToken()); Assert.IsTrue(tokenizer.HasMoreTokens()); Assert.AreEqual("Third", tokenizer.NextToken()); Assert.IsFalse(tokenizer.HasMoreTokens()); }
public void ChangingDelimitersIsHandledCorrectly() { const string toTokenize = "First,more\tSecond,Third"; StringTokenizer tokenizer = new StringTokenizer(toTokenize); Assert.IsTrue(tokenizer.HasMoreTokens()); Assert.AreEqual("First,more", tokenizer.NextToken()); Assert.IsTrue(tokenizer.HasMoreTokens()); Assert.AreEqual("Second", tokenizer.NextToken(",")); Assert.IsTrue(tokenizer.HasMoreTokens()); Assert.AreEqual("Third", tokenizer.NextToken()); Assert.IsFalse(tokenizer.HasMoreTokens()); }
public void RepeatedStringIsTokenizedCorrectly() { const string toTokenize = "First\tFirstly\tThird"; StringTokenizer tokenizer = new StringTokenizer(toTokenize); Assert.IsTrue(tokenizer.HasMoreTokens()); Assert.AreEqual("First", tokenizer.NextToken()); Assert.IsTrue(tokenizer.HasMoreTokens()); Assert.AreEqual("Firstly", tokenizer.NextToken()); Assert.IsTrue(tokenizer.HasMoreTokens()); Assert.AreEqual("Third", tokenizer.NextToken()); Assert.IsFalse(tokenizer.HasMoreTokens()); }
public void StringIsTokenizedWithSpecifiedDelimiters() { const string toTokenize = "First,Second,Third"; StringTokenizer tokenizer = new StringTokenizer(toTokenize, ","); Assert.IsTrue(tokenizer.HasMoreTokens()); Assert.AreEqual("First", tokenizer.NextToken()); Assert.IsTrue(tokenizer.HasMoreTokens()); Assert.AreEqual("Second", tokenizer.NextToken()); Assert.IsTrue(tokenizer.HasMoreTokens()); Assert.AreEqual("Third", tokenizer.NextToken()); Assert.IsFalse(tokenizer.HasMoreTokens()); }
public void CountIsCorrect() { const string toTokenize = "First\tSecond\tThird"; StringTokenizer tokenizer = new StringTokenizer(toTokenize); Assert.AreEqual(3, tokenizer.Count); tokenizer.NextToken(); Assert.AreEqual(2, tokenizer.Count); tokenizer.NextToken(); Assert.AreEqual(1, tokenizer.Count); string token = tokenizer.NextToken(); // This assert assures that asking for the count does not // affect the tokens themselves. Assert.AreEqual("Third", token); Assert.AreEqual(0, tokenizer.Count); }
/// <summary> /// Construct a SID from it's textual representation such as /// <tt>S-1-5-21-1496946806-2192648263-3843101252-1029</tt>. /// </summary> /// <remarks> /// Construct a SID from it's textual representation such as /// <tt>S-1-5-21-1496946806-2192648263-3843101252-1029</tt>. /// </remarks> /// <exception cref="WinrtCifs.Smb.SmbException"></exception> public Sid(string textual) { StringTokenizer st = new StringTokenizer(textual, "-"); if (st.CountTokens() < 3 || !st.NextToken().Equals("S")) { // need S-N-M throw new SmbException("Bad textual SID format: " + textual); } Revision = byte.Parse(st.NextToken()); string tmp = st.NextToken(); long id = 0; if (tmp.StartsWith("0x")) { //id = long.Parse(Sharpen.Runtime.Substring(tmp, 2), 16); id = long.Parse(Runtime.Substring(tmp, 2)); } else { id = long.Parse(tmp); } IdentifierAuthority = new byte[6]; for (int i = 5; id > 0; i--) { IdentifierAuthority[i] = unchecked ((byte)(id % 256)); id >>= 8; } SubAuthorityCount = unchecked ((byte)st.CountTokens()); if (SubAuthorityCount > 0) { SubAuthority = new int[SubAuthorityCount]; for (int i1 = 0; i1 < SubAuthorityCount; i1++) { SubAuthority[i1] = (int)(long.Parse(st.NextToken()) & unchecked (0xFFFFFFFFL)); } } }
/// <summary> /// Gets the field value. /// </summary> /// <param name="field">the field name</param> /// <returns>the field value or null if not found</returns> public string GetField(string field) { var map = _fields; var tk = new StringTokenizer(field, "."); if (!tk.HasMoreTokens()) { return(null); } while (true) { var s = tk.NextToken(); var obj = map[s]; if (obj == null) { return(null); } if (tk.HasMoreTokens()) { if (obj is Hashtable) { map = (Hashtable)obj; } else { return(null); } } else { if (obj is Hashtable) { return(null); } else { if (((PdfObject)obj).IsString()) { return(((PdfString)obj).ToUnicodeString()); } else { return(PdfName.DecodeName(obj.ToString())); } } } } }
public override bool Compare(string actual, string expected) { bool success = false; Pattern p = Pattern.Compile(expected); StringTokenizer tokenizer = new StringTokenizer(actual, "\n\r"); while (tokenizer.HasMoreTokens() && !success) { string actualToken = tokenizer.NextToken(); Matcher m = p.Matcher(actualToken); success = m.Matches(); } return(success); }
public virtual int Compare(string a, string b) { StringTokenizer aTokens = new StringTokenizer(a, "."); StringTokenizer bTokens = new StringTokenizer(b, "."); while (aTokens.HasMoreTokens()) { int aToken = Convert.ToInt32(aTokens.NextToken()); if (bTokens.HasMoreTokens()) { int bToken = Convert.ToInt32(bTokens.NextToken()); if (aToken != bToken) { return(aToken < bToken ? -1 : 1); } } else { // a has some extra trailing tokens. if these are all zeroes, thats ok. if (aToken != 0) { return(1); } } } // b has some extra trailing tokens. if these are all zeroes, thats ok. while (bTokens.HasMoreTokens()) { if (Convert.ToInt32(bTokens.NextToken()) != 0) { return(-1); } } return(0); }
public virtual void ParseOption(string option) { if (option == null || option.Equals(string.Empty)) { //we will have only default comparison return; } StringTokenizer args = new StringTokenizer(option); KeyFieldHelper.KeyDescription global = new KeyFieldHelper.KeyDescription(); while (args.HasMoreTokens()) { string arg = args.NextToken(); if (arg.Equals("-n")) { global.numeric = true; } if (arg.Equals("-r")) { global.reverse = true; } if (arg.Equals("-nr")) { global.numeric = true; global.reverse = true; } if (arg.StartsWith("-k")) { KeyFieldHelper.KeyDescription k = ParseKey(arg, args); if (k != null) { allKeySpecs.AddItem(k); keySpecSeen = true; } } } foreach (KeyFieldHelper.KeyDescription key in allKeySpecs) { if (!(key.reverse | key.numeric)) { key.reverse = global.reverse; key.numeric = global.numeric; } } if (allKeySpecs.Count == 0) { allKeySpecs.AddItem(global); } }
/** Gets the field value. * @param field the field name * @return the field value or <CODE>null</CODE> if not found */ virtual public String GetField(String field) { Dictionary <String, Object> map = fields; StringTokenizer tk = new StringTokenizer(field, "."); if (!tk.HasMoreTokens()) { return(null); } while (true) { String s = tk.NextToken(); Object obj; map.TryGetValue(s, out obj); if (obj == null) { return(null); } if (tk.HasMoreTokens()) { if (obj is Dictionary <String, Object> ) { map = (Dictionary <String, Object>)obj; } else { return(null); } } else { if (obj is Dictionary <String, Object> ) { return(null); } else { if (((PdfObject)obj).IsString()) { return(((PdfString)obj).ToUnicodeString()); } else { return(PdfName.DecodeName(obj.ToString())); } } } } }
public static string[] GetCNs(X509Certificate cert) { IList <string> cnList = new List <string>(); /* * Sebastian Hauer's original StrictSSLProtocolSocketFactory used * getName() and had the following comment: * * Parses a X.500 distinguished name for the value of the * "Common Name" field. This is done a bit sloppy right * now and should probably be done a bit more according to * <code>RFC 2253</code>. * * I've noticed that toString() seems to do a better job than * getName() on these X500Principal objects, so I'm hoping that * addresses Sebastian's concern. * * For example, getName() gives me this: * 1.2.840.113549.1.9.1=#16166a756c6975736461766965734063756362632e636f6d * * whereas toString() gives me this: * [email protected] * * Looks like toString() even works with non-ascii domain names! * I tested it with "花子.co.jp" and it worked fine. */ string subjectPrincipal = cert.GetSubjectX500Principal().ToString(); StringTokenizer st = new StringTokenizer(subjectPrincipal, ","); while (st.HasMoreTokens()) { string tok = st.NextToken(); int x = tok.IndexOf("CN="); if (x >= 0) { cnList.AddItem(Runtime.Substring(tok, x + 3)); } } if (!cnList.IsEmpty()) { string[] cns = new string[cnList.Count]; Collections.ToArray(cnList, cns); return(cns); } else { return(null); } }
/// <summary> /// This method parses a string with attributes and returns a Properties object. /// </summary> /// <param name="str">a string of this form: 'key1="value1"; key2="value2";... keyN="valueN" '</param> /// <returns>a Properties object</returns> public static Properties ParseAttributes(string str) { var result = new Properties(); if (str == null) { return(result); } var keyValuePairs = new StringTokenizer(str, ";"); StringTokenizer keyValuePair; string key; string value; while (keyValuePairs.HasMoreTokens()) { keyValuePair = new StringTokenizer(keyValuePairs.NextToken(), ":"); if (keyValuePair.HasMoreTokens()) { key = keyValuePair.NextToken().Trim().Trim(); } else { continue; } if (keyValuePair.HasMoreTokens()) { value = keyValuePair.NextToken().Trim(); } else { continue; } if (value.StartsWith("\"")) { value = value.Substring(1); } if (value.EndsWith("\"")) { value = value.Substring(0, value.Length - 1); } result.Add(key.ToLowerInvariant(), value); } return(result); }
/// <summary> /// Emits 3 key-value pairs for counting the word, its length, and the /// squares of its length. /// </summary> /// <remarks> /// Emits 3 key-value pairs for counting the word, its length, and the /// squares of its length. Outputs are (Text, LongWritable). /// </remarks> /// <param name="value">This will be a line of text coming in from our input file.</param> /// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> protected override void Map(object key, Text value, Mapper.Context context) { StringTokenizer itr = new StringTokenizer(value.ToString()); while (itr.HasMoreTokens()) { string @string = itr.NextToken(); this.wordLen.Set(@string.Length); // the square of an integer is an integer... this.wordLenSq.Set((long)Math.Pow(@string.Length, 2.0)); context.Write(Length, this.wordLen); context.Write(Square, this.wordLenSq); context.Write(Count, One); } }
private ValueList readDoubleAsListScaled(int length) { String line = ""; StringTokenizer tokenizer; ValueList valueList = new ValueList(); long[] timestamps = new long[length]; double[][] data = new double[length][]; for (int sampleNumber = 0; sampleNumber < length; sampleNumber++) { line = file.ReadLine(); tokenizer = new StringTokenizer(line, separator); timestamps[sampleNumber] = long.Parse(tokenizer.NextToken().Trim()); data[sampleNumber] = new double[channelCount]; for (int i = 0; i < channelCount; i++) { data[sampleNumber][i] = (Double.Parse(tokenizer.NextToken().Trim()) + baseline) * lsbValue; } currentSample++; } valueList.setSamplestamps(timestamps); valueList.setData(data); return(valueList); }
private ValueList readIntAsList(int length) { String line = ""; StringTokenizer tokenizer; var valueList = new ValueList(); var timestamps = new long[length]; var data = new int[length][]; for (int sampleNumber = 0; sampleNumber < length; sampleNumber++) { line = file.ReadLine(); tokenizer = new StringTokenizer(line, separator); timestamps[sampleNumber] = int.Parse(tokenizer.NextToken().Trim()); data[sampleNumber] = new int[channelCount]; for (int i = 0; i < channelCount; i++) { data[sampleNumber][i] = int.Parse(tokenizer.NextToken().Trim()); } currentSample++; } valueList.setSamplestamps(timestamps); valueList.setData(data); return(valueList); }
/// <exception cref="System.IO.IOException"/> public virtual void Map(K key, Text value, OutputCollector <Text, LongWritable> output , Reporter reporter) { // get input text string text = value.ToString(); // value is line of text // tokenize the value StringTokenizer st = new StringTokenizer(text); while (st.HasMoreTokens()) { // output <token,1> pairs output.Collect(new Text(st.NextToken()), new LongWritable(1)); } }
/// <summary> /// This will parse a composite part from the stream. /// </summary> /// <returns>The composite.</returns> /// <exception cref="IOException">If there is an error parsing the composite.</exception> private Composite ParseComposite() { Composite composite = new Composite(); string partData = ReadLine(); StringTokenizer tokenizer = new StringTokenizer(partData, " ;"); string cc = tokenizer.NextToken(); if (!cc.Equals(CC)) { throw new IOException("Expected '" + CC + "' actual='" + cc + "'"); } composite.Name = tokenizer.NextToken(); int partCount; try { partCount = int.Parse(tokenizer.NextToken()); } catch (FormatException e) { throw new IOException("Error parsing AFM document:" + e); } for (int i = 0; i < partCount; i++) { CompositePart part = new CompositePart(); String pcc = tokenizer.NextToken(); if (!pcc.Equals(PCC)) { throw new IOException("Expected '" + PCC + "' actual='" + pcc + "'"); } string partName = tokenizer.NextToken(); try { int x = int.Parse(tokenizer.NextToken()); int y = int.Parse(tokenizer.NextToken()); part.Name = partName; part.XDisplacement = x; part.YDisplacement = y; composite.Parts.Add(part); } catch (FormatException e) { throw new IOException("Error parsing AFM document:" + e); } } return(composite); }
/// <summary> /// This is used to verify that a semicolon is the next token in the stream. /// </summary> /// <param name="tokenizer">The tokenizer to read from.</param> /// <exception cref="IOException">If the semicolon is missing.</exception> private void VerifySemicolon(StringTokenizer tokenizer) { if (tokenizer.HasMoreTokens()) { String semicolon = tokenizer.NextToken(); if (!";".Equals(semicolon)) { throw new IOException("Error: Expected semicolon in stream actual='" + semicolon + "'"); } } else { throw new IOException("CharMetrics is missing a semicolon after a command"); } }
public void test_hasMoreTokens() { // Test for method boolean java.util.StringTokenizer.hasMoreTokens() StringTokenizer st = new StringTokenizer("This is a test String"); for (int counter = 0; counter < 5; counter++) { Assertion.Assert( "StringTokenizer incorrectly reports it has no more tokens", st.HasMoreTokens()); st.NextToken(); } Assertion.Assert("StringTokenizer incorrectly reports it has more tokens", !st.HasMoreTokens()); }
internal static PdfArray CreateDestinationArray(string value, PdfWriter writer) { var ar = new PdfArray(); var tk = new StringTokenizer(value); var n = int.Parse(tk.NextToken()); ar.Add(writer.GetPageReference(n)); if (!tk.HasMoreTokens()) { ar.Add(PdfName.Xyz); ar.Add(new float[] { 0, 10000, 0 }); } else { var fn = tk.NextToken(); if (fn.StartsWith("/")) { fn = fn.Substring(1); } ar.Add(new PdfName(fn)); for (var k = 0; k < 4 && tk.HasMoreTokens(); ++k) { fn = tk.NextToken(); if (fn.Equals("null")) { ar.Add(PdfNull.Pdfnull); } else { ar.Add(new PdfNumber(fn)); } } } return(ar); }
private static void SetParagraphLeading(Paragraph p, String leading) { if (leading == null) { p.SetLeading(0, 1.5f); return; } try { StringTokenizer tk = new StringTokenizer(leading, " ,"); String v = tk.NextToken(); float v1 = float.Parse(v, System.Globalization.NumberFormatInfo.InvariantInfo); if (!tk.HasMoreTokens()) { p.SetLeading(v1, 0); return; } v = tk.NextToken(); float v2 = float.Parse(v, System.Globalization.NumberFormatInfo.InvariantInfo); p.SetLeading(v1, v2); } catch { p.SetLeading(0, 1.5f); } }
/// <summary>A transformation attribute can encompass multiple transformation operation (e.g.</summary> /// <remarks> /// A transformation attribute can encompass multiple transformation operation (e.g. "translate(10,20) scale(30,40)". /// This method splits the original transformation string into multiple strings so that they can be handled separately. /// </remarks> /// <param name="transform">the transformation value</param> /// <returns>a list containing strings describing a single transformation operation</returns> private static IList <String> SplitString(String transform) { List <String> list = new List <String>(); StringTokenizer tokenizer = new StringTokenizer(transform, ")", false); while (tokenizer.HasMoreTokens()) { String trim = tokenizer.NextToken().Trim(); if (trim != null && !String.IsNullOrEmpty(trim)) { list.Add(trim + ")"); } } return(list); }
private static void PopulaTabela(Table table, string line, PdfFont font, bool isHeader) { StringTokenizer tokenizer = new StringTokenizer(line, ";"); int numeroColuna = 0; while (tokenizer.HasMoreTokens()) { if (isHeader) { table.AddHeaderCell(new Cell().Add(new Paragraph(tokenizer.NextToken()) .SetFont(font) .SetPadding(5) .SetBorder(null))); } else { numeroColuna++; table.AddCell(new Cell().Add(new Paragraph(tokenizer.NextToken()) .SetFont(font) .SetBorder(new SolidBorder(ColorConstants.BLACK, 0.5f))) .SetBackgroundColor(backColor(numeroColuna))); } } }
protected void ManipulatePdf(String dest) { PdfDocument srcDoc = new PdfDocument(new PdfReader(SRC)); PdfAcroForm form = PdfAcroForm.GetAcroForm(srcDoc, true); // Create a map with fields from the acroform and their names Dictionary <String, Rectangle> positions = new Dictionary <String, Rectangle>(); IDictionary <String, PdfFormField> fields = form.GetFormFields(); foreach (PdfFormField field in fields.Values) { positions.Add(field.GetFieldName().GetValue(), field.GetWidgets()[0].GetRectangle().ToRectangle()); } PdfDocument pdfDoc = new PdfDocument(new PdfWriter(dest)); Document doc = new Document(pdfDoc); PdfFont font = PdfFontFactory.CreateFont(StandardFonts.HELVETICA); // Event handler copies content of the source pdf file on every page // of the result pdf file as template to fill in. pdfDoc.AddEventHandler(PdfDocumentEvent.END_PAGE, new PaginationEventHandler(srcDoc.GetFirstPage().CopyAsFormXObject(pdfDoc))); srcDoc.Close(); using (StreamReader streamReader = new StreamReader(DATA)) { // Read first line with headers, // do nothing with current text line, because headers are already filled in form String line = streamReader.ReadLine(); while ((line = streamReader.ReadLine()) != null) { int i = 0; StringTokenizer tokenizer = new StringTokenizer(line, ";"); pdfDoc.AddNewPage(); while (tokenizer.HasMoreTokens()) { // Fill in current form field, got by the name from FIELDS[], // with content, read from the current token Process(doc, FIELDS[i++], tokenizer.NextToken(), font, positions); } } } doc.Close(); }
/// <summary> Converts a Namespace name to a directory path /// </summary> /// <param name="pack">The name of the .NET namespace /// /// </param> public static String ConvertPackageToDir(String pack) { if (pack == null || pack.Equals("null")) { return(""); } StringTokenizer token = new StringTokenizer(pack, "."); StringBuilder packBuff = new StringBuilder(); while (token.HasMoreTokens()) { packBuff.Append(Path.DirectorySeparatorChar.ToString()); packBuff.Append(token.NextToken()); } return(packBuff.ToString()); }
/// <summary> /// Process table row for PDF export /// </summary> /// <param name="table"></param> /// <param name="line"></param> /// <param name="font"></param> /// <param name="isHeader"></param> private static void process(Table table, String line, PdfFont font, Boolean isHeader) { StringTokenizer tokenizer = new StringTokenizer(line, ","); while (tokenizer.HasMoreTokens()) { if (isHeader) { table.AddHeaderCell(new Cell().SetBackgroundColor(WebColors.GetRGBColor("A6B8AE")).Add(new Paragraph(tokenizer.NextToken()).SetFont(font))); } else { table.AddCell(new Cell().Add(new iText.Layout.Element.Paragraph(tokenizer.NextToken()).SetFont(font))); } } }
public void DelimitersCustomTokenizingChangeCheck() { StringTokenizer st = new StringTokenizer(StringSet_3); // state check Assert.AreEqual(DefaultDelimiters, st.Delimiters); Console.WriteLine("Token: {0}", st.NextToken()); st.Delimiters = "*"; Assert.AreEqual("*", st.Delimiters); // change check Console.WriteLine("Count: {0}", st.Count); // result check Assert.AreEqual(3, st.Count); }
public static string CollectFooter() { CCommonConstants oConstants = ConfigManager.GetConfig <CCommonConstants>(); string tempConnectionString = oConstants.DBConnection; string sqlCommand = SqlQueries.GetQuery(Query.GetPrintStyles); SqlDataAdapter tempSqlAdapter = new SqlDataAdapter(sqlCommand, tempConnectionString); DataSet tempDataSet = new DataSet(); tempSqlAdapter.Fill(tempDataSet, "PrintStyle"); string HeaderContent = "\r\n\t" + tempDataSet.Tables["PrintStyle"].Select("style_name = 'normal'")[0]["footer"].ToString(); StringTokenizer tempStringTokenizer = new StringTokenizer(HeaderContent, "\r\n"); string footerString = tempStringTokenizer.NextToken(); return(footerString); }
/// <summary>Returns a collection of strings.</summary> /// <param name="str">String to parse</param> /// <param name="delim">delimiter to separate the values</param> /// <returns>Collection of parsed elements.</returns> public static ICollection <string> GetStringCollection(string str, string delim) { IList <string> values = new AList <string>(); if (str == null) { return(values); } StringTokenizer tokenizer = new StringTokenizer(str, delim); while (tokenizer.HasMoreTokens()) { values.AddItem(tokenizer.NextToken()); } return(values); }
/// <summary> /// Executes the specified string command in a separate process with the /// specified environment and working directory. /// /// <para>This is a convenience method. An invocation of the form /// <tt>exec(command, envp, dir)</tt> /// behaves in exactly the same way as the invocation /// <tt><seealso cref="#exec(String[], String[], File) exec"/>(cmdarray, envp, dir)</tt>, /// where <code>cmdarray</code> is an array of all the tokens in /// <code>command</code>. /// /// </para> /// <para>More precisely, the <code>command</code> string is broken /// into tokens using a <seealso cref="StringTokenizer"/> created by the call /// <code>new <seealso cref="StringTokenizer"/>(command)</code> with no /// further modification of the character categories. The tokens /// produced by the tokenizer are then placed in the new string /// array <code>cmdarray</code>, in the same order. /// /// </para> /// </summary> /// <param name="command"> a specified system command. /// </param> /// <param name="envp"> array of strings, each element of which /// has environment variable settings in the format /// <i>name</i>=<i>value</i>, or /// <tt>null</tt> if the subprocess should inherit /// the environment of the current process. /// </param> /// <param name="dir"> the working directory of the subprocess, or /// <tt>null</tt> if the subprocess should inherit /// the working directory of the current process. /// </param> /// <returns> A new <seealso cref="Process"/> object for managing the subprocess /// </returns> /// <exception cref="SecurityException"> /// If a security manager exists and its /// <seealso cref="SecurityManager#checkExec checkExec"/> /// method doesn't allow creation of the subprocess /// </exception> /// <exception cref="IOException"> /// If an I/O error occurs /// </exception> /// <exception cref="NullPointerException"> /// If <code>command</code> is <code>null</code>, /// or one of the elements of <code>envp</code> is <code>null</code> /// </exception> /// <exception cref="IllegalArgumentException"> /// If <code>command</code> is empty /// </exception> /// <seealso cref= ProcessBuilder /// @since 1.3 </seealso> //JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: public Process exec(String command, String[] envp, File dir) throws IOException public virtual Process Exec(String command, String[] envp, File dir) { if (command.Length() == 0) { throw new IllegalArgumentException("Empty command"); } StringTokenizer st = new StringTokenizer(command); String[] cmdarray = new String[st.CountTokens()]; for (int i = 0; st.HasMoreTokens(); i++) { cmdarray[i] = st.NextToken(); } return(Exec(cmdarray, envp, dir)); }
/// <summary> /// Break the prefix string into moves (a sequence of integer row ids that /// will be selected for each column in order). /// </summary> /// <remarks> /// Break the prefix string into moves (a sequence of integer row ids that /// will be selected for each column in order). Find all solutions with /// that prefix. /// </remarks> /// <exception cref="System.IO.IOException"/> protected override void Map <_T0>(WritableComparable <_T0> key, Text value, Mapper.Context context) { prefixString = value; StringTokenizer itr = new StringTokenizer(prefixString.ToString(), ","); int[] prefix = new int[depth]; int idx = 0; while (itr.HasMoreTokens()) { string num = itr.NextToken(); prefix[idx++] = System.Convert.ToInt32(num); } pent.Solve(prefix); }
/// <exception cref="System.IO.IOException"/> public virtual double Read(string path) { FileSystem fs = FileSystem.Get(new Configuration()); FileStatus[] files = fs.ListStatus(new Path(path)); foreach (FileStatus fileStat in files) { if (!fileStat.IsFile()) { continue; } BufferedReader br = null; try { br = new BufferedReader(new InputStreamReader(fs.Open(fileStat.GetPath()))); string line; while ((line = br.ReadLine()) != null) { StringTokenizer st = new StringTokenizer(line); string word; while (st.HasMoreTokens()) { word = st.NextToken(); this.wordsRead++; this.wordLengthsRead += word.Length; this.wordLengthsReadSquared += (long)Math.Pow(word.Length, 2.0); } } } catch (IOException e) { System.Console.Out.WriteLine("Output could not be read!"); throw; } finally { br.Close(); } } double mean = (((double)this.wordLengthsRead) / ((double)this.wordsRead)); mean = Math.Pow(mean, 2.0); double term = (((double)this.wordLengthsReadSquared / ((double)this.wordsRead))); double stddev = Math.Sqrt((term - mean)); return(stddev); }
internal bool SetField(String field, PdfObject value) { Dictionary <String, Object> map = fields; StringTokenizer tk = new StringTokenizer(field, "."); if (!tk.HasMoreTokens()) { return(false); } while (true) { String s = tk.NextToken(); Object obj; map.TryGetValue(s, out obj); if (tk.HasMoreTokens()) { if (obj == null) { obj = new Dictionary <String, Object>(); map[s] = obj; map = (Dictionary <string, object>)obj; continue; } else if (obj is Dictionary <String, Object> ) { map = (Dictionary <String, Object>)obj; } else { return(false); } } else { if (!(obj is Dictionary <String, Object>)) { map[s] = value; return(true); } else { return(false); } } } }
public static List <double> tokString(String Text) { List <double> list = new List <double>(); StringTokenizer tok = new StringTokenizer(Text); tok.NewDelim(new char[] { ',' }); String token; do { token = tok.NextToken(); list.Add(Convert.ToDouble(token)); } while (tok.HasMoreTokens()); return(list); }
public void OnTextSubtitle(ref TEXT_SUBTITLE sub) { try { if (sub.page == _activeSubPage) { Log.Debug("Page: " + sub.page); Log.Debug("Character table: " + sub.encoding); Log.Debug("Timeout: " + sub.timeOut); Log.Debug("Timestamp: " + sub.timeStamp); Log.Debug("Language: " + sub.language); String content = sub.text; if (content == null) { Log.Error("OnTextSubtitle: sub.txt == null!"); return; } Log.Debug("Content: "); if (content.Trim().Length > 0) // debug log subtitles { StringTokenizer st = new StringTokenizer(content, new char[] {'\n'}); while (st.HasMore) { Log.Debug(st.NextToken()); } } else { Log.Debug("Page: <BLANK PAGE>"); } } } catch (Exception e) { Log.Error("Problem with TEXT_SUBTITLE"); Log.Error(e); } try { // if we dont need the subtitle if (!_renderSubtitles || _useBitmap || (_activeSubPage != sub.page)) { // //chemelli: too much logging. You can check if logs have: // Log.Debug("Page: " + sub.page); or Log.Debug("Page: <BLANK PAGE>"); // and // Log.Debug("Text subtitle (page {0}) ACCEPTED: [...] // to know the evaluation of this if block // //Log.Debug("Text subtitle (page {0}) discarded: useBitmap is {1} and activeSubPage is {2}", sub.page, useBitmap, // activeSubPage); return; } Log.Debug("Text subtitle (page {0}) ACCEPTED: useBitmap is {1} and activeSubPage is {2}", sub.page, _useBitmap, _activeSubPage); Subtitle subtitle = new Subtitle(); // TODO - RenderText should directly draw to a D3D texture subtitle.subBitmap = RenderText(sub.lc); subtitle.timeOut = sub.timeOut; subtitle.presentTime = sub.timeStamp / 90000.0f + _startPos; subtitle.height = 576; subtitle.width = 720; subtitle.screenHeight = 576; subtitle.screenWidth = 720; subtitle.firstScanLine = 0; subtitle.horizontalPosition = 0; Texture texture = null; try { // allocate new texture texture = new Texture(GUIGraphicsContext.DX9Device, subtitle.subBitmap.Width, subtitle.subBitmap.Height, 1, Usage.Dynamic, Format.A8R8G8B8, Pool.Default); int pitch; using (GraphicsStream a = texture.LockRectangle(0, LockFlags.Discard, out pitch)) { BitmapData bd = subtitle.subBitmap.LockBits(new Rectangle(0, 0, subtitle.subBitmap.Width, subtitle.subBitmap.Height), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); // Quick copy of content unsafe { byte* to = (byte*)a.InternalDataPointer; byte* from = (byte*)bd.Scan0.ToPointer(); for (int y = 0; y < bd.Height; ++y) { for (int x = 0; x < bd.Width * 4; ++x) { to[pitch * y + x] = from[y * bd.Stride + x]; } } } texture.UnlockRectangle(0); subtitle.subBitmap.UnlockBits(bd); subtitle.subBitmap.SafeDispose(); subtitle.subBitmap = null; subtitle.texture = texture; a.Close(); } } catch (Exception e) { Log.Debug("SubtitleRenderer: Failed to create subtitle surface!"); Log.Error(e); return; } AddSubtitle(subtitle); } catch (Exception e) { Log.Error("Problem processing text subtitle"); Log.Error(e); } }
private SolidBrush[,] paraBrush(String str) { SolidBrush[,] result = new SolidBrush[12,4]; StringTokenizer st = new StringTokenizer(str); for(int j=0; j<12; j++) { for(int i=0; i<4; i++) { String temp = st.NextToken(); result[j,i] = paraCor(Convert.ToInt32(temp)); } } return result; }
private void updateMessageLog(String str) { //processar o cabeçalho da mensagem StringTokenizer st = new StringTokenizer(str); String temp = st.NextToken(); numCom = Convert.ToInt32(temp); switch (numCom) { case 0 : { //caso em que recebe o username username = str.Substring(2,str.Length-2); recebeuUsername = true; fechou = false; break; } case 1 : { //caso em que recebe o codigo secreto tabuleiro = paraBrush(str.Substring(2,str.Length-2)); codRecebido = true; break; } case 2 : { //caso receba a classificação da sua jogada tabScore = paraBrush(str.Substring(2,str.Length-2)); codRecebido = true; break; } case 3 : { nJogos = Convert.ToInt32(str.Substring(2,str.Length-2)); jogosRecebidos = true; break; } default:break; } }
public SubtitleSelector(ISubtitleStream dvbStreams, SubtitleRenderer subRender, TeletextSubtitleDecoder subDecoder) { Log.Debug("SubtitleSelector ctor"); if (subRender == null) { throw new Exception("Nullpointer input not allowed ( SubtitleRenderer)"); } else { this.dvbStreams = dvbStreams; this.subRender = subRender; } // load preferences using (MediaPortal.Profile.Settings reader = new MediaPortal.Profile.Settings(MediaPortal.Configuration.Config.GetFile(MediaPortal.Configuration.Config.Dir.Config, "MediaPortal.xml"))) { preferedLanguages = new List<string>(); string languages = reader.GetValueAsString("tvservice", "preferredsublanguages", ""); Log.Debug("SubtitleSelector: sublangs entry content: " + languages); StringTokenizer st = new StringTokenizer(languages, ";"); while (st.HasMore) { string lang = st.NextToken(); if (lang.Length != 3) { Log.Warn("Language {0} is not in the correct format!", lang); } else { preferedLanguages.Add(lang); Log.Info("Prefered language {0} is {1}", preferedLanguages.Count, lang); } } } pageEntries = new Dictionary<int, TeletextPageEntry>(); bitmapSubtitleCache = new List<SubtitleOption>(); lock (syncLock) { if (subDecoder != null) { subDecoder.SetPageInfoCallback(new MediaPortal.Player.Subtitles.TeletextSubtitleDecoder.PageInfoCallback(OnPageInfo)); } if (dvbStreams != null) { RetrieveBitmapSubtitles(); subStreamCallback = new SubtitleStreamEventCallback(OnSubtitleReset); IntPtr pSubStreamCallback = Marshal.GetFunctionPointerForDelegate(subStreamCallback); Log.Debug("Calling SetSubtitleStreamEventCallback"); dvbStreams.SetSubtitleResetCallback(pSubStreamCallback); } if (preferedLanguages.Count > 0) { autoSelectOption = new SubtitleOption(); autoSelectOption.language = "None"; autoSelectOption.isAuto = true; autoSelectOption.type = SubtitleType.None; SetOption(0); // the autoselect mode will have index 0 (ugly) } } Log.Debug("End SubtitleSelector ctor"); }