private void InitializeTokenizer() { InputStream modelIn = null; try { modelIn = new FileInputStream(TokenizerModel); TokenizerModel model = new TokenizerModel(modelIn); tokenizer = new TokenizerME(model); } catch (IOException ex) { tokenizer = null; } finally { if (modelIn != null) { try { modelIn.close(); } catch (IOException ex) { } } } }
private void InitializePOSTagger() { InputStream modelIn = null; try { modelIn = new FileInputStream(POSModel); POSModel model = new POSModel(modelIn); tagger = new POSTaggerME(model); } catch (IOException ex) { tagger = null; } finally { if (modelIn != null) { try { modelIn.close(); } catch (IOException ex) { } } } }
public SkillState(Dictionary <string, AbstractStoragePool> poolByName, StringPool strings, StringType stringType, Annotation annotationType, List <AbstractStoragePool> types, FileInputStream @in, Mode mode) : base(strings, @in.Path, mode, types, poolByName, stringType, annotationType) { try { AbstractStoragePool p = null; poolByName.TryGetValue("a", out p); AsField = (null == p) ? (P0)Parser.newPool("a", null, types) : (P0)p; poolByName.TryGetValue("b", out p); BsField = (null == p) ? (P1)Parser.newPool("b", AsField, types) : (P1)p; poolByName.TryGetValue("c", out p); CsField = (null == p) ? (P2)Parser.newPool("c", BsField, types) : (P2)p; poolByName.TryGetValue("d", out p); DsField = (null == p) ? (P3)Parser.newPool("d", BsField, types) : (P3)p; poolByName.TryGetValue("noserializeddata", out p); NoSerializedDatasField = (null == p) ? (P4)Parser.newPool("noserializeddata", null, types) : (P4)p; } catch (System.InvalidCastException e) { throw new ParseException(@in, -1, e, "A super type does not match the specification; see cause for details."); } foreach (AbstractStoragePool t in types) { poolByName[t.Name] = t; } finalizePools(@in); @in.close(); }
private void InitializeChunker() { InputStream modelIn = null; try { modelIn = new FileInputStream(ChunkerModel); ChunkerModel model = new ChunkerModel(modelIn); chunker = new ChunkerME(model); } catch (IOException ex) { chunker = null; } finally { if (modelIn != null) { try { modelIn.close(); } catch (IOException ex) { } } } }
public void Zip(string zipFileName, string[] sourceFile) { FileOutputStream filOpStrm = new FileOutputStream(zipFileName); ZipOutputStream zipOpStrm = new ZipOutputStream(filOpStrm); FileInputStream filIpStrm = null; foreach (string strFilName in sourceFile) { filIpStrm = new FileInputStream(strFilName); ZipEntry ze = new ZipEntry(Path.GetFileName(strFilName)); zipOpStrm.putNextEntry(ze); sbyte[] buffer = new sbyte[1024]; int len = 0; while ((len = filIpStrm.read(buffer)) >= 0) { zipOpStrm.write(buffer, 0, len); } } zipOpStrm.closeEntry(); filIpStrm.close(); zipOpStrm.close(); filOpStrm.close(); }
public SkillState(Dictionary <string, AbstractStoragePool> poolByName, StringPool strings, StringType stringType, Annotation annotationType, List <AbstractStoragePool> types, FileInputStream @in, Mode mode) : base(strings, @in.Path, mode, types, poolByName, stringType, annotationType) { try { AbstractStoragePool p = null; poolByName.TryGetValue("testenum", out p); TestEnumsField = (null == p) ? (P0)Parser.newPool("testenum", null, types) : (P0)p; poolByName.TryGetValue("testenum:default", out p); Testenum_defaultsField = (null == p) ? (P1)Parser.newPool("testenum:default", TestEnumsField, types) : (P1)p; poolByName.TryGetValue("testenum:second", out p); Testenum_secondsField = (null == p) ? (P2)Parser.newPool("testenum:second", TestEnumsField, types) : (P2)p; poolByName.TryGetValue("testenum:third", out p); Testenum_thirdsField = (null == p) ? (P3)Parser.newPool("testenum:third", TestEnumsField, types) : (P3)p; poolByName.TryGetValue("testenum:last", out p); Testenum_lastsField = (null == p) ? (P4)Parser.newPool("testenum:last", TestEnumsField, types) : (P4)p; } catch (System.InvalidCastException e) { throw new ParseException(@in, -1, e, "A super type does not match the specification; see cause for details."); } foreach (AbstractStoragePool t in types) { poolByName[t.Name] = t; } finalizePools(@in); @in.close(); }
// package-private for testing Module deserialize(string sourcePath) { #if NEVER string cachePath = getCachePath(sourcePath); FileInputStream fis = null; ObjectInputStream ois = null; try { fis = new FileInputStream(cachePath); ois = new ObjectInputStream(fis); return((Module)ois.readObject()); } catch (Exception e) { return(null); } finally { try { if (ois != null) { ois.close(); } else if (fis != null) { fis.close(); } } catch (Exception e) { } } }
public static Lattice readSlf(string fileName) { FileInputStream fileInputStream = new FileInputStream(fileName); Lattice result = Lattice.readSlf(fileInputStream); fileInputStream.close(); return(result); }
public static AudioData readRawFile(string filename) { FileInputStream fileInputStream = new FileInputStream(filename); AudioFormat audioFormat = new AudioFormat(8000f, 16, 1, true, false); short[] data = RawReader.readAudioData(fileInputStream, audioFormat); fileInputStream.close(); return(new AudioData(data, 8000f)); }
internal CTLUtterance(BatchNISTRecognizer batchNISTRecognizer, string text, string text2) { this_0 = batchNISTRecognizer; this.@ref = text2; string[] array = String.instancehelper_split(text, " "); if (array.Length != 4) { string text3 = new StringBuilder().append("CTL Syntax Error: ").append(text).toString(); throw new BatchNISTRecognizer.CTLException(batchNISTRecognizer, text3); } this.startOffset = Integer.parseInt(array[1]); this.endOffset = Integer.parseInt(array[2]); this.name = array[3]; this.data = new byte[(this.endOffset - this.startOffset) * batchNISTRecognizer.bytesPerFrame]; int num = String.instancehelper_indexOf(array[0], 46); this.file = array[0]; if (num >= 0) { this.file = String.instancehelper_substring(this.file, 0, num); } this.file = new StringBuilder().append(batchNISTRecognizer.dataDir).append('/').append(this.file).append(".raw").toString(); try { FileInputStream fileInputStream = new FileInputStream(this.file); fileInputStream.skip((long)(this.startOffset * batchNISTRecognizer.bytesPerFrame)); if (fileInputStream.read(this.data) != this.data.Length) { fileInputStream.close(); string text4 = new StringBuilder().append("Unable to read ").append(this.data.Length).append(" bytes of utterance ").append(this.name).toString(); throw new BatchNISTRecognizer.CTLException(batchNISTRecognizer, text4); } fileInputStream.close(); } catch (IOException ex) { throw new BatchNISTRecognizer.CTLException(batchNISTRecognizer, new StringBuilder().append("Unable to read utterance ").append(this.name).append(": ").append(Throwable.instancehelper_getMessage(ex)).toString()); } }
public static byte[] getBytesFromFileJava(string filePath) { FileInputStream fis = new FileInputStream(new java.io.File(filePath)); BufferedInputStream bis = new BufferedInputStream(fis); int numByte = bis.available(); byte[] buff = new byte[numByte]; bis.read(buff, 0, numByte); bis.close(); fis.close(); return(buff); }
//JAVA TO C# CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes: //ORIGINAL LINE: @SuppressWarnings("static-access") public void read(String fileName) public virtual void read(string fileName) { Hashtable hm = new Hashtable(); File file = new File(fileName); System.Console.WriteLine("file = " + fileName + ". It will be filtered the values for the moment of the market opened"); int counter = 0; try { FileInputStream fis = new FileInputStream(file); BufferedReader dis = new BufferedReader(new InputStreamReader(fis)); string s; while ((s = dis.readLine()) != null) { //System.out.println(s); string[] s1 = s.Split(",".ToCharArray(), StringSplitOptions.RemoveEmptyEntries); string s00 = s1[0].Replace('\"', ' ').Trim(); string s01 = s1[1].Replace('\"', ' ').Trim(); hm[s00] = s.Replace('\"', ' ').Trim(); //System.out.println(s00 + " " + s01); counter = counter + 1; } fis.close(); } catch (IOException ioe) { System.Console.WriteLine("Oops- an IOException happened."); System.Console.WriteLine(ioe.ToString()); System.Console.Write(ioe.StackTrace); Environment.Exit(1); } System.Console.WriteLine("full number of values = " + counter); ICollection sk = hm.Keys; IEnumerator i = sk.GetEnumerator(); valuesRow = new string[this.MaxCounter]; int n = 0; while (i.MoveNext()) { string key = (string)i.Current; string value = (string)hm[key]; //System.out.println(key + "->" + value); n = n + 1; if (counter - n < this.MaxCounter) { valuesRow[counter - n] = value; System.Console.WriteLine(counter + " " + n + " " + valuesRow[counter - n] + " " + (counter - n)); } } System.Console.WriteLine("valuesRow.length=" + valuesRow.Length); }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: private static void copyFile(java.io.File src, java.io.File dst) throws java.io.IOException private static void copyFile(File src, File dst) { InputStream @in = new FileInputStream(src); OutputStream @out = new FileOutputStream(dst); int len; while ((len = @in.read(copyBuffer)) > 0) { @out.write(copyBuffer, 0, len); } @in.close(); @out.close(); }
public static Fst loadModel(string filename) { long timeInMillis = Calendar.getInstance().getTimeInMillis(); FileInputStream fileInputStream = new FileInputStream(filename); BufferedInputStream bufferedInputStream = new BufferedInputStream(fileInputStream); ObjectInputStream objectInputStream = new ObjectInputStream(bufferedInputStream); Fst result = Fst.readFst(objectInputStream); objectInputStream.close(); bufferedInputStream.close(); fileInputStream.close(); java.lang.System.err.println(new StringBuilder().append("Load Time: ").append((double)(Calendar.getInstance().getTimeInMillis() - timeInMillis) / 1000.0).toString()); return(result); }
// its optional, passing null is allowed public static TrainingParameters loadTrainingParameters(string paramFile, bool supportSequenceTraining) { TrainingParameters @params = null; if (paramFile != null) { checkInputFile("Training Parameter", new Jfile(paramFile)); InputStream paramsIn = null; try { paramsIn = new FileInputStream(new Jfile(paramFile)); @params = new TrainingParameters(paramsIn); } catch (IOException e) { throw new TerminateToolException(-1, "Error during parameters loading: " + e.Message, e); } finally { try { if (paramsIn != null) { paramsIn.close(); } } catch (IOException) { //sorry that this can fail } } if (!TrainUtil.isValid(@params.getSettings())) { throw new TerminateToolException(1, "Training parameters file '" + paramFile + "' is invalid!"); } if (!supportSequenceTraining && TrainUtil.isSequenceTraining(@params.getSettings())) { throw new TerminateToolException(1, "Sequence training is not supported!"); } } return(@params); }
/** * Check if cacerts has a invalid certificate */ public static string GetCurrentJavaProperty() { File storeFile = null; FileInputStream fis = null; var props = new Dictionary <string, string>(); string sep = File.separator; props.Add("trustStore", java.lang.System.getProperty("javax.net.ssl.trustStore")); props.Add("javaHome", java.lang.System.getProperty("java.home")); props.Add("trustStoreType", java.lang.System.getProperty("javax.net.ssl.trustStoreType", KeyStore.getDefaultType())); props.Add("trustStoreProvider", java.lang.System.getProperty("javax.net.ssl.trustStoreProvider", "")); props.Add("trustStorePasswd", java.lang.System.getProperty("javax.net.ssl.trustStorePassword", "")); /* * Try: * javax.net.ssl.trustStore (if this variable exists, stop) * jssecacerts * cacerts * * If none exists, we use an empty keystore. */ var storeFileName = props["trustStore"]; if (storeFileName == null) { string javaHome = props["javaHome"]; storeFileName = javaHome + sep + "lib" + sep + "security" + sep + "jssecacerts"; if ((fis = GetFileInputStream(new File(storeFileName))) == null) { storeFileName = javaHome + sep + "lib" + sep + "security" + sep + "cacerts"; } return(storeFileName); } if (fis != null) { fis.close(); } return(storeFileName); }
public SkillState(Dictionary <string, AbstractStoragePool> poolByName, StringPool strings, StringType stringType, Annotation annotationType, List <AbstractStoragePool> types, FileInputStream @in, Mode mode) : base(strings, @in.Path, mode, types, poolByName, stringType, annotationType) { try { AbstractStoragePool p = null; } catch (System.InvalidCastException e) { throw new ParseException(@in, -1, e, "A super type does not match the specification; see cause for details."); } foreach (AbstractStoragePool t in types) { poolByName[t.Name] = t; } finalizePools(@in); @in.close(); }
private static void AddEntries(ZipFile file, string[] newFiles) { string fileName = file.getName(); string tempFileName = Path.GetTempFileName(); ZipOutputStream destination = new ZipOutputStream(new FileOutputStream(tempFileName)); try { CopyEntries(file, destination); if (newFiles != null) { foreach (string f in newFiles) { ZipEntry z = new ZipEntry(f.Remove(0, Path.GetPathRoot(f).Length)); z.setMethod(ZipEntry.DEFLATED); destination.putNextEntry(z); try { FileInputStream s = new FileInputStream(f); try { CopyStream(s, destination); } finally { s.close(); } } finally { destination.closeEntry(); } } } } finally { destination.close(); } file.close(); System.IO.File.Copy(tempFileName, fileName, true); System.IO.File.Delete(tempFileName); }
public new static ImmutableFst loadModel(string filename) { ImmutableFst result; try { try { try { FileInputStream fileInputStream = new FileInputStream(filename); BufferedInputStream bufferedInputStream = new BufferedInputStream(fileInputStream); ObjectInputStream objectInputStream = new ObjectInputStream(bufferedInputStream); result = ImmutableFst.readImmutableFst(objectInputStream); objectInputStream.close(); bufferedInputStream.close(); fileInputStream.close(); } catch (FileNotFoundException ex) { Throwable.instancehelper_printStackTrace(ex); return(null); } } catch (IOException ex3) { Throwable.instancehelper_printStackTrace(ex3); return(null); } } catch (ClassNotFoundException ex5) { Throwable.instancehelper_printStackTrace(ex5); return(null); } return(result); }
public void put(String src, String dst, SftpProgressMonitor monitor, int mode) { //throws SftpException{ src=localAbsolutePath(src); dst=remoteAbsolutePath(dst); //System.err.println("src: "+src+", "+dst); try { Vector v=glob_remote(dst); int vsize=v.size(); if(vsize!=1) { if(vsize==0) { if(isPattern(dst)) throw new SftpException(SSH_FX_FAILURE, dst); else dst=Util.unquote(dst); } throw new SftpException(SSH_FX_FAILURE, v.toString()); } else { dst=(String)(v.elementAt(0)); } //System.err.println("dst: "+dst); bool _isRemoteDir=isRemoteDir(dst); v=glob_local(src); //System.err.println("glob_local: "+v+" dst="+dst); vsize=v.size(); StringBuffer dstsb=null; if(_isRemoteDir) { if(!dst.endsWith("/")) { dst+="/"; } dstsb=new StringBuffer(dst); } else if(vsize>1) { throw new SftpException(SSH_FX_FAILURE, "Copying multiple files, but destination is missing or a file."); } for(int j=0; j<vsize; j++) { String _src=(String)(v.elementAt(j)); String _dst=null; if(_isRemoteDir) { int i=_src.lastIndexOf(file_separatorc); if(i==-1) dstsb.append(_src); else dstsb.append(_src.substring(i + 1)); _dst=dstsb.toString(); dstsb.delete(dst.length(), _dst.length()); } else { _dst=dst; } //System.err.println("_dst "+_dst); long size_of_dst=0; if(mode==RESUME) { try { SftpATTRS attr=_stat(_dst); size_of_dst=attr.getSize(); } catch(Exception eee) { //System.err.println(eee); } long size_of_src=new File(_src).length(); if(size_of_src<size_of_dst) { throw new SftpException(SSH_FX_FAILURE, "failed to resume for "+_dst); } if(size_of_src==size_of_dst) { return; } } if(monitor!=null) { monitor.init(SftpProgressMonitor.PUT, _src, _dst, (new File(_src)).length()); if(mode==RESUME) { monitor.count(size_of_dst); } } FileInputStream fis=null; try { fis=new FileInputStream(_src); _put(fis, _dst, monitor, mode); } finally { if(fis!=null) { // try{ fis.close(); // }catch(Exception ee){}; } } } } catch(Exception e) { if(e is SftpException) throw (SftpException)e; throw new SftpException(SSH_FX_FAILURE, e.toString()); } }
public int run(String[] arguments) { sourceFiles.clear(); if (!handleArguments(arguments)) { return(1); } var t0 = System.nanoTime(); try { var results = new Compiler().compileFromFiles(parameters, sourceFiles.toArray(new File[sourceFiles.size()])); var hasErrors = false; foreach (var error in results.Errors) { var filename = error.Filename; if (filename != null) { System.out.print(new File(error.Filename).getAbsolutePath()); } else { System.out.print("Unknown source"); } if (error.Line > 0) { System.out.print(" ("); System.out.print(error.Line); if (error.Column > 0) { System.out.print(", "); System.out.print(error.Column); } System.out.print(")"); } if (error.Level == 0) { hasErrors = true; System.out.print(" error "); } else { System.out.print(" warning "); } System.out.print(error.Id); System.out.print(": "); System.out.println(error.Message); } if (!hasErrors) { var outputFile = new File(outputPath); if (outputFile.isDirectory() || outputPath.endsWith("\\") || outputPath.endsWith("/")) { foreach (var e in results.ClassFiles.entrySet()) { var file = new File(outputFile, e.Key.replace('.', '/') + ".class"); var dir = file.getParentFile(); if (!dir.exists()) { dir.mkdirs(); } using (var s = new FileOutputStream(file)) { s.write(e.Value); } } } else { var destination = outputPath; if (PathHelper.getExtension(destination).length() == 0) { destination += ".jar"; } using (var zipStream = new ZipOutputStream(new FileOutputStream(destination))) { if (manifestPath != null) { var zipEntry = new ZipEntry("META-INF/MANIFEST.MF"); zipStream.putNextEntry(zipEntry); var buffer = new byte[4096]; var inputStream = new FileInputStream(manifestPath); int read; while ((read = inputStream.read(buffer)) != -1) { zipStream.write(buffer, 0, read); } inputStream.close(); } if (resourcesPath != null) { var rootDir = new File(resourcesPath); foreach (var content in rootDir.list()) { var file = new File(rootDir, content); if (file.isDirectory()) { exploreDirectory(zipStream, "", file); } else { addEntry(zipStream, "", file); } } } foreach (var e in results.ClassFiles.entrySet()) { var zipEntry = new ZipEntry(e.Key.replace('.', '/') + ".class"); zipStream.putNextEntry(zipEntry); zipStream.write(e.Value); } } } System.out.println(); System.out.println(String.format("%d class(es) successfully generated in %.2fs", results.classFiles.size(), (System.nanoTime() - t0) / 1e9)); return(0); } else { System.out.println(); System.out.println("Compilation failed"); return(1); } } catch (TypeLoadException e) { System.out.println("Cannot find type " + e.TypeName + ". The class is missing from the classpath."); System.out.println("Compilation failed"); return(1); } }
public Dictionary <string, List <string> > Main(string line) { //debug sentence // line = "Show me the sales of Kean Cola .25ltr Bottle in Nicosia from January 2017 to October 2017 as a line chart."; matchedWords?.Clear(); nounPhrases?.Clear(); nouns?.Clear(); adjectivePhrases?.Clear(); verbPhrases?.Clear(); InputStream modelIn = new FileInputStream(HttpRuntime.AppDomainAppPath + "\\Models\\en-parser-chunking.bin"); InputStream modelIn1 = new FileInputStream(HttpRuntime.AppDomainAppPath + "\\Models\\en-ner-date.bin"); InputStream modelIn2 = new FileInputStream(HttpRuntime.AppDomainAppPath + "\\Models\\en-token.bin"); ParserModel model = new ParserModel(modelIn); var myParser = ParserFactory.create(model); var topParses = ParserTool.parseLine(line, myParser, 1); foreach (var p in topParses) { GetSentenceParts(p); } try { TokenizerModel model1 = new TokenizerModel(modelIn2); TokenNameFinderModel model2 = new TokenNameFinderModel(modelIn1); Tokenizer tokenizer = new TokenizerME(model1); var nameFinder = new NameFinderME(model2); var tokens = tokenizer.tokenize(line); var nameSpans = nameFinder.find(tokens); var array = Span.spansToStrings(nameSpans, tokens); // // foreach (var v in array) // { // System.Diagnostics.Debug.WriteLine(v); // } dates = new HashSet <string>(array); PrintSets(); // System.Diagnostics.Debug.WriteLine("\nProcessing Presentation type"); // // if (nouns.Contains("table")) // { // matchedWords.Add(new Tuple<string, string>("PRESENTATION_TYPE", "table")); // } // if (nounPhrases.Contains("bar chart")) // { // matchedWords.Add(new Tuple<string, string>("PRESENTATION_TYPE", "bar chart")); // } // if (nounPhrases.Contains("line chart")) // { // matchedWords.Add(new Tuple<string, string>("PRESENTATION_TYPE", "line chart")); // } //TODO IF NO OPTION IS FOUND ASK THE USER TO GIVE YOU ONE. IMPLEMENT IT IN THE WEB VERSION SOON System.Diagnostics.Debug.WriteLine("\nProcessing Dates"); if (dates.Count == 2) { if (dates.ElementAt(0).contains("from")) { var a = dates.ElementAt(0).replace("from", ""); List <string> newList = new List <string>(); newList.Add("START_PERIOD"); matchedWords.Add(a, newList); newList = new List <string>(); newList.Add("END_PERIOD"); //todo fix when the date is the same here matchedWords.Add(dates.ElementAt(1), newList); } else { List <string> newList = new List <string>(); newList.Add("START_PERIOD"); matchedWords.Add(dates.ElementAt(0), newList); newList = new List <string>(); newList.Add("END_PERIOD"); //todo fix when the date is the same here matchedWords.Add(dates.ElementAt(1), newList); } } if (dates.Count == 1) { if (dates.ElementAt(0).contains("from")) { var a = dates.ElementAt(0).replace("from", ""); var dts = a.Split(new[] { " to " }, StringSplitOptions.None); List <string> newList = new List <string>(); newList.Add("START_PERIOD"); matchedWords.Add(dts[0], newList); newList = new List <string>(); newList.Add("END_PERIOD"); //todo fix when the date is the same here matchedWords.Add(dts[1], newList); } else { List <string> newList = new List <string>(); newList.Add("START_PERIOD"); newList.Add("END_PERIOD"); //todo fix when the date is the same here matchedWords.Add(dates.ElementAt(0), newList); } } System.Diagnostics.Debug.WriteLine("\nProcessing noun phrases"); // var manager = new Manager(); // var serializer = new XmlSerializer(typeof(Manager.language)); // var loadStream = new FileStream("file2.xml", FileMode.Open, FileAccess.Read); // var loadedObject = (Manager.language) serializer.Deserialize(loadStream); var doc = new XmlDocument(); // System.Diagnostics.Debug.WriteLine(HttpRuntime.AppDomainAppPath); // System.Diagnostics.Debug.WriteLine(HttpRuntime.AppDomainAppPath); // System.Diagnostics.Debug.WriteLine(HttpRuntime.AppDomainAppPath); // System.Diagnostics.Debug.WriteLine(HttpRuntime.AppDomainAppPath); doc.Load(HttpRuntime.AppDomainAppPath + "\\file2.xml"); var root = doc.SelectSingleNode("*"); FindMatchingNodesFromXml(root, nounPhrases); foreach (var item in nouns.ToList()) { foreach (var VARIABLE in matchedWords) { if (VARIABLE.Key.Contains(item)) { nouns.Remove(item); //Will work! } } } FindMatchingNodesFromXml(root, verbPhrases); // FindMatchingNodesFromXml(root, nouns); System.Diagnostics.Debug.WriteLine("\nProcessing verb phrases "); System.Diagnostics.Debug.WriteLine("\nProcessing nouns "); // construct the dictionary object and open it var directory = Directory.GetCurrentDirectory() + "\\wordnet\\"; foreach (var variable in matchedWords) { System.Diagnostics.Debug.WriteLine(variable.Value + "\t\t" + variable.Key); } foreach (var variable in matchedWords) { string a = variable.Key; if (line.Contains(a)) { line = line.replace(a, ""); } } foreach (var variable in stopWordsofwordnet) { string a = " " + variable.toLowerCase() + " "; if (line.Contains(a)) { line = line.replace(a, " "); } } if (line.contains(".")) { line = line.replace(".", ""); } if (line.contains("-")) { line = line.replace("-", " "); } System.Diagnostics.Debug.WriteLine("/////////////"); System.Diagnostics.Debug.WriteLine("SECOND PARSE STRING " + line); System.Diagnostics.Debug.WriteLine("/////////////"); line = line.Trim(); topParses = ParserTool.parseLine(line, myParser, 1); nounPhrases?.Clear(); dates?.Clear(); verbPhrases?.Clear(); nouns?.Clear(); foreach (var p in topParses) { //p.show(); GetSentenceParts(p); } FindMatchingNodesFromXml(root, nounPhrases); foreach (var item in nouns.ToList()) { foreach (var VARIABLE in matchedWords) { if (VARIABLE.Key.Contains(item)) { nouns.Remove(item); //Will work! } } } FindMatchingNodesFromXml(root, verbPhrases); FindMatchingNodesFromXml(root, nouns); tokens = tokenizer.tokenize(line); nameSpans = nameFinder.find(tokens); array = Span.spansToStrings(nameSpans, tokens); dates = new HashSet <string>(array); PrintSets(); System.Diagnostics.Debug.WriteLine("\nProcessing Dates"); if (dates.Count == 2) { if (dates.ElementAt(0).contains("from")) { var a = dates.ElementAt(0).replace("from", ""); List <string> newList = new List <string>(); newList.Add("START_PERIOD"); matchedWords.Add(a, newList); newList = new List <string>(); newList.Add("END_PERIOD"); //todo fix when the date is the same here matchedWords.Add(dates.ElementAt(1), newList); } else { List <string> newList = new List <string>(); newList.Add("START_PERIOD"); matchedWords.Add(dates.ElementAt(0), newList); newList = new List <string>(); newList.Add("END_PERIOD"); //todo fix when the date is the same here matchedWords.Add(dates.ElementAt(1), newList); } } if (dates.Count == 1) { if (dates.ElementAt(0).contains("from")) { var a = dates.ElementAt(0).replace("from", ""); var dts = a.Split(new[] { " to " }, StringSplitOptions.None); List <string> newList = new List <string>(); newList.Add("START_PERIOD"); matchedWords.Add(dts[0], newList); newList = new List <string>(); newList.Add("END_PERIOD"); //todo fix when the date is the same here matchedWords.Add(dts[1], newList); } else { List <string> newList = new List <string>(); newList.Add("START_PERIOD"); newList.Add("END_PERIOD"); //todo fix when the date is the same here matchedWords.Add(dates.ElementAt(0), newList); } } System.Diagnostics.Debug.WriteLine("\nProcessing noun phrases"); // var manager = new Manager(); // var serializer = new XmlSerializer(typeof(Manager.language)); // var loadStream = new FileStream("file2.xml", FileMode.Open, FileAccess.Read); // var loadedObject = (Manager.language) serializer.Deserialize(loadStream); FindMatchingNodesFromXml(root, nounPhrases); FindMatchingNodesFromXml(root, verbPhrases); FindMatchingNodesFromXml(root, nouns); foreach (var variable in matchedWords) { System.Diagnostics.Debug.WriteLine(variable.Value + "\t\t" + variable.Key); } doc = null; GC.Collect(); GC.WaitForPendingFinalizers(); //MATCHING WITH WORD NET System.Diagnostics.Debug.WriteLine(directory); // var wordNet = new WordNetEngine(); // // wordNet.AddDataSource(new StreamReader(Path.Combine(directory, "data.adj")), PartOfSpeech.Adjective); // wordNet.AddDataSource(new StreamReader(Path.Combine(directory, "data.adv")), PartOfSpeech.Adverb); // wordNet.AddDataSource(new StreamReader(Path.Combine(directory, "data.noun")), PartOfSpeech.Noun); // wordNet.AddDataSource(new StreamReader(Path.Combine(directory, "data.verb")), PartOfSpeech.Verb); // // wordNet.AddIndexSource(new StreamReader(Path.Combine(directory, "index.adj")), PartOfSpeech.Adjective); // wordNet.AddIndexSource(new StreamReader(Path.Combine(directory, "index.adv")), PartOfSpeech.Adverb); // wordNet.AddIndexSource(new StreamReader(Path.Combine(directory, "index.noun")), PartOfSpeech.Noun); // wordNet.AddIndexSource(new StreamReader(Path.Combine(directory, "index.verb")), PartOfSpeech.Verb); // // System.Diagnostics.Debug.WriteLine("Loading database..."); // wordNet.Load(); // System.Diagnostics.Debug.WriteLine("Load completed."); // while (true) // { // System.Diagnostics.Debug.WriteLine("\nType first word"); // // var word = System.Diagnostics.Debug.ReadLine(); // var synSetList = wordNet.GetSynSets(word); // // if (synSetList.Count == 0) System.Diagnostics.Debug.WriteLine($"No SynSet found for '{word}'"); // // foreach (var synSet in synSetList) // { // var words = string.Join(", ", synSet.Words); // // System.Diagnostics.Debug.WriteLine($"\nWords: {words}"); // } // } } catch (IOException e) { e.printStackTrace(); } finally { if (modelIn1 != null) { try { modelIn1.close(); } catch (IOException e) { } } if (modelIn2 != null) { try { modelIn2.close(); } catch (IOException e) { } } // truncateLists(ref nounPhrases); // truncateLists(ref nouns); // truncateLists(ref dates); // truncateLists(ref verbPhrases); } return(matchedWords); }
// package-private for testing Module deserialize(string sourcePath) { #if NEVER string cachePath = getCachePath(sourcePath); FileInputStream fis = null; ObjectInputStream ois = null; try { fis = new FileInputStream(cachePath); ois = new ObjectInputStream(fis); return (Module) ois.readObject(); } catch (Exception e) { return null; } finally { try { if (ois != null) { ois.close(); } else if (fis != null) { fis.close(); } } catch (Exception e) { } } }
public void put(String src, String dst, SftpProgressMonitor monitor, int mode) { src = localAbsolutePath(src); dst = remoteAbsolutePath(dst); try { ArrayList v = glob_remote(dst); int vsize = v.Count; if (vsize != 1) { if (vsize == 0) { if (isPattern(dst)) throw new SftpException(SSH_FX_FAILURE, dst); else dst = Util.unquote(dst); } throw new SftpException(SSH_FX_FAILURE, v.ToString()); } else { dst = (String)(v[0]); } bool _isRemoteDir = isRemoteDir(dst); v = glob_local(src); vsize = v.Count; StringBuilder dstsb = null; if (_isRemoteDir) { if (!dst.EndsWith("/")) { dst += "/"; } dstsb = new StringBuilder(dst); } else if (vsize > 1) { throw new SftpException(SSH_FX_FAILURE, "Copying multiple files, but destination is missing or a file."); } for (int j = 0; j < vsize; j++) { String _src = (String)(v[j]); String _dst = null; if (_isRemoteDir) { int i = _src.LastIndexOf(file_separatorc); if (i == -1) dstsb.Append(_src); else dstsb.Append(_src.Substring(i + 1)); _dst = dstsb.ToString(); dstsb.Remove(dst.Length, _dst.Length); } else { _dst = dst; } long size_of_dst = 0; if (mode == RESUME) { try { SftpATTRS attr = GetPathAttributes(_dst); size_of_dst = attr.getSize(); } catch (Exception) { } long size_of_src = new File(_src).Length(); if (size_of_src < size_of_dst) { throw new SftpException(SSH_FX_FAILURE, "failed to resume for " + _dst); } if (size_of_src == size_of_dst) { return; } } if (monitor != null) { monitor.init(SftpProgressMonitor.PUT, _src, _dst, (new File(_src)).Length()); if (mode == RESUME) { monitor.count(size_of_dst); } } FileInputStream fis = null; try { fis = new FileInputStream(_src); _put(fis, _dst, monitor, mode); } finally { if (fis != null) { fis.close(); } } } } catch (Exception e) { if (e is SftpException) throw (SftpException)e; throw new SftpException(SSH_FX_FAILURE, e.Message); } }