// 依存句法分析 public static ResultSerialize DepParserDemo(string text) { client = new Nlp(API_KEY, SECRET_KEY); // 调用依存句法分析,可能会抛出网络等异常,请使用try/catch捕获 JObject result = client.DepParser(text); //System.Diagnostics.Debug.WriteLine(result); ResultSerialize res = new ResultSerialize(); res.log_id = result["log_id"].ToString(); res.text = result["text"].ToString(); res.items = (JArray)result["items"]; for (int i = 0; i < res.items.Count; i++) { DepParserSerialize _gammar = new DepParserSerialize(); //_gammar.id = res.items[i]["id"].ToString(); _gammar.word = res.items[i]["word"].ToString(); _gammar.postag = res.items[i]["postag"].ToString(); _gammar.head = res.items[i]["head"].ToString(); _gammar.deprel = res.items[i]["deprel"].ToString(); } return(res); // 如果有可选参数 var options = new Dictionary <string, object> { { "mode", 1 } }; // 带参数调用依存句法分析 result = client.DepParser(text, options); }
// 词法分析 public static ResultSerialize LexerDemo(string text) { client = new Nlp(API_KEY, SECRET_KEY); JObject result = client.Lexer(text); //System.Diagnostics.Debug.WriteLine(result); ResultSerialize res = new ResultSerialize(); res.log_id = result["log_id"].ToString(); res.text = result["text"].ToString(); res.items = (JArray)result["items"]; for (int i = 0; i < res.items.Count; i++) { LexerSerialize _item = new LexerSerialize(); //loc_details = _item.byte_offset = int.Parse(res.items[i]["byte_offset"].ToString()); _item.uri = res.items[i]["uri"].ToString(); _item.pos = res.items[i]["pos"].ToString(); _item.ne = res.items[i]["ne"].ToString(); _item.item = res.items[i]["item"].ToString(); //basic_words = _item.byte_length = int.Parse(res.items[i]["byte_length"].ToString()); _item.formal = res.items[i]["formal"].ToString(); //System.Diagnostics.Debug.Print("item是 " + res.items[i]["item"].ToString()); } return(res); }