public override bool Transform(Sentence sentence) { if (sentence.Complete()) return true; bool success = false; KeyValuePair<string, string> nks = NeighborKinds(sentence); if (nks.Value == ".") { sentence.AbsorbNext(this); success = true; } if (!success && !sentence.Complete()) { string last = constituents[constituents.Count - 1].Part; if (nks.Key == "" && last == ".") { sentence.AddFirstToCompletes(); success = true; } else { sentence.Separate(this); success = true; } } return success; }
public ChildNode(Sentence rhs, int startPosition, int endPosition, string id, int rank) : this() { Sentence = rhs; StartPosition = startPosition; EndPosition = endPosition; Id = id; Rank = rank; }
// Note: see page 327. public StandardizeApartResult standardizeApart(Sentence aSentence, StandardizeApartIndexical standardizeApartIndexical) { List<Variable> toRename = variableCollector .collectAllVariables(aSentence); Dictionary<Variable, Term> renameSubstitution = new Dictionary<Variable, Term>(); Dictionary<Variable, Term> reverseSubstitution = new Dictionary<Variable, Term>(); foreach (Variable var in toRename) { Variable v = null; do { v = new Variable(standardizeApartIndexical.getPrefix() + standardizeApartIndexical.getNextIndex()); // Ensure the new variable name is not already // accidentally used in the sentence } while (toRename.Contains(v)); renameSubstitution.Add(var, v); reverseSubstitution.Add(v, var); } Sentence standardized = substVisitor.subst(renameSubstitution, aSentence); return new StandardizeApartResult(aSentence, standardized, renameSubstitution, reverseSubstitution); }
private static void addSentencesToArticle(string file, Article article) { Sentence current = new Sentence(); string sentence = null; string[] words = file.Split('\n'); for (int i = 0; i < words.Length; i++) { if (words[i] == "") { //now we are at the end of a sentence if (sentence != null) { addWordsToSentence(sentence, current); sentence = null; article.addSentence(current); //add sentence to the article current = new Sentence(); //empty the sentence; } } else { sentence += (words[i] + '\n'.ToString()); } } if (sentence != null) //maybe the last sentence { addWordsToSentence(sentence, current); article.addSentence(current); } }
private static void BuildChildrenHelper(Family family, List<ForestNode[]> startList, Sentence rhs, int position) { if (position + 1 != rhs.Count && family.Production != null) { throw new Exception(); } if (family.Members.Count == 1) { if (position != 0) { throw new Exception(); } var onlyNode = family.Members[0]; AddNode(onlyNode, startList, rhs, position); } else if (family.Members.Count == 2) { var rightNode = family.Members[1]; AddNode(rightNode, startList, rhs, position); var intermediateNode = (IntermediateNode)family.Members[0]; var firstCopy = startList.ToList(); startList.Clear(); foreach (var subfamily in intermediateNode.Families) { var listCopy = firstCopy.ToList(); BuildChildrenHelper(subfamily, listCopy, rhs, position - 1); startList.AddRange(listCopy); } } else { throw new Exception(); } }
public void Learn(Sentence sentence) { if(sentence.Sentiment == Sentiment.Positive) { positiveSentances++; allCount++; for (int i = 0; i < context.SentimentWords.Count; i++) { var w = context.SentimentWords[i]; if (!positiveDict.ContainsKey(w)) positiveDict.Add(w, new int[2]); if (sentence.SentimentWords.Contains(i)) positiveDict[w][1]++; else positiveDict[w][0]++; } } else { negtiveSentances++; allCount++; for (int i = 0; i < context.SentimentWords.Count; i++) { var w = context.SentimentWords[i]; if (!negativeDict.ContainsKey(w)) negativeDict.Add(w, new int[2]); if (sentence.SentimentWords.Contains(i)) negativeDict[w][1]++; else negativeDict[w][0]++; } } }
public CNF convertToCNF(Sentence aSentence) { // I)mplications Out: Sentence implicationsOut = (Sentence)aSentence.accept( new ImplicationsOut(), null); // N)egations In: Sentence negationsIn = (Sentence)implicationsOut.accept( new NegationsIn(), null); // S)tandardize variables: // For sentences like: // (FORALL x P(x)) V (EXISTS x Q(x)), // which use the same variable name twice, change the name of one of the // variables. Sentence saQuantifiers = (Sentence)negationsIn.accept( new StandardizeQuantiferVariables(substVisitor), new List<Variable>()); // Remove explicit quantifiers, by skolemizing existentials // and dropping universals: // E)xistentials Out // A)lls Out: Sentence andsAndOrs = (Sentence)saQuantifiers.accept( new RemoveQuantifiers(parser), new List<Variable>()); // D)istribution // V over ^: Sentence orDistributedOverAnd = (Sentence)andsAndOrs.accept( new DistributeOrOverAnd(), null); // O)perators Out return (new CNFConstructor()).construct(orDistributedOverAnd); }
public override void AcceptKnowledge(Sentence sentence) { base.AcceptKnowledge (sentence); if(sentence.noun1 != this.gameObject && sentence.verb == Sentence.Verb.Love && sentence.noun2 == wife){ AddTree(new StalkAndKill(this, "target", sentence.noun1), 20); } }
/// <summary> /// 重複したセンテンスであるか /// </summary> /// <param name="lhs">比較対象1</param> /// <param name="rhs">比較対象2</param> /// <param name="pattern">パターン</param> /// <returns>重複したセンテンスであるか</returns> private static bool IsDuplicationSentence(Sentence lhs, Sentence rhs, object[] pattern) { if (lhs.Tokens.Length < pattern.Length || rhs.Tokens.Length < pattern.Length) { return false; } for (int i = 0; i < pattern.Length; i++) { if (pattern[i] == null) { continue; } if (pattern[i] is TokenName && lhs.Tokens[i] is TokenName && rhs.Tokens[i] is TokenName) { continue; } if (!lhs.Tokens[i].Equals(pattern[i])) { return false; } if (!rhs.Tokens[i].Equals(pattern[i])) { return false; } } return true; }
// https://en.wikipedia.org/wiki/CYK_algorithm //let the input be a string S consisting of n characters: a1 ... an. //let the grammar contain r nonterminal symbols R1 ... Rr. //This grammar contains the subset Rs which is the set of start symbols. //let P[n,n,r] be an array of booleans. Initialize all elements of P to false. //for each i = 1 to n // for each unit production Rj -> ai // set P[1,i,j] = true //for each i = 2 to n -- Length of span // for each j = 1 to n-i+1 -- Start of span // for each k = 1 to i-1 -- Partition of span // for each production RA -> RB RC // if P[k,j,B] and P[i-k,j+k,C] then set P[i,j,A] = true //if any of P[n,1,x] is true (x is iterated over the set s, where s are all the indices for Rs) then // S is member of language //else // S is not member of language /// <summary> /// Returns the probability that this grammar generated the given sentence /// </summary> /// <param name="s"></param> /// <returns></returns> public override double ParseGetProbability(Sentence s) { if (s.Count == 0) { return _grammar.ProbabilityNull; } var nonterminals_R = _grammar.GetNonterminals(); var RToJ = BuildRToJ(nonterminals_R); var P = new double[s.Count, s.Count, nonterminals_R.Count]; var shouldPunt = CykFillInBase(s, P, RToJ); if (shouldPunt) { return 0.0; } var localProductionList = BuildLocalCYKProductionList(RToJ); for (int i = 2; i <= s.Count; i++) { for (int j = 1; j <= s.Count - i + 1; j++) { for (int k = 1; k <= i - 1; k++) { foreach (var production in localProductionList) { var A = production.A; var B = production.B; var C = production.C; var probThis = production.Probability; var pleft = P[k - 1, j - 1, B]; var pright = P[i - k - 1, j + k - 1, C]; P[i - 1, j - 1, A] += pleft * pright * probThis; } } } } return P[s.Count - 1, 0, RToJ[_grammar.Start]]; }
public void BeginCinematic(Type CinematicType) { // Disable player input // Hide castle UI // Handle enum types switch(CinematicType) { case Type.HoarderConversation: // NOTE (zesty): For special types, always use index 0, the list is used for other types that have // a randomization feature Conv = GameData.Cinematics[CinematicType][0]; break; case Type.RandomExclamation: Conv = GameData.Cinematics[CinematicType][Random.Range(0, GameData.Cinematics[CinematicType].Count)]; break; } // TESTING //Conv = GameData.Cinematics[Type.RandomExclamation][3]; LetterDelay = Conv.LetterDelay; TimeToNextLetter = LetterDelay; SentenceIndex = 0; LetterIndex = 0; CurSentence = Conv.Sentences[SentenceIndex]; CurTextBox = GetTextBox(CurSentence.OwningTextBox); SetTextBoxVisible(CurTextBox, true); bSentenceComplete = false; if(Conv.PauseGame) App.inst.SpawnController.PauseEnemiesForCinematic(); }
public override bool Transform(Sentence sentence) { KeyValuePair<string, string> nks = NeighborKinds(sentence); if (nks.Key == "NP" && nks.Value == "NP") { sentence.Combine(Neighborhood(sentence), new NounPhrase()); return true; } else if (nks.Key == "VP" && nks.Value == "VP") { sentence.Combine(Neighborhood(sentence), new VerbPhrase()); return true; } else if (nks.Key == "PP" && nks.Value == "PP") { sentence.Combine(Neighborhood(sentence), new PrepositionalPhrase()); return true; } else if (nks.Key == "S" && nks.Value == "S") { sentence.Combine(Neighborhood(sentence), new SimpleDeclarativePhrase()); return true; } if (nks.Key == "PP" && nks.Value == "NP") { Phrase preposition = sentence.PhraseBefore(this); sentence.AbsorbNext(preposition); // absorb this sentence.AbsorbNext(preposition); // absorb noun return true; } return false; }
private string[] traits; //Used to store the traits in a more convenient way. #endregion Fields #region Methods public override void AcceptKnowledge(Sentence sentence) { if(!knowledgeBase.ContainsSentence(sentence)){ if(sentence.noun1 == this.gameObject){ if(sentence.verb == Sentence.Verb.StayingIn){ hotelRoom = sentence.noun2; //CHEAT, can look in knowledgebase to find out AddTree(new DropBags(this, "room", sentence.noun2), 7); } } if(sentence.verb == Sentence.Verb.Love){ if(sentence.noun1 == SO || sentence.noun2 == SO){ if(sentence.noun1 != gameObject && sentence.noun2 != gameObject){ if(sentence.noun1 != SO){ AddTree(new StalkAndKill(this, "target", sentence.noun1), 30); } else{ AddTree(new StalkAndKill(this, "target", sentence.noun2), 30); } } } } if(sentence.verb == Sentence.Verb.Murder && sentence.noun1 != gameObject){ if(!knowledgeBase.Murdered(gameObject, sentence.noun2)){ AddTree(new FoundCorpse(this, receptionist, sentence), 20); } } } base.AcceptKnowledge (sentence); }
public bool IsSentenceValid(UserProfile profile, Sentence sentence) { TopicHistoryItem thi = profile.CurrentState.CourseLocationInfo.CurrentTopic; bool result = false; // deactivate yourself when not in pseudo topic if (!thi.IsPseudoTopic) { result = true; } else if (thi.WeaknessForPseudoTopic.WeaknessType == WeaknessType.GenderAgreement) { result = sentence.RoleConjugationPairs.Any(qt => qt.Item2.Gender != GenderRule.Unknown); } else if (thi.WeaknessForPseudoTopic.WeaknessType == WeaknessType.NumberAgreement) { result = sentence.RoleConjugationPairs.Any(qt => qt.Item2.Number != NumberRule.Unknown); } else if(thi.WeaknessForPseudoTopic.WeaknessType == WeaknessType.UmbrellaTopic) { string umbrellaTopicName = thi.WeaknessForPseudoTopic.UmbrellaTopicName; result = sentence.Tags.Any(u => u.Equals(umbrellaTopicName, StringComparison.OrdinalIgnoreCase)); } return result; }
public override bool Transform(Sentence sentence) { List<Phrase> phrases = new List<Phrase>(); phrases.Add(this); sentence.Combine(phrases, new PrepositionalPhrase()); return true; }
public void Setup() { sentence = DomainMother.Sentence; var word = DomainMother.Word; word.Attributes.Single(a => a.Name == "Id").Value = "1"; sentence.Words = new List<Word> {DomainMother.Word, word}; }
// Note: The set guarantees the order in which they were // found. public List<Variable> collectAllVariables(Sentence sentence) { List<Variable> variables = new List<Variable>(); sentence.accept(this, variables); return variables; }
public ConnectedSentence(String connector, Sentence first, Sentence second) { this.connector = connector; this.first = first; this.second = second; args.Add(first); args.Add(second); }
private void _makeFirstParsing() { var parsingResult = ComprenoParsingResult.Undefined; resultSentence = Compreno.ComprenoApi.Instance.AnalyzeForResult(inputSentence.ModifiedText, out parsingResult); _setParsingInfoForIndex(inputSentence.Index, parsingResult); inputSentence.Index.Info5.SetAplliedAlgorithmsCount(0); }
public static void UpdateVerbConjugationHistoryFromSentence(UserProfile profile, Sentence sentence, AnswerScore score) { var verbConjugations = sentence.RoleConjugationPairs.Where(qt => qt.Item2.GetType() == typeof(VerbConjugation)).Select(qt => qt.Item2 as VerbConjugation); foreach (var vc in verbConjugations) { UpdateVerbConjugationHistory(profile, vc, score); } }
public QuantifiedSentence(String quantifier, List<Variable> variables, Sentence quantified) { this.quantifier = quantifier; this.variables.AddRange(variables); this.quantified = quantified; this.args.AddRange(variables); this.args.Add(quantified); }
private static Sentence AddWordsToSentence(List<Word> listWords) { Sentence sentence = new Sentence(); foreach (var word in listWords) { sentence.Add(word); } return sentence; }
public ConstructSentence(BehaviourTree tree, string noun1Key, Sentence.Verb verb, string noun2Key, string sentenceKey) : base(tree) { this.noun1Key = noun1Key; this.verb = verb; this.noun2Key = noun2Key; this.sentenceKey = sentenceKey; }
public void NextAndAfterTests() { var s1 = new Sentence(); var s2 = new Sentence(); s1.Next = s2; Assert.AreSame(s2, s1.Next); Assert.IsNull(s2.Next); CollectionAssert.AreEqual(new[] { s1, s2 }, s1.After.ToList()); }
public void PreviousAndBeforeTests() { var s1 = new Sentence(); var s2 = new Sentence(); s1.Previous = s2; Assert.AreSame(s2, s1.Previous); Assert.IsNull(s1.Next); CollectionAssert.AreEqual(new[] { s1, s2 }, s1.Before.ToList()); }
public override void AcceptKnowledge(Sentence sentence) { if(!knowledgeBase.ContainsSentence(sentence)){ if(sentence.verb == Sentence.Verb.Murder){ AddTree(new CallPolice(this, detective, sentence), 50); } } base.AcceptKnowledge (sentence); }
public StandardizeApartResult(Sentence originalSentence, Sentence standardized, Dictionary<Variable, Term> forwardSubstitution, Dictionary<Variable, Term> reverseSubstitution) { this.originalSentence = originalSentence; this.standardized = standardized; this.forwardSubstitution = forwardSubstitution; this.reverseSubstitution = reverseSubstitution; }
public override void AcceptKnowledge(Sentence sentence) { if(!knowledgeBase.ContainsSentence(sentence)){ if(sentence.verb == Sentence.Verb.Murder){ AddTree(new Investigate(this, sentence.noun2, receptionist, jail) ,17); } } base.AcceptKnowledge (sentence); }
public FoundCorpse(EventManager eventManager, GameObject receptionist, Sentence sentence) : base(eventManager) { receptionistKey = "receptionist"; sentenceKey = "sentenceKey"; AddToBlackboard(receptionistKey, receptionist); AddToBlackboard(sentenceKey, sentence); AddToBlackboard("reception", receptionist); }
public void Analize(Sentence sentence) { var distpos = GetDist(sentence, positiveDict, positiveSentances); var distneg = GetDist(sentence, negativeDict, negtiveSentances); if (distpos > distneg) sentence.Sentiment = Sentiment.Positive; else sentence.Sentiment = Sentiment.Negative; }
public SentenceMode(Sentence sentence) { _sentence = sentence; }
public Object visitNotSentence(NotSentence notSentence, Object arg) { // CNF requires NOT (~) to appear only in literals, so we 'move ~ // inwards' by repeated application of the following equivalences: Sentence negated = notSentence.getNegated(); // ~(~alpha) equivalent to alpha (double negation elimination) if (negated is NotSentence) { return(((NotSentence)negated).getNegated().accept(this, arg)); } if (negated is ConnectedSentence) { ConnectedSentence negConnected = (ConnectedSentence)negated; Sentence alpha = negConnected.getFirst(); Sentence beta = negConnected.getSecond(); // ~(alpha ^ beta) equivalent to (~alpha V ~beta) (De Morgan) if (Connectors.isAND(negConnected.getConnector())) { // I need to ensure the ~s are moved in deeper Sentence notAlpha = (Sentence)(new NotSentence(alpha)).accept( this, arg); Sentence notBeta = (Sentence)(new NotSentence(beta)).accept( this, arg); return(new ConnectedSentence(Connectors.OR, notAlpha, notBeta)); } // ~(alpha V beta) equivalent to (~alpha ^ ~beta) (De Morgan) if (Connectors.isOR(negConnected.getConnector())) { // I need to ensure the ~s are moved in deeper Sentence notAlpha = (Sentence)(new NotSentence(alpha)).accept( this, arg); Sentence notBeta = (Sentence)(new NotSentence(beta)).accept( this, arg); return(new ConnectedSentence(Connectors.AND, notAlpha, notBeta)); } } // in addition, rules for negated quantifiers: if (negated is QuantifiedSentence) { QuantifiedSentence negQuantified = (QuantifiedSentence)negated; // I need to ensure the ~ is moved in deeper Sentence notP = (Sentence)(new NotSentence(negQuantified .getQuantified())).accept(this, arg); // ~FORALL x p becomes EXISTS x ~p if (Quantifiers.isFORALL(negQuantified.getQuantifier())) { return(new QuantifiedSentence(Quantifiers.EXISTS, negQuantified .getVariables(), notP)); } // ~EXISTS x p becomes FORALL x ~p if (Quantifiers.isEXISTS(negQuantified.getQuantifier())) { return(new QuantifiedSentence(Quantifiers.FORALL, negQuantified .getVariables(), notP)); } } return(new NotSentence((Sentence)negated.accept(this, arg))); }
public void Isogram_myname() { Assert.True(Sentence.IsogramSentence("varun mlik")); }
/// <summary> /// If no word tile is dragged to the trash can then the sentence is cleared /// </summary> /// <param name="eventData">Event data.</param> public void OnPointerClick(PointerEventData eventData) { Sentence sentence = GameObject.Find(Sentence.sentenceGameObjectName).GetComponent <Sentence> ();; sentence.clear(); }
public Hypothesis(Sentence hypothesis) { this.hypothesis = hypothesis; }
protected override async void OnCreate(Bundle bundle) { // string path = "C:/Users/mof1/Documents/Visual Studio 2015/Projects/NDSSF/NDSSF/recordings/test.3gpp"; System.Diagnostics.Debug.Write("------------------------->OnCreate()"); base.OnCreate(bundle); id = Intent.GetStringExtra("Id") ?? "Data not available"; int sid = int.Parse(id); sList = (await DataRepository.Sentences.GetAll()); string nlist = (from n in sList where n.Id == sid select n.Text).SingleOrDefault(); SetContentView(Resource.Layout.STE2); EditText word_text2 = FindViewById <EditText>(Resource.Id.sentence_ID); word_text2.Click += delegate { Toast.MakeText(this, "Only Enter Alphabets", ToastLength.Short).Show(); }; ImageButton Save = FindViewById <ImageButton>(Resource.Id.imageButton5); word_text2.Text = nlist; Save.Click += async delegate { Sentence s = new Sentence(); s.Text = word_text2.Text; s.Audio = dir.AbsoluteFile.ToString(); await DataRepository.Sentences.Delete(sid); await DataRepository.Sentences.Add(s); Toast.MakeText(this, s.Text.ToString() + " Saved!", ToastLength.Short).Show(); StartActivity(typeof(STE1)); }; String path = dir.Path; _start = FindViewById <ImageButton>(Resource.Id.imageButton1); _stop = FindViewById <ImageButton>(Resource.Id.imageButton2); _start.Click += delegate { //audio recording _stop.Enabled = !_stop.Enabled; _start.Enabled = !_start.Enabled; _recorder.SetAudioSource(AudioSource.Mic); _recorder.SetOutputFormat(OutputFormat.ThreeGpp); _recorder.SetAudioEncoder(AudioEncoder.AmrNb); _recorder.SetAudioChannels(1); // _recorder.setAudioSamplingRate(8000); _recorder.SetOutputFile(dir.Path); _recorder.Prepare(); _recorder.Start(); }; _stop.Click += delegate { _stop.Enabled = !_stop.Enabled; _recorder.Stop(); _recorder.Reset(); _player.SetDataSource(dir.Path); _player.Prepare(); _player.Start(); }; // SetContentView(Resource.Layout.STE2); }
public void testTermEquality() { FOLDomain domain = new FOLDomain(); domain.addPredicate("P"); domain.addPredicate("Q"); domain.addPredicate("R"); domain.addConstant("A"); domain.addConstant("B"); domain.addConstant("C"); domain.addConstant("D"); domain.addFunction("Plus"); domain.addConstant("ONE"); domain.addConstant("ZERO"); FOLParser parser = new FOLParser(domain); CNFConverter cnfConv = new CNFConverter(parser); // x=y Sentence sent = parser.parse("x = y"); CNF cnf = cnfConv.convertToCNF(sent); Assert.AreEqual("[x = y]", cnf.ToString()); // x!=y sent = parser.parse("NOT(x = y)"); cnf = cnfConv.convertToCNF(sent); Assert.AreEqual("[~x = y]", cnf.ToString()); // A=B sent = parser.parse("A = B"); cnf = cnfConv.convertToCNF(sent); Assert.AreEqual("[A = B]", cnf.ToString()); // A!=B sent = parser.parse("NOT(A = B)"); cnf = cnfConv.convertToCNF(sent); Assert.AreEqual("[~A = B]", cnf.ToString()); // ~(((~A=B or ~D=C) => ~(A=B or D=C)) => A=D) sent = parser .parse("NOT(((((NOT(A = B) OR NOT(D = C))) => NOT((A = B OR D = C))) => A = D))"); cnf = cnfConv.convertToCNF(sent); Assert .AreEqual( "[~A = B, A = B],[~A = B, D = C],[~D = C, A = B],[~D = C, D = C],[~A = D]", cnf.ToString()); // // Induction Axiom Schema using Term Equality // Base Case: sent = parser .parse("NOT(FORALL x (FORALL y (Plus(Plus(x,y),ZERO) = Plus(x,Plus(y,ZERO)))))"); cnf = cnfConv.convertToCNF(sent); Assert.AreEqual( "[~Plus(Plus(SC0,SC1),ZERO) = Plus(SC0,Plus(SC1,ZERO))]", cnf .ToString()); // Instance of Induction Axion Scmema sent = parser.parse("((" + "Plus(Plus(A,B),ZERO) = Plus(A,Plus(B,ZERO))" + " AND " + "(FORALL x (FORALL y (FORALL z(" + "Plus(Plus(x,y),z) = Plus(x,Plus(y,z))" + " => " + "Plus(Plus(x,y),Plus(z,ONE)) = Plus(x,Plus(y,Plus(z,ONE)))" + "))))" + ")" + " => " + "FORALL x (FORALL y (FORALL z(" + "Plus(Plus(x,y),z) = Plus(x,Plus(y,z))" + "))))"); cnf = cnfConv.convertToCNF(sent); Assert .AreEqual( "[~Plus(Plus(A,B),ZERO) = Plus(A,Plus(B,ZERO)), Plus(Plus(q0,q1),q2) = Plus(q0,Plus(q1,q2)), Plus(Plus(SC2,SC3),SC4) = Plus(SC2,Plus(SC3,SC4))],[~Plus(Plus(A,B),ZERO) = Plus(A,Plus(B,ZERO)), ~Plus(Plus(SC2,SC3),Plus(SC4,ONE)) = Plus(SC2,Plus(SC3,Plus(SC4,ONE))), Plus(Plus(q0,q1),q2) = Plus(q0,Plus(q1,q2))]", cnf.ToString()); // Goal sent = parser .parse("NOT(FORALL x (FORALL y (FORALL z (Plus(Plus(x,y),z) = Plus(x,Plus(y,z))))))"); cnf = cnfConv.convertToCNF(sent); Assert.AreEqual( "[~Plus(Plus(SC5,SC6),SC7) = Plus(SC5,Plus(SC6,SC7))]", cnf .ToString()); }
private void SentSplitterProcessSentCallback(Sentence sent) { _words.Clear(); _startIndex = sent.StartIndex; _length = 0; _startPtr = _BASE + _startIndex; _endPtr = _startPtr + sent.Length - 1; var urls = sent.Urls; var urlIndex = 0; var startUrlPtr = (urls != null) ? (_BASE + urls[0].startIndex) : UnsafeConst.MAX_PTR; #region main var realyEndPtr = _endPtr; _endPtr = SkipNonLetterAndNonDigitToTheEnd(); for (_ptr = _startPtr; _ptr <= _endPtr; _ptr++) { #region process allocated url's if (startUrlPtr <= _ptr) { TryCreateWordAndPut2List(); var lenu = urls[urlIndex].length; _ptr = startUrlPtr + lenu - 1; urlIndex++; startUrlPtr = (urlIndex < urls.Count) ? (_BASE + urls[urlIndex].startIndex) : UnsafeConst.MAX_PTR; _startIndex = (int)(_ptr - _BASE + 1); _length = 0; continue; } #endregion var ch = *_ptr; var ct = *(_CTM + ch); #region whitespace if ((ct & CharType.IsWhiteSpace) == CharType.IsWhiteSpace) { TryCreateWordAndPut2List(); _startIndex++; continue; } #endregion var pct = *(_CCTM + ch); #region dot if ((pct & CRFCharType.DotChar) == CRFCharType.DotChar && IsUpperNextChar() ) { _length++; TryCreateWordAndPut2List(); continue; } #endregion #region between-letter-or-digit if ((pct & CRFCharType.BetweenLetterOrDigit) == CRFCharType.BetweenLetterOrDigit) { if (IsBetweenLetterOrDigit()) { _length++; } else { TryCreateWordAndPut2List(); #region merge punctuation (with white-spaces) if (!MergePunctuation(ch)) { break; } #endregion //punctuation word TryCreateWordAndPut2List(); } continue; } else if ((pct & CRFCharType.BetweenDigit) == CRFCharType.BetweenDigit) { if (IsBetweenDigit()) { _length++; } else { TryCreateWordAndPut2List(); #region merge punctuation (with white-spaces) if (!MergePunctuation(ch)) { break; } #endregion TryCreateWordAndPut2List(); } continue; } #endregion #region tokenize-different-separately if ((pct & CRFCharType.TokenizeDifferentSeparately) == CRFCharType.TokenizeDifferentSeparately) { TryCreateWordAndPut2List(); #region merge punctuation (with white-spaces) if (!MergePunctuation(ch)) { break; } #endregion TryCreateWordAndPut2List(); continue; } #endregion #region interprete-as-whitespace if ((pct & CRFCharType.InterpreteAsWhitespace) == CRFCharType.InterpreteAsWhitespace) { TryCreateWordAndPut2List(); _startIndex++; continue; } #endregion _length++; } #endregion TryCreateWordAndPut2List(); #region tail punctuation for (_endPtr = realyEndPtr; _ptr <= _endPtr; _ptr++) { var ch = *_ptr; var ct = *(_CTM + ch); #region whitespace if ((ct & CharType.IsWhiteSpace) == CharType.IsWhiteSpace) { TryCreateWordAndPut2List(); _startIndex++; continue; } #endregion var nct = *(_CCTM + ch); #region tokenize-different-separately if ((nct & CRFCharType.TokenizeDifferentSeparately) == CRFCharType.TokenizeDifferentSeparately) { TryCreateWordAndPut2List(); #region merge punctuation (with white-spaces) if (!MergePunctuation(ch)) { break; } #endregion TryCreateWordAndPut2List(); continue; } #endregion #region interprete-as-whitespace if ((nct & CRFCharType.InterpreteAsWhitespace) == CRFCharType.InterpreteAsWhitespace) { TryCreateWordAndPut2List(); _startIndex++; continue; } #endregion _length++; } #endregion #region last punctuation TryCreateWordAndPut2List(); #endregion _outerProcessSentCallbackDelegate(_words); }
private void Update() { if (!dialogueRunning) { return; } if (Input.GetButtonDown("Interact") || Input.GetButtonDown("Submit")) { if (currentSentence.answers == null) { currentSentence = sentences[currentSentence.followingId]; } else if (selected != 0) // Submit selected answer { currentSentence = sentences[currentSentence.answers[selected - 1].followSentenceId]; } else { return; } selected = 0; writeSentence(currentSentence); } if (currentSentence.answers == null) { return; } int option = 100; if (Input.GetKeyDown("1")) { option = 1; } if (Input.GetKeyDown("2")) { option = 2; } if (Input.GetKeyDown("3")) { option = 3; } if (Input.GetKeyDown("4")) { option = 4; } if (option > currentSentence.answers.Count) { return; } // Ivalid selection else if (option == selected) // Inputed option is same as selected => submit selected answer { currentSentence = sentences[currentSentence.answers[selected - 1].followSentenceId]; selected = 0; } else { selected = option; } // Same sentence, highlight selected answer writeSentence(currentSentence); }
private void loadDialogue(TextAsset dialogueText) { sentences = new Dictionary <int, Sentence>(); using (StringReader reader = new StringReader(dialogueText.text)) { while (true) { string line = reader.ReadLine(); if (line == null) { break; } if (line == "") { continue; } string[] sentenceInfo = line.Split(' '); Sentence sentence = new Sentence { id = int.Parse(sentenceInfo[0]) }; if (sentenceInfo.Length == 2) { sentence.endInfo = sentenceInfo[1]; sentences[sentence.id] = sentence; continue; } sentence.actor = sentenceInfo[2] == "_" ? "" : sentenceInfo[2]; sentence.text = reader.ReadLine(); if (sentenceInfo[1][0] == '*') { sentence.answers = new List <Answer>(); while (true) { line = reader.ReadLine(); if (line == null) { return; } if (line == "") { break; } string[] answerInfo = line.Split(' '); Answer answer = new Answer { followSentenceId = int.Parse(answerInfo[1]), text = reader.ReadLine() }; sentence.answers.Add(answer); } } else { sentence.followingId = int.Parse(sentenceInfo[1]); } sentences[sentence.id] = sentence; } } }
public Object visitQuantifiedSentence(QuantifiedSentence sentence, Object arg) { Sentence quantified = sentence.getQuantified(); List <Variable> universalScope = (List <Variable>)arg; // Skolemize: Skolemization is the process of removing existential // quantifiers by elimination. This is done by introducing Skolem // functions. The general rule is that the arguments of the Skolem // function are all the universally quantified variables in whose // scope the existential quantifier appears. if (Quantifiers.isEXISTS(sentence.getQuantifier())) { Dictionary <Variable, Term> skolemSubst = new Dictionary <Variable, Term>(); foreach (Variable eVar in sentence.getVariables()) { if (universalScope.Count > 0) { // Replace with a Skolem Function String skolemFunctionName = parser.getFOLDomain() .addSkolemFunction(); skolemSubst.Add(eVar, new Function(skolemFunctionName, new List <Term>(universalScope))); } else { // Replace with a Skolem Constant String skolemConstantName = parser.getFOLDomain() .addSkolemConstant(); skolemSubst.Add(eVar, new Constant(skolemConstantName)); } } Sentence skolemized = substVisitor.subst(skolemSubst, quantified); return(skolemized.accept(this, arg)); } // Drop universal quantifiers. if (Quantifiers.isFORALL(sentence.getQuantifier())) { // Add to the universal scope so that // existential skolemization may be done correctly universalScope.AddRange(sentence.getVariables()); Sentence droppedUniversal = (Sentence)quantified.accept(this, arg); // Enusre my scope is removed before moving back up // the call stack when returning foreach (Variable s in sentence.getVariables()) { universalScope.Remove(s); } return(droppedUniversal); } // Should not reach here as have already // handled the two quantifiers. throw new ApplicationException("Unhandled Quantifier:" + sentence.getQuantifier()); }
// Given a well formated input (currently we have no special format specification except that we require the sentence itself isn't enclosed in double quotes // otherwise it might indicate a quote, not something speaker says // - Returns a list of strings representing sentences to reply in turn // - At least one reply is guaranteed to be generated // - Due to the nature of this function it might take considerable amount of time to generate a reply so call in a seperate thread // - When well indexed, we already know which sentences(and their related themes) words in an input sentences matches to. // - Sentences do not repeat and given any sentence it's occurrence inside a theme is its index in theme's list. // @Instigator: who we are currently talking to, should ideally be constant across a session // @themeContext: Use this to specify a specific context where conversation should follow, i.e. define a subject/preset of possible communication schemes // public async Task<List<string>> Speak(string input, string instigator) public List <string> Speak(string input, string instigator, string themeContext = null) { // Stage 1: Break input into meaningful words List <string> words = new List <string>(); // This is for ease of organization, we might speed things up a bit by not generating a new list string[] tempWords = input.Split(new char[] { ' ', ',', '.', '?', '!' }); foreach (string word in tempWords) { if (word != "") // In case of comma seperated { words.Add(word); } } // Stage 2: Accumulating points of similarity: For each word, find all its related sentences; Then for each sentence evaluate how involved it is with other words List <Sentence> RelatedSentences = new List <Sentence>(); foreach (string word in words) { // If the word exst, then collect all its related sentences into the bigger list; If not exist, that is perfectly normal // <Improvement> In the future we might want to pay special attention to those new vocabulary that we don't know yet in our memory if (AiriMemory.Words.ContainsKey(word)) { foreach (Sentence s in AiriMemory.Words[word].RelatedSentences) { RelatedSentences.Add(s); s.MatchMarks = 0; } } } // Stage 3: Do a similarity marking // <Improvement> Can be more intelligent foreach (Sentence sentence in RelatedSentences) { foreach (string word in words) { if (sentence.Content.Contains(word)) { sentence.MatchMarks++; } } } // Stage 4: Order and find a best match // <Improvement> Can be more intelligent Sentence bestMatch = (RelatedSentences.OrderByDescending(x => x.MatchMarks).ToList())[0]; // <Debug> This raises an exception // http://stackoverflow.com/questions/3801748/select-method-in-listt-collection List <Sentence> allMatches = RelatedSentences.Where(s => s.MatchMarks == bestMatch.MatchMarks).ToList(); Random rnd = new Random(); bestMatch = allMatches[rnd.Next(0, allMatches.Count)]; // http://stackoverflow.com/questions/2706500/how-do-i-generate-a-random-int-number-in-c // Stage 5: Fetch a proper reply // Current implementation we are just using next available line in theme int index = bestMatch.Theme.Sentences.IndexOf(bestMatch); // Notice the difference between IndexOf and FindIndex: http://stackoverflow.com/questions/6481823/find-index-of-an-int-in-a-list, https://msdn.microsoft.com/en-us/library/x1xzf2ca(v=vs.100).aspx string reply; if (bestMatch.Theme.Sentences[index + 1] != null) { reply = bestMatch.Theme.Sentences[index + 1].Content; // <Debug> Pending cleaning, some sentences are not well defined } else { reply = "Well that makes me think"; } // Stage 6: Potentially generate a smooth, fluid multi-conversation List <string> speeches = new List <string>(); speeches.Add(reply); // Stage 7: Also learn from the input for next time communication if (AiriMemory.Themes.ContainsKey(instigator)) { // Learn, appending to existing theme LearnSentence(input, AiriMemory.Themes[instigator]); } else { // Create a theme and then learn AiriMemory.Themes[instigator] = new Theme(instigator); LearnSentence(input, AiriMemory.Themes[instigator]); } return(speeches); }
public NotSentence(Sentence negated) { this.negated = negated; args.Add(negated); }
public bool Verify(Sentence sentence) { return(VerifyInner(sentence).HasValue); }
public SentenceInfo Explain(Sentence sentence) { var direction = VerifyInner(sentence); if (direction == null) { return(null); } var transformedSentence = sentence.Transform(); string format; string template; switch (direction.Value) { case SentenceDirection.Affirmative: format = string.Format(AffirmativeMask, LEFT_SUBJECT, RIGHT_SUBJECT, LEFT_PREDICAT, RIGHT_PREDICAT); template = AffirmativeTemplate; break; case SentenceDirection.Negative: format = string.Format(NegativeMask, LEFT_SUBJECT, RIGHT_SUBJECT, LEFT_PREDICAT, RIGHT_PREDICAT); template = NegativeTemplate; var m1 = Regex.Match(transformedSentence, template).Groups[18].Value; if (!m1.Contains("MV") && !m1.Contains("B")) { format = format.Replace("<%$18%>", "$18"); } break; case SentenceDirection.Interrogative: format = string.Format(InterrogativeMask, LEFT_SUBJECT, RIGHT_SUBJECT, LEFT_PREDICAT, RIGHT_PREDICAT); template = InterrogativeTemplate; var m2 = Regex.Match(transformedSentence, template).Groups[9].Value; if (!m2.Contains("MV") && !m2.Contains("B")) { format = format.Replace("<%$9%>", "$9"); } break; default: throw new NotSupportedException(direction.Value.ToString()); } var replace = Regex.Replace(transformedSentence, template, format); var text = sentence.TransformBack(replace); var redundantSymbolsCount = 0; var l = Math.Min(text.IndexOf(LEFT_SUBJECT, StringComparison.Ordinal), text.IndexOf(LEFT_PREDICAT, StringComparison.Ordinal)); var r = Math.Min(text.IndexOf(RIGHT_SUBJECT, StringComparison.Ordinal), text.IndexOf(RIGHT_PREDICAT, StringComparison.Ordinal)) - 2; Chunk subjectChunk = null; var predicateChuncks = new List <Chunk>(); while (l > -1) { if (text.Substring(l + redundantSymbolsCount, 2) == LEFT_SUBJECT) { subjectChunk = new Chunk(l, r, sentence.Text.Substring(l, r - l)); } else { predicateChuncks.Add(new Chunk(l, r, sentence.Text.Substring(l, r - l))); } redundantSymbolsCount += 4; var ls = text.IndexOf(LEFT_SUBJECT, r + redundantSymbolsCount, StringComparison.Ordinal); var lp = text.IndexOf(LEFT_PREDICAT, r + redundantSymbolsCount, StringComparison.Ordinal); l = ls < 0 || lp < 0 ? Math.Max(ls, lp) - redundantSymbolsCount : Math.Min(ls, lp) - redundantSymbolsCount; if (l <= -1) { continue; } var rs = text.IndexOf(RIGHT_SUBJECT, l + redundantSymbolsCount, StringComparison.Ordinal); var rp = text.IndexOf(RIGHT_PREDICAT, l + redundantSymbolsCount, StringComparison.Ordinal); r = rs < 0 || rp < 0 ? Math.Max(rs, rp) - 2 - redundantSymbolsCount : Math.Min(rs, rp) - 2 - redundantSymbolsCount; } return(new SentenceInfo { Direction = direction.ToString(), Tense = Name, Subject = subjectChunk, Predicate = predicateChuncks.ToArray() }); }
/// <summary> /// Sets this classes properties from the Sentence. /// </summary> protected void SetPropertiesFromSentence() { // Get the location of the dollar sign int dollarSignIndex = Sentence.IndexOf("$", StringComparison.Ordinal); if (dollarSignIndex == -1) { return; // If it's -1, this is invalid } // Get the location of the first comma. This will mark the end of the command word and the start of data. int firstCommaIndex = Sentence.IndexOf(",", StringComparison.Ordinal); if (firstCommaIndex == -1) { return; // If it's -1, this is invalid } // Determine if the data is not properly formated. Not propertly formated data leads to a negative length. if (firstCommaIndex < dollarSignIndex) { return; } // The data starts just after the first comma int dataStartIndex = firstCommaIndex + 1; /* Extract the command word. This is the text between the dollar sign and the first comma. * * $GPRMC, * ^^^^^^ * 0123456 * * ... the string is interned, which allows us to compare the word by reference. */ CommandWord = string.Intern(Sentence.Substring(dollarSignIndex, firstCommaIndex - dollarSignIndex)); // Next, get the index of the asterisk int asteriskIndex = Sentence.IndexOf("*", StringComparison.Ordinal); int dataEndIndex = asteriskIndex == -1 ? Sentence.Length - 1 : asteriskIndex - 1; //dataEndIndex is before asterix if it exists, otherwise it's the last character // Determine if the data is properly formated. Not propertly formated data leads to a negative length. if (dataEndIndex < dataStartIndex) { return; } /* Extract the words for the sentence, starting just after the first comma and ending * just before the asterisk or at the end of the string. * * $GPRMC, 11, 22, 33, 44, 55*CC OR $GPRMC, 11, 22, 33, 44, 55 */ Words = Sentence.Substring(dataStartIndex, dataEndIndex - dataStartIndex + 1).Split(','); // Calculate the checksum for this portion byte checksum = (byte)Sentence[dollarSignIndex + 1]; for (int index = dollarSignIndex + 2; index <= dataEndIndex; index++) { checksum ^= (byte)Sentence[index]; } CorrectChecksum = checksum.ToString("X2", NmeaCultureInfo); // The checksum is the two-character hexadecimal value // Get existing checksum // The checksum is limited to two characters and we expect a \r\n to follow them if (asteriskIndex != -1 && Sentence.Length >= asteriskIndex + 1 + 2) { ExistingChecksum = Sentence.Substring(asteriskIndex + 1, 2); // If the existing checksum matches the current checksum, the sentence is valid _isValid = ExistingChecksum.Equals(CorrectChecksum, StringComparison.Ordinal); } }
//void RecognizeResult(string content) //{ // voiceHint = false; // content = content.TrimEnd('.'); // bool recognizeOK = false; // for (int i = 0; i < dialog.answerIds.Count; i++) // { // Sentence sentence = Sentence.Get(dialog.answerIds[i]); // if (sentence.en.ToUpper().Contains(content.ToUpper())) // { // Debug.Log(sentence.en + " contains " + content); // triggerId = dialog.answerIds[i]; // answerIndex = i; // iPlay.Select(dialog.answerIds[i]); // if (dialog.transfer == 2) // { // index++; // StartCoroutine(Evaluating(0, SUCCESS)); // recognizeOK = true; // break; // } // else // { // DisplayAnswer(answerIndex); // recognizeOK = true; // break; // } // } // else // { // Debug.Log(sentence.en + " not contains " + content); // } // } // if (recognizeOK == false) // { // StartCoroutine(ShowPrompt()); // } //} //IEnumerator ShowPrompt() //{ // yield return new WaitForSeconds(1f); // float time = Audio.GetInstance().Play(AudioType.INTRO, Prompt.Get().Audio); // yield return new WaitForSeconds(time); // agent.StartRecognize(RecognizeResult); // voiceHint = true; // deltaTime = 0f; //} void DisplayAnswer(int aIndex) { Debug.Log("DisplayAnswer:" + aIndex); iPlay.Display(Sentence.Get(dialog.answerIds[aIndex]).itemName); customer.Find("EN").GetComponent <Text>().text = Sentence.Get(dialog.answerIds[aIndex]).en; customer.Find("CN").GetComponent <Text>().text = Role.currentRole.isReview ? "" : Sentence.Get(dialog.answerIds[aIndex]).cn; agent.StartEvaluator(ReceiveEvaluatorResult, Sentence.Get(dialog.answerIds[aIndex]).en); ///测试 //Hint.GetInstance().Show("棒极了!", "Perfect!"); //ShowStar(5); //index++; //StartCoroutine(Evaluating(0.5f, SUCCESS)); voiceHint = true; deltaTime = 0f; Role.AddToReviewByDialog(dialog.answerIds[aIndex]); }
/// <summary> /// Indicates whether the current object is equal to another object of the same type. /// </summary> /// <param name="other">An object to compare with this object.</param> /// <returns>true if the current object is equal to the <paramref name="other"/> parameter; otherwise, false.</returns> public bool Equals(NmeaSentence other) { return(Sentence.Equals(other.Sentence, StringComparison.Ordinal)); }
IEnumerator ShowDialog() { DisplayHints(null); customer.Find("EN").GetComponent <Text>().text = ""; customer.Find("CN").GetComponent <Text>().text = ""; Debug.Log("===" + index); if (index >= curriculum.dialogs.Count)//index is greater than dialog‘s count, finish { yield return(new WaitForSeconds(0.5f)); Finish(); yield break; } dialog = curriculum.dialogs[index]; if (dialog.triggerId != 0)//if triggerId is not zero, find the dialog's triggerId equals zero or equals the triggerId { for (; index < curriculum.dialogs.Count; index++) { if ((curriculum.dialogs[index].triggerId != 0 && curriculum.dialogs[index].triggerId == triggerId) || (curriculum.dialogs[index].triggerId == 0)) { triggerId = 0; break; } } } dialog = curriculum.dialogs[index];//reset the dialog to correct one if (dialog.transfer == 1) { yield return(new WaitForSeconds(4f)); iPlay.Transfer(dialog.transfer); yield return(new WaitForSeconds(1f)); } if (dialog.askId >= 0) { waiter.Find("EN").GetComponent <Text>().text = Sentence.Get(dialog.askId).en; waiter.Find("CN").GetComponent <Text>().text = Role.currentRole.isReview ? "" : Sentence.Get(dialog.askId).cn; float waiterTime = Audio.GetInstance().Play(AudioType.INTRO, Sentence.Get(dialog.askId).audio); iPlay.PlayAnimation(Sentence.Get(dialog.askId).anim); yield return(new WaitForSeconds(waiterTime)); Role.AddToReviewByDialog(dialog.askId); } if (dialog.answerIds.Count == 1)//only ont answer { answerIndex = 0; DisplayAnswer(answerIndex); } else if (dialog.answerIds.Count > 1) //multy-answers { if (string.IsNullOrEmpty(dialog.keyword)) //no keyword, random select one { answerIndex = UnityEngine.Random.Range(0, dialog.answerIds.Count); DisplayAnswer(answerIndex); } else//have keyword, display items according to dialog, and waiting for user voice input { iPlay.Process(dialog); //agent.StartRecognize(RecognizeResult); DisplayHints(dialog.answerIds); voiceHint = true; deltaTime = 0f; } } else//no customer dialog, finish { if (index == curriculum.dialogs.Count - 1) { yield return(new WaitForSeconds(0.5f)); Finish(); yield return(null); } else { index += 1; yield return(StartCoroutine(ShowDialog())); } } }
public void DisplaySentence(Sentence currentSentence) { sentenceDisplayer.DisplaySentence(currentSentence); }
public Paragraph ShuffleSentenceUnit(Paragraph xmlSentenceElement) { _sentence = new Sentence(xmlSentenceElement); Text[] sentenceArray = _sentence.SentenceArray; if (_sentence.UnitNotFoundInSentence( sentenceArray, element => element.InnerText.IsTimer())) { return(xmlSentenceElement); } _sentence.TimerUnitCount = 0; var timerUnits = GetTimerUnits( sentenceArray).ToArray <IMoveableUnit>(); _sentence.UnderlineJoinedSentenceUnit( sentenceArray, timerUnits[0].StartPosition, timerUnits[_sentence.TimerUnitCount - 1].EndPosition); Text[] timerUnitsInSerialNumberOrder = _sentence.GetMoveableUnitsInSerialNumberDescendingOrder( _sentence.TimerUnitCount, timerUnits, sentenceArray); Text[] newSentence; if (_sentence.HasVbVbaPastToTheLeft( sentenceArray, timerUnits[0].StartPosition)) { newSentence = MoveTimerUnitBeforeVbVbaPast( sentenceArray, timerUnitsInSerialNumberOrder, timerUnits[0].StartPosition); } else if (_sentence.HasDGToTheLeft( sentenceArray, timerUnits[0].StartPosition)) { int dGIndexPosition = Array.FindIndex(sentenceArray, i => i.IsDG()); newSentence = MoveTimerUnitBeforeDGUnit( sentenceArray, dGIndexPosition, timerUnitsInSerialNumberOrder); } else { var beforeTimer = sentenceArray.Take(timerUnits[0].StartPosition); newSentence = beforeTimer.Concat(timerUnitsInSerialNumberOrder).ToArray(); } newSentence = _sentence.RemoveAnyBlankSpaceFromEndOfUnit (newSentence .Concat( _sentence.GetSentenceBreaker(sentenceArray)).ToArray()); return(new Paragraph( OpenXmlHelper.BuildWordsIntoOpenXmlElement( newSentence))); }
public void Isogram_sixyearold() { Assert.True(Sentence.IsogramSentence("six-year-old")); }
public void Isogram_Empty() { Assert.True(Sentence.IsogramSentence(string.Empty)); }
private static void collectAllVariables(Sentence s, List <Variable> vars) { s.accept(_collectAllVariables, vars); }
//override public string generateSummary(DocsStatistics docStats, Document newDoc) override public string generateSummary(ArrayList docs, double compressionRatio) { ArrayList allTitles = new ArrayList(); ArrayList allFirstSents = new ArrayList(); ArrayList allSents = new ArrayList(); foreach (Document doc in docs) { allTitles.Add(doc.title); if (doc.sentences.Count >= 1) { allFirstSents.Add(doc.sentences[0]); } allSents.AddRange(doc.sentences); } double[] cTotal = new double[allSents.Count]; double[] pTotal = new double[allSents.Count]; double[] fTotal = new double[allSents.Count]; double cMax = double.MinValue; if (this.centroidClusters == null) { this.centroidClusters = CentroidCluster.fromFolder(this.clustersDir, this.idfThreshold, this.keepWords); } for (int i = 0; i < allSents.Count; i++) { Sentence currSent = (Sentence)allSents[i]; // Calculate C cTotal[i] = 0; foreach (string word in currSent.words) { cTotal[i] += getCentroidValue(this.centroidClusters, word); } if (cTotal[i] > cMax) { cMax = cTotal[i]; } // Calculate F fTotal[i] = 0; foreach (string word in currSent.words) { int wordOccurence = 0; foreach (Sentence title in allTitles) { if (title.wordsCount[word] != null) { wordOccurence += ((int)title.wordsCount[word]); } } foreach (Sentence firstSent in allFirstSents) { if (firstSent.wordsCount[word] != null) { wordOccurence += ((int)firstSent.wordsCount[word]); } } fTotal[i] += (wordOccurence * ((int)currSent.wordsCount[word])); } } // Calculate P int pIndex = 0; foreach (Document doc in docs) { for (int i = 0; i < doc.sentences.Count; i++) { // Remove + 1 as arrays are zero based. pTotal[pIndex++] = ((doc.sentences.Count - i) * cMax) / doc.sentences.Count; } } double maxScore = double.MinValue; for (int i = 0; i < allSents.Count; i++) { double currWeight = (this.centroidWeight * cTotal[i]) + (this.positionalWeight * pTotal[i]) + (this.firstSentenceWeight * fTotal[i]); ((Sentence)allSents[i]).weight = currWeight; if (currWeight > maxScore) { maxScore = currWeight; } } string genSummary = null; string prevgenSummary = null; do { for (int i = 0; i < allSents.Count; i++) { for (int j = 0; j < allSents.Count; j++) { if (i >= j) { continue; } double redundancy = redundancyPenalty((Sentence)allSents[i], (Sentence)allSents[j]); ((Sentence)allSents[j]).weight -= (maxScore * redundancy); } } maxScore = double.MinValue; for (int i = 0; i < allSents.Count; i++) { if (((Sentence)allSents[i]).weight > maxScore) { maxScore = ((Sentence)allSents[i]).weight; } } Sentence[] sents = (Sentence[])allSents.ToArray(typeof(Sentence)); prevgenSummary = genSummary; genSummary = SummaryUtil.SummarizeByCompressionRatio(sents, compressionRatio); } while (!genSummary.Equals(prevgenSummary)); return(genSummary); }
public static void GenerateSentenceValuesforSentence(Sentence sentence, Type VariableType, Page Page, object Script, FieldInfo Info = null, PropertyInfo PInfo = null) { if (Info == null && PInfo == null) { foreach (FieldInfo method in VariableType.GetFields(BindingFlags.Public | BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Static)) { if (method.GetCustomAttributes(typeof(ObsoleteAttribute), true).Length == 0) { if (method.FieldType.IsGenericType) { IEnumerable list = method.GetValue(Script) as IEnumerable; if (sentence.Sentences == null) { sentence.Sentences = new List <Sentence>(); } uint count = 0; if (list != null) { foreach (var c in list) { Sentence _sentence = new Sentence(); _sentence.ValueVariable = c; _sentence.OnPageID = Page.ID; _sentence.PagePosition = count; _sentence.ValueVariableType = c.GetType(); _sentence.SentenceID = Page.ASentenceID; Page.ASentenceID++; Page.IDtoSentence[_sentence.SentenceID] = _sentence; Type valueType = c.GetType(); if (valueType.IsGenericType) { Type baseType = valueType.GetGenericTypeDefinition(); if (baseType == typeof(KeyValuePair <,>)) { _sentence.KeyVariable = valueType.GetProperty("Key").GetValue(c, null); _sentence.ValueVariable = valueType.GetProperty("Value").GetValue(c, null); _sentence.ValueVariableType = valueType.GetGenericArguments()[1]; _sentence.KeyVariableType = valueType.GetGenericArguments()[0]; } } if (!valueType.IsClass) { GenerateSentenceValuesforSentence(_sentence, c.GetType(), Page, c); } count++; sentence.Sentences.Add(_sentence); } } } } } } else { if (VariableType.IsGenericType) { IEnumerable list; Type TType; if (Info == null) { list = PInfo.GetValue(Script) as IEnumerable; //icollection<keyvaluepair> TType = PInfo.PropertyType; } else { list = Info.GetValue(Script) as IEnumerable; // TType = Info.FieldType; } sentence.Sentences = new List <Sentence>(); uint count = 0; if (list != null) { foreach (object c in list) { Sentence _sentence = new Sentence(); _sentence.ValueVariable = c; _sentence.OnPageID = Page.ID; _sentence.PagePosition = count; _sentence.ValueVariableType = c.GetType(); _sentence.SentenceID = Page.ASentenceID; Page.ASentenceID++; Page.IDtoSentence[_sentence.SentenceID] = _sentence; Type valueType = c.GetType(); if (valueType.IsGenericType) { Type baseType = valueType.GetGenericTypeDefinition(); if (baseType == typeof(KeyValuePair <,>)) { _sentence.KeyVariable = valueType.GetProperty("Key").GetValue(c, null); _sentence.ValueVariable = valueType.GetProperty("Value").GetValue(c, null); _sentence.ValueVariableType = valueType.GetGenericArguments()[1]; _sentence.KeyVariableType = valueType.GetGenericArguments()[0]; } } if (!valueType.IsClass) { GenerateSentenceValuesforSentence(_sentence, c.GetType(), Page, c); } count++; Page.Sentences.Sentences.Add(_sentence); } } } } }
public static List <NGSegment> CreateSegments(Sentence s) { List <NGSegment> res = new List <NGSegment>(); for (int i = 0; i < s.Items.Count; i++) { SentItem it = s.Items[i]; if (it.Typ == SentItemType.Verb || it.Typ == SentItemType.Delim) { continue; } NGSegment seg = new NGSegment(); NGItem nit = new NGItem() { Source = it }; for (int j = i - 1; j >= 0; j--) { it = s.Items[j]; if (it.Typ == SentItemType.Verb) { seg.BeforeVerb = it.Source as Pullenti.Ner.Core.VerbPhraseToken; break; } if (it.Typ == SentItemType.Delim) { break; } if (it.CanBeCommaEnd) { if ((it.Source as Pullenti.Ner.Core.ConjunctionToken).Typ == Pullenti.Ner.Core.ConjunctionType.Comma) { nit.CommaBefore = true; } else { nit.AndBefore = true; if ((it.Source as Pullenti.Ner.Core.ConjunctionToken).Typ == Pullenti.Ner.Core.ConjunctionType.Or) { nit.OrBefore = true; } } } if (it.Typ == SentItemType.Conj || it.CanBeNoun) { break; } } bool comma = false; bool and = false; bool or = false; for (; i < s.Items.Count; i++) { it = s.Items[i]; if (it.CanBeCommaEnd) { comma = false; and = false; or = false; if ((it.Source as Pullenti.Ner.Core.ConjunctionToken).Typ == Pullenti.Ner.Core.ConjunctionType.Comma) { comma = true; } else { and = true; if ((it.Source as Pullenti.Ner.Core.ConjunctionToken).Typ == Pullenti.Ner.Core.ConjunctionType.Or) { or = true; } } if (seg.Items.Count > 0) { if (comma) { seg.Items[seg.Items.Count - 1].CommaAfter = true; } else { seg.Items[seg.Items.Count - 1].AndAfter = true; if (or) { seg.Items[seg.Items.Count - 1].OrAfter = true; } } } continue; } if (it.CanBeNoun || it.Typ == SentItemType.Adverb) { nit = new NGItem() { Source = it, CommaBefore = comma, AndBefore = and, OrBefore = or }; seg.Items.Add(nit); comma = false; and = false; or = false; } else if (it.Typ == SentItemType.Verb || it.Typ == SentItemType.Conj || it.Typ == SentItemType.Delim) { break; } } for (int j = i; j < s.Items.Count; j++) { it = s.Items[j]; if (it.Typ == SentItemType.Verb) { seg.AfterVerb = it.Source as Pullenti.Ner.Core.VerbPhraseToken; break; } if ((it.Typ == SentItemType.Conj || it.CanBeNoun || it.Typ == SentItemType.Delim) || it.Typ == SentItemType.Adverb) { break; } } res.Add(seg); } foreach (NGSegment ss in res) { ss.CreateLinks(false); } return(res); }
public void testExamplesPg299AIMA2e() { FOLDomain domain = DomainFactory.lovesAnimalDomain(); FOLParser parser = new FOLParser(domain); // FOL A. Sentence origSentence = parser .parse("FORALL x (FORALL y (Animal(y) => Loves(x, y)) => EXISTS y Loves(y, x))"); CNFConverter cnfConv = new CNFConverter(parser); CNF cnf = cnfConv.convertToCNF(origSentence); // CNF A1. and A2. Assert .AreEqual( "[Animal(SF0(x)), Loves(SF1(x),x)],[~Loves(x,SF0(x)), Loves(SF1(x),x)]", cnf.ToString()); // FOL B. origSentence = parser .parse("FORALL x (EXISTS y (Animal(y) AND Kills(x, y)) => FORALL z NOT(Loves(z, x)))"); cnf = cnfConv.convertToCNF(origSentence); // CNF B. Assert.AreEqual("[~Animal(y), ~Kills(x,y), ~Loves(z,x)]", cnf .ToString()); // FOL C. origSentence = parser.parse("FORALL x (Animal(x) => Loves(Jack, x))"); cnf = cnfConv.convertToCNF(origSentence); // CNF C. Assert.AreEqual("[~Animal(x), Loves(Jack,x)]", cnf.ToString()); // FOL D. origSentence = parser .parse("(Kills(Jack, Tuna) OR Kills(Curiosity, Tuna))"); cnf = cnfConv.convertToCNF(origSentence); // CNF D. Assert.AreEqual("[Kills(Curiosity,Tuna), Kills(Jack,Tuna)]", cnf .ToString()); // FOL E. origSentence = parser.parse("Cat(Tuna)"); cnf = cnfConv.convertToCNF(origSentence); // CNF E. Assert.AreEqual("[Cat(Tuna)]", cnf.ToString()); // FOL F. origSentence = parser.parse("FORALL x (Cat(x) => Animal(x))"); cnf = cnfConv.convertToCNF(origSentence); // CNF F. Assert.AreEqual("[~Cat(x), Animal(x)]", cnf.ToString()); // FOL G. origSentence = parser.parse("NOT(Kills(Curiosity, Tuna))"); cnf = cnfConv.convertToCNF(origSentence); // CNF G. Assert.AreEqual("[~Kills(Curiosity,Tuna)]", cnf.ToString()); }
// // START-InferenceProcedure /** * FOL-FC-ASK returns a substitution or false. * * @param KB * the knowledge base, a set of first order definite clauses * @param query * the query, an atomic sentence * * @return a substitution or false */ public InferenceResult ask(FOLKnowledgeBase KB, Sentence query) { // Assertions on the type of queries this Inference procedure // supports if (!(query is AtomicSentence)) { throw new IllegalArgumentException("Only Atomic Queries are supported."); } FCAskAnswerHandler ansHandler = new FCAskAnswerHandler(); Literal alpha = new Literal((AtomicSentence)query); // local variables: new, the new sentences inferred on each iteration ICollection <Literal> newSentences = CollectionFactory.CreateQueue <Literal>(); // Ensure query is not already a know fact before // attempting forward chaining. ISet <IMap <Variable, Term> > answers = KB.fetch(alpha); if (answers.Size() > 0) { ansHandler.addProofStep(new ProofStepFoChAlreadyAFact(alpha)); ansHandler.setAnswers(answers); return(ansHandler); } // repeat until new is empty do { // new <- {} newSentences.Clear(); // for each rule in KB do // (p1 ^ ... ^ pn => q) <-STANDARDIZE-VARIABLES(rule) foreach (Clause implIter in KB.getAllDefiniteClauseImplications()) { Clause impl = implIter; impl = KB.standardizeApart(impl); // for each theta such that SUBST(theta, p1 ^ ... ^ pn) = // SUBST(theta, p'1 ^ ... ^ p'n) // --- for some p'1,...,p'n in KB foreach (IMap <Variable, Term> thetaIter in KB.fetch(invert(impl.getNegativeLiterals()))) { IMap <Variable, Term> theta = thetaIter; // q' <- SUBST(theta, q) Literal qDelta = KB.subst(theta, impl.getPositiveLiterals().Get(0)); // if q' does not unify with some sentence already in KB or // new then do if (!KB.isRenaming(qDelta) && !KB.isRenaming(qDelta, newSentences)) { // add q' to new newSentences.Add(qDelta); ansHandler.addProofStep(impl, qDelta, theta); // theta <- UNIFY(q', alpha) theta = KB.unify(qDelta.getAtomicSentence(), alpha.getAtomicSentence()); // if theta is not fail then return theta if (null != theta) { foreach (Literal l in newSentences) { Sentence s = null; if (l.isPositiveLiteral()) { s = l.getAtomicSentence(); } else { s = new NotSentence(l.getAtomicSentence()); } KB.tell(s); } ansHandler.setAnswers(KB.fetch(alpha)); return(ansHandler); } } } } // add new to KB foreach (Literal l in newSentences) { Sentence s = null; if (l.isPositiveLiteral()) { s = l.getAtomicSentence(); } else { s = new NotSentence(l.getAtomicSentence()); } KB.tell(s); } } while (newSentences.Size() > 0); // return false return(ansHandler); }
public object visitNotSentence(NotSentence notSentence, object arg) { Sentence negated = notSentence.getNegated(); return(new NotSentence((Sentence)negated.accept(this, arg))); }