void Awake() { senses = GetComponent<Senses>(); nav = GetComponent<Navigator>(); body = GetComponent<Body>(); renderer.material.color = Species; }
private static bool CanSeeCharacter(Transform registrant, Transform character, out Vector3 direction) { float registrantHeight = registrant.GetComponent <CharacterController>().height; Senses senses = registrant.GetComponent <Senses>(); direction = character.position - registrant.position; // Get distance to character float distanceToTarget = Vector3.Distance(registrant.position, character.position); // Check in visible range if (senses.sightRange > distanceToTarget) { // Angle between character and registrant float angle = Vector3.Angle(direction, registrant.forward); // Convert to positive value angle = System.Math.Abs(angle); // Is the target within the viewing angle. Ignores obstacles between target and shooter if (angle < (senses.viewingAngle / 2)) { RaycastHit hitData; // Create a layer mask for the ray. Look for players only (player should be configured to layer 8). LayerMask playerMask = 1 << 8; // Player may be obscurred by cover so ensure ray picks up cover too LayerMask coverMask = 1 << 9; // AI LayerMask aiMask = 1 << 10; LayerMask mask = coverMask | playerMask | aiMask; // Aim for the upper part of the body. A rather grued check that could be fixed later //float targetHeight = (character.GetComponent<CharacterController>().height); Vector3 registrantEyePosition = new Vector3(registrant.position.x, registrant.position.y + registrantHeight, registrant.position.z); Vector3 targetBodyPosition = new Vector3(character.transform.position.x, character.transform.position.y, character.transform.position.z); Vector3 rayDirection = (targetBodyPosition - registrantEyePosition).normalized; bool hit = Physics.Raycast(registrantEyePosition, rayDirection, out hitData, senses.sightRange, mask.value); Debug.DrawRay(registrantEyePosition, rayDirection * senses.sightRange, Color.magenta); if (hit) { // Ignore cover if (hitData.collider.tag == Tags.Player) { return(true); } } } } return(false); }
// Use this for initialization private void Start() { _ann = new Ann(2, 2, 1, 6, 0.2); _statsTexts = _stats.GetComponentsInChildren <Text>(); Assert.IsNotNull(_statsTexts); _senses = GetComponent <Senses>(); Assert.IsNotNull(_senses); _rigidbody2D = GetComponent <Rigidbody2D>(); Assert.IsNotNull(_rigidbody2D); _startPosition = transform.position; Time.timeScale = _timeScale; }
public void Fire() { var go = Instantiate(bullet) as GameObject; Bullet _bulletObj = go.GetComponent <Bullet>(); _bulletObj.damagePacket = new DamagePacket(_entity, (int)damage, damageType); _bulletObj.owner = _entity; go.transform.position = SpawnPoint.position; go.transform.forward = SpawnPoint.forward; go.GetComponent <Rigidbody>()?.AddForce(go.transform.forward * _bulletObj.speed, ForceMode.Impulse); if (!isSilent) { Senses.TriggerSoundAlert(transform.position, zombieTargetPriority); audioFire.Play(); } }
// Update is called once per frame void Update() { if (_alarmActivated) { _internalTimer += Time.deltaTime * LightPulseSpeed; light.intensity = Mathf.PingPong(_internalTimer, 2.5f); _durationTimer += Time.deltaTime; Senses.TriggerSoundAlert(transform.position, zombieTargetPriority); if (_durationTimer >= alarmDuration) { _durationTimer = 0; _alarmActivated = false; GetComponent <AudioSource>().Stop(); light.range /= lightRangeMultiplier; } } }
/// <summary> /// used by SILCAWL list /// </summary> public string GetSomeMeaningToUseInAbsenseOfHeadWord(string writingSystemId) { var s = Senses.FirstOrDefault(); if (s == null) { return("?NoMeaning?"); } var gloss = s.Gloss.GetExactAlternative(writingSystemId); if (string.IsNullOrEmpty(gloss)) { var def = s.Definition.GetExactAlternative(writingSystemId); if (string.IsNullOrEmpty(def)) { return("?NoGlossOrDef?"); } return(def); } return(gloss); }
private static bool CanSeeCharacter(Transform reg, Transform Character, out Vector3 direction) { float regHeight = reg.GetComponent <CharacterController>().height; // assing the AIs height Senses senses = reg.GetComponent <Senses>(); //reference the sense's script direction = Character.position - reg.position; // set the direction of the enemy float distToTarget = Vector3.Distance(reg.position, Character.position); // set the distance to the the enemy if (senses.sightRange > distToTarget) // if the sight range is more than the distance { float angle = Vector3.Angle(direction, reg.forward); // obtain the angle to the enemey angle = System.Math.Abs(angle); // set the angle if (angle < (senses.viewingAngle / 2)) //if the angle is less than the sight range { // set up the variable to save the data collected when the raycast hits RaycastHit HitData; // set up the layers to detect LayerMask playerMask = 1 << 8; LayerMask coverMask = 1 << 9; LayerMask aiMask = 1 << 10; LayerMask mask = coverMask | playerMask | aiMask; float targetHeight = (Character.GetComponent <CharacterController>().height / 1.25f); // set the targets height Vector3 regEyePos = new Vector3(reg.position.x, reg.position.y + regHeight, reg.position.z); // set the AIs eye position Vector3 targetBodyPos = new Vector3(Character.transform.position.x, Character.transform.position.y + targetHeight, Character.transform.position.z); // set the targets body position Vector3 rayDirection = (targetBodyPos - regEyePos).normalized; // set the raycast direction bool hit = Physics.Raycast(regEyePos, rayDirection, out HitData, senses.sightRange, mask.value); // shoot a raycast and of hits set the hit variable to true Debug.DrawRay(regEyePos, rayDirection * senses.sightRange, Color.cyan); if (hit) { if (HitData.collider.tag == "Bear") // if the raycast hit the bear return true { return(true); } } } } return(false); }
public LexSense GetOrCreateSenseWithMeaning(MultiText meaning) //Switch to meaning { foreach (LexSense sense in Senses) { #if GlossMeaning if (meaning.HasFormWithSameContent(sense.Gloss)) #else if (meaning.HasFormWithSameContent(sense.Definition)) #endif { return(sense); } } LexSense newSense = new LexSense(); Senses.Add(newSense); #if GlossMeaning newSense.Gloss.MergeIn(meaning); #else newSense.Definition.MergeIn(meaning); #endif return(newSense); }
public string GenerateHtmlString() { var sb = new StringBuilder(); sb.Append("<!DOCTYPE html><html><head><meta charset='utf-8'></head><body>"); if (Correct) { sb.Append($"<h2>Requested word: {Word}</h2></br>"); sb.Append("<label style='color: gray;'>Etymologies</label>"); if (Etymologies?.Any() ?? false) { sb.Append("<ul>"); foreach (var etymologie in Etymologies) { if (etymologie != null) { sb.Append($"<li>{etymologie}</li>"); } } sb.Append("</ul>"); } else { sb.Append("<p>No data</p>"); } sb.Append("<label style='color: gray;'>Senses</label>"); if (Senses?.Any() ?? false) { sb.Append("<ul>"); foreach (var sense in Senses) { if (sense != null) { sb.Append($"<li>{sense}</li>"); } } sb.Append("</ul>"); } else { sb.Append("<p>No data</p>"); } sb.Append("<label style='color: gray;'>Subsenses</label>"); if (Subsenses?.Any() ?? false) { sb.Append("<ul>"); foreach (var subsense in Subsenses) { if (subsense != null) { sb.Append($"<li>{subsense}</li>"); } } sb.Append("</ul>"); } else { sb.Append("<p>No data</p>"); } } else { sb.Append(String.IsNullOrEmpty(Word) ? "<h2 style='color: red;'>You requested empty string</h2>" : $"<h2 style='color: red;'>Request failed with word: {Word}</h2>"); sb.Append($"<p><strong>Error message: </strong>{ErrorMessage}</p>"); } sb.Append($"<h4>Requested date: {DateTime.Now.ToShortDateString()}</h4>"); sb.Append("</body></html>"); return(sb.ToString()); }
protected void Awake() { // Build the Finite State Machine characterController = this.GetComponent <CharacterController>(); //target = GameObject.FindGameObjectWithTag(Tags.FriendlyAI); ttm = this.gameObject.GetComponent <TargetTrackingManager>(); senses = this.gameObject.GetComponent <Senses>(); animator = this.GetComponent <Animator>(); aiGunLogic = AIGun.GetComponent <AIGunLogic>(); health = this.GetComponent <Health>(); imageBehaviour = this.GetComponent <ImageBehaviour>(); alertImage = imageBehaviour.alertImage.gameObject; chaseImage = imageBehaviour.chaseImage.gameObject; tacticalStateMachine = new FSM <TacticalStates>(displayFSMTransitions); tacticalStateMachine.AddState(new Goto <TacticalStates>(TacticalStates.Goto, this, 0f)); tacticalStateMachine.AddState(new Animate <TacticalStates>(TacticalStates.Animate, this, 0f)); tacticalStateMachine.AddState(new UseSmartObject <TacticalStates>(TacticalStates.UseSmartObject, this, 0f)); tacticalStateMachine.AddTransition(TacticalStates.Goto, TacticalStates.Animate); tacticalStateMachine.AddTransition(TacticalStates.Goto, TacticalStates.UseSmartObject); tacticalStateMachine.AddTransition(TacticalStates.Goto, TacticalStates.Goto); tacticalStateMachine.AddTransition(TacticalStates.Animate, TacticalStates.Goto); tacticalStateMachine.AddTransition(TacticalStates.Animate, TacticalStates.UseSmartObject); tacticalStateMachine.AddTransition(TacticalStates.Animate, TacticalStates.Animate); tacticalStateMachine.AddTransition(TacticalStates.UseSmartObject, TacticalStates.Goto); tacticalStateMachine.AddTransition(TacticalStates.UseSmartObject, TacticalStates.Animate); tacticalStateMachine.AddTransition(TacticalStates.UseSmartObject, TacticalStates.UseSmartObject); actions = new List <Action>(); Action idle = new Idle("idle", 1, this, TacticalStates.Goto); idle.SetEffect(Atom.OnGuard, true); idle.destination = patrolPoints[destinationPoint]; actions.Add(idle); Action GetInRangeOfPlayer = new getInRangeOfPlayer("Get in range of the player", 1, this, TacticalStates.Goto); GetInRangeOfPlayer.SetPreCondition(Atom.KnowledgeOfPlayer, true); GetInRangeOfPlayer.SetEffect(Atom.CanSeePlayer, true); GetInRangeOfPlayer.SetEffect(Atom.InRange, true); GetInRangeOfPlayer.destination = GameObject.FindGameObjectWithTag(Tags.Player).transform; actions.Add(GetInRangeOfPlayer); Action attackWithGun = new AttackWithGun("Attack with gun", 1, this, TacticalStates.Goto); attackWithGun.SetPreCondition(Atom.InRange, true); attackWithGun.SetPreCondition(Atom.CanSeePlayer, true); attackWithGun.SetEffect(Atom.PlayerDead, true); attackWithGun.destination = searchPos.transform; actions.Add(attackWithGun); // set the current world state startWS = new WorldState(); // what the ai knows about the enviornment startWS.SetValue(Atom.KnowledgeOfPlayer, false); startWS.SetValue(Atom.CanSeePlayer, false); goals = new List <Goal>(); Goal idleGoal = new Goal(1); idleGoal.condition.SetValue(Atom.OnGuard, true); goals.Add(idleGoal); Goal combatGoal = new Goal(10); //combatGoal.condition.SetValue(Atom.HaveGun, true); //combatGoal.condition.SetValue(Atom.HaveAmmo, true); combatGoal.condition.SetValue(Atom.PlayerDead, true); goals.Add(combatGoal); // Goal getInRangeGoal = new Goal(20); // getInRangeGoal.condition.SetValue(Atom.InRange, true); // goals.Add(getInRangeGoal); /*Goal findTargetGoal = new Goal(5); * //combatGoal.condition.SetValue(Atom.HaveGun, true); * //combatGoal.condition.SetValue(Atom.HaveAmmo, true); * findTargetGoal.condition.SetValue(Atom.CanSeePlayer, true); * goals.Add(findTargetGoal);*/ }
void Awake() { metabolism = GetComponent<Body>(); senses = GetComponent<Senses>(); nav = GetComponent<Navigator>(); }
public void Awake() { animator = GetComponent <Animator>(); senses = GetComponent <Senses>(); }
public void Extract(string text_segment) { if (!string.IsNullOrEmpty(text_segment)) { #region Local Variables int i = 0; int j; int k; int d; int l; int chunkLength; int chunksLength; string curToken; List <SynSet> Senses, tmpSenses; SynSet tmpSense; List <SentenceChunk> Chunks = new List <SentenceChunk>(); // This list of all chunks List <SentenceChunk> tmpChunks = new List <SentenceChunk>(); // This list of all chunks Dictionary <string, SynSet> CachedConcepts = new Dictionary <string, SynSet>(); TextVectors = new List <TaggedWord>(); // The list that will hold all mappable terms with thier information List <string> MiscTerms = new List <string>(); // The list of unmapped terms in the text string[] tokens; string[] sentences = _sentenceDetector.SentenceDetect(text_segment); #endregion #region Section 3.1. // Extract all chunks from the given text segment for (k = 0; k < sentences.Length; k++) { tokens = _tokenizer.Tokenize(sentences[k]); tmpChunks = _chunker.GetChunks(tokens, _posTagger.Tag(tokens)); tmpChunks.RemoveAll(predicate => predicate.TaggedWords.Count == 0); Chunks.AddRange(tmpChunks); } tmpChunks = null; tokens = null; sentences = null; // Extract elements that will be used for Similarity Matrix Generation as the input of clustering chunksLength = Chunks.Count; while (i < chunksLength) { // Look only inside NP chunks if (Chunks[i].Tag == "NP") { #region Rectify NP Chunks if (i + 1 < chunksLength) { if (Chunks[i + 1].Tag == "NP") { if (Chunks[i + 1].TaggedWords[0].Tag.StartsWith("NNP") || AllowedDTList.ContainsKey(Chunks[i + 1].TaggedWords[0].Word)) { int length = Chunks[i].TaggedWords.Count; foreach (TaggedWord w in Chunks[i + 1].TaggedWords) { w.Index = length; Chunks[i].TaggedWords.Add(w); length++; } Chunks.RemoveRange(i + 1, 1); chunksLength = chunksLength - 1; } } else if (Chunks[i + 1].Tag == "PP" && i + 2 < chunksLength) { if (Chunks[i + 2].TaggedWords[0].Tag.StartsWith("NNP") || AllowedDTList.ContainsKey(Chunks[i + 1].TaggedWords[0].Word)) { int length = Chunks[i].TaggedWords.Count; Chunks[i + 1].TaggedWords[0].Index = length; Chunks[i].TaggedWords.Add(Chunks[i + 1].TaggedWords[0]); length++; foreach (TaggedWord w in Chunks[i + 2].TaggedWords) { w.Index = length; length++; Chunks[i].TaggedWords.Add(w); } Chunks.RemoveRange(i + 1, 2); chunksLength = chunksLength - 2; } } } #endregion #region Find N-Gram NNPs // This part is very important: // 1- Rectify any linguistic errors generated as side effect of the previous step (such as "Belly the") // 2- Eliminate any syntactic errors such as Texas Rangers (sports) --> Texas Ranger (Police) // since we don't alter the value of a NNP(s) chunkLength = Chunks[i].TaggedWords.Count; j = 0; // Loop through all chunk words while (j < chunkLength) { if (Chunks[i].TaggedWords[j].Tag[0] == 'N') { // Needed for fast access to the last element in SemanticElements d = TextVectors.Count() - 1; // Check the probability of merging N-gram Named Entities (NNP(S)* || NNP(S)*|DT*|NNP(S)*) if (Chunks[i].TaggedWords[j].Tag.StartsWith("NNP")) { k = 0; // First scan to see if the pattern is satisfied for (l = j + 1; l < chunkLength; l++) { // Here to define any patterns the user may wish to apply if ( Chunks[i].TaggedWords[l].Tag.StartsWith("NNP") || // allow N-Gram NNP AllowedDTList.ContainsKey(Chunks[i].TaggedWords[l].Word) || // allow adding stop words inside the NNP Chunks[i].TaggedWords[l].Tag == "CD" // allow adding numbers inside NNP ) { k++; } else { break; } } // k-value changing means a pattern has been found // if k is changed and the scanned pattern does not end with a stop word if (k > 0 && !AllowedDTList.ContainsKey(Chunks[i].TaggedWords[j + k].Word)) { // Concatenate all the pattern parts ans store them in temp variable curToken = Chunks[i].TaggedWords[j].Word; for (l = j + 1; l <= j + k; l++) { curToken = curToken + " " + Chunks[i].TaggedWords[l].Word; } // Delete all the parts added in temp Chunks[i].TaggedWords.RemoveRange(j + 1, k); // rectify the sequence length after deletion chunkLength = chunkLength - k; // Check if the perv token is a capitalized JJ if (d > -1 && j > 0 && TextVectors[d].Tag == "JJ" && char.IsUpper(TextVectors[d].Word[0])) { // Replace current j with its previous j-1 word, and allocate special tag NNP*J Chunks[i].TaggedWords[j - 1].Tag = Chunks[i].TaggedWords[j].Tag + "J"; Chunks[i].TaggedWords[j - 1].Word = TextVectors[d].Word + " " + curToken; // Remove the previous word from all lists TextVectors.RemoveAt(d); Chunks[i].TaggedWords.RemoveRange(j, 1); chunkLength--; j--; } else { // Only update the current word Chunks[i].TaggedWords[j].Word = curToken; } TextVectors.Add(Chunks[i].TaggedWords[j]); // Skip the loop by k steps j = j + k; } else { // If there is no pattern match --> add singular NNP(S) // Before addition check JJ pattern if (d > -1 && j > 0 && TextVectors[d].Tag == "JJ" && char.IsUpper(TextVectors[d].Word[0])) { // Replace current j with its previous j-1 word, and allocate special tag NNP*J Chunks[i].TaggedWords[j - 1].Tag = Chunks[i].TaggedWords[j].Tag + "J"; Chunks[i].TaggedWords[j - 1].Word = TextVectors[d].Word + " " + Chunks[i].TaggedWords[j].Word; // Remove the previous word from all lists TextVectors.RemoveAt(d); Chunks[i].TaggedWords.RemoveRange(j, 1); chunkLength--; j--; } TextVectors.Add(Chunks[i].TaggedWords[j]); j++; } } else { // If the current word is NN(S) if (Chunks[i].TaggedWords[j].Tag == "NNS") { Chunks[i].TaggedWords[j].Word = _wn.Lemmatize(Chunks[i].TaggedWords[j].Word, "noun"); } // Find if the current token forms bigram WordNet concept with the previous token if (j > 0) { if (Chunks[i].TaggedWords[j - 1].Tag == "NN" || Chunks[i].TaggedWords[j - 1].Tag == "NNS" || Chunks[i].TaggedWords[j - 1].Tag == "JJ") { if (_wn.GetSynSets(Chunks[i].TaggedWords[j - 1].Word + "_" + Chunks[i].TaggedWords[j].Word, "noun").Count > 0) { Chunks[i].TaggedWords[j].Word = Chunks[i].TaggedWords[j - 1].Word + "_" + Chunks[i].TaggedWords[j].Word; Chunks[i].TaggedWords[j].Index = Chunks[i].TaggedWords[j - 1].Index; Chunks[i].TaggedWords.RemoveRange(j - 1, 1); TextVectors.RemoveAt(d); j--; chunkLength--; } } } TextVectors.Add(Chunks[i].TaggedWords[j]); j++; } } else { if (Chunks[i].TaggedWords[j].Tag[0] == 'J') { // We add adjectives to increase the disambiguation accuracy TextVectors.Add(Chunks[i].TaggedWords[j]); } // Skip any chunk element that is not NNP(S),NN(S), or JJ(*) j++; } } #endregion i++; } else { // Remove the current Chunk since it was checked during rectification phase of the previous step // Keeping only NPs is for efficiency reason during the last step of the algorithm Chunks.RemoveRange(i, 1); chunksLength--; } } #region Disambiguatation d = TextVectors.Count; // Normalize NNP* vectors before the actual disambiguatation // Performing normalization after disambiguatation may affects caching the concepts since the keys may change for (i = 0; i < d; i++) { if (TextVectors[i].Tag.StartsWith("NNP")) { for (j = 0; j < d; j++) { if (TextVectors[j].Tag.StartsWith("NNP")) { if (TextVectors[i].Word.Contains(TextVectors[j].Word)) { TextVectors[j].Word = TextVectors[i].Word; TextVectors[j].Tag = TextVectors[i].Tag; } else if (TextVectors[j].Word.Contains(TextVectors[i].Word)) { TextVectors[i].Word = TextVectors[j].Word; TextVectors[i].Tag = TextVectors[j].Tag; } } } } } for (i = 0; i < d; i++) { // For limiting access to the list -- Efficiency curToken = TextVectors[i].Word; if (TextVectors[i].Tag == "NN" || TextVectors[i].Tag == "NNS") { if (CachedConcepts.ContainsKey(curToken)) { TextVectors[i].Sense = CachedConcepts[curToken]; } else { // Check availability in WordNet Senses = _wn.GetSynSets(curToken, false, WordNetEngine.POS.Noun); if (Senses.Count > 0) { tmpSense = Disambiguate(Senses, GenerateContextWindow(i, d)); CachedConcepts.Add(curToken, tmpSense); TextVectors[i].Sense = CachedConcepts[curToken]; } } } else if (TextVectors[i].Tag.StartsWith("NNP")) { if (CachedConcepts.ContainsKey(curToken)) { TextVectors[i].Sense = CachedConcepts[curToken]; continue; } Senses = _wn.GetSynSets(curToken.Replace(" ", "_"), false, WordNetEngine.POS.Noun); if (Senses.Count > 0) { tmpSense = Disambiguate(Senses, GenerateContextWindow(i, d)); CachedConcepts.Add(curToken, tmpSense); TextVectors[i].Sense = CachedConcepts[curToken]; continue; } if (PlugInsNumber > 0) { Senses.Clear(); for (l = 0; l < PlugInsNumber; l++) { KBDriverQueryArgs[1] = curToken; tmpSenses = KBDriversQueryPointers[l].Invoke(KBDrivers[l], KBDriverQueryArgs) as List <SynSet>; if (tmpSenses != null) { Senses.AddRange(tmpSenses); } } if (Senses.Count > 0) { tmpSense = Disambiguate(Senses, GenerateContextWindow(i, d)); CachedConcepts.Add(curToken, tmpSense); TextVectors[i].Sense = CachedConcepts[curToken]; continue; } } if (TextVectors[i].Tag.EndsWith("J")) { TextVectors[i].Word = curToken.Substring(curToken.IndexOf(" ") + 1); TextVectors[i].Tag = TextVectors[i].Tag.Substring(0, TextVectors[i].Tag.Length - 1); i--; continue; } } } // Prepare the vectors for semantic similarity measurement // hence, any vector does not hold valid sense must be excluded from the list in temp list i = 0; while (i < d) { if (TextVectors[i].Sense == null) { if (TextVectors[i].Tag.StartsWith("NNP") && !MiscTerms.Contains(TextVectors[i].Word)) { MiscTerms.Add(TextVectors[i].Word); } TextVectors.RemoveAt(i); d--; } else { i++; } } #endregion // [Implicit-Dispose] tmpSense = null; tmpSenses = null; Senses = null; #endregion #region Section 3.2. // Row * Col - Diagonal / 2 (above or under the Diagonal) double[] S = new double[((d * d) - d) / 2]; // Dummy counter k = 0; for (i = 0; i < d; i++) { for (j = i + 1; j < d; j++) { S[k] = Math.Round(wupMeasure(TextVectors[i].Sense, TextVectors[j].Sense), 4); k++; } } // Perform clustering on S int[] res = ap.Run(S, d, 1, 0.9, 1000, 50); // Optimized Clustering information collection // We collect clustering information and at the same time filter out all terms that are not close to their exemplars Dictionary <int, List <int> > ClusRes = new Dictionary <int, List <int> >(); // =================================== for (i = 0; i < res.Length; i++) { if (!ClusRes.ContainsKey(res[i])) { ClusRes.Add(res[i], new List <int>()); } if (i == res[i]) { ClusRes[res[i]].Add(i); continue; } if (Math.Round(wupMeasure(TextVectors[res[i]].Sense, TextVectors[i].Sense), 4) >= ClosenessToCentroid) { ClusRes[res[i]].Add(i); } } Console.WriteLine("-> Clustering Information:\n"); foreach (KeyValuePair <int, List <int> > kv in ClusRes) { Console.Write("\t[" + TextVectors[kv.Key].Word + "] " + TextVectors[kv.Key].Sense.ID + " : "); foreach (var item in kv.Value) { Console.Write(TextVectors[item].Word + ","); } Console.WriteLine(); Console.WriteLine(); } // Manual averaging of exemplars (Sec. 3.2) Console.WriteLine("-> Remove unimportant clusters:"); bool delFlag; while (true) { delFlag = false; Console.Write("\tEnter Seed:"); curToken = Console.ReadLine(); if (curToken == "$") { break; } foreach (var key in ClusRes.Keys) { if (TextVectors[key].Word == curToken) { delFlag = ClusRes.Remove(key); break; } } if (delFlag) { Console.WriteLine("\tCluster deleted"); } else { Console.WriteLine("\tSeed is not found"); } Console.WriteLine(); } // ESA-Based averaging of exemplars // Insert here local server API #endregion #region Section 3.3. // Flatten ClusRes into List List <int> Seeds = ClusRes.Values .SelectMany(x => x) // Flatten .ToList(); // Final seeds list must be sorted in case of using candidate phrase selection from a window //Seeds.Sort(); List <string> CandidatePhrases = new List <string>(); List <string> CandidatePhraseSeed = new List <string>(); SelectionWindowSize = Chunks.Count; for (i = 0; i < Chunks.Count; i++) { if (Chunks[i].Tag == "NP") { d = Chunks[i].TaggedWords.Count; for (l = 0; l < Seeds.Count; l++) { for (j = 0; j < d; j++) { if (Chunks[i].TaggedWords[j].Word == TextVectors[Seeds[l]].Word) { if (TextVectors[Seeds[l]].Tag.StartsWith("NNP") && !CandidatePhrases.Contains(TextVectors[Seeds[l]].Word) && i < SelectionWindowSize) { CandidatePhrases.Add(TextVectors[Seeds[l]].Word); if (TextVectors[Seeds[l]].Sense.URI != null) { CandidatePhraseSeed.Add(TextVectors[Seeds[l]].Sense.URI); } else { CandidatePhraseSeed.Add("http://www.pdl.io/core_onto/" + TextVectors[Seeds[l]].Sense.ID); } } else if (TextVectors[Seeds[l]].Tag == "NN" || TextVectors[Seeds[l]].Tag == "NNS") { curToken = TextVectors[Seeds[l]].Word; if (j > 0 && Chunks[i].TaggedWords[j - 1].Tag == "JJ") { curToken = Chunks[i].TaggedWords[j - 1].Word + " " + curToken; } for (k = j + 1; k < d; k++) { if (Chunks[i].TaggedWords[k].Tag != "NN") { break; } else { curToken = curToken + " " + Chunks[i].TaggedWords[k].Word; } } if (curToken.Contains(" ") || curToken.Contains("_")) { if (!CandidatePhrases.Contains(curToken)) { CandidatePhrases.Add(curToken); if (TextVectors[Seeds[l]].Sense.URI != null) { CandidatePhraseSeed.Add(TextVectors[Seeds[l]].Sense.URI); } else { CandidatePhraseSeed.Add("http://www.pdl.io/core_onto/" + TextVectors[Seeds[l]].Sense.ID); } } } } } } } } } #endregion // Print results Console.WriteLine("\n-> Candidate Keyphrases:\n"); for (i = 0; i < CandidatePhrases.Count; i++) { Console.WriteLine("\t" + CandidatePhrases[i].Replace("_", " ") + " , URI:" + CandidatePhraseSeed[i]); } Console.WriteLine("\n-> MISC Entities:\n"); for (i = 0; i < MiscTerms.Count; i++) { Console.WriteLine("\t" + MiscTerms[i]); } } }
/// <summary> /// Load the senses of the entity. /// </summary> private void LoadSenses() { int senseCount; Sense currSense = null; for (senseCount = 1; senseCount < 7; senseCount++) { switch (senseCount) { case 1: currSense = new Sense { Name = SensoryType.Hearing, MessagePrefix = "[HEARING]", Measurement = SensoryTypeMeasurement.Decibel, Enabled = true }; break; case 2: currSense = new Sense { Name = SensoryType.Sight, MessagePrefix = "[SIGHT]", Measurement = SensoryTypeMeasurement.Lumen, Enabled = true }; break; case 3: currSense = new Sense { Name = SensoryType.Smell, MessagePrefix = "[SMELL]", Measurement = SensoryTypeMeasurement.PartsPerMillion, Enabled = true }; break; case 4: currSense = new Sense { Name = SensoryType.Touch, MessagePrefix = "[TOUCH]", Measurement = SensoryTypeMeasurement.PoundsPerSquareInch, Enabled = true }; break; case 5: currSense = new Sense { Name = SensoryType.Taste, MessagePrefix = "[TASTE]", Measurement = SensoryTypeMeasurement.PartsPerMillion, Enabled = true }; break; case 6: currSense = new Sense { Name = SensoryType.Debug, MessagePrefix = "[DEBUG]", Measurement = SensoryTypeMeasurement.Debug, Enabled = true }; break; default: break; } if (currSense != null) { currSense.LowThreshold = 0; currSense.HighThreshold = 100; Senses.AddSense(currSense); } } }
// Use this for initialization void Start() { senses = this.GetComponent <Senses>(); audioSource = this.GetComponent <AudioSource>(); }
public string Summary(string languageCode) => string.Join(", ", Senses.SelectMany(s => s.Translations.Where(t => t.Language == languageCode).Select(t => t.Text)).Distinct());
public void TrainWord2Sense(IEnumerable <IDocument> documents, ParallelOptions parallelOptions, int ngrams = 3, double tooRare = 1E-5, double tooCommon = 0.1, Word2SenseTrainingData trainingData = null) { var HashCount = new ConcurrentDictionary <ulong, int>(); var Senses = new ConcurrentDictionary <ulong, ulong[]>(); var Words = new ConcurrentDictionary <ulong, string>(); if (trainingData is object) { HashCount = new ConcurrentDictionary <ulong, int>(trainingData.HashCount); Senses = new ConcurrentDictionary <ulong, ulong[]>(trainingData.Senses); Words = new ConcurrentDictionary <ulong, string>(trainingData.Words); } bool ignoreCase = Data.IgnoreCase; bool ignoreOnlyNumeric = Data.IgnoreOnlyNumeric; var stopwords = new HashSet <ulong>(StopWords.Spacy.For(Language).Select(w => ignoreCase ? IgnoreCaseHash64(w.AsSpan()) : Hash64(w.AsSpan())).ToArray()); int docCount = 0, tkCount = 0; var sw = Stopwatch.StartNew(); TrainLock.EnterWriteLock(); try { Parallel.ForEach(documents, parallelOptions, doc => { try { var stack = new Queue <ulong>(ngrams); if (doc.TokensCount < ngrams) { return; } //Ignore too small documents Interlocked.Add(ref tkCount, doc.TokensCount); foreach (var span in doc) { var tokens = span.GetCapturedTokens().ToArray(); for (int i = 0; i < tokens.Length; i++) { var tk = tokens[i]; var hash = ignoreCase ? IgnoreCaseHash64(tk.ValueAsSpan) : Hash64(tk.ValueAsSpan); bool filterPartOfSpeech = !(tk.POS == PartOfSpeech.ADJ || tk.POS == PartOfSpeech.NOUN || tk.POS == PartOfSpeech.PROPN); bool skipIfHasUpperCase = (!ignoreCase && !tk.ValueAsSpan.IsAllLowerCase()); bool skipIfTooSmall = (tk.Length < 3); bool skipIfNotAllLetterOrDigit = !(tk.ValueAsSpan.IsAllLetterOrDigit()); bool skipIfStopWordOrEntity = stopwords.Contains(hash) || tk.EntityTypes.Any(); //Heuristic for ordinal numbers (i.e. 1st, 2nd, 33rd, etc) bool skipIfMaybeOrdinal = (tk.ValueAsSpan.IndexOfAny(new char[] { '1', '2', '3', '4', '5', '6', '7', '8', '9', '0' }, 0) >= 0 && tk.ValueAsSpan.IndexOfAny(new char[] { 't', 'h', 's', 't', 'r', 'd' }, 0) >= 0 && tk.ValueAsSpan.IndexOfAny(new char[] { 'a', 'b', 'c', 'e', 'f', 'g', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'u', 'v', 'w', 'x', 'y', 'z' }, 0) < 0); bool skipIfOnlyNumeric = ignoreOnlyNumeric ? tk.ValueAsSpan.IsLetter() : false; //Only filter for POS if language != any, as otherwise we won't have the POS information bool skipThisToken = (filterPartOfSpeech && Language != Language.Any) || skipIfHasUpperCase || skipIfTooSmall || skipIfNotAllLetterOrDigit || skipIfStopWordOrEntity || skipIfMaybeOrdinal || skipIfOnlyNumeric; if (skipThisToken) { stack.Clear(); continue; } if (!Words.ContainsKey(hash)) { Words[hash] = ignoreCase ? tk.Value.ToLowerInvariant() : tk.Value; } stack.Enqueue(hash); ulong combined = stack.ElementAt(0); for (int j = 1; j < stack.Count; j++) { combined = HashCombine64(combined, stack.ElementAt(j)); if (HashCount.ContainsKey(combined)) { HashCount[combined]++; } else { Senses[combined] = stack.Take(j + 1).ToArray(); HashCount[combined] = 1; } } if (stack.Count > ngrams) { stack.Dequeue(); } } } int count = Interlocked.Increment(ref docCount); if (count % 1000 == 0) { Logger.LogInformation("Training Word2Sense model - at {DOCCOUNT} documents, {TKCOUNT} tokens - elapsed {ELAPSED} seconds at {KTKS} kTk/s)", docCount, tkCount, sw.Elapsed.TotalSeconds, (tkCount / sw.ElapsedMilliseconds)); } } catch (Exception E) { Logger.LogError(E, "Error during training Word2Sense model"); } }); } catch (OperationCanceledException) { return; } finally { TrainLock.ExitWriteLock(); } Logger.LogInformation("Finish parsing documents for Word2Sense model"); int thresholdRare = (int)Math.Floor(tooRare * docCount); int thresholdCommon = (int)Math.Floor(tooCommon * docCount); var toKeep = HashCount.Where(kv => kv.Value >= thresholdRare && kv.Value <= thresholdCommon).OrderByDescending(kv => kv.Value) .Select(kv => kv.Key).ToArray(); foreach (var key in toKeep) { if (Senses.TryGetValue(key, out var hashes) && HashCount.TryGetValue(key, out var count)) { Data.Hashes.Add(key); for (int i = 0; i < hashes.Length; i++) { if (Data.MultiGramHashes.Count <= i) { Data.MultiGramHashes.Add(new HashSet <ulong>()); } Data.MultiGramHashes[i].Add(hashes[i]); } } } if (trainingData is object) { trainingData.HashCount = new Dictionary <ulong, int>(HashCount); trainingData.Senses = new Dictionary <ulong, ulong[]>(Senses); trainingData.Words = new Dictionary <ulong, string>(Words); foreach (var word in trainingData.Words.Values) { AddToGazeteer(word); } } Logger.LogInformation("Finish training Word2Sense model"); }
public override bool IsReadyToUse() { _target = Senses.GetSalvageTarget(); return(ActionIsValid()); }
public void AddSense(Sense _toAdd, int val) { Senses.Add(_toAdd, val); }
internal string GenerateMessage() { Logger.LogInformation(Name, $"generating message for word {{{Definition}}}", Logger.Method.Start); var sb = new StringBuilder(); var hr = new string('-', 39); sb.Append($"{hr}\n"); if (Definition != null) { sb.Append($"\nNEW WORD\n{Definition}\n\n"); sb.Append($"{hr}\n"); } if (PhoneticSpelling != null) { sb.Append($"\nPHONETIC SPELLING\n{PhoneticSpelling}\n\n"); sb.Append($"{hr}\n"); } if (Etymologies?.Any() ?? false) { sb.Append("\nETYMOLOGIES\n"); foreach (var etymology in Etymologies) { sb.Append($"\u2022 {etymology}\n\n"); } sb.Append($"{hr}\n"); } if (Senses?.Any() ?? false) { sb.Append("\nSENSES\n"); foreach (var sense in Senses) { sb.Append($"\u2022 {sense}\n\n"); } sb.Append($"{hr}\n"); } if (SubSenses?.Any() ?? false) { sb.Append("\nSUBSENSES\n"); foreach (var subSense in SubSenses) { sb.Append($"\u2022 {subSense}\n\n"); } sb.Append($"{hr}\n"); } if (AudioFile != null) { sb.Append($"\nAUDIO\n{AudioFile}\n\n"); sb.Append($"{hr}\n"); } Logger.LogInformation(Name, $"generating message for word {{{Definition}}}", Logger.Method.End); return(sb.ToString()); }
void Awake() { nav = GetComponent<Navigator>(); senses = GetComponent<Senses>(); }