private void Define(string name, Vocabulary vocabulary) { Dictionary<string, ForthDictionaryEntry> dict = definitions.Dict; if (dict.ContainsKey(name)) dict.Remove(name); dict.Add(name, new ForthDictionaryEntry(vocabulary)); lastWordCompiled = name; }
private static void CreateVocabulary() { Console.WriteLine("Creating vocabulary..."); var vocabulary = new Vocabulary(path); vocabulary.Create(); vocabulary.SaveToFile(); Console.WriteLine("Vocabulary has been created.\n"); Console.WriteLine("Size of collection is {0} kB.", vocabulary.DocParser.CollectionSize/1024); Console.WriteLine("Number of words in collection: {0}", vocabulary.WordsCounter); Console.WriteLine("Number of terms in vocabulary: {0}\n", vocabulary.TermsCounter); }
public Trainer(Vocabulary v) { InitializeComponent(); vocab = v; currentVocab = v; label_1lang.Text = vocab.fromLang; label_2lang.Text = vocab.toLang; this.ActiveControl = button_start; foreach (VocWord word in currentVocab.words) { word.correct = false; } }
public void GetCommonPrefixes_WithEmbeddedJsonFile_ReturnsPrefixesDictionary() { // Arrange var vocabulary = new Vocabulary(); // Act var result = vocabulary.GetKnownPrefixes(); // Assert Assert.IsNotNull(result); Assert.IsInstanceOf<Dictionary<string, string>>(result); Assert.AreEqual(5105, result.Count); Assert.IsTrue(result.ContainsKey("dc")); Assert.AreEqual("http://purl.org/dc/elements/1.1/", result["dc"]); }
public void Process(XmlNode n){//Exception toevoegen if(n.Name == IAM){ base.ReadAttributes(n); XmlAttributeCollection attr = n.Attributes; if(attr.GetNamedItem(BASE) != null){ //the presentation is loaded from an URI m_voc = new Vocabulary(attr.GetNamedItem(BASE).Value); m_base = attr.GetNamedItem(BASE).Value; }else if(attr.GetNamedItem(ID) != null){ m_identifier = attr.GetNamedItem(ID).Value; //make a custom vocabulary for this presentation m_voc = new CustomVocabulary(m_identifier, n); }else if(attr.GetNamedItem(SOURCE) != null){ m_source = attr.GetNamedItem(SOURCE).Value; } } }
public List<LookupValue> GetValues(Vocabulary vocabulary, Field field, IDataReader reader) { if (!string.IsNullOrEmpty(field.ConceptId)) { var conceptId = reader.GetInt(field.ConceptId); return new List<LookupValue> { new LookupValue { ConceptId = conceptId } }; } var mapper = FindMapper(ConceptIdMappers, reader); if (mapper == null) return new List<LookupValue> { new LookupValue { ConceptId = field.DefaultConceptId } }; var conceptKey = reader.GetString(field.Key); if (conceptKey == null) return new List<LookupValue>(); return mapper.Map(vocabulary, field.Key, conceptKey, reader.GetDateTime(field.EventDate)); }
private static Vocabulary BuildDefault() { var _default = new Vocabulary(); _default.AddPlural("$", "s"); _default.AddPlural("s$", "s"); _default.AddPlural("(ax|test)is$", "$1es"); _default.AddPlural("(octop|vir|alumn|fung|cact|foc|hippopotam|radi|stimul|syllab|nucle)us$", "$1i"); _default.AddPlural("(alias|bias|iris|status|campus|apparatus|virus|walrus|trellis)$", "$1es"); _default.AddPlural("(buffal|tomat|volcan|ech|embarg|her|mosquit|potat|torped|vet)o$", "$1oes"); _default.AddPlural("([dti])um$", "$1a"); _default.AddPlural("sis$", "ses"); _default.AddPlural("(?:([^f])fe|([lr])f)$", "$1$2ves"); _default.AddPlural("(hive)$", "$1s"); _default.AddPlural("([^aeiouy]|qu)y$", "$1ies"); _default.AddPlural("(x|ch|ss|sh)$", "$1es"); _default.AddPlural("(matr|vert|ind|d)ix|ex$", "$1ices"); _default.AddPlural("([m|l])ouse$", "$1ice"); _default.AddPlural("^(ox)$", "$1en"); _default.AddPlural("(quiz)$", "$1zes"); _default.AddPlural("(buz|blit|walt)z$", "$1zes"); _default.AddPlural("(hoo|lea|loa|thie)f$", "$1ves"); _default.AddPlural("(alumn|alg|larv|vertebr)a$", "$1ae"); _default.AddPlural("(criteri|phenomen)on$", "$1a"); _default.AddSingular("s$", ""); _default.AddSingular("(n)ews$", "$1ews"); _default.AddSingular("([dti])a$", "$1um"); _default.AddSingular("(analy|ba|diagno|parenthe|progno|synop|the|ellip|empha|neuro|oa|paraly)ses$", "$1sis"); _default.AddSingular("([^f])ves$", "$1fe"); _default.AddSingular("(hive)s$", "$1"); _default.AddSingular("(tive)s$", "$1"); _default.AddSingular("([lr]|hoo|lea|loa|thie)ves$", "$1f"); _default.AddSingular("(^zomb)?([^aeiouy]|qu)ies$", "$2y"); _default.AddSingular("(s)eries$", "$1eries"); _default.AddSingular("(m)ovies$", "$1ovie"); _default.AddSingular("(x|ch|ss|sh)es$", "$1"); _default.AddSingular("([m|l])ice$", "$1ouse"); _default.AddSingular("(o)es$", "$1"); _default.AddSingular("(shoe)s$", "$1"); _default.AddSingular("(cris|ax|test)es$", "$1is"); _default.AddSingular("(octop|vir|alumn|fung|cact|foc|hippopotam|radi|stimul|syllab|nucle)i$", "$1us"); _default.AddSingular("(alias|bias|iris|status|campus|apparatus|virus|walrus|trellis)es$", "$1"); _default.AddSingular("^(ox)en", "$1"); _default.AddSingular("(matr|d)ices$", "$1ix"); _default.AddSingular("(vert|ind)ices$", "$1ex"); _default.AddSingular("(quiz)zes$", "$1"); _default.AddSingular("(buz|blit|walt)zes$", "$1z"); _default.AddSingular("(alumn|alg|larv|vertebr)ae$", "$1a"); _default.AddSingular("(criteri|phenomen)a$", "$1on"); _default.AddIrregular("person", "people"); _default.AddIrregular("man", "men"); _default.AddIrregular("child", "children"); _default.AddIrregular("sex", "sexes"); _default.AddIrregular("move", "moves"); _default.AddIrregular("goose", "geese"); _default.AddIrregular("wave", "waves"); _default.AddIrregular("die", "dice"); _default.AddIrregular("foot", "feet"); _default.AddIrregular("tooth", "teeth"); _default.AddIrregular("curriculum", "curricula"); _default.AddIrregular("database", "databases"); _default.AddIrregular("zombie", "zombies"); _default.AddIrregular("is", "are", matchEnding: false); _default.AddIrregular("that", "those", matchEnding: false); _default.AddIrregular("this", "these", matchEnding: false); _default.AddIrregular("bus", "buses", matchEnding: false); _default.AddUncountable("equipment"); _default.AddUncountable("information"); _default.AddUncountable("rice"); _default.AddUncountable("money"); _default.AddUncountable("species"); _default.AddUncountable("series"); _default.AddUncountable("fish"); _default.AddUncountable("sheep"); _default.AddUncountable("deer"); _default.AddUncountable("aircraft"); _default.AddUncountable("oz"); _default.AddUncountable("tsp"); _default.AddUncountable("tbsp"); _default.AddUncountable("ml"); _default.AddUncountable("l"); _default.AddUncountable("water"); _default.AddUncountable("waters"); _default.AddUncountable("s***n"); _default.AddUncountable("sperm"); _default.AddUncountable("bison"); _default.AddUncountable("grass"); _default.AddUncountable("hair"); _default.AddUncountable("mud"); _default.AddUncountable("elk"); _default.AddUncountable("luggage"); _default.AddUncountable("moose"); _default.AddUncountable("offspring"); _default.AddUncountable("salmon"); _default.AddUncountable("shrimp"); _default.AddUncountable("someone"); _default.AddUncountable("swine"); _default.AddUncountable("trout"); _default.AddUncountable("tuna"); _default.AddUncountable("corps"); _default.AddUncountable("scissors"); _default.AddUncountable("means"); return(_default); }
public override IEnumerable <IEntity> GetConcepts(Concept concept, IDataRecord reader, KeyMasterOffsetManager keyOffset) { var genderConceptId = 0; if (string.IsNullOrEmpty(GenderConceptId) && Vocabulary != null) { genderConceptId = Vocabulary.LookupGender(GenderSourceValue) ?? 0; } else if (reader.GetInt(GenderConceptId).HasValue) { genderConceptId = reader.GetInt(GenderConceptId).Value; } if (concept == null) { var prov = new Provider { CareSiteId = reader.GetInt(CareSiteId) ?? 0, ProviderSourceValue = reader.GetString(ProviderSourceValue), SourceValue = reader.GetString(SpecialtySourceValue), Npi = reader.GetString(NPI), Dea = reader.GetString(DEA), Name = reader.GetString(Name), YearOfBirth = reader.GetInt(YearOfBirth), GenderConceptId = genderConceptId, GenderSourceValue = reader.GetString(GenderSourceValue), GenderSourceConceptId = reader.GetInt(GenderSourceConceptId) ?? 0, SpecialtySourceConceptId = reader.GetInt(SpecialtySourceConceptId) ?? 0 }; prov.Id = string.IsNullOrEmpty(Id) ? Entity.GetId(prov.GetKey()) : reader.GetLong(Id).Value; yield return(prov); } else { var conceptField = concept.Fields[0]; var source = reader.GetString(conceptField.Key) ?? reader.GetString(conceptField.SourceKey); if (source != null && source.Length == 0) { source = null; } var specialtyConceptIds = concept.GetConceptIdValues(Vocabulary, conceptField, reader).ToList(); int?specialtyConcept = null; //(Unknown Physician Specialty) var defaultConceptId = 38004514; if (conceptField.DefaultConceptId.HasValue) { defaultConceptId = conceptField.DefaultConceptId.Value; } if (specialtyConceptIds.Count > 0 && specialtyConceptIds[0].ConceptId != 0) { specialtyConcept = specialtyConceptIds[0].ConceptId; } var prov = new Provider { CareSiteId = reader.GetInt(CareSiteId) ?? 0, ConceptId = specialtyConcept ?? defaultConceptId, ProviderSourceValue = reader.GetString(ProviderSourceValue), SourceValue = source, Name = reader.GetString(Name), YearOfBirth = reader.GetInt(YearOfBirth), GenderConceptId = genderConceptId, GenderSourceValue = reader.GetString(GenderSourceValue), Npi = reader.GetString(NPI), Dea = reader.GetString(DEA), GenderSourceConceptId = reader.GetInt(GenderSourceConceptId) ?? 0, SpecialtySourceConceptId = reader.GetInt(SpecialtySourceConceptId) ?? 0 }; prov.Id = string.IsNullOrEmpty(Id) ? Entity.GetId(prov.GetKey()) : reader.GetLong(Id).Value; yield return(prov); } }
public void EnumerateTest1() { Vocabulary context = new Vocabulary(new string[] { "TEST" }, 1, 1); (string s, float f)[] expected = new(string, float)[]
public void GetHashCodeTest1() { Vocabulary context = Helpers.CreateClassWithPrivateConstructor <Vocabulary>(); Assert.AreEqual(1, context.GetHashCode()); }
public ForthDictionaryEntry(Vocabulary vocabulary) { this.name = vocabulary.Name; this.proc = delegate(Forth f) { f.searchOrder.Top = this.vocabulary; }; this.isImmediate = false; this.vocabulary = vocabulary; }
/// <summary> /// Gets the type of the scope. /// </summary> /// <param name="voc">The voc.</param> /// <param name="scopeTypeId">The scope type id.</param> /// <returns>scope type.</returns> internal static ScopeType GetScopeType(this Vocabulary voc, int scopeTypeId) { IScopeTypeController ctl = Util.GetScopeTypeController(); return(ctl.GetScopeTypes().Where(s => s.ScopeTypeId == scopeTypeId).SingleOrDefault()); }
private void ProcessVocabularies(ExportImportJob importJob, ImportDto importDto, IList <TaxonomyScopeType> otherScopeTypes, IEnumerable <TaxonomyVocabulary> otherVocabularies) { var changed = false; var dataService = Util.GetDataService(); var localVocabularies = GetTaxonomyVocabularies(importDto.PortalId, DateUtils.GetDatabaseUtcTime().AddYears(1), null); foreach (var other in otherVocabularies) { var createdBy = Common.Util.GetUserIdByName(importJob, other.CreatedByUserID, other.CreatedByUserName); var modifiedBy = Common.Util.GetUserIdByName(importJob, other.LastModifiedByUserID, other.LastModifiedByUserName); var local = localVocabularies.FirstOrDefault(t => t.Name == other.Name); var scope = otherScopeTypes.FirstOrDefault(s => s.ScopeTypeID == other.ScopeTypeID); var scopeId = other.ScopeID ?? Null.NullInteger; if (scope != null && scope.ScopeType.Equals("Application", StringComparison.InvariantCultureIgnoreCase)) { scopeId = Null.NullInteger; } else if (scope != null && scope.ScopeType.Equals("Portal", StringComparison.InvariantCultureIgnoreCase)) { scopeId = importDto.PortalId; } if (local != null) { other.LocalId = local.VocabularyID; switch (importDto.CollisionResolution) { case CollisionResolution.Ignore: Result.AddLogEntry("Ignored vocabulary", other.Name); break; case CollisionResolution.Overwrite: var vocabulary = new Vocabulary(other.Name, other.Description) { IsSystem = other.IsSystem, Weight = other.Weight, ScopeId = scopeId, ScopeTypeId = scope?.LocalId ?? other.ScopeTypeID, }; dataService.UpdateVocabulary(vocabulary, modifiedBy); Result.AddLogEntry("Updated vocabulary", other.Name); changed = true; break; default: throw new ArgumentOutOfRangeException(importDto.CollisionResolution.ToString()); } } else { var vocabulary = new Vocabulary(other.Name, other.Description, (VocabularyType)other.VocabularyTypeID) { IsSystem = other.IsSystem, Weight = other.Weight, ScopeId = scopeId, ScopeTypeId = scope?.LocalId ?? other.ScopeTypeID, }; other.LocalId = dataService.AddVocabulary(vocabulary, createdBy); Result.AddLogEntry("Added vocabulary", other.Name); changed = true; } } if (changed) { DataCache.ClearCache(DataCache.VocabularyCacheKey); } }
public void CreateLookup() { PerformAction(() => { var vocabulary = new Vocabulary(); vocabulary.Initialize(); vocabulary.CreateEntityLookup(); }); }
public ForthDictionaryEntry(string name, ExecutionToken proc, bool isImmediate) { this.name = name; this.proc = proc; this.isImmediate = isImmediate; this.vocabulary = null; }
//Hide Result public void HideResults() { lvVocab.Items.Clear(); Vocabulary.DelAllMean(Variables.vocabList); Function.ArrayListToListView(Variables.vocabList, lvVocab); }
public void BindVocabulary(Vocabulary vocabulary, bool showScope) { editVocabularyControl.BindVocabulary(vocabulary, true, showScope); cancelCreate.NavigateUrl = Model.TaxonomyHomeUrl; }
/// <summary> /// adds an vocabulary to document /// </summary> /// <param name="vocabulary">defined emotion vocabulary</param> public void addVocabulary(Vocabulary vocabulary) { this.vocabularies.Add(vocabulary); }
public static DProperty FromProperty(Property prop, Part part, Vocabulary voc) { DProperty dprop; try { dprop = voc.GetDPropertyGetter(prop.Name, part.Class); } catch { dprop = voc.GetDPropertySetter(prop.Name, part.Class); } return dprop; }
protected internal virtual string ElementName(string[] tokenNames, int a) { return(ElementName(Vocabulary.FromTokenNames(tokenNames), a)); }
public DFASerializer(DFA dfa, string[] tokenNames) : this(dfa, Vocabulary.FromTokenNames(tokenNames), null, null) { }
public static void Run() { Console.WriteLine("Build Vocabulary."); Vocabulary vocabulary = new Vocabulary(); string trainPath = InternetFileDownloader.Donwload(DOWNLOAD_URL + TRAIN_FILE, TRAIN_FILE, TRAIN_FILE_HASH); string validPath = InternetFileDownloader.Donwload(DOWNLOAD_URL + VALID_FILE, VALID_FILE, VALID_FILE_HASH); string testPath = InternetFileDownloader.Donwload(DOWNLOAD_URL + TEST_FILE, TEST_FILE, TEST_FILE_HASH); int[] trainData = vocabulary.LoadData(trainPath); int[] validData = vocabulary.LoadData(validPath); int[] testData = vocabulary.LoadData(testPath); int nVocab = vocabulary.Length; Console.WriteLine("Network Initilizing."); FunctionStack model = new FunctionStack( new EmbedID(nVocab, N_UNITS, name: "l1 EmbedID"), new Dropout(), new LSTM(N_UNITS, N_UNITS, name: "l2 LSTM"), new Dropout(), new LSTM(N_UNITS, N_UNITS, name: "l3 LSTM"), new Dropout(), new Linear(N_UNITS, nVocab, name: "l4 Linear") ); //与えられたthresholdで頭打ちではなく、全パラメータのL2Normからレートを取り補正を行う GradientClipping gradientClipping = new GradientClipping(threshold: GRAD_CLIP); SGD sgd = new SGD(learningRate: 1); model.SetOptimizer(gradientClipping, sgd); Real wholeLen = trainData.Length; int jump = (int)Math.Floor(wholeLen / BATCH_SIZE); int epoch = 0; Stack <NdArray[]> backNdArrays = new Stack <NdArray[]>(); Console.WriteLine("Train Start."); for (int i = 0; i < jump * N_EPOCH; i++) { NdArray x = new NdArray(new[] { 1 }, BATCH_SIZE); NdArray t = new NdArray(new[] { 1 }, BATCH_SIZE); for (int j = 0; j < BATCH_SIZE; j++) { x.Data[j] = trainData[(int)((jump * j + i) % wholeLen)]; t.Data[j] = trainData[(int)((jump * j + i + 1) % wholeLen)]; } NdArray[] result = model.Forward(x); Real sumLoss = new SoftmaxCrossEntropy().Evaluate(result, t); backNdArrays.Push(result); Console.WriteLine("[{0}/{1}] Loss: {2}", i + 1, jump, sumLoss); //Run truncated BPTT if ((i + 1) % BPROP_LEN == 0) { for (int j = 0; backNdArrays.Count > 0; j++) { Console.WriteLine("backward" + backNdArrays.Count); model.Backward(backNdArrays.Pop()); } model.Update(); model.ResetState(); } if ((i + 1) % jump == 0) { epoch++; Console.WriteLine("evaluate"); Console.WriteLine("validation perplexity: {0}", Evaluate(model, validData)); if (epoch >= 6) { sgd.LearningRate /= 1.2; Console.WriteLine("learning rate =" + sgd.LearningRate); } } } Console.WriteLine("test start"); Console.WriteLine("test perplexity:" + Evaluate(model, testData)); }
private static ToastContent getToastContentWithoutPlay(Vocabulary _item) { ToastContent content = new ToastContent() { Launch = "vocabulary-reminder", Audio = new ToastAudio() { Silent = true }, Visual = new ToastVisual() { BindingGeneric = new ToastBindingGeneric() { Children = { new AdaptiveText() { Text = _item.Word, HintMaxLines = 1 }, new AdaptiveText() { Text = _item.Ipa, }, new AdaptiveText() { Text = _item.Translate } }, HeroImage = new ToastGenericHeroImage() { Source = "https://picsum.photos/364/180?image=1043" }, } }, Scenario = ToastScenario.Reminder, Actions = new ToastActionsCustom() { Buttons = { new ToastButton("Next", new QueryString() { { "WordId", _item.Id.ToString() }, { "action", "next" }, }.ToString()) { ActivationType = ToastActivationType.Background, ActivationOptions = new ToastActivationOptions() { AfterActivationBehavior = ToastAfterActivationBehavior.PendingUpdate } }, new ToastButton("View", new QueryString() { { "action", "view" }, { "url", viewDicOnlineUrl + _item.Word } }.ToString()), new ToastButton("Skip", "dismiss") { ActivationType = ToastActivationType.Background }, } }, }; return(content); }
public ID3(ExampleSet t_set, Vocabulary voc) { this.m_t_set = t_set; this.m_voc = voc; this.m_wordGainList = new List<WordGainPair>(); }
private static ReadOnlyCollection<Vocabulary> CreateVocabulariesFromResponse( string methodNSSuffix, HealthServiceResponseData response) { XPathNavigator infoNav = response.InfoNavigator.SelectSingleNode( Vocabulary.GetInfoXPathExpression( methodNSSuffix, response.InfoNavigator)); List<Vocabulary> vocabularies = new List<Vocabulary>(); XPathNodeIterator vocabIter = infoNav.Select("vocabulary"); foreach (XPathNavigator vocabNav in vocabIter) { Vocabulary vocabulary = new Vocabulary(); vocabulary.PopulateFromXml(vocabNav); vocabularies.Add(vocabulary); } return new ReadOnlyCollection<Vocabulary>(vocabularies); }
// Initialization public VocabularyTests() { _vocabulary = new Vocabulary(NumberOfWordVectorDimensions, Vocabulary.WordVectorType.Learned); }
public void GetHashCodeTest0() { Vocabulary context = new Vocabulary(new string[] { "TEST" }, 1, 2); Assert.AreEqual(6, context.GetHashCode()); }
public void DeleteVocabulary(Vocabulary vocabulary) { this._vocabularyController.DeleteVocabulary(vocabulary); }
public void NextStatesTest2() { const int MaxRepeatCount = 2; for (int minRepeatCount = 1; minRepeatCount <= MaxRepeatCount; minRepeatCount++) { Vocabulary context = new Vocabulary(new string[] { "AB" }, minRepeatCount, MaxRepeatCount); State[] states = context.InitialState.NextStates().Values.ToArray(); Assert.AreEqual(1, states.Length); Context.ContextState state = states[0] as Context.ContextState; Assert.AreEqual('A', state.Char); Assert.IsFalse(state.WordEnd); Assert.IsFalse(state.ContextWordEnd); Assert.AreEqual(1, state.RepeatCount); Assert.AreEqual(1.0f, state.CharProbability); Assert.AreEqual(0.0f, state.WordEndProbability); states = state.NextStates().Values.ToArray(); Assert.AreEqual(1, states.Length); state = states[0] as Context.ContextState; Assert.AreEqual('B', state.Char); Assert.AreEqual(minRepeatCount < MaxRepeatCount, state.WordEnd); Assert.AreEqual(minRepeatCount < MaxRepeatCount, state.ContextWordEnd); Assert.AreEqual(1, state.RepeatCount); Assert.AreEqual(1.0f, state.CharProbability); Assert.AreEqual(1.0f, state.WordEndProbability); states = state.NextStates().Values.ToArray(); Assert.AreEqual(1, states.Length); state = states[0] as Context.ContextState; Assert.AreEqual(' ', state.Char); Assert.IsFalse(state.WordEnd); Assert.IsFalse(state.ContextWordEnd); Assert.AreEqual(2, state.RepeatCount); Assert.AreEqual(1.0f, state.CharProbability); Assert.AreEqual(0.0f, state.WordEndProbability); states = state.NextStates().Values.ToArray(); Assert.AreEqual(1, states.Length); state = states[0] as Context.ContextState; Assert.AreEqual('A', state.Char); Assert.IsFalse(state.WordEnd); Assert.IsFalse(state.ContextWordEnd); Assert.AreEqual(2, state.RepeatCount); Assert.AreEqual(1.0f, state.CharProbability); Assert.AreEqual(0.0f, state.WordEndProbability); states = state.NextStates().Values.ToArray(); Assert.AreEqual(1, states.Length); state = states[0] as Context.ContextState; Assert.AreEqual('B', state.Char); Assert.IsTrue(state.WordEnd); Assert.IsTrue(state.ContextWordEnd); Assert.AreEqual(2, state.RepeatCount); Assert.AreEqual(1.0f, state.CharProbability); Assert.AreEqual(1.0f, state.WordEndProbability); Assert.IsNull(state.NextStates()); } }
static void Main(string[] args) { //File path array string[] pathArray = new[] { //Enter path to file(s) from which you want to compose a vocabulary/index "D:\\avidreaders.ru__animal-farm-a-fairy-story.fb2", "D:\\meta.fb2", }; CompressedReverseIndex cri = new CompressedReverseIndex(); //Init CompressedReverseIndex Console.WriteLine("TEST VIRTUAL BYTE ENCODING AND DECODING WITH RANDOM NUMBERS"); var rand = new Random(); for (int i = 0; i < 10; i++) { int before = rand.Next(0, 1000); //Get random in 0-1000 range int after = cri.VBDecode(cri.VBEncode(before)); //Encode this random number and decode result Console.WriteLine("BEFORE ENCODING: " + before + " | AFTER ENCODING: " + after); } Index idx = new Index(); //Init Index HashSet <string> set = new HashSet <string>(); //Read every document from paths for (int i = 0; i < pathArray.Length; i++) { string file_content = System.IO.File.ReadAllText(pathArray[i]); string[] contentStrs = Regex.Split(file_content, "\\s+<*>\\s+|[^a-zA-Z]+"); //Regex which tokenizes text, removes XML-tags and numbers foreach (string str in contentStrs) { if (!String.IsNullOrWhiteSpace(str)) //Check if token is not empty string or contains only whitespaces { /* Add word token to HastSet, this is needed to add words in * compressed vocabulary later (because it doesn`t support adding one word) */ if (!set.Contains(str)) { set.Add(str); } cri.Add(str, i); //Add word to CompressedReverseIndex idx.Add(str, i); //Add word to Index } } } Vocabulary vcb = new Vocabulary(); //Init vocabulary CompressedVocabulary cv = new CompressedVocabulary(4); //Init CompressedVocabulary string[] arr = new string[set.Count]; set.CopyTo(arr); // Move values from HashSet of lexemes to array cv.Add(arr); vcb.AddRange(arr); //Add array to compressed and non-compressed vocabulary //Write all vocabularies and indexes somewhere (in this case, folder on disc D) to make sure that compression actually works WriteFile <CompressedVocabulary>("D:\\CompressedVsUsualIndex\\COMPRESSED_VOCAB.txt", cv); WriteFile <Vocabulary>("D:\\CompressedVsUsualIndex\\NON_COMPRESSED_VOCAB.txt", vcb); WriteFile <CompressedReverseIndex>("D:\\CompressedVsUsualIndex\\COMPRESSED_INDEX.txt", cri); WriteFile <Index>("D:\\CompressedVsUsualIndex\\NON_COMPRESSED_INDEX.txt", idx); //Search in compressed index/vocabulary while (true) { Console.Write("Enter word: "); string query = Console.ReadLine(); Console.WriteLine("VOCABULARY SEARCH: " + cv.Search(query)); Console.WriteLine("INDEX SEARCH\n" + cri.StringifySearchResult(query)); } }
private static void Main(string[] inputArgs) { Console.WriteLine("Lookup creation in progress, please do not close the window..."); try { if (inputArgs.Length > 0) { var args = string.Join(" ", inputArgs); var builderConnectionString = Regex.Match(args, @"(?s)(?<=\<cs\>).*?(?=\<\/cs\>)", RegexOptions.IgnoreCase).Value; var s3awsAccessKeyId = Regex.Match(args, @"(?s)(?<=\<s3keyid\>).*?(?=\<\/s3keyid\>)", RegexOptions.IgnoreCase).Value; var s3awsSecretAccessKey = Regex.Match(args, @"(?s)(?<=\<s3accesskey\>).*?(?=\<\/s3accesskey\>)", RegexOptions.IgnoreCase).Value; var ec2awsAccessKeyId = Regex.Match(args, @"(?s)(?<=\<ec2keyid\>).*?(?=\<\/ec2keyid\>)", RegexOptions.IgnoreCase).Value; var ec2awsSecretAccessKey = Regex.Match(args, @"(?s)(?<=\<ec2accesskey\>).*?(?=\<\/ec2accesskey\>)", RegexOptions.IgnoreCase).Value; var bucket = Regex.Match(args, @"(?s)(?<=\<bucket\>).*?(?=\<\/bucket\>)", RegexOptions.IgnoreCase).Value; var cdmFolder = Regex.Match(args, @"(?s)(?<=\<cdmFolder\>).*?(?=\<\/cdmFolder\>)", RegexOptions.IgnoreCase).Value; var saveOnlyToS3 = Regex.Match(args, @"(?s)(?<=\<saveOnlyToS3\>).*?(?=\<\/saveOnlyToS3\>)", RegexOptions.IgnoreCase).Value; var storageType = Regex.Match(args, @"(?s)(?<=\<storageType\>).*?(?=\<\/storageType\>)", RegexOptions.IgnoreCase).Value; Settings.Initialize(builderConnectionString, Environment.MachineName); Settings.Current.S3AwsAccessKeyId = s3awsAccessKeyId; Settings.Current.S3AwsSecretAccessKey = s3awsSecretAccessKey; Settings.Current.Ec2AwsAccessKeyId = ec2awsAccessKeyId; Settings.Current.Ec2AwsSecretAccessKey = ec2awsSecretAccessKey; Settings.Current.Bucket = bucket; Settings.Current.CDMFolder = cdmFolder; Settings.Current.SaveOnlyToS3 = Parse(saveOnlyToS3); if (Enum.TryParse(storageType, out S3StorageType type)) { Settings.Current.StorageType = type; } Console.WriteLine($"Bucket ={Settings.Current.Bucket};CDMFolder={Settings.Current.CDMFolder}"); Console.WriteLine($"SaveOnlyToS3={Settings.Current.SaveOnlyToS3}; StorageType={Settings.Current.StorageType};"); } else { Settings.Initialize(ConfigurationManager.ConnectionStrings["Builder"].ConnectionString, Environment.MachineName); } Console.WriteLine("Settings initialized"); Console.WriteLine($"Vendor={Settings.Current.Building.Vendor}"); Console.WriteLine("Loading vocabulary..."); var vocabulary = new Vocabulary(); vocabulary.Fill(true); Console.WriteLine("Vocabulary was loaded"); var locationConcepts = new List <Location>(); var careSiteConcepts = new List <CareSite>(); var providerConcepts = new List <Provider>(); Console.WriteLine("Loading locations..."); var location = Settings.Current.Building.SourceQueryDefinitions.FirstOrDefault(qd => qd.Locations != null && qd.IsSuitable(qd.Query.Database, Settings.Current.Building.Vendor)); if (location != null) { FillList <Location>(locationConcepts, location, location.Locations[0]); } if (locationConcepts.Count == 0) { locationConcepts.Add(new Location { Id = Entity.GetId(null) }); } Console.WriteLine("Locations was loaded"); Console.WriteLine("Loading care sites..."); var careSite = Settings.Current.Building.SourceQueryDefinitions.FirstOrDefault(qd => qd.CareSites != null && qd.IsSuitable(qd.Query.Database, Settings.Current.Building.Vendor)); if (careSite != null) { FillList <CareSite>(careSiteConcepts, careSite, careSite.CareSites[0]); } if (careSiteConcepts.Count == 0) { careSiteConcepts.Add(new CareSite { Id = 0, LocationId = 0, OrganizationId = 0, PlaceOfSvcSourceValue = null }); } Console.WriteLine("Care sites was loaded"); Console.WriteLine("Loading providers..."); var provider = Settings.Current.Building.SourceQueryDefinitions.FirstOrDefault(qd => qd.Providers != null && qd.IsSuitable(qd.Query.Database, Settings.Current.Building.Vendor)); if (provider != null) { FillList <Provider>(providerConcepts, provider, provider.Providers[0]); } Console.WriteLine("Providers was loaded"); Console.WriteLine("Saving lookups..."); var saver = Settings.Current.Building.DestinationEngine.GetSaver(); using (saver.Create(Settings.Current.Building.DestinationConnectionString)) { saver.SaveEntityLookup(locationConcepts, careSiteConcepts, providerConcepts); } Console.WriteLine("Lookups was saved " + Settings.Current.Building.DestinationEngine.Database); } catch (Exception e) { Console.WriteLine(e.Message); Console.WriteLine(e.StackTrace); Console.ReadLine(); } }
public static async Task goGetPlayURLAsync(Vocabulary item) { using (HttpClient httpClient = new HttpClient()) { //try //{ string _wordUrl = mainGetPlayUrl + item.Word; HttpResponseMessage response = await httpClient.GetAsync(_wordUrl); HttpContent content = response.Content; HtmlDocument document = new HtmlDocument(); document.LoadHtml(await content.ReadAsStringAsync()); // Get Title var DefineNode = document.DocumentNode.SelectSingleNode("(//span[@class='def'])[1]"); if (DefineNode != null) { item.Define = DefineNode.InnerText; } var ExampleNodes = document.DocumentNode.SelectNodes("(//ul[@class='examples']//span[@class='x'])[position()<3]"); if (ExampleNodes != null && ExampleNodes.Count > 0) { int _count = 0; foreach (var node in ExampleNodes) { _count++; if (_count == 1) { item.Example = (node != null) ? node.InnerText : ""; } else if (_count == 2) { item.Example2 = (node != null) ? node.InnerText : ""; } } } var IpaNodes = document.DocumentNode.SelectNodes("//span[@class='phonetics']//div[contains(@class, 'phon')]"); if (IpaNodes != null && IpaNodes.Count > 0) { int _count = 0; foreach (var node in IpaNodes) { _count++; if (_count == 1) { item.Ipa = (node != null) ? node.InnerText : ""; } else if (_count == 2) { item.Ipa2 = (node != null) ? node.InnerText : ""; } } } var soundNodes = document.DocumentNode.SelectNodes("//span[@class='phonetics']/div/div[contains(@class, 'sound')][1]"); if (soundNodes != null && soundNodes.Count > 0) { int _count = 0; foreach (var node in soundNodes) { _count++; if (_count == 1) { item.PlayURL = (node != null) ? node.GetAttributeValue("data-src-mp3", "") : ""; } else if (_count == 2) { item.PlayURL2 = (node != null) ? node.GetAttributeValue("data-src-mp3", "") : ""; } } } if (!String.IsNullOrEmpty(item.PlayURL)) { DataAccess.UpdatePlayURL(item); } //} catch (Exception ex) //{ // Debug.WriteLine(ex.Message); //} } }
public static void Run() { RILogManager.Default?.SendDebug("Building Vocabulary."); Vocabulary vocabulary = new Vocabulary(); string trainPath = InternetFileDownloader.Download(DOWNLOAD_URL + TRAIN_FILE, TRAIN_FILE); string testPath = InternetFileDownloader.Download(DOWNLOAD_URL + TEST_FILE, TEST_FILE); int[] trainData = vocabulary.LoadData(trainPath); int[] testData = vocabulary.LoadData(testPath); int nVocab = vocabulary.Length; RILogManager.Default?.SendDebug("Network Initializing."); FunctionStack model = new FunctionStack("Test9", new EmbedID(nVocab, N_UNITS, name: "l1 EmbedID"), new Linear(true, N_UNITS, N_UNITS, name: "l2 Linear"), new Tanh("l2 Tanh"), new Linear(true, N_UNITS, nVocab, name: "l3 Linear"), new Softmax("l3 Softmax") ); model.SetOptimizer(new Adam()); List <int> s = new List <int>(); RILogManager.Default?.SendDebug("Train Start."); SoftmaxCrossEntropy softmaxCrossEntropy = new SoftmaxCrossEntropy(); for (int epoch = 0; epoch < TRAINING_EPOCHS; epoch++) { for (int pos = 0; pos < trainData.Length; pos++) { NdArray h = new NdArray(new Real[N_UNITS]); int id = trainData[pos]; s.Add(id); if (id == vocabulary.EosID) { Real accumloss = 0; Stack <NdArray> tmp = new Stack <NdArray>(); for (int i = 0; i < s.Count; i++) { int tx = i == s.Count - 1 ? vocabulary.EosID : s[i + 1]; //l1 EmbedID NdArray l1 = model.Functions[0].Forward(true, s[i])[0]; //l2 Linear NdArray l2 = model.Functions[1].Forward(true, h)[0]; //Add NdArray xK = l1 + l2; //l2 Tanh h = model.Functions[2].Forward(true, xK)[0]; //l3 Linear NdArray h2 = model.Functions[3].Forward(true, h)[0]; Real loss = softmaxCrossEntropy.Evaluate(h2, tx); tmp.Push(h2); accumloss += loss; } RILogManager.Default?.SendDebug(accumloss.ToString()); for (int i = 0; i < s.Count; i++) { model.Backward(true, tmp.Pop()); } model.Update(); s.Clear(); } if (pos % 100 == 0) { RILogManager.Default?.SendDebug(pos + "/" + trainData.Length + " finished"); } } } RILogManager.Default?.SendDebug("Test Start."); Real sum = 0; int wnum = 0; List <int> ts = new List <int>(); bool unkWord = false; for (int pos = 0; pos < 1000; pos++) { int id = testData[pos]; ts.Add(id); if (id > trainData.Length) { unkWord = true; } if (id == vocabulary.EosID) { if (!unkWord) { RILogManager.Default?.SendDebug("pos: " + pos); RILogManager.Default?.SendDebug("tsLen: " + ts.Count); RILogManager.Default?.SendDebug("sum: " + sum); RILogManager.Default?.SendDebug("wnum: " + wnum); RILogManager.Default?.ViewerSendWatch("pos", pos); RILogManager.Default?.ViewerSendWatch("tsLen", ts.Count); RILogManager.Default?.ViewerSendWatch("sum", sum); RILogManager.Default?.ViewerSendWatch("wnum", wnum); sum += CalPs(model, ts); wnum += ts.Count - 1; } else { unkWord = false; } ts.Clear(); } } RILogManager.Default?.SendDebug(Math.Pow(2.0, sum / wnum).ToString()); }
//private class Options //{ // [Option('b', "builderconnectionstring", HelpText = "Connection string to Builder database", Required = false)] // public string BuilderConnectionString { get; set; } // [Option('s', "sourceconnectionstring", HelpText = "Connection string to Native database", Required = false)] // public string SourceConnectionString { get; set; } // [Option('d', "destinationconnectionstring", HelpText = "Connection string to CDM database", Required = false)] // public string DestinationConnectionString { get; set; } // [Option('v', "vocabularyconnectionstring", HelpText = "Connection string to CDM Vocabulary database", Required = false)] // public string VocabularyConnectionString { get; set; } // [Option('r', "vendor", HelpText = "Native dataset Vendor (Vendor Schema Id from HIX database)", Required = false)] // public Vendors Vendor { get; set; } // [Option('c', "chunksize", HelpText = "CDM Build Batch size", Required = false)] // public int ChunkSize { get; set; } // [Option('a', "s3awsaccesskeyid", HelpText = "S3 AWS Access Key Id", Required = false)] // public string S3AwsAccessKeyId { get; set; } // [Option('e', "s3awssecretaccesskey", HelpText = "S3 AWS Secret Access Key", Required = false)] // public string S3AwsSecretAccessKey { get; set; } // [Option('l', "loadid", HelpText = "CDM Build Load Id (from HIX database)", Required = false)] // public int LoadId { get; set; } // [Option("nativeSchema", HelpText = "Native database schema name", Required = false)] // public string NativeSchema { get; set; } // [Option("cdmSchema", HelpText = "CDM database schema name", Required = false)] // public string CdmSchema { get; set; } //} static void Main(string[] inputArgs) { Console.WriteLine("Chunks creation in progress, please do not close the window..."); try { bool restart = false; if (inputArgs.Length > 0) { var args = string.Join(" ", inputArgs); var builderConnectionString = Regex.Match(args, @"(?s)(?<=\<cs\>).*?(?=\<\/cs\>)", RegexOptions.IgnoreCase).Value; var s3awsAccessKeyId = Regex.Match(args, @"(?s)(?<=\<s3keyid\>).*?(?=\<\/s3keyid\>)", RegexOptions.IgnoreCase).Value; var s3awsSecretAccessKey = Regex.Match(args, @"(?s)(?<=\<s3accesskey\>).*?(?=\<\/s3accesskey\>)", RegexOptions.IgnoreCase).Value; var ec2awsAccessKeyId = Regex.Match(args, @"(?s)(?<=\<ec2keyid\>).*?(?=\<\/ec2keyid\>)", RegexOptions.IgnoreCase).Value; var ec2awsSecretAccessKey = Regex.Match(args, @"(?s)(?<=\<ec2accesskey\>).*?(?=\<\/ec2accesskey\>)", RegexOptions.IgnoreCase).Value; var bucket = Regex.Match(args, @"(?s)(?<=\<bucket\>).*?(?=\<\/bucket\>)", RegexOptions.IgnoreCase).Value; restart = Parse(Regex.Match(args, @"(?s)(?<=\<restart\>).*?(?=\<\/restart\>)", RegexOptions.IgnoreCase).Value); var cdmFolder = Regex.Match(args, @"(?s)(?<=\<cdmFolder\>).*?(?=\<\/cdmFolder\>)", RegexOptions.IgnoreCase).Value; var saveOnlyToS3 = Regex.Match(args, @"(?s)(?<=\<saveOnlyToS3\>).*?(?=\<\/saveOnlyToS3\>)", RegexOptions.IgnoreCase).Value; var storageType = Regex.Match(args, @"(?s)(?<=\<storageType\>).*?(?=\<\/storageType\>)", RegexOptions.IgnoreCase).Value; Settings.Initialize(builderConnectionString, Environment.MachineName); Settings.Current.S3AwsAccessKeyId = s3awsAccessKeyId; Settings.Current.S3AwsSecretAccessKey = s3awsSecretAccessKey; Settings.Current.Ec2AwsAccessKeyId = ec2awsAccessKeyId; Settings.Current.Ec2AwsSecretAccessKey = ec2awsSecretAccessKey; Settings.Current.Bucket = bucket; Settings.Current.CDMFolder = cdmFolder; Settings.Current.SaveOnlyToS3 = Parse(saveOnlyToS3); if (Enum.TryParse(storageType, out S3StorageType type)) { Settings.Current.StorageType = type; } Console.WriteLine($"Bucket ={Settings.Current.Bucket};CDMFolder={Settings.Current.CDMFolder}"); Console.WriteLine($"SaveOnlyToS3={Settings.Current.SaveOnlyToS3}; StorageType={Settings.Current.StorageType};"); } else { Settings.Initialize(ConfigurationManager.ConnectionStrings["Builder"].ConnectionString, Environment.MachineName); } Console.WriteLine("Settings initialized"); Console.WriteLine($"Vendor={Settings.Current.Building.Vendor}; restart={restart}"); Console.WriteLine($"BuildingId={Settings.Current.Building.Id}; Chunk size={Settings.Current.Building.BatchSize}"); var chunkController = new ChunkController(); chunkController.CreateChunks(restart); if (Settings.Current.Building.SourceEngine.Database == Database.Redshift) { Console.WriteLine("Saving vocabulary to S3..."); var vocabulary = new Vocabulary(); vocabulary.SaveToS3(); Console.WriteLine("Vocabulary was saved to S3"); } Console.WriteLine("DONE."); } catch (Exception e) { Console.WriteLine(e.Message); Console.WriteLine(e.StackTrace); Console.ReadLine(); } }
public virtual string ToString(string[] tokenNames) { return(ToString(Vocabulary.FromTokenNames(tokenNames))); }
void OnGUI () { #region MainMenu //Calls for Pop up script if ( hideMenuGUI == false ) { //Language Popup.List( // 1 new Rect( 0, 0, BaseSceneClass.ourScreen.width / 3, BaseSceneClass.ourScreen.height / 3 ), ref languageSelectedBool, ref showListLanguage, ref languageUsable, ref languageSelected, new GUIContent( "Language" ), languageContentForPopup.ToArray(), theListStyle ); //Course Popup.List( // 2 new Rect( 0, BaseSceneClass.ourScreen.height / 3, BaseSceneClass.ourScreen.width / 3, BaseSceneClass.ourScreen.height / 3 ), ref courseSelectedBool, ref showListCourse, ref courseUsable, ref courseSelected, new GUIContent( "Course" ), courseContentForPopup.ToArray(), theListStyle ); //Level Popup.List( // 3 new Rect( 0, BaseSceneClass.ourScreen.height - BaseSceneClass.ourScreen.height / 3, BaseSceneClass.ourScreen.width / 3, BaseSceneClass.ourScreen.height / 3 ), ref levelSelectedBool, ref showListLevel, ref levelUsable, ref levelSelected, new GUIContent( "Level" ), levelContentForPopup.ToArray(), theListStyle ); //Unit Popup.List( // 4 new Rect( BaseSceneClass.ourScreen.width / 3, 0, BaseSceneClass.ourScreen.width / 3, BaseSceneClass.ourScreen.height / 3 ), ref unitSelectedBool, ref showListUnit, ref unitUsable, ref unitSelected, new GUIContent( "Unit" ), unitContentForPopup.ToArray(), theListStyle ); //Lesson Popup.List( // 5 new Rect( BaseSceneClass.ourScreen.width / 3, BaseSceneClass.ourScreen.height / 3, BaseSceneClass.ourScreen.width / 3, BaseSceneClass.ourScreen.height / 3 ), ref lessonSelectedBool, ref showListLesson, ref lessonUsable, ref lessonSelected, new GUIContent( "Lesson" ), lessonContentForPopup.ToArray(), theListStyle ); //Activity Popup.List( // 6 new Rect( BaseSceneClass.ourScreen.width / 3, BaseSceneClass.ourScreen.height - BaseSceneClass.ourScreen.height / 3, BaseSceneClass.ourScreen.width / 3, BaseSceneClass.ourScreen.height / 3 ), ref activitySelectedBool, ref showListActivity, ref activityUsable, ref activitySelected, new GUIContent( "Activity" ), activityContentForPopup.ToArray(), theListStyle ); /* Load Activity Button will need to be adjusted for use with * other types of activities currently good for use with Vocabulary*/ if ( activitySelected != -1 ) { if ( GUI.Button( new Rect( BaseSceneClass.ourScreen.width - BaseSceneClass.ourScreen.width / 3, BaseSceneClass.ourScreen.height / 3, BaseSceneClass.ourScreen.width / 3, BaseSceneClass.ourScreen.height / 3 ), "Load Activity" ) ) { Event.current.Use(); theScene.initialized = false; hideMenuGUI = true; hideActivityMenuGui = false; } } } #endregion #region ActivityMenu // Activity Menu, will need adjusting to work with more then just vocabulary if ( hideActivityMenuGui == false ) { //Used to draw the activity menu GUI.DrawTexture( new Rect( 0, 0, Screen.width, Screen.height ), VocabMenuTex ); // Using the guiContains function, make invisible buttons over the textures prompting for each Vocab type and well as a back button if ( Input.GetMouseButtonUp( 0 ) && delay <= 0) { if ( Popup.guiContains( Input.mousePosition, new Rect( 0, 0, BaseSceneClass.ourScreen.width * 0.43f, BaseSceneClass.ourScreen.height / 3 ) ) ) { Fill( "learn", BaseSceneClass.VocabType.Learn ); } if ( Popup.guiContains( Input.mousePosition, new Rect( BaseSceneClass.ourScreen.width - BaseSceneClass.ourScreen.width * 0.43f, 0, BaseSceneClass.ourScreen.width * 0.43f, BaseSceneClass.ourScreen.height / 3 ) ) ) { Fill( "Practice", BaseSceneClass.VocabType.Practice ); } if ( Popup.guiContains( Input.mousePosition, new Rect( 0, BaseSceneClass.ourScreen.height - BaseSceneClass.ourScreen.height / 3, BaseSceneClass.ourScreen.width * 0.43f, BaseSceneClass.ourScreen.height / 3 ) ) ) { Fill( "Review", BaseSceneClass.VocabType.Review ); } if ( Popup.guiContains( Input.mousePosition, new Rect( BaseSceneClass.ourScreen.width - BaseSceneClass.ourScreen.width * 0.43f, BaseSceneClass.ourScreen.height - BaseSceneClass.ourScreen.height / 3, BaseSceneClass.ourScreen.width * 0.43f, BaseSceneClass.ourScreen.height / 3 ) ) ) { Fill( "Quiz", BaseSceneClass.VocabType.Quiz ); } if ( Popup.guiContains( Input.mousePosition, new Rect( BaseSceneClass.ourScreen.width / 3, BaseSceneClass.ourScreen.height / 3, BaseSceneClass.ourScreen.width / 3, BaseSceneClass.ourScreen.height / 3 ) ) ) { Back(); } } } #endregion #region Finalize //Loading screen code section //This is here since loading on the same GUI update would not render the loading dialog if ( selectedVocabType ) { GUI.DrawTexture( new Rect( BaseSceneClass.ourScreen.width / 3, //starting point BaseSceneClass.ourScreen.height / 3,//Starting point BaseSceneClass.ourScreen.width / 3, //Width BaseSceneClass.ourScreen.height / 3 //Height ), loading ); if ( loader > 0 ) loader--; if ( loader == 0 ) { selectedVocabType = false; load = true; } } #endregion #region Load //Activity Loading code section if ( load ) { if ( lastActivity != activitySelected ) { //Only pull the words and meaning from the database if we haven't already. //This will always run the first time and whenever the selected vocab changes selectedVocab = dataBase.sql.QueryDatabaseForVocab( activityList [ activitySelected ] ); lastActivity = activitySelected; } load = false; Application.LoadLevel( level ); theScene.Type = type; initialized = true; } #endregion }
public override void AddToChunk(string domain, IEnumerable <IEntity> entities) { foreach (var entity in entities) { var entityDomain = GetDomain(domain, entity.Domain); switch (entityDomain) { case "Condition": var obs = entity as Observation; if (obs == null || obs.ValueAsNumber == 1) { var cond = entity as ConditionOccurrence ?? new ConditionOccurrence(entity) { Id = Offset.GetKeyOffset(entity.PersonId).ConditionOccurrenceId }; ConditionForEra.Add(cond); ChunkData.AddData(cond); } break; case "Measurement": if (entity.TypeConceptId != 38000246) { var mes = entity as Measurement ?? new Measurement(entity) { Id = Offset.GetKeyOffset(entity.PersonId).MeasurementId }; //var result = Vocabulary.Lookup(mes.SourceValue, "JMDC-ICD10-MapsToValue", DateTime.MinValue, false); var result = Vocabulary.Lookup(mes.SourceValue, "JMDC-ICD10-MapsToValue", DateTime.MinValue); int conceptId = 4181412; if (result.Any() && result[0].ConceptId.HasValue && result[0].ConceptId > 0) { conceptId = result[0].ConceptId.Value; } if (mes.ValueAsConceptId == 0) { mes.ValueAsConceptId = conceptId; } ChunkData.AddData(mes); } break; case "Meas Value": ChunkData.AddData(entity as Measurement ?? new Measurement(entity) { Id = Offset.GetKeyOffset(entity.PersonId).MeasurementId }); break; case "Observation": if (entity.TypeConceptId != 38000246) { var obser = entity as Observation ?? new Observation(entity) { Id = Offset.GetKeyOffset(entity.PersonId).ObservationId }; //var result = Vocabulary.Lookup(obser.SourceValue, "JMDC-ICD10-MapsToValue", DateTime.MinValue, false); var result = Vocabulary.Lookup(obser.SourceValue, "JMDC-ICD10-MapsToValue", DateTime.MinValue); int conceptId = 4181412; if (result.Any() && result[0].ConceptId.HasValue && result[0].ConceptId > 0) { conceptId = result[0].ConceptId.Value; } if (obser.ValueAsConceptId == 0) { obser.ValueAsConceptId = conceptId; } ChunkData.AddData(obser); } break; case "Procedure": var p = entity as ProcedureOccurrence ?? new ProcedureOccurrence(entity) { Id = Offset.GetKeyOffset(entity.PersonId) .ProcedureOccurrenceId }; ChunkData.AddData(p); break; case "Device": ChunkData.AddData(entity as DeviceExposure ?? new DeviceExposure(entity) { Id = Offset.GetKeyOffset(entity.PersonId).DeviceExposureId }); break; case "Drug": var drg = entity as DrugExposure ?? new DrugExposure(entity) { Id = Offset.GetKeyOffset(entity.PersonId).DrugExposureId }; DrugForEra.Add(drg); ChunkData.AddData(drg); break; } //HIX-823 if (domain == "Procedure" && entityDomain != "Procedure") { var po = (ProcedureOccurrence)entity; po.ConceptId = 0; ChunkData.AddData(po); } if (domain == "Observation" && entityDomain != "Observation") { var o = (Observation)entity; o.ConceptId = 0; ChunkData.AddData(o); } } }
/// <summary> /// parses <vocabulary/> area to Vocabulary /// </summary> /// <param name="vocabularyNode">XML node of <vocabulary/></param> /// <returns>vocabulary object</returns> protected Vocabulary parseVocabulary(XmlNode vocabularyNode) { Vocabulary vocabulary = null; string id = vocabularyNode.Attributes["id"].Value; string type = vocabularyNode.Attributes["type"].Value; XmlNodeList items = vocabularyNode.SelectNodes("./emo:item", nsManager); if (items.Count == 0) { throw new EmotionMLException("Each vocabulary must have at least one item."); } //parse items foreach (XmlNode itemNode in items) { string name = itemNode.Attributes["name"].Value; Item newItem = new Item(name); //parse item XmlNodeList infoNodes = itemNode.SelectNodes("./emo:info", nsManager); if (infoNodes.Count > 1) { throw new EmotionMLException("Only maximum one instance of <info/> is allowed. " + infoNodes.Count + " given."); } else if (infoNodes.Count == 1) { newItem.Info = parseInfo(infoNodes.Item(0)); } //add item to vocabulary if (vocabulary == null) { vocabulary = new Vocabulary(type, id, newItem); } else { vocabulary.addItem(newItem); } } //search for info element XmlNodeList infos = vocabularyNode.SelectNodes("./emo:info", nsManager); if (infos.Count > 1) { throw new EmotionMLException("Only maximum one instance of <info/> is allowed. " + infos.Count + " given."); } else if (infos.Count == 1) { vocabulary.Info = parseInfo(infos.Item(0)); } return vocabulary; }
public DFASerializer(DFA dfa, string[] tokenNames, string[] ruleNames, ATN atn) : this(dfa, Vocabulary.FromTokenNames(tokenNames), ruleNames, atn) { }
/// <summary> /// Query the database for the Vocab we've selected, and fill it's contents /// </summary> /// <param name="lesson"></param> /// <returns></returns> public Vocabulary QueryDatabaseForVocab ( Vocabulary Vocab ) { //Fill the Vocabs words nad meanings Vocab.words = QueryDatabaseForWords( Vocab.ID ); Vocab.wordMeanings = QueryDatabaseForMeanings( Vocab.ID ); return Vocab; }
public override IEnumerable <IEntity> GetConcepts(Concept concept, IDataRecord reader, KeyMasterOffsetManager keyOffset) { var locationSourceValue = reader.GetString(Location); long?locationId = null; if (!string.IsNullOrEmpty(Location)) { locationId = Entity.GetId(locationSourceValue); } var genderSource = reader.GetString(Gender); var genderConceptId = Vocabulary.LookupGender(genderSource); var personId = reader.GetLong(PersonId); if (personId.HasValue) { Dictionary <string, string> additionalFields = null; if (AdditionalFields != null) { additionalFields = new Dictionary <string, string>(AdditionalFields.Length, StringComparer.OrdinalIgnoreCase); foreach (var additionalField in AdditionalFields) { additionalFields.Add(additionalField, reader.GetString(additionalField)); } } var startDate = reader.GetDateTime(StartDate); var endDate = reader.GetDateTime(EndDate); var startTime = reader.GetTime(StartTime) ?? startDate.ToString("HH:mm:ss", CultureInfo.InvariantCulture); var endTime = reader.GetTime(EndTime) ?? endDate.ToString("HH:mm:ss", CultureInfo.InvariantCulture); yield return(new Person { ObservationPeriodGap = reader.GetInt(ObservationPeriodGap) ?? 32, AdditionalFields = additionalFields, PersonId = personId.Value, StartDate = startDate, EndDate = endDate, StartTime = startTime, EndTime = endTime, PersonSourceValue = reader.GetString(PersonSourceValue), GenderSourceValue = genderSource, GenderConceptId = genderConceptId ?? 0, LocationId = locationId, YearOfBirth = reader.GetInt(YearOfBirth), MonthOfBirth = reader.GetInt(MonthOfBirth), DayOfBirth = reader.GetInt(DayOfBirth), LocationSourceValue = locationSourceValue, CareSiteId = reader.GetInt(CareSiteId), EthnicitySourceValue = reader.GetString(Ethnicity), EthnicityConceptId = reader.GetInt(EthnicityConceptId) ?? 0, RaceSourceValue = reader.GetString(Race), RaceConceptId = reader.GetInt(RaceConceptId) ?? 0, ProviderId = reader.GetInt(ProviderId), TimeOfBirth = reader.GetTime(TimeOfBirth), GenderSourceConceptId = reader.GetInt(GenderSourceConceptId) ?? 0, // CCAE RaceSourceConceptId = reader.GetInt(RaceSourceConceptId) ?? 0, EthnicitySourceConceptId = reader.GetInt(EthnicitySourceConceptId) ?? 0, TypeConceptId = reader.GetInt(PeriodTypeConceptId), PotentialChildId = reader.GetLong(PotentialChildId), PotentialChildBirthDate = reader.GetDateTime(PotentialChildBirthDate) }); } }
public IActionResult VocabularyUpdateAjax(Vocabulary vocabulary) { _VocabularyManager.Update(vocabulary); return(Json(new { status = true, message = "Add new vocabulary success" })); }
private static ToastContent getToastContent(Vocabulary _item) { string _Ipa = _item.Ipa; if (_item.Ipa != _item.Ipa2) { _Ipa = _item.Ipa + " " + _item.Ipa2; } ToastContent content = new ToastContent() { Duration = ToastDuration.Long, Launch = "vocabulary-reminder", Audio = new ToastAudio() { Silent = true }, Visual = new ToastVisual() { BindingGeneric = new ToastBindingGeneric() { Attribution = new ToastGenericAttributionText() { Text = _item.Type }, Children = { new AdaptiveText() { Text = _item.Define, }, new AdaptiveText() { Text = _item.Example, }, new AdaptiveText() { Text = _item.Example2, }, new AdaptiveGroup() { Children = { new AdaptiveSubgroup() { Children = { new AdaptiveText() { Text = _item.Word + " " + _Ipa, HintStyle = AdaptiveTextStyle.Subtitle, }, new AdaptiveText() { Text = _item.Translate, HintStyle = AdaptiveTextStyle.Base, }, new AdaptiveText() { Text = _item.Related, HintStyle = AdaptiveTextStyle.CaptionSubtle } } }, } } }, HeroImage = new ToastGenericHeroImage() { Source = "https://picsum.photos/364/180?image=1043" }, } }, Scenario = ToastScenario.Reminder, Actions = new ToastActionsCustom() { ContextMenuItems = { new ToastContextMenuItem("Reload", "action=reload&WordId=" + _item.Id.ToString()) }, Buttons = { new ToastButton("\u25B6", new QueryString() { { "action", "play" }, { "WordId", _item.Id.ToString() }, { "PlayId", "1" }, { "PlayUrl", _item.PlayURL }, }.ToString()) { ActivationType = ToastActivationType.Background, ActivationOptions = new ToastActivationOptions() { AfterActivationBehavior = ToastAfterActivationBehavior.PendingUpdate } }, new ToastButton("\u25B7", new QueryString() { { "action", "play" }, { "WordId", _item.Id.ToString() }, { "PlayId", "2" }, { "PlayUrl", _item.PlayURL2 }, }.ToString()) { ActivationType = ToastActivationType.Background, ActivationOptions = new ToastActivationOptions() { AfterActivationBehavior = ToastAfterActivationBehavior.PendingUpdate } }, new ToastButton("Next", new QueryString() { { "action", "next" }, { "WordId", _item.Id.ToString() }, }.ToString()) { ActivationType = ToastActivationType.Background, ActivationOptions = new ToastActivationOptions() { AfterActivationBehavior = ToastAfterActivationBehavior.PendingUpdate } }, new ToastButton("View", new QueryString() { { "action", "view" }, { "url", viewDicOnlineUrl + _item.Word } }.ToString()), //new ToastButton("Close", "dismiss") //{ // ActivationType = ToastActivationType.Background //}, } }, }; return(content); }
private static void BuildDefault() { _default = new Vocabulary(); _default.AddPlural("$", "s"); _default.AddPlural("s$", "s"); _default.AddPlural("(ax|test)is$", "$1es"); _default.AddPlural("(octop|vir|alumn|fung)us$", "$1i"); _default.AddPlural("(alias|status)$", "$1es"); _default.AddPlural("(bu)s$", "$1ses"); _default.AddPlural("(buffal|tomat|volcan)o$", "$1oes"); _default.AddPlural("([ti])um$", "$1a"); _default.AddPlural("sis$", "ses"); _default.AddPlural("(?:([^f])fe|([lr])f)$", "$1$2ves"); _default.AddPlural("(hive)$", "$1s"); _default.AddPlural("([^aeiouy]|qu)y$", "$1ies"); _default.AddPlural("(x|ch|ss|sh)$", "$1es"); _default.AddPlural("(matr|vert|ind)ix|ex$", "$1ices"); _default.AddPlural("([m|l])ouse$", "$1ice"); _default.AddPlural("^(ox)$", "$1en"); _default.AddPlural("(quiz)$", "$1zes"); _default.AddPlural("(campus)$", "$1es"); _default.AddPlural("^is$", "are"); _default.AddSingular("s$", ""); _default.AddSingular("(n)ews$", "$1ews"); _default.AddSingular("([ti])a$", "$1um"); _default.AddSingular("((a)naly|(b)a|(d)iagno|(p)arenthe|(p)rogno|(s)ynop|(t)he)ses$", "$1$2sis"); _default.AddSingular("(^analy)ses$", "$1sis"); _default.AddSingular("([^f])ves$", "$1fe"); _default.AddSingular("(hive)s$", "$1"); _default.AddSingular("(tive)s$", "$1"); _default.AddSingular("([lr])ves$", "$1f"); _default.AddSingular("([^aeiouy]|qu)ies$", "$1y"); _default.AddSingular("(s)eries$", "$1eries"); _default.AddSingular("(m)ovies$", "$1ovie"); _default.AddSingular("(x|ch|ss|sh)es$", "$1"); _default.AddSingular("([m|l])ice$", "$1ouse"); _default.AddSingular("(bus)es$", "$1"); _default.AddSingular("(o)es$", "$1"); _default.AddSingular("(shoe)s$", "$1"); _default.AddSingular("(cris|ax|test)es$", "$1is"); _default.AddSingular("(octop|vir|alumn|fung)i$", "$1us"); _default.AddSingular("(alias|status)es$", "$1"); _default.AddSingular("^(ox)en", "$1"); _default.AddSingular("(vert|ind)ices$", "$1ex"); _default.AddSingular("(matr)ices$", "$1ix"); _default.AddSingular("(quiz)zes$", "$1"); _default.AddSingular("(campus)es$", "$1"); _default.AddSingular("^are$", "is"); _default.AddIrregular("person", "people"); _default.AddIrregular("man", "men"); _default.AddIrregular("child", "children"); _default.AddIrregular("sex", "sexes"); _default.AddIrregular("move", "moves"); _default.AddIrregular("goose", "geese"); _default.AddIrregular("alumna", "alumnae"); _default.AddIrregular("criterion", "criteria"); _default.AddIrregular("wave", "waves"); _default.AddUncountable("equipment"); _default.AddUncountable("information"); _default.AddUncountable("rice"); _default.AddUncountable("money"); _default.AddUncountable("species"); _default.AddUncountable("series"); _default.AddUncountable("fish"); _default.AddUncountable("sheep"); _default.AddUncountable("deer"); _default.AddUncountable("aircraft"); _default.AddUncountable("oz"); _default.AddUncountable("tsp"); _default.AddUncountable("tbsp"); _default.AddUncountable("ml"); _default.AddUncountable("l"); }
/// <summary> /// Gets the terms by vocabularyId. /// </summary> /// <param name="voc">The voc.</param> /// <param name="vocabularyId">The vocabulary id.</param> /// <returns>term collection.</returns> internal static List <Term> GetTerms(this Vocabulary voc, int vocabularyId) { ITermController ctl = Util.GetTermController(); return(ctl.GetTermsByVocabulary(vocabularyId).ToList()); }
/// <summary> /// Deletes the vocabulary. /// </summary> /// <param name="vocabulary">The vocabulary.</param> public void DeleteVocabulary(Vocabulary vocabulary) { _provider.ExecuteNonQuery("DeleteVocabulary", vocabulary.VocabularyId); }
public async Task DeleteVocabulary(Vocabulary vocabulary) { _unitOfWork.Vocabularies.Remove(vocabulary); await _unitOfWork.CommitAsync(); }
public Forth() { dStack = new ForthStack<object>(); aStack = new ForthStack<object>(); rStack = new ForthStack<int>(); searchOrder = new ForthStack<Vocabulary>(); definitions = new Vocabulary("forth"); definitions.Dict.Add("forth", new ForthDictionaryEntry(definitions)); searchOrder.Push(definitions); compileStack = new ForthStack<CompileState>(); loopStack = new ForthStack<LoopDeDoo>(); numericBase = 10; memory = new object[1024]; here = 0; isCompiling = false; // populate initial dictionary Dictionary<string, ForthDictionaryEntry> dict = definitions.Dict; foreach (MethodInfo mi in typeof(Forth).GetMethods(BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Static)) { object[] obj = mi.GetCustomAttributes(typeof(ForthWordAttribute), false); if (obj == null || obj.Length != 1 || !(obj[0] is ForthWordAttribute)) continue; ParameterInfo[] pInfo = mi.GetParameters(); if (pInfo.Length != 1) continue; if (pInfo[0].ParameterType != typeof(Forth)) continue; if (mi.ReturnType != typeof(void)) continue; ForthWordAttribute fwa = obj[0] as ForthWordAttribute; ForthDictionaryEntry fde = new ForthDictionaryEntry ( fwa.Name, Delegate.CreateDelegate ( typeof(ExecutionToken), mi ) as ExecutionToken, fwa.IsImmediate ); dict.Add(fde.Name, fde); } }