public void StoreConfig() { TermData T1 = new TermData(DTT1S.Value, DTT1E.Value, NuSFT1Grant.Value, NuSFT1Loan.Value, DTSFP1.Value, NUBT1.Value, DTBT1.Value); TermData T2 = new TermData(DTT2S.Value, DTT2E.Value, NuSFT2Grant.Value, NuSFT2Loan.Value, DTSFP2.Value, NUBT2.Value, DTBT2.Value); TermData T3 = new TermData(DTT3S.Value, DTT3E.Value, NuSFT3Grant.Value, NuSFT3Loan.Value, DTSFP3.Value, NUBT3.Value, DTBT3.Value); YearInfo YI = new YearInfo(CBYII.Checked, T1 , T2, T3); MessageBox.Show(RC.YearInfo.Count.ToString()); //int year = (int) NUYear.Value - 1; //0 based index //MessageBox.Show(year.ToString() + "is year\n" + YearRecords.Count.ToString()); //if ( YearRecords.Count > 0 && year <= YearRecords.Count) // // List populated && Year in existing range //{ // //if (YearRecords.Count >= year) // YearRecords[year] = YI; //} // else YearRecords.Add(YI); //PoulateEmpty | append RC = new Configuration(NUYear.Value, YearRecords); XmlSerializer XSR = new XmlSerializer(typeof(Configuration)); FileStream ConfigStream = new FileStream("Finances.rc", FileMode.Create); try { XSR.Serialize(ConfigStream, RC); } catch (Exception Ex) { MessageBox.Show(Ex.GetBaseException().ToString()); } ConfigStream.Close(); LoadConfig(); MessageBox.Show("Configurations recorded"); MessageBox.Show(YearRecords.Count.ToString()); }
public virtual void _run() { for (int iter = 0; iter < NUM_TEST_ITER; iter++) { FieldData field = Fields[Random.Next(Fields.Length)]; TermsEnum termsEnum = TermsDict.GetTerms(field.FieldInfo.Name).GetIterator(null); #pragma warning disable 612, 618 if (Si.Codec is Lucene3xCodec) #pragma warning restore 612, 618 { // code below expects unicode sort order continue; } int upto = 0; // Test straight enum of the terms: while (true) { BytesRef term = termsEnum.Next(); if (term == null) { break; } BytesRef expected = new BytesRef(field.Terms[upto++].Text2); Assert.IsTrue(expected.BytesEquals(term), "expected=" + expected + " vs actual " + term); } Assert.AreEqual(upto, field.Terms.Length); // Test random seek: TermData term2 = field.Terms[Random.Next(field.Terms.Length)]; TermsEnum.SeekStatus status = termsEnum.SeekCeil(new BytesRef(term2.Text2)); Assert.AreEqual(status, TermsEnum.SeekStatus.FOUND); Assert.AreEqual(term2.Docs.Length, termsEnum.DocFreq); if (field.OmitTF) { this.VerifyDocs(term2.Docs, term2.Positions, TestUtil.Docs(Random, termsEnum, null, null, DocsFlags.NONE), false); } else { this.VerifyDocs(term2.Docs, term2.Positions, termsEnum.DocsAndPositions(null, null), true); } // Test random seek by ord: int idx = Random.Next(field.Terms.Length); term2 = field.Terms[idx]; bool success = false; try { termsEnum.SeekExact(idx); success = true; } #pragma warning disable 168 catch (System.NotSupportedException uoe) #pragma warning restore 168 { // ok -- skip it } if (success) { Assert.AreEqual(status, TermsEnum.SeekStatus.FOUND); Assert.IsTrue(termsEnum.Term.BytesEquals(new BytesRef(term2.Text2))); Assert.AreEqual(term2.Docs.Length, termsEnum.DocFreq); if (field.OmitTF) { this.VerifyDocs(term2.Docs, term2.Positions, TestUtil.Docs(Random, termsEnum, null, null, DocsFlags.NONE), false); } else { this.VerifyDocs(term2.Docs, term2.Positions, termsEnum.DocsAndPositions(null, null), true); } } // Test seek to non-existent terms: if (VERBOSE) { Console.WriteLine("TEST: seek non-exist terms"); } for (int i = 0; i < 100; i++) { string text2 = TestUtil.RandomUnicodeString(Random) + "."; status = termsEnum.SeekCeil(new BytesRef(text2)); Assert.IsTrue(status == TermsEnum.SeekStatus.NOT_FOUND || status == TermsEnum.SeekStatus.END); } // Seek to each term, backwards: if (VERBOSE) { Console.WriteLine("TEST: seek terms backwards"); } for (int i = field.Terms.Length - 1; i >= 0; i--) { Assert.AreEqual(TermsEnum.SeekStatus.FOUND, termsEnum.SeekCeil(new BytesRef(field.Terms[i].Text2)), Thread.CurrentThread.Name + ": field=" + field.FieldInfo.Name + " term=" + field.Terms[i].Text2); Assert.AreEqual(field.Terms[i].Docs.Length, termsEnum.DocFreq); } // Seek to each term by ord, backwards for (int i = field.Terms.Length - 1; i >= 0; i--) { try { termsEnum.SeekExact(i); Assert.AreEqual(field.Terms[i].Docs.Length, termsEnum.DocFreq); Assert.IsTrue(termsEnum.Term.BytesEquals(new BytesRef(field.Terms[i].Text2))); } #pragma warning disable 168 catch (System.NotSupportedException uoe) #pragma warning restore 168 { } } // Seek to non-existent empty-string term status = termsEnum.SeekCeil(new BytesRef("")); Assert.IsNotNull(status); //Assert.AreEqual(TermsEnum.SeekStatus.NOT_FOUND, status); // Make sure we're now pointing to first term Assert.IsTrue(termsEnum.Term.BytesEquals(new BytesRef(field.Terms[0].Text2))); // Test docs enum termsEnum.SeekCeil(new BytesRef("")); upto = 0; do { term2 = field.Terms[upto]; if (Random.Next(3) == 1) { DocsEnum docs; DocsEnum docsAndFreqs; DocsAndPositionsEnum postings; if (!field.OmitTF) { postings = termsEnum.DocsAndPositions(null, null); if (postings != null) { docs = docsAndFreqs = postings; } else { docs = docsAndFreqs = TestUtil.Docs(Random, termsEnum, null, null, DocsFlags.FREQS); } } else { postings = null; docsAndFreqs = null; docs = TestUtil.Docs(Random, termsEnum, null, null, DocsFlags.NONE); } Assert.IsNotNull(docs); int upto2 = -1; bool ended = false; while (upto2 < term2.Docs.Length - 1) { // Maybe skip: int left = term2.Docs.Length - upto2; int doc; if (Random.Next(3) == 1 && left >= 1) { int inc = 1 + Random.Next(left - 1); upto2 += inc; if (Random.Next(2) == 1) { doc = docs.Advance(term2.Docs[upto2]); Assert.AreEqual(term2.Docs[upto2], doc); } else { doc = docs.Advance(1 + term2.Docs[upto2]); if (doc == DocIdSetIterator.NO_MORE_DOCS) { // skipped past last doc Debug.Assert(upto2 == term2.Docs.Length - 1); ended = true; break; } else { // skipped to next doc Debug.Assert(upto2 < term2.Docs.Length - 1); if (doc >= term2.Docs[1 + upto2]) { upto2++; } } } } else { doc = docs.NextDoc(); Assert.IsTrue(doc != -1); upto2++; } Assert.AreEqual(term2.Docs[upto2], doc); if (!field.OmitTF) { Assert.AreEqual(term2.Positions[upto2].Length, postings.Freq); if (Random.Next(2) == 1) { this.VerifyPositions(term2.Positions[upto2], postings); } } } if (!ended) { Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docs.NextDoc()); } } upto++; } while (termsEnum.Next() != null); Assert.AreEqual(upto, field.Terms.Length); } }
public virtual void TestFixedPostings() { const int NUM_TERMS = 100; TermData[] terms = new TermData[NUM_TERMS]; for (int i = 0; i < NUM_TERMS; i++) { int[] docs = new int[] { i }; string text = Convert.ToString(i); terms[i] = new TermData(this, text, docs, null); } FieldInfos.Builder builder = new FieldInfos.Builder(); FieldData field = new FieldData(this, "field", builder, terms, true, false); FieldData[] fields = new FieldData[] { field }; FieldInfos fieldInfos = builder.Finish(); // LUCENENET specific - BUG: we must wrap this in a using block in case anything in the below loop throws using (Directory dir = NewDirectory()) { this.Write(fieldInfos, dir, fields, true); Codec codec = Codec.Default; SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null); // LUCENENET specific - BUG: we must wrap this in a using block in case anything in the below loop throws using (FieldsProducer reader = codec.PostingsFormat.FieldsProducer(new SegmentReadState(dir, si, fieldInfos, NewIOContext(Random), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR))) { IEnumerator <string> fieldsEnum = reader.GetEnumerator(); fieldsEnum.MoveNext(); string fieldName = fieldsEnum.Current; Assert.IsNotNull(fieldName); Terms terms2 = reader.GetTerms(fieldName); Assert.IsNotNull(terms2); TermsEnum termsEnum = terms2.GetIterator(null); DocsEnum docsEnum = null; for (int i = 0; i < NUM_TERMS; i++) { BytesRef term = termsEnum.Next(); Assert.IsNotNull(term); Assert.AreEqual(terms[i].Text2, term.Utf8ToString()); // do this twice to stress test the codec's reuse, ie, // make sure it properly fully resets (rewinds) its // internal state: for (int iter = 0; iter < 2; iter++) { docsEnum = TestUtil.Docs(Random, termsEnum, null, docsEnum, DocsFlags.NONE); Assert.AreEqual(terms[i].Docs[0], docsEnum.NextDoc()); Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docsEnum.NextDoc()); } } Assert.IsNull(termsEnum.Next()); for (int i = 0; i < NUM_TERMS; i++) { Assert.AreEqual(termsEnum.SeekCeil(new BytesRef(terms[i].Text2)), TermsEnum.SeekStatus.FOUND); } Assert.IsFalse(fieldsEnum.MoveNext()); } } }
internal virtual TermData[] MakeRandomTerms(bool omitTF, bool storePayloads) { int numTerms = 1 + Random.Next(NUM_TERMS_RAND); //final int numTerms = 2; TermData[] terms = new TermData[numTerms]; ISet <string> termsSeen = new JCG.HashSet <string>(); for (int i = 0; i < numTerms; i++) { // Make term text string text2; while (true) { text2 = TestUtil.RandomUnicodeString(Random); if (!termsSeen.Contains(text2) && !text2.EndsWith(".", StringComparison.Ordinal)) { termsSeen.Add(text2); break; } } int docFreq = 1 + Random.Next(DOC_FREQ_RAND); int[] docs = new int[docFreq]; PositionData[][] positions; if (!omitTF) { positions = new PositionData[docFreq][]; } else { positions = null; } int docID = 0; for (int j = 0; j < docFreq; j++) { docID += TestUtil.NextInt32(Random, 1, 10); docs[j] = docID; if (!omitTF) { int termFreq = 1 + Random.Next(TERM_DOC_FREQ_RAND); positions[j] = new PositionData[termFreq]; int position = 0; for (int k = 0; k < termFreq; k++) { position += TestUtil.NextInt32(Random, 1, 10); BytesRef payload; if (storePayloads && Random.Next(4) == 0) { var bytes = new byte[1 + Random.Next(5)]; for (int l = 0; l < bytes.Length; l++) { bytes[l] = (byte)Random.Next(255); } payload = new BytesRef(bytes); } else { payload = null; } positions[j][k] = new PositionData(this, position, payload); } } } terms[i] = new TermData(this, text2, docs, positions); } return(terms); }
private static bool StatsEqual(TermData t1, TermData t2) { return(t1.docFreq == t2.docFreq && t1.totalTermFreq == t2.totalTermFreq); }
public override string OutputToString(TermData data) { return(data.ToString()); }
internal static bool StatsEqual(TermData t1, TermData t2) { return(t1.DOC_FREQ == t2.DOC_FREQ && t1.TOTAL_TERM_FREQ == t2.TOTAL_TERM_FREQ); }
public YearInfo(bool YearInIndustry, TermData T1, TermData T2, TermData T3) { this.YearInIndustry = YearInIndustry; this.T1 = T1; this.T2 = T2; this.T3 = T3; }
/// <summary> /// Main entry method for transforming terms /// </summary> public TermData Transform(TermData inputSourceTerm) { //Design: // This will have two modes: // Default mode to work out the terms from source to destination based on identical IDs or Term Paths // Mapping file to override default mode for specifically mapping a source term to designation term //Scenarios: // Term Ids or Term Names // Source or Target Term ID/Name may not be found // Default Mode if (!this.skipTermStoreMapping && !_baseTransformationInformation.IsCrossFarmTransformation) { var resolvedInputMapping = ResolveTermInCache(this._sourceContext, inputSourceTerm.TermGuid); if (resolvedInputMapping.IsTermResolved) { //Check if the source term ID exists in target then map. var resolvedInputMappingInTarget = ResolveTermInCache(this._targetContext, inputSourceTerm.TermGuid); if (resolvedInputMappingInTarget.IsTermResolved && !resolvedInputMapping.IsSourceTerm) { inputSourceTerm.IsTermResolved = true; //Happy that term ID is the same as source inputSourceTerm.TermLabel = resolvedInputMappingInTarget.TermLabel; //Just in case the ids are the same and labels are not return(inputSourceTerm); } //Check if the term labels are the same, ids maybe different - in this scenario, validate if the term paths are the same. //if so, then auto-map. resolvedInputMappingInTarget = ResolveTermInCache(this._targetContext, resolvedInputMapping.TermPath); if (resolvedInputMappingInTarget.IsTermResolved && !resolvedInputMapping.IsSourceTerm) { inputSourceTerm.IsTermResolved = true; //Happy that term ID is the same as source inputSourceTerm.TermGuid = resolvedInputMappingInTarget.TermGuid; //Just in case the ids are the same and labels are not return(inputSourceTerm); } } } // Mapping Mode if (termMappings != null) { var resolvedInputMapping = ResolveTermInCache(this._sourceContext, inputSourceTerm.TermGuid); //Check Source Mappings foreach (var mapping in termMappings) { // Simple Check, if the delimiter is | lets check for that if (mapping.SourceTerm.Contains("|")) { //Term Path // If found validate against the term cache if (resolvedInputMapping.TermPath == mapping.SourceTerm) { var resolvedTargetMapping = ResolveTermInCache(this._targetContext, mapping.TargetTerm); if (resolvedTargetMapping != default) { return(resolvedTargetMapping); } else { //Log Failure in resolving to target mapping LogWarning(string.Format(LogStrings.Warning_TermMappingFailedResolveTarget, mapping.TargetTerm), LogStrings.Heading_TermMapping); } } } else { //Guid if (Guid.TryParse(mapping.SourceTerm, out Guid mappingSourceTermId)) { //Found if (resolvedInputMapping.TermGuid == mappingSourceTermId) { if (Guid.TryParse(mapping.TargetTerm, out Guid mappingTargetTermId)) { var resolvedTargetMapping = ResolveTermInCache(this._targetContext, mappingTargetTermId); if (resolvedTargetMapping != default) { return(resolvedTargetMapping); } else { //Log Failure in resolving to target mapping LogWarning(string.Format(LogStrings.Warning_TermMappingFailedResolveTarget, mapping.TargetTerm), LogStrings.Heading_TermMapping); } } else { var resolvedTargetMapping = ResolveTermInCache(this._targetContext, mapping.TargetTerm); if (resolvedTargetMapping != default) { return(resolvedTargetMapping); } else { //Log Failure in resolving to target mapping LogWarning(string.Format(LogStrings.Warning_TermMappingFailedResolveTarget, mapping.TargetTerm), LogStrings.Heading_TermMapping); } } } } else { // Failure in parsing the Term ID } } } //Log Failure in mapping LogWarning(string.Format(LogStrings.Warning_TermMappingFailedMapping, inputSourceTerm.TermGuid, inputSourceTerm.TermLabel), LogStrings.Heading_TermMapping); } return(inputSourceTerm); //Pass-Through }
private bool IsTermUpdated(Term dbTerm, TermData termData) { return(dbTerm.Text != termData.Text || dbTerm.Position != termData.Position || dbTerm.TermCategory.Name != termData.TermCategoryName); }
public static void ProcessAllPrefabs() { int languageIndex = GetEnglishLanguageIndex(); if (languageIndex == -1) { return; } List <GameObject> allPrefabs = PrefabLoader.LoadAllPrefabs("Assets/Prefabs"); foreach (GameObject prefab in allPrefabs) { MonoBehaviour[] monoBehaviours = prefab.GetComponentsInChildren <MonoBehaviour>(); foreach (MonoBehaviour mb in monoBehaviours) { if (mb == null) { Debug.Log(prefab.name + " referencing dead component", prefab); continue; } string chunks = string.Empty; SerializedObject mbSO = new SerializedObject(mb); SerializedProperty propIter = mbSO.GetIterator(); propIter.Next(true); bool anyChanged = false; while (propIter.Next(true)) { if (propIter.type == typeof(DialogueChunk).ToString() && propIter.isArray) { for (int j = 0; j < propIter.arraySize; j++) { SerializedProperty chunkProp = propIter.GetArrayElementAtIndex(j); SerializedProperty dialogueTermProp = chunkProp.FindPropertyRelative("dialogueTerm"); string displayPropPath = chunkProp.propertyPath .Replace("_", "") .Replace(".Array.data", "") .Replace("[", "-") .Replace("]", ""); displayPropPath = char.ToUpper(displayPropPath[0]) + displayPropPath.Substring(1); string termName = string.Format("Dialogue/{0}_{1}", prefab.name, displayPropPath); TermData termData = LocalizationManager.GetTermData(termName); if (termData == null) { termData = LocalizationManager.Sources[0].AddTerm(termName); chunks += termName + "\n"; } string prevTranslation = termData.GetTranslation(languageIndex); string newTranslation = chunkProp.FindPropertyRelative("dialogue").stringValue; if (prevTranslation == string.Empty || newTranslation != prevTranslation || dialogueTermProp.stringValue != termName) { termData.SetTranslation(languageIndex, newTranslation); dialogueTermProp.stringValue = termName; anyChanged = true; } } } } if (anyChanged) { mbSO.ApplyModifiedProperties(); EditorUtility.SetDirty(LocalizationManager.Sources[0]); } if (chunks != string.Empty) { Debug.Log(prefab.name + ":\n" + chunks); } } } AssetDatabase.SaveAssets(); }
public static void FixBrokenTermPopups() { // Iterate over all prefabs, find TermPopup attributed properties, check if valid term, if not try to fix by adding category int languageIndex = GetEnglishLanguageIndex(); if (languageIndex == -1) { return; } string fixedProperties = string.Empty; List <GameObject> allPrefabs = PrefabLoader.LoadAllPrefabs("Assets/Prefabs"); foreach (GameObject prefab in allPrefabs) { MonoBehaviour[] monoBehaviours = prefab.GetComponentsInChildren <MonoBehaviour>(); foreach (MonoBehaviour mb in monoBehaviours) { if (mb == null) { Debug.Log(prefab.name + " referencing dead component", prefab); continue; } SerializedObject mbSO = new SerializedObject(mb); bool anyChanged = false; FieldInfo[] fieldInfos = mb.GetType().GetFields(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); foreach (FieldInfo fieldInfo in fieldInfos) { object[] attributes = fieldInfo.GetCustomAttributes(true); foreach (object attribute in attributes) { TermsPopup termsPopup = attribute as TermsPopup; if (termsPopup != null) { SerializedProperty termProp = mbSO.FindProperty(fieldInfo.Name); string termName = termProp.stringValue; TermData termData = LocalizationManager.GetTermData(termName); if (termData == null) { foreach (var kvp in LocalizationManager.Sources[0].mDictionary) { if (kvp.Value.IsTerm(termName, true)) { termData = kvp.Value; termProp.stringValue = termData.Term; fixedProperties += termProp.propertyPath + "\n"; anyChanged = true; break; } } break; } } } } if (anyChanged) { mbSO.ApplyModifiedProperties(); } } } if (fixedProperties != string.Empty) { Debug.Log("Fixed Properties:\n" + fixedProperties); AssetDatabase.SaveAssets(); } }