public JsonResult GetCategorySingleContent(InformationContentPostModel model) { try { InfrastructureModel <InformationContentSingleDataModel> response = new InfrastructureModel <InformationContentSingleDataModel>(); InformationContentSingleDataModel resultModel = new InformationContentSingleDataModel(); try { InformationContent informationContext = informationContentOperation.GetCategorySingleInformationContent(model.AppUserId, model.CategoryId); resultModel.AuthorFullName = informationContext.Author.Name + " " + informationContext.Author.Surname; resultModel.Explanation = informationContext.Explanation; resultModel.ImagePath = informationContext.PostImagePath; resultModel.LikeCount = informationContext.LikeCount.ToString(); resultModel.Title = informationContext.Title; response.ResultModel = resultModel; response.ResultStatus = true; } catch (Exception ex) { response.ResultStatus = false; throw ex; } return(new JsonResult(response)); } catch (Exception ex) { throw ex; } }
/// <summary> /// Method for collecting candidates for replacing target words (at the sentence level). Candidates are retrieved as most similar words obtained via word embeddings (cosine similarity between embedding vectors) /// </summary> /// <param name="sentence">Sentence to be simplified</param> /// <param name="substCandidates">List of all substitution candidates</param> /// <param name="icTreshold">The information content treshold</param> /// <param name="word">Target word to be replaced (if not provided, all content words are considered for replacing)</param> public void CollectCandidates(SentenceAnnotation sentence, List <Tuple <TokenAnnotation, List <Tuple <string, double> > > > substCandidates, double icTreshold, string word = null) { List <string> sides = new List <string> { "north", "west", "south", "east" }; var contentTokens = sentence.Tokens.Where(t => t.IsContent() && !t.POSTag.StartsWith("C") && string.IsNullOrEmpty(t.NamedEntity) && !sides.Any(si => t.Text.ToLower().Contains(si))).ToList(); contentTokens.ForEach(ct => { var ic = InformationContent.GetRelativeInformationContent(string.IsNullOrEmpty(ct.Lemma) ? ct.Text.ToLower() : ct.Lemma.ToLower()); if (ic > icTreshold && (!char.IsUpper(ct.Text[0]) || sentence.Tokens.IndexOf(ct) == 0)) { if (word == null || ct.Text.ToLower() == word.ToLower()) { var candidates = new List <Tuple <string, double> >(); var cands1 = VectorSpace.GetMostSimilar(ct.Text.ToLower(), 30); if (cands1 != null) { candidates.AddRange(cands1); } if (!string.IsNullOrEmpty(ct.Lemma)) { var cands2 = VectorSpace.GetMostSimilar(ct.Lemma.ToLower(), 30); if (cands2 != null) { candidates.AddRange(cands2); } } candidates = candidates.OrderByDescending(x => x.Item2).ToList(); substCandidates.Add(new Tuple <TokenAnnotation, List <Tuple <string, double> > >(ct, candidates)); } } }); }
public InformationContent GetSingleInformationContent(int appUserId = 0) { List <int> readList = informationReadLogOperation.GetReadedInformationContentByAppUserId(appUserId); InformationContent informationContent = new InformationContent(); if (appUserId != 0) { informationContent = context.InformationContent.Include(s => s.Author).Include(a => a.Category).Where(s => s.IsActive && !s.IsDeleted && !readList.Contains(s.InformationContentId)).OrderBy(s => Guid.NewGuid()).Take(1).SingleOrDefault(); if (informationContent != null) { DATA.Models.InformationReadLog informationReadLog = new DATA.Models.InformationReadLog() { AppUserId = appUserId, CreateDate = DateTime.Now, InformationContentId = informationContent.InformationContentId, IsActive = true, IsDeleted = false, }; informationReadLogOperation.InsertInformationContentReadList(informationReadLog); } } else { informationContent = context.InformationContent.Where(s => s.IsActive && !s.IsDeleted).OrderBy(s => Guid.NewGuid()).Take(1).SingleOrDefault(); } return(informationContent); }
public InformationContent GetCategorySingleInformationContent(int appUserId = 0, int categoryId = 0) { List <int> readList = informationReadLogOperation.GetReadedInformationContentByAppUserId(appUserId); InformationContent informationContent = new InformationContent(); if (readList != null) { informationContent = context.InformationContent.Include(s => s.Author).Include(a => a.Category).Where(s => s.IsActive && !s.IsDeleted && !readList.Contains(s.InformationContentId) && s.CategoryId == categoryId).OrderBy(s => Guid.NewGuid()).Take(1).SingleOrDefault(); } else { informationContent = context.InformationContent.Where(s => s.IsActive && !s.IsDeleted && s.CategoryId == categoryId).OrderBy(s => Guid.NewGuid()).Take(1).SingleOrDefault(); } return(informationContent); }
public void DoRandomWalkThread(int start, int end, Landscape landscape, ResearchParameters parameters, IOperator op, StringBuilder dataBuilder, Action <string, float> callback, string connectionId, float step) { for (int j = start; j < end; ++j) { var rwResult = landscape.RandomWalk(parameters.RandomWalkSteps, op); float ac = Autocorrelation.Run(rwResult); float ic = InformationContent.Run(rwResult, parameters.Sensitivity); float pic = PartialInformationContent.Run(rwResult, parameters.Sensitivity); float dbi = DensityBasinInformation.Run(rwResult, parameters.Sensitivity); string line = (float.IsNaN(ac) ? FLOAT_PATTERN : ac.ToString(FLOAT_PATTERN)) + SEPARATOR + (float.IsNaN(ic) ? FLOAT_PATTERN : ic.ToString(FLOAT_PATTERN)) + SEPARATOR + (float.IsNaN(pic) ? FLOAT_PATTERN : pic.ToString(FLOAT_PATTERN)) + SEPARATOR + (float.IsNaN(dbi) ? FLOAT_PATTERN : dbi.ToString(FLOAT_PATTERN)); dataBuilder.AppendLine(line); callback(connectionId, step); } }
/// <summary> /// A place for experimenting/running code /// </summary> /// <param name="args">Command line arguments</param> public static void Main(string[] args) { Console.WriteLine("Loading resources..."); var tmp = Console.Out; Console.SetOut(TextWriter.Null); // setting the culture to en-US (e.g., decimal point is used instead of decimal comma) Thread.CurrentThread.CurrentCulture = new System.Globalization.CultureInfo("en-US"); #region Usage example code // Loading the information contents based on word frequencies from a large corpus InformationContent ic = new InformationContent(ConfigurationManager.AppSettings["other-resources"] + "\\word-freqs.txt"); // Loading the GloVe embeddings into an instance of WordVectorSpace class WordVectorSpace wvs = new WordVectorSpace(); wvs.Load(ConfigurationManager.AppSettings["other-resources"] + "\\glove-vectors-6b-200d.txt", null); // Preprocessing tools required for preprocessing the free-text document AnnotatorChain ac = new AnnotatorChain(TakeLabCore.NLP.Language.English, new List<AnnotatorType> { AnnotatorType.SentenceSplitter, AnnotatorType.POSTagger, AnnotatorType.Morphology, AnnotatorType.NamedEntities }); // Instantiating the lexical simplifier LexicalSimplifier simplifier = new LexicalSimplifier(); // setting the information content and word vector space properties simplifier.InformationContent = ic; simplifier.VectorSpace = wvs; simplifier.CandidateInPoSLookup = TakeLab.Utilities.IO.StringLoader.LoadDictionaryStrings(ConfigurationManager.AppSettings["other-resources"] + "\\candidate-in-pos-lookup.txt"); // dummy simplification call to force loading of language model resources var dLoad = new Document { Text = "want wish" }; dLoad.Annotate(ac); simplifier.Simplify(dLoad, 5, 0.6, 0.5); Console.SetOut(tmp); Console.WriteLine("Loading resources done! \n\n [Light-LS v.0.9 Copyright TakeLab]\n Enter the text to simplify...\n\n"); while(true) { try { var line = Console.ReadLine(); if (string.IsNullOrEmpty(line)) break; var document = new Document { Text = line }; document.Annotate(ac); var dSimple = simplifier.Simplify(document, 5, 0.6, 0.5); var substitutions = simplifier.LastSubstitutions; foreach (var v in substitutions) { Console.WriteLine(v.Item1.Text + " -> " + v.Item2); } Console.WriteLine("Simplified text: \n"); Console.WriteLine(dSimple.Text); Console.WriteLine("\n\n [Light-LS v.0.9 Copyright TakeLab]\n Enter the text to simplify...\n\n"); } catch { Console.WriteLine("Something went wrong, check your text for oddities and try again..."); } } #endregion #region Evaluations Code (whole code commented) //// Evaluating systems via human-assigned scores of grammaticality, simplicity, and meaning preservation //#region Human evaluation //Console.WriteLine("Grammaticality"); ////var evalResSanja = HumanEvaluation.EvaluateHuman(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\sanja\grammaticality.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt"); ////var sanjaGrammaticality = HumanEvaluation.AllAnnotatorScores; ////var evalResGoran = HumanEvaluation.EvaluateHuman(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\goran\grammaticality.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt"); ////var goranGrammaticality = HumanEvaluation.AllAnnotatorScores; ////Console.WriteLine("\nSanja"); ////evalResSanja.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine("\nGoran"); ////evalResGoran.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine("\nAgreement: " + Pearson.Correlation(sanjaGrammaticality, goranGrammaticality)); //var evalAvg = HumanEvaluation.EvaluateHumanDual(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\sanja\grammaticality.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\goran\grammaticality.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt"); //Console.WriteLine("\nAveraged"); //evalAvg.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine(); //Console.WriteLine("\nSimplicity"); ////evalResSanja = HumanEvaluation.EvaluateHuman(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\sanja\simplicity.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt"); ////var sanjaSimplicity = HumanEvaluation.AllAnnotatorScores; ////evalResGoran = HumanEvaluation.EvaluateHuman(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\goran\simplicity.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt"); ////var goranSimplicity = HumanEvaluation.AllAnnotatorScores; ////Console.WriteLine("\nSanja"); ////evalResSanja.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine("\nGoran"); ////evalResGoran.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine("\nAgreement: " + Pearson.Correlation(sanjaSimplicity, goranSimplicity)); //evalAvg = HumanEvaluation.EvaluateHumanDual(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\sanja\simplicity.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\goran\simplicity.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt"); //Console.WriteLine("\nAveraged"); //evalAvg.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine(); ////Console.WriteLine("Meaning preservation"); ////evalResSanja = HumanEvaluation.EvaluateHuman(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\sanja\meaning-preservation.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt", true); ////var sanjaMeaningPreservation = HumanEvaluation.AllAnnotatorScores; ////evalResGoran = HumanEvaluation.EvaluateHuman(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\goran\meaning-preservation.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt", true); ////var goranMeaningPreservation = HumanEvaluation.AllAnnotatorScores; ////Console.WriteLine("\nSanja"); ////evalResSanja.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine("\nGoran"); ////evalResGoran.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine("\nAgreement: " + Pearson.Correlation(sanjaMeaningPreservation, goranMeaningPreservation)); //evalAvg = HumanEvaluation.EvaluateHumanDual(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\sanja\meaning-preservation.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\goran\meaning-preservation.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt", true); //Console.WriteLine("\nAveraged"); //evalAvg.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); //Console.WriteLine(); ////Console.WriteLine("Changed"); ////var evalResCh = HumanEvaluation.EvaluateHuman(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\goran\changed.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt", true); ////evalResCh.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine("Done!"); ////Console.ReadLine(); //#endregion //// Loading the information contents based on word frequencies from a large corpus //InformationContent ic = new InformationContent(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Corpora\word-freqs.txt"); //// Loading the GloVe embeddings into an instance of WordVectorSpace class //WordVectorSpace wvs = new WordVectorSpace(); //wvs.Load(@"C:\Goran\Korpusi\GloVe-Vectors\glove-vectors-6b-200d\glove-vectors-6b-200d.txt", null); //// Instantiating the lexical simplifier //LexicalSimplifier simplifier = new LexicalSimplifier(); //// setting the information content and word vector space properties //simplifier.InformationContent = ic; //simplifier.VectorSpace = wvs; //simplifier.CandidateInPoSLookup = TakeLab.Utilities.IO.StringLoader.LoadDictionaryStrings(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\candidate-in-pos-lookup.txt"); //// Evaluating systems via human-assigned scores of grammaticality, simplicity, and meaning preservation //#region Human Evaluation Task //#region Preparing files for evaluation ////var dirPath = @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\"; ////var originalLines = (new StreamReader(dirPath + "HA-Wiki-original.txt")).ReadToEnd().Split(new char[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries).ToList(); ////var humanLines = (new StreamReader(dirPath + "HA-Wiki-manualSimplification.txt")).ReadToEnd().Split(new char[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries).ToList(); ////var embeSimpLines = (new StreamReader(dirPath + "HA-Wiki-EmbeSimp.txt")).ReadToEnd().Split(new char[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries).ToList(); ////var biranLines = (new StreamReader(dirPath + "HA-Wiki-Biran-simplified.txt")).ReadToEnd().Split(new char[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries).ToList(); ////var gramSimpWriter = new StreamWriter(dirPath + "simplification-grades.txt"); ////var meanPresWriter = new StreamWriter(dirPath + "meaning-preservation-grades.txt"); ////Dictionary<Guid, string> codings = new Dictionary<Guid, string>(); ////for (int i = 0; i < originalLines.Count; i++) ////{ //// List<AnnotationEntry> entries = new List<AnnotationEntry>(); //// var entry = new AnnotationEntry { ID = Guid.NewGuid(), Sentence = originalLines[i], Grade = 0 }; //// entries.Add(entry); //// codings.Add(entry.ID, "original"); //// entry = new AnnotationEntry { ID = Guid.NewGuid(), Sentence = humanLines[i], Grade = 0 }; //// entries.Add(entry); //// codings.Add(entry.ID, "manual"); //// entry = new AnnotationEntry { ID = Guid.NewGuid(), Sentence = biranLines[i], Grade = 0 }; //// entries.Add(entry); //// codings.Add(entry.ID, "biran"); //// entry = new AnnotationEntry { ID = Guid.NewGuid(), Sentence = embeSimpLines[i], Grade = 0 }; //// entries.Add(entry); //// codings.Add(entry.ID, "embesimp"); //// entries = entries.OrderBy(x => x.ID).ToList(); //// entries.ForEach(x => gramSimpWriter.WriteLine(x.ID + " " + x.Grade + " \"" + x.Sentence + "\"")); //// gramSimpWriter.WriteLine(); //// var original = entries.Where(x => codings[x.ID] == "original").Single(); //// var other = entries.Where(x => codings[x.ID] != "original").ToList(); //// other.ForEach(x => //// { //// meanPresWriter.WriteLine("Original: " + "\"" + original.Sentence + "\""); //// meanPresWriter.WriteLine("Simplified: " + x.ID + " " + x.Grade + " \"" + x.Sentence + "\""); //// meanPresWriter.WriteLine(); //// }); //// meanPresWriter.WriteLine(); //// meanPresWriter.WriteLine(); ////} ////gramSimpWriter.Close(); ////meanPresWriter.Close(); ////StreamWriter codingsWriter = new StreamWriter(dirPath + "codings.txt"); ////codings.ToList().ForEach(c => ////{ //// codingsWriter.WriteLine(c.Key.ToString() + " " + c.Value); ////}); ////codingsWriter.Close(); ////Console.WriteLine("Done!"); ////Console.ReadLine(); //#endregion ////var wikiSentences = TakeLab.Utilities.IO.StringLoader.LoadList(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\HA-Wiki-original.txt"); ////var wikiWriter = new StreamWriter(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\HA-Wiki-EmbeSimp.txt"); ////AnnotatorChain annChain = new AnnotatorChain(TakeLabCore.NLP.Language.English, new List<AnnotatorType> { AnnotatorType.SentenceSplitter, AnnotatorType.POSTagger, AnnotatorType.Morphology, AnnotatorType.NamedEntities }); //////wikiSentences = wikiSentences.OrderBy(x => Guid.NewGuid()).ToList(); ////for (int i = 0; i < wikiSentences.Count; i++) ////{ //// var document = new Document { Text = wikiSentences[i] }; //// document.Annotate(annChain); //// var dSimple = simplifier.Simplify(document, 5, 0.55, 0.55); //// wikiWriter.WriteLine(dSimple.Text); //// Console.WriteLine(document.Text); //// Console.WriteLine(dSimple.Text); //// Console.WriteLine(); ////} ////wikiWriter.Close(); ////Console.WriteLine("Done!"); ////Console.ReadLine(); //#endregion //// Automated evaluation on the ranking lexical simplification task from SemEval 2012 //#region Ranking task ////StreamWriter rankingsWriter = new StreamWriter(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\ranking-task-semeval\test-dataset\gorankings.txt"); ////var datasetRanking = DatasetLoader.LoadRankingTasksDataset(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\ranking-task-semeval\test-dataset\contexts.xml", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\ranking-task-semeval\test-dataset\substitutions.gold-rankings"); ////datasetRanking = datasetRanking.OrderBy(x => x.ID).ToList(); ////datasetRanking.ForEach(rt => { //// Console.WriteLine("Processing example: " + datasetRanking.IndexOf(rt)); //// var candidateRanking = simplifier.RankSimplificationCandidates(rt, 5); //// rankingsWriter.Write("Sentence " + rt.ID.ToString() + " rankings: "); //// for (int i = 0; i < candidateRanking.Count; i++) //// { //// rankingsWriter.Write("{" + candidateRanking[i] + "} "); //// //Console.WriteLine(candidateRanking[i]); //// } //// rankingsWriter.WriteLine(); //// //Console.WriteLine(); //// //Console.ReadLine(); ////}); ////rankingsWriter.Close(); ////Console.WriteLine("Done!"); ////Console.ReadLine(); //#endregion //// Automated evaluation on the crowdsourced replacement dataset (Kavuhcuk et al., ACL 2014) //#region Replacement task //AnnotatorChain ac = new AnnotatorChain(TakeLabCore.NLP.Language.English, new List<AnnotatorType> { AnnotatorType.SentenceSplitter, AnnotatorType.POSTagger, AnnotatorType.Morphology, AnnotatorType.NamedEntities }); //var dataset = DatasetLoader.LoadMechanicalTurkTDataset(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\lex.mturk.txt");//.OrderBy(x => Guid.NewGuid()).ToList(); //List<string> systemReplacements = new List<string>(); //List<List<string>> systemReplacementCandidates = new List<List<string>>(); //StreamWriter replacementOutputWriter = new StreamWriter(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Output\replacements.txt"); //for(int i = 0; i < dataset.Count; i++) //{ // Console.WriteLine("Simplifying sentence {0}: ", i); // Document d = new Document { Text = dataset[i].Item1 }; // ac.Apply(d); // var dSimple = simplifier.Simplify(d, 5, 0, 0, dataset[i].Item2); // Console.WriteLine("Original: " + d.Text); // Console.WriteLine("Simplified: " + dSimple.Text); // Console.WriteLine(); // var targetSimplification = simplifier.LastSubstitutions.Where(s => s.Item1.Text.ToLower() == dataset[i].Item2.ToLower()).FirstOrDefault(); // if (targetSimplification == null) // { // systemReplacements.Add(string.Empty); // systemReplacementCandidates.Add(new List<string>()); // } // else // { // systemReplacements.Add(targetSimplification.Item2); // systemReplacementCandidates.Add(simplifier.LastSubstitutionCandidates[0].Item2); // } // Console.WriteLine("Change: {0} -> {1}", dataset[i].Item2, targetSimplification != null ? targetSimplification.Item2 : "<empty>"); // //Console.ReadLine(); // replacementOutputWriter.WriteLine(d.Text); // replacementOutputWriter.WriteLine("Target: " + dataset[i].Item2); // replacementOutputWriter.WriteLine("Simplification: " + ((targetSimplification != null) ? targetSimplification.Item2 : "<empty>")); // replacementOutputWriter.WriteLine("All candidates: "); // simplifier.LastSubstitutionCandidates[0].Item2.ForEach(x => replacementOutputWriter.WriteLine(x)); // replacementOutputWriter.WriteLine(); //} //replacementOutputWriter.Close(); //var evaluator = new ReplacementEvaluator(); //var precision = evaluator.EvaluatePrecision(dataset, systemReplacements); //var accuracy = evaluator.EvaluateAccuracy(dataset, systemReplacements); //var changed = evaluator.EvaluateChanged(dataset, systemReplacements); //var precisionCandidate = evaluator.EvaluateCandidatePrecision(dataset, systemReplacementCandidates); //var accuracyCandidate = evaluator.EvaluateCandidateAccuracy(dataset, systemReplacementCandidates); //Console.WriteLine("Precision: {0}\nAccuracy: {1}\nChanged: {2}\n", precision, accuracy, changed); //Console.WriteLine("Soft precision: {0}\nSoft accuracy: {1}\n Changed: {2}\n ", precisionCandidate, accuracyCandidate, changed); //TakeLab.Utilities.IO.StringWriter<string>.WriteDictionary<string>(simplifier.CandidateInPoSLookup, @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\candidate-in-pos-lookup.txt"); //#endregion //#region Wiki preprocessing ////WikipediaPreprocessing wpp = new WikipediaPreprocessing(); ////wpp.Preprocess(@"C:\Goran\Korpusi\Wikipedia\preprocessed"); ////TakeLabCore.NLP.Annotations.Document doc = new TakeLabCore.NLP.Annotations.Document{ Text = "he doctor who pulled Harrison Ford from the wreck of a plane crash has described how he feared a fireball from the aircraft's leaking fuel. Spine surgeon Sanjay Khurana was playing golf in Los Angeles when the American actor's vintage plane \"belly-flopped\" down on to the eighth hole." }; ////(new TakeLabCore.NLP.Annotators.English.EngNER()).Annotate(doc); ////doc.NamedEntities.ForEach(ne => Console.WriteLine(ne.Text + " " + ne.NEType + " " + ne.StartPosition)); ////var simnet = new SimilarityNet(); ////simnet.SimpleLoad(@"C:\Goran\Korpusi\GloVe-Vectors\glove-vectors-6b-200d\glove-vectors-6b-200d.txt"); ////while (true) ////{ //// var line = Console.ReadLine(); //// if (string.IsNullOrEmpty(line)) break; //// else //// { //// var mostSimilar = simnet.GetMostSimilar(line.Trim(), 20); //// if (mostSimilar != null) mostSimilar.ForEach(ms => Console.WriteLine(ms.Item1 + " " + ms.Item2)); //// else Console.WriteLine("Word not found in vocabulary, try again!"); //// Console.WriteLine(); //// } ////} //#endregion #endregion }
/// <summary> /// Oredring the candidates for the ranking task, when the candidates are given. The ordering is somewhat different /// </summary> /// <param name="document">The document whose text is to be simplified</param> /// <param name="substitutionCandidates">Substitution candidates</param> /// <param name="target">Target word</param> /// <param name="contextSize">The size of the context of the target word to be compared semantically with candidate replacements</param> /// <returns></returns> public List <string> OrderGivenSubstitutionCandidates(Document document, List <string> substitutionCandidates, string target, int contextSize) { EngMorphology morph = new EngMorphology(); List <Tuple <TokenAnnotation, string> > substitutions = new List <Tuple <TokenAnnotation, string> >(); //List<string> metrics = new List<string> { "sim", "ic-diff", "context-sim", "length", "lm-bigram-pre", "lm-bigram-post", "lm-trigram-pre", "lm-trigram-post" }; List <string> metrics = new List <string> { "context-sim" /*, "lm-bigram-pre", "lm-bigram-post"*/, "ic-diff" /*, "lm-trigram-pre", "lm-trigram-post"*/ }; //List<string> metrics = new List<string> { "ic-diff" }; Dictionary <string, string> candidateChanges = new Dictionary <string, string>(); var targetToken = document.AllTokens.Where(t => t.Text == target).Last(); var preceedingSentencePart = document.Text.Substring(0, targetToken.StartPositionSentence); var followingSentencePart = document.Text.Substring(targetToken.StartPositionSentence + targetToken.Text.Length); var targetLemmaIC = InformationContent.GetRelativeInformationContent(targetToken.Lemma.ToLower()); var targetWordIC = InformationContent.GetRelativeInformationContent(targetToken.Text.ToLower()); var targetContextTokens = document.AllTokens.Where(t => Math.Abs(document.AllTokens.IndexOf(t) - targetToken.SentenceIndex) > 0 && Math.Abs(document.AllTokens.IndexOf(t) - targetToken.SentenceIndex) <= contextSize && t.IsContent()).ToList(); Dictionary <string, Dictionary <string, double> > scores = new Dictionary <string, Dictionary <string, double> >(); substitutionCandidates.ForEach(candidate => { var candidateText = candidate; if (candidateText.Contains(",")) { var splitCand = candidateText.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries).ToList(); string selected = string.Empty; double maxIC = double.MinValue; splitCand.ForEach(sc => { var scic = InformationContent.GetRelativeInformationContent(sc.Trim().ToLower()); if (scic > maxIC) { selected = sc.Trim(); maxIC = scic; } }); candidateText = selected; } if (candidateText.Trim().Contains(" ")) { var tokens = (new EngPOSTagger()).Annotate(candidateText.Trim()).ToList(); var contentTokens = tokens.Where(x => ((TokenAnnotation)x).IsContent()).ToList(); var change = contentTokens.Count > 0 ? ((TokenAnnotation)(contentTokens.Last())).Text.Trim() : ((TokenAnnotation)(tokens.First())).Text.Trim(); candidateText = change; } var candidateLemmaIC = InformationContent.GetRelativeInformationContent(candidateText.ToLower()); var candidateWordIC = !string.IsNullOrEmpty(candidateText) ? InformationContent.GetRelativeInformationContent(candidateText.ToLower()) : 1; var candidateIC = candidateWordIC == 1 ? candidateLemmaIC : candidateWordIC; var targetIC = targetWordIC == 1 ? targetLemmaIC : targetWordIC; var candidateContextSimilarities = targetContextTokens.Select(x => VectorSpace.Similarity(x.Lemma.ToLower(), candidateText.ToLower())).Where(x => x >= -1).ToList(); var candidateContextSimilarity = candidateContextSimilarities.Count > 0 ? candidateContextSimilarities.Average() : 0; scores.Add(candidate, new Dictionary <string, double>()); var sim = VectorSpace.Similarity(targetToken.Text.ToLower().Trim(), candidateText.ToLower().Trim()); if (sim < 1) { scores[candidate].Add("sim", sim); } scores[candidate].Add("ic-diff", candidateIC); scores[candidate].Add("context-sim", candidateContextSimilarity); scores[candidate].Add("length", candidateText.Length); var tokenIndex = document.AllTokens.IndexOf(targetToken); // bigram LM if (tokenIndex > 0) { var lmScore = EnglishLanguageModel.Instance.GetBigramLMScore(document.AllTokens[document.AllTokens.IndexOf(targetToken) - 1].Text.ToLower(), candidateText); scores[candidate].Add("lm-bigram-pre", lmScore.HasValue ? lmScore.Value : -100); } else { scores[candidate].Add("lm-bigram-pre", 0); } if (tokenIndex < document.AllTokens.Count - 1) { var lmScore = EnglishLanguageModel.Instance.GetBigramLMScore(candidateText, document.AllTokens[document.AllTokens.IndexOf(targetToken) + 1].Text.ToLower()); scores[candidate].Add("lm-bigram-post", lmScore.HasValue ? lmScore.Value : -100); } else { scores[candidate].Add("lm-bigram-pre", 0); } // trigram LM if (tokenIndex > 1) { var lmScore = EnglishLanguageModel.Instance.GetTrigramLMScore(document.AllTokens[document.AllTokens.IndexOf(targetToken) - 2].Text.ToLower(), document.AllTokens[document.AllTokens.IndexOf(targetToken) - 1].Text.ToLower(), candidateText); scores[candidate].Add("lm-trigram-pre", lmScore.HasValue ? lmScore.Value : -100); } else { scores[candidate].Add("lm-trigram-pre", 0); } if (tokenIndex < document.AllTokens.Count - 2) { var lmScore = EnglishLanguageModel.Instance.GetTrigramLMScore(candidateText, document.AllTokens[document.AllTokens.IndexOf(targetToken) + 1].Text.ToLower(), document.AllTokens[document.AllTokens.IndexOf(targetToken) + 2].Text.ToLower()); scores[candidate].Add("lm-trigram-post", lmScore.HasValue ? lmScore.Value : -100); } else { scores[candidate].Add("lm-trigram-post", 0); } }); LastSubstitutionCandidates = new List <Tuple <TokenAnnotation, List <string> > >(); LastSubstitutionCandidates.Add(new Tuple <TokenAnnotation, List <string> >(targetToken, scores.Select(x => x.Key).ToList())); var allRanks = new List <Dictionary <string, int> >(); metrics.ForEach(m => { var featDict = scores.Where(x => x.Value.ContainsKey(m)).ToDictionary(x => x.Key, x => x.Value[m]); allRanks.Add(TrainingExample.RankExamplesByNumericFeature(featDict, m == "length" || m == "ic-diff")); }); var allCandidates = scores.Select(x => x.Key).ToList(); Dictionary <string, double> averageRankings = allCandidates.ToDictionary(x => x, x => allRanks.Where(y => y.ContainsKey(x)).Select(r => r[x]).Average()); return(averageRankings.OrderBy(r => r.Value).Select(x => x.Key).ToList()); }
/// <summary> /// Choosing the substitutions for target words. This means we rank the candidates according to several features (similarity with target word, information content reduction, similarity with context words, ...) /// </summary> /// <param name="document">The document whose text is to be simplified</param> /// <param name="substCandidates">All the pairs of target words and collected candidate replacements</param> /// <param name="contextSize">The size of the context of the target word to be considered for measuring the similarity between candidate words and target word context</param> /// <param name="noSubstitutionWords">Stopwords, never to be considered for simplification</param> /// <param name="similarityTreshold">The treshold for semantic similarity between target word and candidate replacement</param> /// <param name="icReplacementCandidateTreshold">Information content treshold for replacing the target word</param> /// <param name="word"></param> /// <returns>The list of substitutions (tuple of target token and candidate replacement word)</returns> public List <Tuple <TokenAnnotation, string> > GetSubstitutions(Document document, List <Tuple <TokenAnnotation, List <Tuple <string, double> > > > substCandidates, int contextSize, List <string> noSubstitutionWords, double similarityTreshold, double icReplacementCandidateTreshold, string word = null) { EngMorphology morph = new EngMorphology(); List <Tuple <TokenAnnotation, string> > substitutions = new List <Tuple <TokenAnnotation, string> >(); List <string> metrics = new List <string> { "sim", "ic-diff", "context-sim", "lm-bigram-pre", "lm-bigram-post", "lm-trigram-pre", "lm-trigram-post" }; //List<string> metrics = new List<string> { "sim", "lm-bigram-pre", "lm-bigram-post", /*"ic-diff", "lm-trigram-pre", "lm-trigram-post"*/ }; substCandidates.ForEach(sc => { if (!noSubstitutionWords.Contains(sc.Item1.Text.ToLower())) { if (word == null || sc.Item1.Text.ToLower() == word) { Dictionary <string, Dictionary <string, double> > scores = new Dictionary <string, Dictionary <string, double> >(); var targetToken = sc.Item1; var sentence = document.Sentences.Where(s => s.Tokens.Any(t => t.StartPosition == targetToken.StartPosition && t.Text == targetToken.Text)).Single(); var targetTokenCopy = sentence.Tokens.Where(t => t.StartPosition == targetToken.StartPosition && t.Text == targetToken.Text).Single(); var preceedingSentencePart = sentence.Text.Substring(0, targetTokenCopy.StartPositionSentence); var followingSentencePart = sentence.Text.Substring(targetTokenCopy.StartPositionSentence + targetTokenCopy.Text.Length); var targetLemmaIC = InformationContent.GetRelativeInformationContent(targetToken.Lemma.ToLower()); var targetWordIC = InformationContent.GetRelativeInformationContent(targetToken.Text.ToLower()); var targetContextTokens = sentence.Tokens.Where(t => Math.Abs(sentence.Tokens.IndexOf(t) - targetTokenCopy.SentenceIndex) > 0 && Math.Abs(sentence.Tokens.IndexOf(t) - targetTokenCopy.SentenceIndex) <= contextSize && t.IsContent()).ToList(); var targetCtxtSimilarities = targetContextTokens.Select(x => VectorSpace.Similarity(x.Lemma.ToLower(), targetToken.Lemma.ToLower())).Where(x => x >= -1).ToList(); var targetContextSimilarity = targetCtxtSimilarities.Count > 0 ? targetCtxtSimilarities.Average() : 0; if (sc.Item2 != null) { sc.Item2.ForEach(candidate => { try { var candidateLemmaIC = InformationContent.GetRelativeInformationContent(candidate.Item1.ToLower()); string key = candidate.Item1 + "<->" + targetToken.POSTag; //var candidateInPOS = EngMorphology.GetForm(candidate.Item1, targetToken.POSTag); //if (!CandidateInPoSLookup.ContainsKey(key)) CandidateInPoSLookup.Add(key, candidateInPOS); var candidateInPOS = CandidateInPoSLookup.ContainsKey(key) ? CandidateInPoSLookup[key] : candidate.Item1; var candidateWordIC = !string.IsNullOrEmpty(candidateInPOS) ? InformationContent.GetRelativeInformationContent(candidateInPOS.ToLower()) : 1; var candidateIC = candidateWordIC == 1 ? candidateLemmaIC : candidateWordIC; var targetIC = targetWordIC == 1 ? targetLemmaIC : targetWordIC; if (!string.IsNullOrEmpty(candidateInPOS) && targetLemmaIC > icReplacementCandidateTreshold && (candidateIC < targetIC /*|| Math.Abs(targetIC - candidateIC) < 0.05*/)) { var artificialSentence = preceedingSentencePart + candidateInPOS + followingSentencePart; var artTokens = (new EngPOSTagger()).Annotate(artificialSentence).Select(x => (TokenAnnotation)x).ToList(); morph.AnnotateMorphology(artTokens); var candidateToken = artTokens.Where(x => x.StartPositionSentence == targetTokenCopy.StartPositionSentence /*&& x.Text == candidateInPOS*/).Single(); var candidateContextSimilarities = targetContextTokens.Select(x => VectorSpace.Similarity(x.Lemma.ToLower(), candidateToken.Lemma.ToLower())).Where(x => x >= -1).ToList(); var candidateContextSimilarity = candidateContextSimilarities.Count > 0 ? candidateContextSimilarities.Average() : targetContextSimilarity; // POS-tag compatibility is a second prerequisite bool sameWord = candidate.Item1.Contains(targetToken.Text) || targetToken.Text.Contains(candidate.Item1) || candidate.Item1.Contains(targetToken.Lemma) || targetToken.Lemma.Contains(candidate.Item1) || candidateInPOS.Contains(targetToken.Text) || targetToken.Text.Contains(candidateInPOS) || candidateInPOS.Contains(targetToken.Lemma) || targetToken.Lemma.Contains(candidateInPOS); bool sameAsContext = targetContextTokens.Any(ct => candidate.Item1.Contains(ct.Text) || ct.Text.Contains(candidate.Item1) || candidate.Item1.Contains(ct.Lemma) || ct.Lemma.Contains(candidate.Item1) || candidateInPOS.Contains(ct.Text) || ct.Text.Contains(candidateInPOS) || candidateInPOS.Contains(ct.Lemma) || ct.Lemma.Contains(candidateInPOS)); if (candidate.Item2 >= similarityTreshold && (candidateToken.POSTag == targetToken.POSTag) && !sameWord && !sameAsContext) { if (!scores.ContainsKey(candidateInPOS)) { scores.Add(candidateInPOS, new Dictionary <string, double>()); scores[candidateInPOS].Add("sim", candidate.Item2); scores[candidateInPOS].Add("ic-diff", targetIC - candidateIC); scores[candidateInPOS].Add("context-sim", candidateContextSimilarity); scores[candidateInPOS].Add("length", candidateInPOS.Length); var tokenIndex = sentence.Tokens.IndexOf(targetTokenCopy); // bigram LM if (tokenIndex > 0) { var lmScore = EnglishLanguageModel.Instance.GetBigramLMScore(sentence.Tokens[sentence.Tokens.IndexOf(targetTokenCopy) - 1].Text.ToLower(), candidateInPOS); scores[candidateInPOS].Add("lm-bigram-pre", lmScore.HasValue ? lmScore.Value : -100); } else { scores[candidateInPOS].Add("lm-bigram-pre", 0); } if (tokenIndex < sentence.Tokens.Count - 1) { var lmScore = EnglishLanguageModel.Instance.GetBigramLMScore(candidateInPOS, sentence.Tokens[sentence.Tokens.IndexOf(targetTokenCopy) + 1].Text.ToLower()); scores[candidateInPOS].Add("lm-bigram-post", lmScore.HasValue ? lmScore.Value : -100); } else { scores[candidateInPOS].Add("lm-bigram-post", 0); } // trigram LM if (tokenIndex > 1) { var lmScore = EnglishLanguageModel.Instance.GetTrigramLMScore(sentence.Tokens[sentence.Tokens.IndexOf(targetTokenCopy) - 2].Text.ToLower(), sentence.Tokens[sentence.Tokens.IndexOf(targetTokenCopy) - 1].Text.ToLower(), candidateInPOS); scores[candidateInPOS].Add("lm-trigram-pre", lmScore.HasValue ? lmScore.Value : -100); } else { scores[candidateInPOS].Add("lm-trigram-pre", 0); } if (tokenIndex < sentence.Tokens.Count - 2) { var lmScore = EnglishLanguageModel.Instance.GetTrigramLMScore(candidateInPOS, sentence.Tokens[sentence.Tokens.IndexOf(targetTokenCopy) + 1].Text.ToLower(), sentence.Tokens[sentence.Tokens.IndexOf(targetTokenCopy) + 2].Text.ToLower()); scores[candidateInPOS].Add("lm-trigram-post", lmScore.HasValue ? lmScore.Value : -100); } else { scores[candidateInPOS].Add("lm-trigram-post", 0); } } } } } catch { } }); } LastSubstitutionCandidates = new List <Tuple <TokenAnnotation, List <string> > >(); LastSubstitutionCandidates.Add(new Tuple <TokenAnnotation, List <string> >(targetToken, scores.Select(x => x.Key).ToList())); if (scores.Count > 0) { var allRanks = new List <Dictionary <string, int> >(); metrics.ForEach(m => { var featDict = scores.ToDictionary(x => x.Key, x => x.Value[m]); allRanks.Add(TrainingExample.RankExamplesByNumericFeature(featDict, m == "length")); }); var allCandidates = scores.Select(x => x.Key).ToList(); Dictionary <string, double> averageRankings = allCandidates.ToDictionary(x => x, x => allRanks.Select(r => r[x]).Average()); var finalRanking = averageRankings.OrderBy(r => r.Value).ToList(); double topScore = finalRanking[0].Value; var equal = new List <string>(); finalRanking.ForEach(fr => { if (fr.Value == topScore) { equal.Add(fr.Key); } }); var finalChoice = equal.Where(eq => equal.Where(eq2 => eq2 != eq).All(eq2 => scores[eq]["sim"] >= scores[eq2]["sim"])).First(); substitutions.Add(new Tuple <TokenAnnotation, string>(targetToken, finalChoice)); } } } }); return(substitutions); }
public void Insert(InformationContent entity) { context.InformationContent.Add(entity); context.SaveChanges(); }
/// <summary> /// A place for experimenting/running code /// </summary> /// <param name="args">Command line arguments</param> public static void Main(string[] args) { Console.WriteLine("Loading resources..."); var tmp = Console.Out; Console.SetOut(TextWriter.Null); // setting the culture to en-US (e.g., decimal point is used instead of decimal comma) Thread.CurrentThread.CurrentCulture = new System.Globalization.CultureInfo("en-US"); #region Usage example code // Loading the information contents based on word frequencies from a large corpus InformationContent ic = new InformationContent(ConfigurationManager.AppSettings["other-resources"] + "\\word-freqs.txt"); // Loading the GloVe embeddings into an instance of WordVectorSpace class WordVectorSpace wvs = new WordVectorSpace(); wvs.Load(ConfigurationManager.AppSettings["other-resources"] + "\\glove-vectors-6b-200d.txt", null); // Preprocessing tools required for preprocessing the free-text document AnnotatorChain ac = new AnnotatorChain(TakeLabCore.NLP.Language.English, new List <AnnotatorType> { AnnotatorType.SentenceSplitter, AnnotatorType.POSTagger, AnnotatorType.Morphology, AnnotatorType.NamedEntities }); // Instantiating the lexical simplifier LexicalSimplifier simplifier = new LexicalSimplifier(); // setting the information content and word vector space properties simplifier.InformationContent = ic; simplifier.VectorSpace = wvs; simplifier.CandidateInPoSLookup = TakeLab.Utilities.IO.StringLoader.LoadDictionaryStrings(ConfigurationManager.AppSettings["other-resources"] + "\\candidate-in-pos-lookup.txt"); // dummy simplification call to force loading of language model resources var dLoad = new Document { Text = "want wish" }; dLoad.Annotate(ac); simplifier.Simplify(dLoad, 5, 0.6, 0.5); Console.SetOut(tmp); Console.WriteLine("Loading resources done! \n\n [Light-LS v.0.9 Copyright TakeLab]\n Enter the text to simplify...\n\n"); while (true) { try { var line = Console.ReadLine(); if (string.IsNullOrEmpty(line)) { break; } var document = new Document { Text = line }; document.Annotate(ac); var dSimple = simplifier.Simplify(document, 5, 0.6, 0.5); var substitutions = simplifier.LastSubstitutions; foreach (var v in substitutions) { Console.WriteLine(v.Item1.Text + " -> " + v.Item2); } Console.WriteLine("Simplified text: \n"); Console.WriteLine(dSimple.Text); Console.WriteLine("\n\n [Light-LS v.0.9 Copyright TakeLab]\n Enter the text to simplify...\n\n"); } catch { Console.WriteLine("Something went wrong, check your text for oddities and try again..."); } } #endregion #region Evaluations Code (whole code commented) //// Evaluating systems via human-assigned scores of grammaticality, simplicity, and meaning preservation //#region Human evaluation //Console.WriteLine("Grammaticality"); ////var evalResSanja = HumanEvaluation.EvaluateHuman(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\sanja\grammaticality.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt"); ////var sanjaGrammaticality = HumanEvaluation.AllAnnotatorScores; ////var evalResGoran = HumanEvaluation.EvaluateHuman(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\goran\grammaticality.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt"); ////var goranGrammaticality = HumanEvaluation.AllAnnotatorScores; ////Console.WriteLine("\nSanja"); ////evalResSanja.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine("\nGoran"); ////evalResGoran.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine("\nAgreement: " + Pearson.Correlation(sanjaGrammaticality, goranGrammaticality)); //var evalAvg = HumanEvaluation.EvaluateHumanDual(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\sanja\grammaticality.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\goran\grammaticality.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt"); //Console.WriteLine("\nAveraged"); //evalAvg.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine(); //Console.WriteLine("\nSimplicity"); ////evalResSanja = HumanEvaluation.EvaluateHuman(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\sanja\simplicity.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt"); ////var sanjaSimplicity = HumanEvaluation.AllAnnotatorScores; ////evalResGoran = HumanEvaluation.EvaluateHuman(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\goran\simplicity.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt"); ////var goranSimplicity = HumanEvaluation.AllAnnotatorScores; ////Console.WriteLine("\nSanja"); ////evalResSanja.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine("\nGoran"); ////evalResGoran.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine("\nAgreement: " + Pearson.Correlation(sanjaSimplicity, goranSimplicity)); //evalAvg = HumanEvaluation.EvaluateHumanDual(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\sanja\simplicity.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\goran\simplicity.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt"); //Console.WriteLine("\nAveraged"); //evalAvg.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine(); ////Console.WriteLine("Meaning preservation"); ////evalResSanja = HumanEvaluation.EvaluateHuman(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\sanja\meaning-preservation.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt", true); ////var sanjaMeaningPreservation = HumanEvaluation.AllAnnotatorScores; ////evalResGoran = HumanEvaluation.EvaluateHuman(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\goran\meaning-preservation.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt", true); ////var goranMeaningPreservation = HumanEvaluation.AllAnnotatorScores; ////Console.WriteLine("\nSanja"); ////evalResSanja.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine("\nGoran"); ////evalResGoran.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine("\nAgreement: " + Pearson.Correlation(sanjaMeaningPreservation, goranMeaningPreservation)); //evalAvg = HumanEvaluation.EvaluateHumanDual(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\sanja\meaning-preservation.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\goran\meaning-preservation.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt", true); //Console.WriteLine("\nAveraged"); //evalAvg.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); //Console.WriteLine(); ////Console.WriteLine("Changed"); ////var evalResCh = HumanEvaluation.EvaluateHuman(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\human-evaluation\goran\changed.txt", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\codings.txt", true); ////evalResCh.ToList().ForEach(er => Console.WriteLine(er.Key + " " + er.Value)); ////Console.WriteLine("Done!"); ////Console.ReadLine(); //#endregion //// Loading the information contents based on word frequencies from a large corpus //InformationContent ic = new InformationContent(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Corpora\word-freqs.txt"); //// Loading the GloVe embeddings into an instance of WordVectorSpace class //WordVectorSpace wvs = new WordVectorSpace(); //wvs.Load(@"C:\Goran\Korpusi\GloVe-Vectors\glove-vectors-6b-200d\glove-vectors-6b-200d.txt", null); //// Instantiating the lexical simplifier //LexicalSimplifier simplifier = new LexicalSimplifier(); //// setting the information content and word vector space properties //simplifier.InformationContent = ic; //simplifier.VectorSpace = wvs; //simplifier.CandidateInPoSLookup = TakeLab.Utilities.IO.StringLoader.LoadDictionaryStrings(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\candidate-in-pos-lookup.txt"); //// Evaluating systems via human-assigned scores of grammaticality, simplicity, and meaning preservation //#region Human Evaluation Task //#region Preparing files for evaluation ////var dirPath = @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\"; ////var originalLines = (new StreamReader(dirPath + "HA-Wiki-original.txt")).ReadToEnd().Split(new char[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries).ToList(); ////var humanLines = (new StreamReader(dirPath + "HA-Wiki-manualSimplification.txt")).ReadToEnd().Split(new char[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries).ToList(); ////var embeSimpLines = (new StreamReader(dirPath + "HA-Wiki-EmbeSimp.txt")).ReadToEnd().Split(new char[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries).ToList(); ////var biranLines = (new StreamReader(dirPath + "HA-Wiki-Biran-simplified.txt")).ReadToEnd().Split(new char[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries).ToList(); ////var gramSimpWriter = new StreamWriter(dirPath + "simplification-grades.txt"); ////var meanPresWriter = new StreamWriter(dirPath + "meaning-preservation-grades.txt"); ////Dictionary<Guid, string> codings = new Dictionary<Guid, string>(); ////for (int i = 0; i < originalLines.Count; i++) ////{ //// List<AnnotationEntry> entries = new List<AnnotationEntry>(); //// var entry = new AnnotationEntry { ID = Guid.NewGuid(), Sentence = originalLines[i], Grade = 0 }; //// entries.Add(entry); //// codings.Add(entry.ID, "original"); //// entry = new AnnotationEntry { ID = Guid.NewGuid(), Sentence = humanLines[i], Grade = 0 }; //// entries.Add(entry); //// codings.Add(entry.ID, "manual"); //// entry = new AnnotationEntry { ID = Guid.NewGuid(), Sentence = biranLines[i], Grade = 0 }; //// entries.Add(entry); //// codings.Add(entry.ID, "biran"); //// entry = new AnnotationEntry { ID = Guid.NewGuid(), Sentence = embeSimpLines[i], Grade = 0 }; //// entries.Add(entry); //// codings.Add(entry.ID, "embesimp"); //// entries = entries.OrderBy(x => x.ID).ToList(); //// entries.ForEach(x => gramSimpWriter.WriteLine(x.ID + " " + x.Grade + " \"" + x.Sentence + "\"")); //// gramSimpWriter.WriteLine(); //// var original = entries.Where(x => codings[x.ID] == "original").Single(); //// var other = entries.Where(x => codings[x.ID] != "original").ToList(); //// other.ForEach(x => //// { //// meanPresWriter.WriteLine("Original: " + "\"" + original.Sentence + "\""); //// meanPresWriter.WriteLine("Simplified: " + x.ID + " " + x.Grade + " \"" + x.Sentence + "\""); //// meanPresWriter.WriteLine(); //// }); //// meanPresWriter.WriteLine(); //// meanPresWriter.WriteLine(); ////} ////gramSimpWriter.Close(); ////meanPresWriter.Close(); ////StreamWriter codingsWriter = new StreamWriter(dirPath + "codings.txt"); ////codings.ToList().ForEach(c => ////{ //// codingsWriter.WriteLine(c.Key.ToString() + " " + c.Value); ////}); ////codingsWriter.Close(); ////Console.WriteLine("Done!"); ////Console.ReadLine(); //#endregion ////var wikiSentences = TakeLab.Utilities.IO.StringLoader.LoadList(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\HA-Wiki-original.txt"); ////var wikiWriter = new StreamWriter(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\HA-Wiki-EmbeSimp.txt"); ////AnnotatorChain annChain = new AnnotatorChain(TakeLabCore.NLP.Language.English, new List<AnnotatorType> { AnnotatorType.SentenceSplitter, AnnotatorType.POSTagger, AnnotatorType.Morphology, AnnotatorType.NamedEntities }); //////wikiSentences = wikiSentences.OrderBy(x => Guid.NewGuid()).ToList(); ////for (int i = 0; i < wikiSentences.Count; i++) ////{ //// var document = new Document { Text = wikiSentences[i] }; //// document.Annotate(annChain); //// var dSimple = simplifier.Simplify(document, 5, 0.55, 0.55); //// wikiWriter.WriteLine(dSimple.Text); //// Console.WriteLine(document.Text); //// Console.WriteLine(dSimple.Text); //// Console.WriteLine(); ////} ////wikiWriter.Close(); ////Console.WriteLine("Done!"); ////Console.ReadLine(); //#endregion //// Automated evaluation on the ranking lexical simplification task from SemEval 2012 //#region Ranking task ////StreamWriter rankingsWriter = new StreamWriter(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\ranking-task-semeval\test-dataset\gorankings.txt"); ////var datasetRanking = DatasetLoader.LoadRankingTasksDataset(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\ranking-task-semeval\test-dataset\contexts.xml", @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\ranking-task-semeval\test-dataset\substitutions.gold-rankings"); ////datasetRanking = datasetRanking.OrderBy(x => x.ID).ToList(); ////datasetRanking.ForEach(rt => { //// Console.WriteLine("Processing example: " + datasetRanking.IndexOf(rt)); //// var candidateRanking = simplifier.RankSimplificationCandidates(rt, 5); //// rankingsWriter.Write("Sentence " + rt.ID.ToString() + " rankings: "); //// for (int i = 0; i < candidateRanking.Count; i++) //// { //// rankingsWriter.Write("{" + candidateRanking[i] + "} "); //// //Console.WriteLine(candidateRanking[i]); //// } //// rankingsWriter.WriteLine(); //// //Console.WriteLine(); //// //Console.ReadLine(); ////}); ////rankingsWriter.Close(); ////Console.WriteLine("Done!"); ////Console.ReadLine(); //#endregion //// Automated evaluation on the crowdsourced replacement dataset (Kavuhcuk et al., ACL 2014) //#region Replacement task //AnnotatorChain ac = new AnnotatorChain(TakeLabCore.NLP.Language.English, new List<AnnotatorType> { AnnotatorType.SentenceSplitter, AnnotatorType.POSTagger, AnnotatorType.Morphology, AnnotatorType.NamedEntities }); //var dataset = DatasetLoader.LoadMechanicalTurkTDataset(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\lex.mturk.txt");//.OrderBy(x => Guid.NewGuid()).ToList(); //List<string> systemReplacements = new List<string>(); //List<List<string>> systemReplacementCandidates = new List<List<string>>(); //StreamWriter replacementOutputWriter = new StreamWriter(@"C:\Goran\Repos\EmbeSimp\EmbeSimp\Output\replacements.txt"); //for(int i = 0; i < dataset.Count; i++) //{ // Console.WriteLine("Simplifying sentence {0}: ", i); // Document d = new Document { Text = dataset[i].Item1 }; // ac.Apply(d); // var dSimple = simplifier.Simplify(d, 5, 0, 0, dataset[i].Item2); // Console.WriteLine("Original: " + d.Text); // Console.WriteLine("Simplified: " + dSimple.Text); // Console.WriteLine(); // var targetSimplification = simplifier.LastSubstitutions.Where(s => s.Item1.Text.ToLower() == dataset[i].Item2.ToLower()).FirstOrDefault(); // if (targetSimplification == null) // { // systemReplacements.Add(string.Empty); // systemReplacementCandidates.Add(new List<string>()); // } // else // { // systemReplacements.Add(targetSimplification.Item2); // systemReplacementCandidates.Add(simplifier.LastSubstitutionCandidates[0].Item2); // } // Console.WriteLine("Change: {0} -> {1}", dataset[i].Item2, targetSimplification != null ? targetSimplification.Item2 : "<empty>"); // //Console.ReadLine(); // replacementOutputWriter.WriteLine(d.Text); // replacementOutputWriter.WriteLine("Target: " + dataset[i].Item2); // replacementOutputWriter.WriteLine("Simplification: " + ((targetSimplification != null) ? targetSimplification.Item2 : "<empty>")); // replacementOutputWriter.WriteLine("All candidates: "); // simplifier.LastSubstitutionCandidates[0].Item2.ForEach(x => replacementOutputWriter.WriteLine(x)); // replacementOutputWriter.WriteLine(); //} //replacementOutputWriter.Close(); //var evaluator = new ReplacementEvaluator(); //var precision = evaluator.EvaluatePrecision(dataset, systemReplacements); //var accuracy = evaluator.EvaluateAccuracy(dataset, systemReplacements); //var changed = evaluator.EvaluateChanged(dataset, systemReplacements); //var precisionCandidate = evaluator.EvaluateCandidatePrecision(dataset, systemReplacementCandidates); //var accuracyCandidate = evaluator.EvaluateCandidateAccuracy(dataset, systemReplacementCandidates); //Console.WriteLine("Precision: {0}\nAccuracy: {1}\nChanged: {2}\n", precision, accuracy, changed); //Console.WriteLine("Soft precision: {0}\nSoft accuracy: {1}\n Changed: {2}\n ", precisionCandidate, accuracyCandidate, changed); //TakeLab.Utilities.IO.StringWriter<string>.WriteDictionary<string>(simplifier.CandidateInPoSLookup, @"C:\Goran\Repos\EmbeSimp\EmbeSimp\Data\candidate-in-pos-lookup.txt"); //#endregion //#region Wiki preprocessing ////WikipediaPreprocessing wpp = new WikipediaPreprocessing(); ////wpp.Preprocess(@"C:\Goran\Korpusi\Wikipedia\preprocessed"); ////TakeLabCore.NLP.Annotations.Document doc = new TakeLabCore.NLP.Annotations.Document{ Text = "he doctor who pulled Harrison Ford from the wreck of a plane crash has described how he feared a fireball from the aircraft's leaking fuel. Spine surgeon Sanjay Khurana was playing golf in Los Angeles when the American actor's vintage plane \"belly-flopped\" down on to the eighth hole." }; ////(new TakeLabCore.NLP.Annotators.English.EngNER()).Annotate(doc); ////doc.NamedEntities.ForEach(ne => Console.WriteLine(ne.Text + " " + ne.NEType + " " + ne.StartPosition)); ////var simnet = new SimilarityNet(); ////simnet.SimpleLoad(@"C:\Goran\Korpusi\GloVe-Vectors\glove-vectors-6b-200d\glove-vectors-6b-200d.txt"); ////while (true) ////{ //// var line = Console.ReadLine(); //// if (string.IsNullOrEmpty(line)) break; //// else //// { //// var mostSimilar = simnet.GetMostSimilar(line.Trim(), 20); //// if (mostSimilar != null) mostSimilar.ForEach(ms => Console.WriteLine(ms.Item1 + " " + ms.Item2)); //// else Console.WriteLine("Word not found in vocabulary, try again!"); //// Console.WriteLine(); //// } ////} //#endregion #endregion }
public void PlaceObjectInInfoUI(InformationContent contentId) { PlaceObjectInInfoUI((int)contentId); }
// GET: Information public ActionResult ListPaging(int pageNum = 1, string p_priority = "ALL", string p_read_yn = "N", string p_query_type = "999", string p_data = "N") { try { //int memberID = 1; CMember member = Session[CDictionary.welcome] as CMember; //modify by Jony 109-12-15 int memberID = 0; if (member != null) { memberID = member.fMemberId; } int pageSize = 5; int currentPage = pageNum < 1 ? 1 : pageNum; ViewBag.Read_YN = p_read_yn; //將partialview資料傳給 主要view ViewBag.Priority = p_priority; ViewBag.Query_Type = p_query_type; ViewBag.Query_Data = p_data; Func <Information, bool> myWhere = null; SingleApartmentEntities db = new SingleApartmentEntities(); IEnumerable <Information> table = null; //myWhere = p => p.Status != "User_Deleted"; //todo:ok Linq多重where //if (string.IsNullOrEmpty(read_yn)) //p_priority == "ALL" 顯示全部資料 if (p_query_type == "999" && p_priority == "ALL") { myWhere = p => p.MemberID == memberID && p.Status != "User_Deleted" && p.Status != "Admin_Deleted"; } else if (p_query_type == "999" && p_priority == "Null") { //p_priority == "Null" 顯示已讀或未讀資料 //移除 p.Priority == p_priority myWhere = p => p.MemberID == memberID && p.Status != "User_Deleted" && p.Status != "Admin_Deleted" && p.Read_YN == p_read_yn; } else if (p_query_type == "999" && p_read_yn == "Null") { //p_read_yn == "Null" 顯示優先等級資料 //移除 p.Read_YN == p_read_yn myWhere = p => p.MemberID == memberID && p.Status != "User_Deleted" && p.Status != "Admin_Deleted" && p.Priority == p_priority; } //<option value="999">請選擇查詢項目</option> // <option value="100">系統分類</option> // <option value="200">個人分類</option> // <option value="300">關鍵字</option> else if (p_query_type == "100") {//100 = 系統分類 myWhere = p => p.MemberID == memberID && p.Status != "User_Deleted" && p.Status != "Admin_Deleted" && p.InformationCategoryID == Convert.ToInt32(p_data); } else if (p_query_type == "200") {//200 = 個人分類 if (p_data == "999") { myWhere = p => p.MemberID == memberID && p.Status != "User_Deleted" && p.Status != "Admin_Deleted" && p.MemberCategoryID == null; } else { myWhere = p => p.MemberID == memberID && p.Status != "User_Deleted" && p.Status != "Admin_Deleted" && p.MemberCategoryID == Convert.ToInt32(p_data); } } else if (p_query_type == "300") {//300 = 關鍵字 myWhere = p => p.MemberID == memberID && p.Status != "User_Deleted" && p.Status != "Admin_Deleted" && p.InformationContent.Contains(p_data); } else if (p_query_type == "400") {//400 = 依訊息內容分類 myWhere = p => p.MemberID == memberID && p.Status != "User_Deleted" && p.Status != "Admin_Deleted" && p.InformationSource == Convert.ToInt32(p_data); } else if (p_query_type == "500") {//500 = 依當日新訊息 if (p_data == "N") { myWhere = p => p.MemberID == memberID && p.Status != "User_Deleted" && p.Status != "Admin_Deleted" && p.InformationDate.ToString("yyyy-MM-dd") == DateTime.Today.ToString("yyyy-MM-dd"); } else { myWhere = p => p.MemberID == memberID && p.Status != "User_Deleted" && p.Status != "Admin_Deleted" && p.InformationDate.ToString("yyyy-MM-dd") == DateTime.Today.ToString("yyyy-MM-dd") && p.InformationSource == Convert.ToInt32(p_data); } } else { //no use //myWhere = p => p.MemberID == memberID && p.Status != "User_Deleted" && p.Status != "Admin_Deleted" && p.Read_YN == p_read_yn && p.Priority == p_priority; } //modify by Jony 1091208 增加 .OrderBy(a=>a.InformationID) //modify by Jony 1091218 增加 .OrderByDescending(a=>a.InformationID) table = db.Information.Where(myWhere).OrderByDescending(a => a.InformationID); // myWhere = p => p.InformationContent.Contains(keyword) && p.Status != "User_Deleted" && p.Status != "Admin_Deleted"; // table = db.Information.Where(myWhere); // //table = from p in db.Information // // where p.InformationContent.Contains(keyword) && p.Status != "User_Deleted" && p.Status != "Admin_Deleted" // // select p; //} List <CInformation> list = new List <CInformation>(); foreach (Information item in table) { //list.Add(new CInformation() //{ // information_entity = item, // InformationCategoryName = item.InformationCategory.InformationCategoryName, // //三元運算子 // UserCategoryName = item.MemberCategoryID == null ? "未分類" : item.MemberInformationCategory.MemberCategoryName, //}); CInformation x = new CInformation(); x.information_entity = item; x.InformationCategoryName = item.InformationCategory.InformationCategoryName; x.UserCategoryName = item.MemberCategoryID == null ? "未分類" : item.MemberInformationCategory.MemberCategoryName; x.InformationSourceName = ""; if (x.InformationSource != null) { InformationContent c = db.InformationContent.Where(p => p.ContentID == item.InformationSource).FirstOrDefault(); //沒設關聯, 為了取得 UserCategoryName = MemberInformationCategory.MemberCategoryName if (c != null) { x.InformationSourceName = c.ContentName; //取得 訊息來源名稱 } } list.Add(x); } //return View(list); var pagedlist = list.ToPagedList(currentPage, pageSize); //return View(pagedlist); //Page_原始ok 此cshtml使用 return(PartialView(pagedlist));//使用部分顯示 } catch { return(View()); } }
public void ActivateInformationContent(InformationContent _info) { infoHolder.PlaceObjectInInfoUI(_info); }
public JsonResult GetSingleContent(InformationContentPostModel informationContentPostModel) { InfrastructureModel <InformationContentSingleDataModel> response = new InfrastructureModel <InformationContentSingleDataModel>(); InformationContentSingleDataModel resultModel = new InformationContentSingleDataModel(); try { InformationContent informationContext = informationContentOperation.GetSingleInformationContent(informationContentPostModel.AppUserId); if (informationContext != null) { resultModel.AuthorFullName = informationContext.Author != null?informationContext.Author.Name + " " + informationContext.Author.Surname:"Eski Sevgilim :)"; resultModel.Explanation = informationContext.Explanation; resultModel.ImagePath = UrlHelper.InformationPhotoPath + "/" + informationContext.PostImagePath; resultModel.LikeCount = informationContext.LikeCount.ToString(); resultModel.Title = informationContext.Title; for (int i = 0; i <= 4; i++) { Random rnd = new Random(); Color randomColor = Color.FromArgb(rnd.Next(256), rnd.Next(256), rnd.Next(256)); if (i == 1) { resultModel.ColorCode1 = "#" + randomColor.R.ToString("X2") + randomColor.G.ToString("X2") + randomColor.B.ToString("X2"); } if (i == 2) { resultModel.ColorCode2 = "#" + randomColor.R.ToString("X2") + randomColor.G.ToString("X2") + randomColor.B.ToString("X2"); } if (i == 3) { resultModel.ColorCode3 = "#" + randomColor.R.ToString("X2") + randomColor.G.ToString("X2") + randomColor.B.ToString("X2"); } if (i == 4) { resultModel.ColorCode4 = "#" + randomColor.R.ToString("X2") + randomColor.G.ToString("X2") + randomColor.B.ToString("X2"); } response.ResultModel = resultModel; response.ResultModel.IsAllView = false; response.ResultStatus = true; } } else { response.ResultModel = resultModel; response.ResultModel.IsAllView = true; resultModel.ImagePath = UrlHelper.InformationPhotoPath + "/" + "33350e5ae-56d8-4707-ac40-8b4c60ddb115.jpg"; resultModel.Title = "Tüm İçerikleri Gördünüz"; resultModel.Explanation = "Bunları biliyor musunuz ? takipçileri içerik bittiğinde görüyorsa bir yöneticiye mail gitmiştir ve sizin için yeni içerik oluşturma talebi oluşur"; response.ResultStatus = true; } } catch (Exception ex) { response.ResultStatus = false; throw ex; } return(new JsonResult(response)); }
public JsonResult InsertInformationContent([FromForm] InformationApiContentCRUDModel model) { InfrastructureModel response; try { if (model != null) { InformationContent informationContent = new InformationContent() { CategoryId = model.CategoryId, AuthorId = model.AuthorId, CreateDate = DateTime.Now, IsActive = true, IsDeleted = false, Explanation = model.Explanation, LikeCount = model.LikeCount, Title = model.Title, }; long size = model.PostImageFile.Length; if (model.PostImageFile != null) { string filePath = Path.Combine(_env.WebRootPath, "Content", "Information"); string imagePath = string.Empty; string fileExtension = Path.GetExtension(model.PostImageFile.FileName); string fileName = (Guid.NewGuid() + fileExtension); if (model.PostImageFile.Length > 0) { using (var ms = new MemoryStream()) { model.PostImageFile.CopyTo(ms); var fileBytes = ms.ToArray(); imagePath = Convert.ToBase64String(fileBytes); } var bytes = Convert.FromBase64String(imagePath); using (var imageFile = new FileStream(Path.Combine(filePath, fileName), FileMode.Create)) { imageFile.Write(bytes, 0, bytes.Length); imageFile.Flush(); } } informationContent.PostImagePath = fileName; } informationContentOperation.Insert(informationContent); response = new InfrastructureModel() { ResultStatus = true }; } else { response = new InfrastructureModel() { ResultStatus = false }; } } catch (Exception ex) { throw; } return(Json(response)); }
public void Test1() { AbstractChromosomeFactory factory = new SolutionFactory(); int[] routeWeights = new int[] { 20000, 50000, 120000, 200000, 350000 }; int distanceWeight = 1; string[] customerTypes = new string[] { "C1", "C2", "R1", "R2", "RC1", "RC2" }; Dictionary <string, int> customerNumbers = new Dictionary <string, int>() { { "2", 20000 }, { "4", 50000 }, { "6", 120000 }, { "8", 200000 }, { "10", 350000 } }; string[] customerInstances = new string[] { "1", "2", "3", "4", "5", "6", "7", "8", "9", "10" }; CrossoverOperator[] crossoverOps = new CrossoverOperator[] { new OrderCrossover(), new PartiallyMatchedCrossover(), new CycleCrossover(), new UniformBasedOrderCrossover() }; MutationOperator[] mutationOps = new MutationOperator[] { new SwapOperator(), new InsertionOperator(), new InversionOperator(), new DisplacementOperator() }; int randomWalkNumber = 2000, randomWalkSteps = 5000; string floatPattern = "0.000", separator = ","; float epsilon = 0.05f; foreach (var type in customerTypes) { foreach (var number in customerNumbers) { foreach (var instance in customerInstances) { string instanceId = type + '_' + number.Key + '_' + instance; VrptwProblem problem = reader.ReadFromFile(FILE_PATH + @"\" + instanceId + ".txt"); FitnessFunction ff = new FitnessFunction(number.Value, distanceWeight); Landscape landscape = new Landscape(problem, factory, ff); foreach (var op in crossoverOps) { string path = RESULT_PATH + @"\" + instanceId + "_" + op.GetId() + ".csv"; if (!File.Exists(path)) { File.Create(path).Close(); File.ReadAllText(path); using (TextWriter tw = new StreamWriter(path)) { tw.WriteLine("AC, IC, PIC, DBI"); for (int i = 0; i < randomWalkNumber; ++i) { var rwResult = landscape.RandomWalk(randomWalkSteps, op); float ac = Autocorrelation.Run(rwResult); float ic = InformationContent.Run(rwResult, epsilon); float pic = PartialInformationContent.Run(rwResult, epsilon); float dbi = DensityBasinInformation.Run(rwResult, epsilon); string line = ac.ToString(floatPattern) + separator + ic.ToString(floatPattern) + separator + pic.ToString(floatPattern) + separator + dbi.ToString(floatPattern); tw.WriteLine(line); } } } } } } } }