public override string P1() { FrequencyTable <char>[] table = new FrequencyTable <char> [Input[0].Length]; for (int i = 0; i < table.Length; ++i) { table[i] = new(); } Input.ForEach(line => { for (int i = 0; i < table.Length; ++i) { table[i].Add(line[i]); } }); string code = ""; for (int i = 0; i < table.Length; ++i) { code += table[i].GetMostFrequent(); } return(code); }
public void GetItemsWithPriorityLessThan() { EnqueueTestData(testPriorityQueue); var result = new List <KeyValuePair <Double, Char> >(testPriorityQueue.GetItemsWithPriorityLessThan(6.0, 4)); Assert.AreEqual(4, result.Count); Assert.AreEqual(5.0, result[0].Key); Assert.AreEqual('A', result[0].Value); Assert.AreEqual(4, result.Count); Assert.AreEqual(4.0, result[1].Key); Assert.AreEqual(4.0, result[2].Key); Assert.AreEqual(4.0, result[3].Key); // Order of values with priority 4.0 is not deterministic, so put all into a FrequencyTable to check that they exist irrespective of order var equalPriorityCharcterCounts = new FrequencyTable <Char>(); equalPriorityCharcterCounts.Increment(result[1].Value); equalPriorityCharcterCounts.Increment(result[2].Value); equalPriorityCharcterCounts.Increment(result[3].Value); Assert.AreEqual(1, equalPriorityCharcterCounts.GetFrequency('B')); Assert.AreEqual(2, equalPriorityCharcterCounts.GetFrequency('D')); result = new List <KeyValuePair <Double, Char> >(testPriorityQueue.GetItemsWithPriorityLessThan(4.0, 1)); Assert.AreEqual(1, result.Count); Assert.AreEqual(3.0, result[0].Key); Assert.AreEqual('C', result[0].Value); }
/// <summary> /// Reset previous calculation results /// </summary> private void ResetResults() { L_Chances.Content = ""; FrequencyTable.Clear(); CaseCount = 0; Combinations.Clear(); this.UpdateLayout(); }
private void Button_Go2_Click(object sender, RoutedEventArgs e) { string input; if (!string.IsNullOrEmpty(TextBox_Input2.Text)) { input = TextBox_Input2.Text; FrequencyTable ft = new FrequencyTable(); TextBox_Output2.Text = ft.GetAtOctave(input, 3).ToString(); } }
public void FrequencyTableTest1() { GenerateByteStream(); var fileStream = new FileStream("FrequencyTableTest.in", FileMode.Open, FileAccess.Read); var bytes = new byte[fileStream.Length]; fileStream.Read(bytes, 0, (int)fileStream.Length); var frequencyTable = new FrequencyTable(bytes); Console.WriteLine(frequencyTable.ToString()); }
// submit the review. If it exsist update it. public string SubmitReview(FrequencyTable frequency) { try { var filteredResult = from f in ratingDB.frequencies where f.PatientId == frequency.PatientId && f.DoctorId == frequency.DoctorId select f; var filteredData = filteredResult.FirstOrDefault(); if (filteredData == null) { ratingDB.frequencies.Add(frequency); var result = ratingDB.SaveChanges(); if (result == 1) { return("Record Has been inserted."); } else { return("record Not Inserted."); } } else { try { filteredData.Rating = frequency.Rating; ratingDB.Entry(filteredData).State = Microsoft.EntityFrameworkCore.EntityState.Modified; // ratingDB.Update(frequency); var result = ratingDB.SaveChanges(); if (result == 1) { return("rating updated"); } else { return("rating which is exsist has not updated."); } } catch (Exception e) { return(e.Message); } } } catch (Exception e) { return(e.Message); } }
/// <summary> /// Initialises a new instance of the Algorithms.CandidateWordPriorityCalculator class. /// </summary> /// <param name="maximumSourceWordToCandidateWordDistance">The maximum possible distance between any two words in the graph.</param> /// <param name="sourceWordToCandidateWordDistanceWeight">The weight which should be applied when calculating the overall priority, to the distance from the source word to the candidate word (the 'g(n)' score in the A* algorithm).</param> /// <param name="numberOfCharactersMatchingDestinationWeight">The weight which should be applied when calculating the overall priority, to the number of matching characters between the candidate word and the destination word.</param> /// <param name="popularityOfChangeToCharacterWeight">The weight which should be applied when calculating the overall priority, to the frequency that the character changed to in the candidate word is the 'changed from' character in a substitution between adjacent words.</param> /// <param name="popularityOfCharacterChangeWeight">The weight which should be applied when calculating the overall priority, to the frequency/popularity of the character substitution (i.e. character changed from current word to candidate word).</param> /// <param name="allWordsTrieRoot">The root of a character trie containing all the words in the graph.</param> /// <param name="fromCharacterFrequencies">A FrequencyTable containing the number of times each character is the 'from' character in a substitution between adjacent words (i.e. represented by an edge of the graph).</param> /// <param name="characterSubstitutionFrequencies">A FrequencyTable containing the number of times each pair of characters in a substitution between adjacent words (i.e. represented by an edge of the graph) occurs.</param> /// <exception cref="System.ArgumentException">Parameter 'maximumSourceWordToCandidateWordDistance' is less than 0.</exception> /// <exception cref="System.ArgumentException">Parameter 'sourceWordToCandidateWordDistanceWeight' is less than 0.</exception> /// <exception cref="System.ArgumentException">Parameter 'numberOfCharactersMatchingDestinationWeight' is less than 0.</exception> /// <exception cref="System.ArgumentException">Parameter 'popularityOfChangeToCharacterWeight' is less than 0.</exception> /// <exception cref="System.ArgumentException">Parameter 'popularityOfCharacterChangeWeight' is less than 0.</exception> /// <exception cref="System.ArgumentException">At least one of parameters 'sourceWordToCandidateWordDistanceWeight', 'numberOfCharactersMatchingDestinationWeight', 'popularityOfChangeToCharacterWeight', and 'popularityOfCharacterChangeWeight' must be greater than 0.</exception> public CandidateWordPriorityCalculator(Int32 maximumSourceWordToCandidateWordDistance, Int32 sourceWordToCandidateWordDistanceWeight, Int32 numberOfCharactersMatchingDestinationWeight, Int32 popularityOfChangeToCharacterWeight, Int32 popularityOfCharacterChangeWeight, Dictionary <Char, TrieNode <Char> > allWordsTrieRoot, FrequencyTable <Char> fromCharacterFrequencies, FrequencyTable <CharacterSubstitution> characterSubstitutionFrequencies) { if (maximumSourceWordToCandidateWordDistance < 1) { throw new ArgumentException("Parameter 'maximumSourceWordToCandidateWordDistance' must be greater than or equal to 1.", "maximumSourceWordToCandidateWordDistance"); } if (sourceWordToCandidateWordDistanceWeight < 0) { throw new ArgumentException("Parameter 'sourceWordToCandidateWordDistanceWeight' must be greater than or equal to 0.", "sourceWordToCandidateWordDistanceWeight"); } if (numberOfCharactersMatchingDestinationWeight < 0) { throw new ArgumentException("Parameter 'numberOfCharactersMatchingDestinationWeight' must be greater than or equal to 0.", "numberOfCharactersMatchingDestinationWeight"); } if (popularityOfChangeToCharacterWeight < 0) { throw new ArgumentException("Parameter 'popularityOfChangeToCharacterWeight' must be greater than or equal to 0.", "popularityOfChangeToCharacterWeight"); } if (popularityOfCharacterChangeWeight < 0) { throw new ArgumentException("Parameter 'popularityOfCharacterChangeWeight' must be greater than or equal to 0.", "popularityOfCharacterChangeWeight"); } if (sourceWordToCandidateWordDistanceWeight == 0 && numberOfCharactersMatchingDestinationWeight == 0 && popularityOfChangeToCharacterWeight == 0 && popularityOfCharacterChangeWeight == 0) { throw new ArgumentException("At least one of parameters 'sourceWordToCandidateWordDistanceWeight', 'numberOfCharactersMatchingDestinationWeight', 'popularityOfChangeToCharacterWeight', and 'popularityOfCharacterChangeWeight' must be greater than 0."); } // Initialize priority functions and weights priorityFunctions = new List <Func <String, String, String, Int32, Double> >(); priorityFunctionWeights = new List <Int32>(); functionWeightsTotal = 0; priorityFunctionWeights.Add(sourceWordToCandidateWordDistanceWeight); priorityFunctions.Add(CalculateSourceWordToCandidateWordDistancePriority); functionWeightsTotal += Convert.ToInt64(sourceWordToCandidateWordDistanceWeight); priorityFunctionWeights.Add(numberOfCharactersMatchingDestinationWeight); priorityFunctions.Add(CalculateNumberOfCharactersMatchingDestinationPriority); functionWeightsTotal += Convert.ToInt64(numberOfCharactersMatchingDestinationWeight); priorityFunctionWeights.Add(popularityOfChangeToCharacterWeight); priorityFunctions.Add(CalculatePopularityOfChangeToCharacterPriority); functionWeightsTotal += Convert.ToInt64(popularityOfChangeToCharacterWeight); priorityFunctionWeights.Add(popularityOfCharacterChangeWeight); priorityFunctions.Add(CalculatePopularityOfCharacterChangePriority); functionWeightsTotal += Convert.ToInt64(popularityOfCharacterChangeWeight); this.maximumSourceWordToCandidateWordDistance = maximumSourceWordToCandidateWordDistance; this.allWordsTrieRoot = allWordsTrieRoot; this.fromCharacterFrequencies = fromCharacterFrequencies; this.characterSubstitutionFrequencies = characterSubstitutionFrequencies; wordUtilities = new WordUtilities(); PopulateMaximumFrequencyMembers(); }
public void TestMethod1() { var freqTableStream = new FileStream("FrequencyTableTest.in", FileMode.Open, FileAccess.Read); var bytes = new byte[freqTableStream.Length]; freqTableStream.Read(bytes, 0, (int)freqTableStream.Length); var frequencyTable = new FrequencyTable(bytes); var fileStream = new FileStream("serializedFrequencyTable.out", FileMode.Create, FileAccess.Write); var binaryFormatter = new BinaryFormatter(); binaryFormatter.Serialize(fileStream, frequencyTable); }
/// <summary> /// Button "Calculate Chances" click event /// </summary> private void CalculateChances_Click(object sender, RoutedEventArgs e) { ResetResults(); Card[] restCards = new Card[AllCardCount]; Card[] userCardsSorted = (Card[])_userCards.Clone(); Array.Sort(userCardsSorted); CardDeck.CopyTo(restCards, 0); for (int c = 0; c < AllCardCount; c++) { if (_userCards.Contains((c + 1).ToCard())) { restCards[c] = default(Card); } } int unknownCardsCount = userCardsSorted.Count(card => card == default(Card)); _allKnown = unknownCardsCount == 0; CheckPossibleSets(userCardsSorted, restCards, unknownCardsCount); #region Print Results if (_allKnown) { foreach (var comb in FrequencyTable) { L_Chances.Content = CombinationNames[(int)comb.Key]; return; } } float overallChance = 0; foreach (Combination comb in CombTypes) { if (!FrequencyTable.ContainsKey(comb.Type)) { continue; } int freq = FrequencyTable[comb.Type]; float chance = freq / (float)CaseCount * 100; overallChance += chance; L_Chances.Content += (CombinationNames[(int)comb.Type] + " = " + chance + " || " + overallChance + "%" + "\r\n"); } #endregion }
private static void HuffmanTree(string path) { using (var File = new FileStream(path, FileMode.Open)) { var Buffer = new byte[BufferLenght]; using var Reader = new BinaryReader(File); TotalData = Reader.BaseStream.Length; while (Reader.BaseStream.Position != Reader.BaseStream.Length) { Buffer = Reader.ReadBytes(BufferLenght); foreach (var item in Buffer) { if (FrequencyTable.Keys.Contains(item)) { FrequencyTable[(item)]++; } else { FrequencyTable.Add(item, 1); } } } } List <HuffmanNode> FrequencyList = new List <HuffmanNode>(); foreach (KeyValuePair <byte, int> Nodes in FrequencyTable) { FrequencyList.Add(new HuffmanNode(Nodes.Key, Convert.ToDecimal(Nodes.Value) / TotalData)); } while (FrequencyList.Count > 1) { if (FrequencyList.Count == 1) { break; } else { FrequencyList = FrequencyList.OrderBy(x => x.Probability).ToList(); HuffmanNode Union = LinkNodes(FrequencyList[1], FrequencyList[0]); FrequencyList.RemoveRange(0, 2); FrequencyList.Add(Union); } } Root = FrequencyList[0]; }
public void EncodingTest3() { GenerateByteStream("EncodingTest3", 15, 16, false); var fileStream = new FileStream("EncodingTest3", FileMode.Open, FileAccess.Read); var bytes = new byte[fileStream.Length]; fileStream.Read(bytes, 0, (int)fileStream.Length); var frequencyTable = new FrequencyTable(bytes); var huffmanTree = new HuffmanTree(frequencyTable); var encodingTable = new EncodingTable(huffmanTree); Console.WriteLine(encodingTable.ToString()); }
/// <summary> /// checks all possible sets of cards from array /// in which some cards is already known and others aren't. /// Card is unknown if it is equal to "default(Card)". /// This method is recursive /// </summary> private void CheckPossibleSets(Card[] userCards, Card[] restCards, int unknownCardsCount) { #region BaseCase if (unknownCardsCount == 0) { CaseCount++; CardSet set = new CardSet(userCards); if (!FrequencyTable.ContainsKey(set.HighestCombination.Type)) { FrequencyTable.Add(set.HighestCombination.Type, 1); } else { FrequencyTable[set.HighestCombination.Type]++; } return; } #endregion int index = 0; while (userCards[index] != default(Card)) { index++; // location first unknown cards poristion } unknownCardsCount--; // temponary decrease unknown cards count for (int c = 0; c < restCards.Length; c++) { if (restCards[c] == default(Card)) { continue; } Card temp = restCards[c]; userCards[index] = restCards[c]; restCards[c] = default(Card); CheckPossibleSets(userCards, restCards, unknownCardsCount); // recursive step restCards[c] = temp; //backtracking } userCards[index] = default(Card); //backtracking }
/// <summary> /// Constructor /// </summary> public GridCalculator() { InitializeComponent(); _frequencyTable = new FrequencyTable<decimal>(); _calcValueLabels[(int)Calculations.SelectedValues] = lblNumValues; _calcValueLabels[(int)Calculations.Sum] = lblSum; _calcValueLabels[(int)Calculations.Average] = lblAverage; _calcValueLabels[(int)Calculations.Median] = lblMedian; _calcValueLabels[(int)Calculations.Max] = lblMaximum; _calcValueLabels[(int)Calculations.Min] = lblMinimum; _calcValueLabels[(int)Calculations.StdDev] = lblStdDev; UserAccount.Settings.GetFormSizeLoc(this); this.FormClosing += new FormClosingEventHandler(GridCalculator_FormClosing); }
public void startVitalSignsMonitoring() { //vitalSignsTiming = 0; frequencyTable = new FrequencyTable(); vitalSignsTimer.Start(); vitalSignsTimer.Enabled = true; try { IVitalSignsService vitalS = ServiceManager.getOneVitalSignsService(); vitalS.cleanBuffer(); ServiceManager.closeService(vitalS); }catch (Exception ex) { Console.WriteLine(ex.ToString()); } //vitalSignsTimer.AutoReset = true; }
public MainWindow() { InitializeComponent(); var wordlist = new List <FrequencyTableRow <WordGroup> >(); var count = 0; foreach (var kvp in _words.OrderByDescending(x => x.Value)) { wordlist.Add(new FrequencyTableRow <WordGroup>(new WordGroup(kvp.Key), kvp.Value)); count += kvp.Value; } Words = new FrequencyTable <WordGroup>(wordlist, count); _wordcloudControlDataContext = new WordCloudData(Words); WordCloudControl.DataContext = _wordcloudControlDataContext; }
public MsgFrequencyConfig(FrequencyTable param) { int num = 2; if (param.FreqTable != null) { num += param.FreqTable.Length; } msgBody = new byte[num]; msgBody[0] = 0; msgBody[1] = (byte)(param.IsAutoSet ? 1 : 0); if (num > 2) { for (int i = 0; i < param.FreqTable.Length; i++) { msgBody[i + 2] = param.FreqTable[i]; } } }
public void Constructor_InitialiseWithCounts() { var initialData = new List <KeyValuePair <Char, Int32> > { new KeyValuePair <Char, Int32>('c', 5), new KeyValuePair <Char, Int32>('a', 1), new KeyValuePair <Char, Int32>('d', 4), new KeyValuePair <Char, Int32>('e', 3), new KeyValuePair <Char, Int32>('b', 2) }; testFrequencyTable = new FrequencyTable <Char>(initialData); Assert.AreEqual(5, testFrequencyTable.ItemCount); Assert.AreEqual(15, testFrequencyTable.FrequencyCount); Assert.AreEqual(5, testFrequencyTable.GetFrequency('c')); Assert.AreEqual(1, testFrequencyTable.GetFrequency('a')); Assert.AreEqual(4, testFrequencyTable.GetFrequency('d')); Assert.AreEqual(3, testFrequencyTable.GetFrequency('e')); Assert.AreEqual(2, testFrequencyTable.GetFrequency('b')); }
//Huffman Coding public string DoCompression() { FrequencyTable frequency = new FrequencyTable(); Dictionary <string, int> frequencyTable = frequency.BuildFrequencyTable(encodedFile); HuffmanTree tree = new HuffmanTree(); HuffmanNodes fullTree = tree.BuildTree(frequencyTable, frequency); IDictionary <string, string> binaryValues = tree.AssignCode(); binaryValues[binaryValues.First().Key] = "0"; //Stops eliminating first value foreach (KeyValuePair <string, string> kvp in binaryValues) { Console.WriteLine((kvp.Key == "\n" ? "EOF" : kvp.Key.ToString()) + ":\t" + kvp.Value); } string final = handleHuffmanFile(binaryValues); return(final); }
public void PopulateAdjacentWordDataStructures_DisposeCalledOnException() { IStreamReader mockStreamReader = mockery.NewMock <IStreamReader>(); ICharacterTrieBuilder mockTrieBuilder = mockery.NewMock <ICharacterTrieBuilder>(); HashSet <String> allWords = new HashSet <String>(); FrequencyTable <Char> fromCharacterFrequencies = new FrequencyTable <Char>(); FrequencyTable <CharacterSubstitution> characterSubstitutionFrequencies = new FrequencyTable <CharacterSubstitution>(); Func <String, Boolean> wordFilterFunction = new Func <String, Boolean>((inputString) => { return(true); }); using (mockery.Ordered) { Expect.Once.On(mockStreamReader).GetProperty("EndOfStream").Will(Return.Value(false)); Expect.Once.On(mockStreamReader).Method("ReadLine").WithNoArguments().Will(Throw.Exception(new Exception("Test Exception"))); Expect.Once.On(mockStreamReader).Method("Dispose").WithNoArguments(); } Exception e = Assert.ThrowsException <Exception>(() => { testDataStructureUtilities.PopulateAdjacentWordDataStructures(mockStreamReader, mockTrieBuilder, wordFilterFunction, trieRoot, allWords, fromCharacterFrequencies, characterSubstitutionFrequencies); }); mockery.VerifyAllExpectationsHaveBeenMet(); }
public void SetUp() { mockery = new Mockery(); mockStreamReader = mockery.NewMock <IStreamReader>(); allWordsTrieRoot = new Dictionary <Char, TrieNode <Char> >(); allWords = new HashSet <String>(); fromCharacterFrequencies = new FrequencyTable <Char>(); characterSubstitutionFrequencies = new FrequencyTable <CharacterSubstitution>(); List <String> testWords = new List <String>() { "read", "bead", "fail", "dead", "road", "reed", "calm", "real", "rear" }; Func <String, Boolean> wordFilterFunction = new Func <String, Boolean>((inputString) => { return(true); }); CharacterTrieBuilder characterTrieBuilder = new CharacterTrieBuilder(); using (mockery.Ordered) { foreach (String currentTestWord in testWords) { Expect.Once.On(mockStreamReader).GetProperty("EndOfStream").Will(Return.Value(false)); Expect.Once.On(mockStreamReader).Method("ReadLine").WithNoArguments().Will(Return.Value(currentTestWord)); } Expect.Once.On(mockStreamReader).GetProperty("EndOfStream").Will(Return.Value(true)); Expect.Once.On(mockStreamReader).Method("Dispose").WithNoArguments(); } dataStructureUtilities.PopulateAdjacentWordDataStructures(mockStreamReader, characterTrieBuilder, wordFilterFunction, allWordsTrieRoot, allWords, fromCharacterFrequencies, characterSubstitutionFrequencies); mockery.ClearExpectation(mockStreamReader); testCandidateWordPriorityCalculator = new CandidateWordPriorityCalculator(20, 1, 1, 1, 1, allWordsTrieRoot, fromCharacterFrequencies, characterSubstitutionFrequencies); }
/// <summary> /// Calcula los valores de estadistica descriptiva de los datos que e usuario carg ya sea por un archivo o por el editor. /// </summary> private void LoadValuesFromUserData() { try { //Modificar el vector de string y tranformarlo en doubles this.StringToDoubleArray(this.UserDataString); //calcular la tabla de frecuencias DataDescriptive = new ObservableCollection <GroupedData <string, ItemDescriptive> >(); FrequencyTable <double> data = new FrequencyTable <double>(userData); if (data != null) { EmptyData = true; } //Tendencia central DataDescriptive.Add( new GroupedData <string, ItemDescriptive>(new ItemDescriptive[] { new ItemDescriptive() { Detail = "Media Aritmética", Value = data.Mean }, new ItemDescriptive() { Detail = "Media Geométrica - No implementado", Value = 0 }, new ItemDescriptive() { Detail = "Media Armónica - No implementado", Value = 0 }, new ItemDescriptive() { Detail = "Media Cuadrática - No implementado", Value = 0 }, new ItemDescriptive() { Detail = "Mediana", Value = data.Median }, new ItemDescriptive() { Detail = "Moda", Value = data.Mode }, }) { Type = "Medidas de tendencia central" }); //Dispersion DataDescriptive.Add( new GroupedData <string, ItemDescriptive>(new ItemDescriptive[] { new ItemDescriptive() { Detail = "Rango", Value = data.Range }, new ItemDescriptive() { Detail = "Desviación Media", Value = data.StandardDevSample }, new ItemDescriptive() { Detail = "Varianza", Value = data.VarianceSample }, new ItemDescriptive() { Detail = "Desviación Estándar", Value = data.StandardDevSample }, new ItemDescriptive() { Detail = "Coeficiente de Variación", Value = data.StandardDevSample / data.Mean } }) { Type = "Medidas de dispersión" }); //Medidas de forma DataDescriptive.Add( new GroupedData <string, ItemDescriptive>( new ItemDescriptive[] { new ItemDescriptive() { Detail = "Asimetría", Value = data.Skewness }, new ItemDescriptive() { Detail = "Curtosis", Value = data.Kurtosis } }) { Type = "Medidas de forma" }); } catch (FormatException e) { UserDataString = e.Message; } }
/// <summary> /// Generates a Huffman Tree from the <paramref name="frequencyTable" />. /// </summary> /// <param name="frequencyTable">The frequency table to generate a Huffman tree from.</param> public HuffmanTree(FrequencyTable frequencyTable) { Root = HuffmanTreeGenerator.GenerateHuffmanTree(frequencyTable, out var height); Height = height; }
public WordCloudData(FrequencyTable <WordGroup> words) { Words = words; }
public string Post([FromBody] FrequencyTable frequency) { var result = ratingService.SubmitReview(frequency); return(result); }
public static void DecodeStream(Stream inStream, Stream outStream) { int i = 0; // counter byte streamLength = 0, readCharacters = 0; // the root of Huffman Tree TreeNode<int, byte> huffmanTreeRoot = new TreeNode<int, byte>(-1, 0); TreeNode<int, byte> currentNode; frequencyTable = new FrequencyTable(); readHuffmanTreeFromStream(inStream); streamLength = (byte)inStream.ReadByte(); i = 0; // construct A Huffman Tree for (i = 0; i < frequencyTable.MaxTableSize; i++) { if (frequencyTable.Hits[i] != 0) { currentNode = huffmanTreeRoot; foreach (char c in frequencyTable.Codewords[i]) { if (c == '0') { if (currentNode.Left == null) currentNode.Left = new TreeNode<int, byte>(-1, 0); currentNode = currentNode.Left; } else if (c == '1') { if (currentNode.Right == null) currentNode.Right = new TreeNode<int, byte>(-1, 0); currentNode = currentNode.Right; } } currentNode.Key = 1; // this node contains a character (is a leaf) currentNode.Value = (byte)i; } } // Tree is ready, start decoding string buffer = "", bits = ""; //string pooledBuffer = ""; int nextByte = 0, nextNextByte; i = 0; nextNextByte = inStream.ReadByte(); while (nextByte != -1) { nextByte = nextNextByte; nextNextByte = inStream.ReadByte(); bits = decodeByteToBitString((byte)nextByte); if (bits.Length < 8) bits += new string('0', 8 - bits.Length); buffer += bits; //pooledBuffer += bits; currentNode = huffmanTreeRoot; // start from Tree root for (i = 0; i < buffer.Length; i++) { if (nextNextByte == -1) // end of stream { if (readCharacters == streamLength) break; } if (buffer[i] == '0') currentNode = currentNode.Left; else currentNode = currentNode.Right; if (currentNode.Key == 1) // character found { ++readCharacters; outStream.WriteByte(currentNode.Value); buffer = buffer.Substring(i + 1); i = -1; currentNode = huffmanTreeRoot; } } //nextByte = inStream.ReadByte(); } }
static void HuffmanTree(string path) { using var nameJumper = new StreamReader(path); var position = nameJumper.ReadLine().Length; nameJumper.Close(); using (var File = new FileStream(path, FileMode.Open)) { File.Position = position + 1; int separator1 = 0; var buffer = new byte[BufferLenght]; string Data_Lenght1 = ""; string frequency = ""; string Datamount = ""; int final = 0; byte bit = new byte(); using (var reader = new BinaryReader(File)) { while (reader.BaseStream.Position != reader.BaseStream.Length) { buffer = reader.ReadBytes(BufferLenght); foreach (var item in buffer) { if (separator1 == 0) { if (Convert.ToChar(item) == '|' || Convert.ToChar(item) == 'ÿ' || Convert.ToChar(item) == 'ß') { separator1 = 1; if (Convert.ToChar(item) == '|') { Separator = '|'; } else if (Convert.ToChar(item) == 'ÿ') { Separator = 'ÿ'; } else { Separator = 'ß'; } } else { Data_Lenght1 += Convert.ToChar(item).ToString(); } } else if (separator1 == 2) { break; } else { if (final == 1 && Convert.ToChar(item) == Separator) { final = 2; separator1 = 2; } else { final = 0; } if (Datamount == "") { Datamount = Convert.ToChar(item).ToString(); bit = item; } else if (Convert.ToChar(item) == Separator && final == 0) { FrequencyTable.Add(bit, Convert.ToInt32(frequency)); Datamount = ""; frequency = ""; final = 1; } else { frequency += Convert.ToChar(item).ToString(); } } } } } DataLenght = Convert.ToDecimal(Data_Lenght1); } List <HuffmanNode> FrequencyList = new List <HuffmanNode>(); foreach (KeyValuePair <byte, int> Nodes in FrequencyTable) { FrequencyList.Add(new HuffmanNode(Nodes.Key, Convert.ToDecimal(Nodes.Value) / DataLenght)); } FrequencyList = FrequencyList.OrderBy(x => x.Probability).ToList(); while (FrequencyList.Count > 1) { FrequencyList = FrequencyList.OrderBy(x => x.Probability).ToList(); HuffmanNode Link = LinkNodes(FrequencyList[1], FrequencyList[0]); FrequencyList.RemoveRange(0, 2); FrequencyList.Add(Link); } Root = FrequencyList[0]; }
/// <summary> /// Reads a set of words from a stream, and adds the words and statistics pertaining to the words to a set of data structures. /// </summary> /// <param name="reader">The stream reader to used to read the set of words (allows specifying a mocked reader for unit testing).</param> /// <param name="trieBuilder">The trie builder to use to add the words to the trie (allows specifying a mocked builder for unit testing).</param> /// <param name="wordFilterFunction">A Func to filter whether or not the specified word should be added to the trie. Accepts the word as a parameter, and returns a bookean indicating whether that word should be added to the trie.</param> /// <param name="allWordsTrieRoot">The root of a character trie to populate with the words.</param> /// <param name="allWords">A HashSet to populate with the words.</param> /// <param name="fromCharacterFrequencies">A FrequencyTable to populate with the number of times each character is the 'from' character in a substitution.</param> /// <param name="characterSubstitutionFrequencies">A FrequencyTable to populate with the number of times each pair of characters in a substitution occur.</param> public void PopulateAdjacentWordDataStructures(IStreamReader reader, ICharacterTrieBuilder trieBuilder, Func <String, Boolean> wordFilterFunction, Dictionary <Char, TrieNode <Char> > allWordsTrieRoot, HashSet <String> allWords, FrequencyTable <Char> fromCharacterFrequencies, FrequencyTable <CharacterSubstitution> characterSubstitutionFrequencies) { // Read all words and add them to the HashSet and trie using (reader) { while (reader.EndOfStream == false) { String currentWord = reader.ReadLine(); if (wordFilterFunction.Invoke(currentWord) == true) { if (allWords.Contains(currentWord) == false) { allWords.Add(currentWord); trieBuilder.AddWord(allWordsTrieRoot, currentWord, true); } } } } // Populate the frequency tables CharacterTrieUtilities trieUtilities = new CharacterTrieUtilities(); WordUtilities wordUtilities = new WordUtilities(); foreach (String currentWord in allWords) { foreach (String adjacentWord in trieUtilities.FindAdjacentWords(allWordsTrieRoot, currentWord)) { // Find the character which was substitued between the word and the adjacent word Tuple <Char, Char> differingCharacters = wordUtilities.FindDifferingCharacters(currentWord, adjacentWord); Char fromCharacter = differingCharacters.Item1, toCharacter = differingCharacters.Item2; // Increment the data structures fromCharacterFrequencies.Increment(fromCharacter); characterSubstitutionFrequencies.Increment(new CharacterSubstitution(fromCharacter, toCharacter)); } } }
public static void EncodeStream(Stream inStream, Stream outStream) { int i = 0; // counter int newFreq; // the list of all Huffman Tree nodes List<TreeNode<int, byte>> huffmanTree = new List<TreeNode<int, byte>>(); // the root of Huffman Tree TreeNode<int, byte> huffmanTreeRoot = new TreeNode<int, byte>(0, 0); frequencyTable = new FrequencyTable(); /*frequencyTable = new FrequencyEntry[256 ]; for (i = 0; i < frequencyTable.Length; ++i) frequencyTable[i] = new FrequencyEntry();*/ calculateFrequency(inStream); i = 0; // construct a forest for (i = 0; i < frequencyTable.MaxTableSize; i++) { if (frequencyTable.Hits[i] != 0) // add a new node to Huffman Tree, where character hits is the key // and the character itself is the value huffmanTree.Add(new TreeNode<int, byte>(frequencyTable.Hits[i], (byte)i)); } // link the nodes while (huffmanTree.Count > 1) { huffmanTree.Sort(compareTreeNodes); newFreq = huffmanTree[0].Key + huffmanTree[1].Key; TreeNode<int, byte> node = new TreeNode<int, byte>(newFreq, 0); node.Left = huffmanTree[0]; node.Right = huffmanTree[1]; node.Left.Parent = node; node.Right.Parent = node; huffmanTree.RemoveAt(0); huffmanTree.RemoveAt(0); huffmanTree.Add(node); huffmanTreeRoot = node; } //calculate codewords traverseTree(huffmanTreeRoot, ""); //wrtie table writeHuffmanTreeToStream(outStream); // outStream.WriteByte((byte)(inStream.Length % 256)); //wrtie encoded stream //string buffer = "", bits = ""; //string pooledBuffer = ""; BitBuffer bitBuffer = new BitBuffer(1, outStream); int nextChar = 0; inStream.Seek(0, SeekOrigin.Begin); nextChar = inStream.ReadByte(); while (nextChar != -1) { bitBuffer.AddToBuffer(frequencyTable.Codewords[nextChar]); //pooledBuffer += frequencyTable.Codewords[nextChar]; /*if (buffer.Length >= 8) { bits = buffer.Substring(0, 8); buffer = buffer.Substring(8); outStream.WriteByte(encodeBitString(bits)); }*/ nextChar = inStream.ReadByte(); } bitBuffer.Flush(); //if (buffer.Length > 0) // outStream.WriteByte(encodeBitString(buffer)); }
public void FrequencyTableTest() { /* Test the Add command and the GetTableAsArray command with strings */ var table = new FrequencyTable<string>(); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("B"); table.Add("A"); table.Add("A"); table.Add("A"); table.Add("A"); table.Add("B"); table.Add("B"); table.Add("A"); table.Add("C"); table.Add("C"); table.Add("C"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("r"); table.Add("t"); table.Add("t"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("r"); table.Add("Z"); table.Add("Z"); table.Add("C"); table.Add("C"); table.Add("C"); table.Add("t"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("Z"); table.Add("t"); table.Add("C"); table.Add("d"); table.Add("Z"); table.Add("Z"); table.Add("C"); table.Add("C"); table.Add("C"); table.Add("e"); table.Add("e"); table.Add("e"); table.Add("C"); table.Add("C"); table.Add("C"); table.Add("C"); table.Add("C"); table.Add("C"); table.Add("e"); table.Add("C"); table.Add("C"); var r = table.GetTableAsArray(FrequencyTableSortOrder.None); var r1 = table.GetTableAsArray(FrequencyTableSortOrder.Value_Ascending); var r2 = table.GetTableAsArray(FrequencyTableSortOrder.Value_Descending); var r3 = table.GetTableAsArray(FrequencyTableSortOrder.Frequency_Ascending); var r4 = table.GetTableAsArray(FrequencyTableSortOrder.Frequency_Descending); Console.WriteLine("Table unsorted:"); Console.WriteLine("***************************************************"); foreach(FrequencyTableEntry<string> f in r) { log.Debug("{0} {1} {2} {3}", f.Value, f.AbsoluteFrequency, f.RelativeFrequency, Math.Round(f.Percentage, 2)); } Console.WriteLine("***************************************************"); Console.WriteLine(""); Console.WriteLine("Table sorted by value - ascending:"); Console.WriteLine("***************************************************"); foreach(FrequencyTableEntry<string> f in r1) { log.Debug("{0} {1} {2} {3}", f.Value, f.AbsoluteFrequency, f.RelativeFrequency, Math.Round(f.Percentage, 2)); } Console.WriteLine("***************************************************"); Console.WriteLine(""); Console.WriteLine("Table sorted by value - descending:"); Console.WriteLine("***************************************************"); foreach(FrequencyTableEntry<string> f in r2) { log.Debug("{0} {1} {2} {3}", f.Value, f.AbsoluteFrequency, f.RelativeFrequency, Math.Round(f.Percentage, 2)); } Console.WriteLine("***************************************************"); Console.WriteLine(""); Console.WriteLine("Table sorted by frequency - ascending:"); Console.WriteLine("***************************************************"); foreach(FrequencyTableEntry<string> f in r3) { log.Debug("{0} {1} {2} {3}", f.Value, f.AbsoluteFrequency, f.RelativeFrequency, Math.Round(f.Percentage, 2)); } Console.WriteLine("***************************************************"); log.Debug("Scarcest Value:\t{0}\tFrequency: {1}", table.ScarcestValue, table.SmallestFrequency); log.Debug("Mode:\t\t{0}\tFrequency: {1}", table.Mode, table.HighestFrequency); Console.WriteLine(""); Console.WriteLine("Table sorted by frequency - descending:"); Console.WriteLine("***************************************************"); foreach(FrequencyTableEntry<string> f in r4) { log.Debug("{0} {1} {2} {3}", f.Value, f.AbsoluteFrequency, f.RelativeFrequency, Math.Round(f.Percentage, 2)); } Console.WriteLine("***************************************************"); Console.WriteLine(""); /* now test the class with integers * initialize a new frequency table using an integer array*/ var test = new[] { 1, 1, 1, 2, 3, 3, 2, 2, 1, 1, 1, 2, 3, 4, 23, 1, 1, 1 }; var table1 = new FrequencyTable<int>(test); Console.WriteLine(""); Console.WriteLine("Integer table unsorted:"); Console.WriteLine("***************************************************"); foreach(FrequencyTableEntry<int> f in table1) { log.Debug("{0} {1} {2} {3}", f.Value, f.AbsoluteFrequency, f.RelativeFrequency, Math.Round(f.Percentage, 2)); } /* now test the class using a string */ var testString = new FrequencyTable<string>("NON NOBIS DOMINE, NON NOBIS, SED NOMINI TUO DA GLORIAM", TextAnalyzeMode.LettersOnly); var stringArray = testString.GetTableAsArray(FrequencyTableSortOrder.Frequency_Descending); Console.WriteLine(""); Console.WriteLine("Character table sorted by frequency - descending:"); Console.WriteLine("***************************************************"); foreach(FrequencyTableEntry<string> f in stringArray) { log.Debug("{0} {1} {2} {3}", f.Value, f.AbsoluteFrequency, f.RelativeFrequency, Math.Round(f.Percentage, 2)); } Console.Write("Press any key to exit"); }
protected void SetUp() { testFrequencyTable = new FrequencyTable <Char>(); }
public void PopulateAdjacentWordDataStructures() { IStreamReader mockStreamReader = mockery.NewMock <IStreamReader>(); ICharacterTrieBuilder mockTrieBuilder = mockery.NewMock <ICharacterTrieBuilder>(); HashSet <String> allWords = new HashSet <String>(); FrequencyTable <Char> fromCharacterFrequencies = new FrequencyTable <Char>(); FrequencyTable <CharacterSubstitution> characterSubstitutionFrequencies = new FrequencyTable <CharacterSubstitution>(); List <String> testWords = new List <String>() { "read", "bead", "fail", "dead", "road", "reed", "calm", "real", "rear" }; Func <String, Boolean> wordFilterFunction = new Func <String, Boolean>((inputString) => { if (inputString.Length == 4) { return(true); } else { return(false); } }); using (mockery.Ordered) { foreach (String currentTestWord in testWords) { Expect.Once.On(mockStreamReader).GetProperty("EndOfStream").Will(Return.Value(false)); Expect.Once.On(mockStreamReader).Method("ReadLine").WithNoArguments().Will(Return.Value(currentTestWord)); // Invoking of action AddWordToCharacterTrieActionAction adds the test word to the trie (since this will not be done by the 'mockTrieBuilder') Expect.Once.On(mockTrieBuilder).Method("AddWord").With(trieRoot, currentTestWord, true).Will(new AddWordToCharacterTrieActionAction(new CharacterTrieBuilder(), trieRoot, currentTestWord)); } Expect.Once.On(mockStreamReader).GetProperty("EndOfStream").Will(Return.Value(true)); Expect.Once.On(mockStreamReader).Method("Dispose").WithNoArguments(); } testDataStructureUtilities.PopulateAdjacentWordDataStructures(mockStreamReader, mockTrieBuilder, wordFilterFunction, trieRoot, allWords, fromCharacterFrequencies, characterSubstitutionFrequencies); Assert.AreEqual(1, characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('a', 'e'))); Assert.AreEqual(1, characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('b', 'd'))); Assert.AreEqual(1, characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('b', 'r'))); Assert.AreEqual(1, characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('d', 'b'))); Assert.AreEqual(1, characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('d', 'l'))); Assert.AreEqual(2, characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('d', 'r'))); Assert.AreEqual(1, characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('e', 'a'))); Assert.AreEqual(1, characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('e', 'o'))); Assert.AreEqual(1, characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('l', 'd'))); Assert.AreEqual(1, characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('l', 'r'))); Assert.AreEqual(1, characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('o', 'e'))); Assert.AreEqual(1, characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('r', 'b'))); Assert.AreEqual(2, characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('r', 'd'))); Assert.AreEqual(1, characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('r', 'l'))); Assert.AreEqual(14, characterSubstitutionFrequencies.ItemCount); Assert.AreEqual(1, fromCharacterFrequencies.GetFrequency('a')); Assert.AreEqual(2, fromCharacterFrequencies.GetFrequency('b')); Assert.AreEqual(4, fromCharacterFrequencies.GetFrequency('d')); Assert.AreEqual(2, fromCharacterFrequencies.GetFrequency('e')); Assert.AreEqual(2, fromCharacterFrequencies.GetFrequency('l')); Assert.AreEqual(1, fromCharacterFrequencies.GetFrequency('o')); Assert.AreEqual(4, fromCharacterFrequencies.GetFrequency('r')); Assert.AreEqual(7, fromCharacterFrequencies.ItemCount); mockery.VerifyAllExpectationsHaveBeenMet(); }
/// <summary> /// Runs the graph traversal comparison. /// </summary> public void Run() { // The path to a file containing a dictionary of words const String dictionaryFilePath = @"C:\Temp\words2.txt"; // The assumed maximum distance from a source word to a candidate word (used in weighting of g(n) and h(n) scores) const Int32 maximumSourceWordToCandidateWordDistance = 30; // Setup the word dictionary tree and other supporting data structures Dictionary <Char, TrieNode <Char> > allWordsTrieRoot = new Dictionary <Char, TrieNode <Char> >(); HashSet <String> allWords = new HashSet <String>(); FrequencyTable <Char> fromCharacterFrequencies = new FrequencyTable <Char>(); FrequencyTable <CharacterSubstitution> characterSubstitutionFrequencies = new FrequencyTable <CharacterSubstitution>(); // Populate the word dictionary tree and other supporting data structures System.IO.StreamReader underlyingReader = new System.IO.StreamReader(dictionaryFilePath); Algorithms.StreamReader reader = new Algorithms.StreamReader(underlyingReader); CharacterTrieBuilder trieBuilder = new CharacterTrieBuilder(); Func <String, Boolean> wordFilterFunction = new Func <String, Boolean>((inputString) => { foreach (Char currentCharacter in inputString) { if (Char.IsLetter(currentCharacter) == false) { return(false); } } if (inputString.Length == 4) { return(true); } else { return(false); } }); DataStructureUtilities dataStructureUtils = new DataStructureUtilities(); dataStructureUtils.PopulateAdjacentWordDataStructures(reader, trieBuilder, wordFilterFunction, allWordsTrieRoot, allWords, fromCharacterFrequencies, characterSubstitutionFrequencies); CharacterTrieUtilities trieUtilities = new CharacterTrieUtilities(); // Setup the test data (word pairs to find paths between) List <Tuple <String, String> > testData = new List <Tuple <String, String> >() { new Tuple <String, String>("role", "band"), new Tuple <String, String>("pack", "sill"), new Tuple <String, String>("debt", "tyre"), new Tuple <String, String>("duct", "grid") }; // Find paths foreach (Tuple <String, String> currentWordPair in testData) { // Setup priority calculator and graph path finder Int32 sourceWordToCandidateWordDistanceWeight = 1; Int32 numberOfCharactersMatchingDestinationWeight = 0; Int32 popularityOfChangeToCharacterWeight = 0; Int32 popularityOfCharacterChangeWeight = 0; CandidateWordPriorityCalculator priorityCalculator = new CandidateWordPriorityCalculator(maximumSourceWordToCandidateWordDistance, sourceWordToCandidateWordDistanceWeight, numberOfCharactersMatchingDestinationWeight, popularityOfChangeToCharacterWeight, popularityOfCharacterChangeWeight, allWordsTrieRoot, fromCharacterFrequencies, characterSubstitutionFrequencies); AdjacentWordGraphPathFinder pathFinder = new AdjacentWordGraphPathFinder(priorityCalculator, trieUtilities, allWordsTrieRoot); Console.WriteLine("-----------------------------------------------"); Console.WriteLine(" Finding paths for strings '{0}' and '{1}'", currentWordPair.Item1, currentWordPair.Item2); Console.WriteLine("-----------------------------------------------"); // Find a path using breadth-first search Console.WriteLine(" Using breadth-first search..."); Int32 numberOfEdgesExplored = 0; LinkedList <String> path = pathFinder.FindPathViaAStar(currentWordPair.Item1, currentWordPair.Item2, ref numberOfEdgesExplored); Console.Write(" Path: "); WritePathToConsole(path); Console.WriteLine(" Edges explored: {0}", numberOfEdgesExplored); Console.WriteLine(); // Find the shortest path using Dijkstras algorithm Console.WriteLine(" Using Dijkstra's algorithm..."); numberOfEdgesExplored = 0; path = pathFinder.FindShortestPathViaDijkstrasAlgorithm(currentWordPair.Item1, currentWordPair.Item2, ref numberOfEdgesExplored); Console.Write(" Path: "); WritePathToConsole(path); Console.WriteLine(" Edges explored: {0}", numberOfEdgesExplored); Console.WriteLine(); // Find the shortest path using bidirectional breadth-first search Console.WriteLine(" Using bidirectional breadth-first search..."); numberOfEdgesExplored = 0; path = pathFinder.FindPathViaBidirectionalBreadthFirstSearch(currentWordPair.Item1, currentWordPair.Item2, ref numberOfEdgesExplored); Console.Write(" Path: "); WritePathToConsole(path); Console.WriteLine(" Edges explored: {0}", numberOfEdgesExplored); Console.WriteLine(); // Find the shortest path using A* ( 50% g(n) and 50% h(n) ) Console.WriteLine(" Using A* ( 50% g(n) and 50% h(n) )..."); sourceWordToCandidateWordDistanceWeight = 3; numberOfCharactersMatchingDestinationWeight = 1; popularityOfChangeToCharacterWeight = 1; popularityOfCharacterChangeWeight = 1; priorityCalculator = new CandidateWordPriorityCalculator(maximumSourceWordToCandidateWordDistance, sourceWordToCandidateWordDistanceWeight, numberOfCharactersMatchingDestinationWeight, popularityOfChangeToCharacterWeight, popularityOfCharacterChangeWeight, allWordsTrieRoot, fromCharacterFrequencies, characterSubstitutionFrequencies); pathFinder = new AdjacentWordGraphPathFinder(priorityCalculator, trieUtilities, allWordsTrieRoot); numberOfEdgesExplored = 0; path = pathFinder.FindPathViaAStar(currentWordPair.Item1, currentWordPair.Item2, ref numberOfEdgesExplored); Console.Write(" Path: "); WritePathToConsole(path); Console.WriteLine(" Edges explored: {0}", numberOfEdgesExplored); Console.WriteLine(); // Find the shortest path using A* ( 0% g(n) and 100% h(n) ) Console.WriteLine(" Using A* ( 0% g(n) and 100% h(n) )..."); sourceWordToCandidateWordDistanceWeight = 0; numberOfCharactersMatchingDestinationWeight = 1; popularityOfChangeToCharacterWeight = 1; popularityOfCharacterChangeWeight = 1; priorityCalculator = new CandidateWordPriorityCalculator(maximumSourceWordToCandidateWordDistance, sourceWordToCandidateWordDistanceWeight, numberOfCharactersMatchingDestinationWeight, popularityOfChangeToCharacterWeight, popularityOfCharacterChangeWeight, allWordsTrieRoot, fromCharacterFrequencies, characterSubstitutionFrequencies); pathFinder = new AdjacentWordGraphPathFinder(priorityCalculator, trieUtilities, allWordsTrieRoot); numberOfEdgesExplored = 0; path = pathFinder.FindPathViaAStar(currentWordPair.Item1, currentWordPair.Item2, ref numberOfEdgesExplored); Console.Write(" Path: "); WritePathToConsole(path); Console.WriteLine(" Edges explored: {0}", numberOfEdgesExplored); Console.WriteLine(); // Find the shortest path using A* ( 25% g(n) and 75% h(n) ) Console.WriteLine(" Using A* ( 25% g(n) and 75% h(n) )..."); sourceWordToCandidateWordDistanceWeight = 1; numberOfCharactersMatchingDestinationWeight = 1; popularityOfChangeToCharacterWeight = 1; popularityOfCharacterChangeWeight = 1; priorityCalculator = new CandidateWordPriorityCalculator(maximumSourceWordToCandidateWordDistance, sourceWordToCandidateWordDistanceWeight, numberOfCharactersMatchingDestinationWeight, popularityOfChangeToCharacterWeight, popularityOfCharacterChangeWeight, allWordsTrieRoot, fromCharacterFrequencies, characterSubstitutionFrequencies); pathFinder = new AdjacentWordGraphPathFinder(priorityCalculator, trieUtilities, allWordsTrieRoot); numberOfEdgesExplored = 0; path = pathFinder.FindPathViaAStar(currentWordPair.Item1, currentWordPair.Item2, ref numberOfEdgesExplored); Console.Write(" Path: "); WritePathToConsole(path); Console.WriteLine(" Edges explored: {0}", numberOfEdgesExplored); Console.WriteLine(); // Find the shortest path using A* ( 25% g(n) and 75% h(n) with custom h(n) weighting ) Console.WriteLine(" Using A* ( 25% g(n) and 75% h(n) with custom h(n) weighting )..."); sourceWordToCandidateWordDistanceWeight = 1; numberOfCharactersMatchingDestinationWeight = 2; popularityOfChangeToCharacterWeight = 1; popularityOfCharacterChangeWeight = 0; priorityCalculator = new CandidateWordPriorityCalculator(maximumSourceWordToCandidateWordDistance, sourceWordToCandidateWordDistanceWeight, numberOfCharactersMatchingDestinationWeight, popularityOfChangeToCharacterWeight, popularityOfCharacterChangeWeight, allWordsTrieRoot, fromCharacterFrequencies, characterSubstitutionFrequencies); pathFinder = new AdjacentWordGraphPathFinder(priorityCalculator, trieUtilities, allWordsTrieRoot); numberOfEdgesExplored = 0; path = pathFinder.FindPathViaAStar(currentWordPair.Item1, currentWordPair.Item2, ref numberOfEdgesExplored); Console.Write(" Path: "); WritePathToConsole(path); Console.WriteLine(" Edges explored: {0}", numberOfEdgesExplored); Console.WriteLine(); Console.WriteLine(); } return; #region Test Code and Utility Routines // Read a word from the console and find its adjacent words String readWord = ""; while (true) { Console.Write("Type a source word: "); readWord = Console.ReadLine(); if (readWord.Equals("q")) { break; } foreach (String currAdjacent in trieUtilities.FindAdjacentWords(allWordsTrieRoot, readWord)) { Console.WriteLine(" " + currAdjacent); } } // Find the total number of edges and vertices in the graph Int32 totalEdges = 0; foreach (String currWord in allWords) { foreach (String currAdj in trieUtilities.FindAdjacentWords(allWordsTrieRoot, currWord)) { totalEdges++; } } Console.WriteLine("Total edges: " + totalEdges / 2); Console.WriteLine("Total vertices: " + allWords.Count); // Show contents of frequency tables foreach (KeyValuePair <Char, Int32> currKVP in fromCharacterFrequencies) { Console.WriteLine(currKVP.Key + ": " + currKVP.Value); } Console.WriteLine("t > b: " + characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('t', 'b'))); Console.WriteLine("n > f: " + characterSubstitutionFrequencies.GetFrequency(new CharacterSubstitution('n', 'f'))); foreach (KeyValuePair <CharacterSubstitution, Int32> currKVP in characterSubstitutionFrequencies) { if (currKVP.Key.ToCharacter == 'y') { Console.WriteLine(currKVP.Key.FromCharacter + " > " + currKVP.Key.ToCharacter + ": " + currKVP.Value); } } foreach (String currWord in allWords) { if (currWord[1] == 'y') { Console.WriteLine(currWord); } } // Compare each heuristic function in isolation List <Tuple <String, String> > testData2 = new List <Tuple <String, String> >() { new Tuple <String, String>("role", "band"), new Tuple <String, String>("pack", "sill"), new Tuple <String, String>("debt", "tyre"), new Tuple <String, String>("duct", "grid") }; foreach (Tuple <String, String> currentWordPair in testData2) { // Setup priority calculator and graph path finder Int32 sourceWordToCandidateWordDistanceWeight = 1; Int32 numberOfCharactersMatchingDestinationWeight = 0; Int32 popularityOfChangeToCharacterWeight = 0; Int32 popularityOfCharacterChangeWeight = 0; CandidateWordPriorityCalculator priorityCalculator = new CandidateWordPriorityCalculator(maximumSourceWordToCandidateWordDistance, sourceWordToCandidateWordDistanceWeight, numberOfCharactersMatchingDestinationWeight, popularityOfChangeToCharacterWeight, popularityOfCharacterChangeWeight, allWordsTrieRoot, fromCharacterFrequencies, characterSubstitutionFrequencies); AdjacentWordGraphPathFinder pathFinder = new AdjacentWordGraphPathFinder(priorityCalculator, trieUtilities, allWordsTrieRoot); Int32 numberOfEdgesExplored = 0; Int32 maximumSourceWordToCandidateWordDistance2 = 4000; Console.WriteLine("-----------------------------------------------"); Console.WriteLine(" Finding paths for strings '{0}' and '{1}'", currentWordPair.Item1, currentWordPair.Item2); Console.WriteLine("-----------------------------------------------"); // Find the shortest path using A* ( 50% g(n) and 50% h(n) ) Console.WriteLine(" 1 0 0..."); sourceWordToCandidateWordDistanceWeight = 0; numberOfCharactersMatchingDestinationWeight = 1; popularityOfChangeToCharacterWeight = 0; popularityOfCharacterChangeWeight = 0; priorityCalculator = new CandidateWordPriorityCalculator(maximumSourceWordToCandidateWordDistance2, sourceWordToCandidateWordDistanceWeight, numberOfCharactersMatchingDestinationWeight, popularityOfChangeToCharacterWeight, popularityOfCharacterChangeWeight, allWordsTrieRoot, fromCharacterFrequencies, characterSubstitutionFrequencies); pathFinder = new AdjacentWordGraphPathFinder(priorityCalculator, trieUtilities, allWordsTrieRoot); numberOfEdgesExplored = 0; LinkedList <String> path = pathFinder.FindPathViaAStar(currentWordPair.Item1, currentWordPair.Item2, ref numberOfEdgesExplored); Console.Write(" Path: "); WritePathToConsole(path); Console.WriteLine(" Edges explored: {0}", numberOfEdgesExplored); Console.WriteLine(" Length of path: {0}", path.Count); Console.WriteLine(); // Find the shortest path using A* ( 0% g(n) and 100% h(n) ) Console.WriteLine(" 0 1 0..."); sourceWordToCandidateWordDistanceWeight = 0; numberOfCharactersMatchingDestinationWeight = 0; popularityOfChangeToCharacterWeight = 1; popularityOfCharacterChangeWeight = 0; priorityCalculator = new CandidateWordPriorityCalculator(maximumSourceWordToCandidateWordDistance2, sourceWordToCandidateWordDistanceWeight, numberOfCharactersMatchingDestinationWeight, popularityOfChangeToCharacterWeight, popularityOfCharacterChangeWeight, allWordsTrieRoot, fromCharacterFrequencies, characterSubstitutionFrequencies); pathFinder = new AdjacentWordGraphPathFinder(priorityCalculator, trieUtilities, allWordsTrieRoot); numberOfEdgesExplored = 0; path = pathFinder.FindPathViaAStar(currentWordPair.Item1, currentWordPair.Item2, ref numberOfEdgesExplored); Console.Write(" Path: "); WritePathToConsole(path); Console.WriteLine(" Edges explored: {0}", numberOfEdgesExplored); Console.WriteLine(" Length of path: {0}", path.Count); Console.WriteLine(); // Find the shortest path using A* ( 25% g(n) and 75% h(n) ) Console.WriteLine(" 0 0 1..."); sourceWordToCandidateWordDistanceWeight = 0; numberOfCharactersMatchingDestinationWeight = 0; popularityOfChangeToCharacterWeight = 0; popularityOfCharacterChangeWeight = 1; priorityCalculator = new CandidateWordPriorityCalculator(maximumSourceWordToCandidateWordDistance2, sourceWordToCandidateWordDistanceWeight, numberOfCharactersMatchingDestinationWeight, popularityOfChangeToCharacterWeight, popularityOfCharacterChangeWeight, allWordsTrieRoot, fromCharacterFrequencies, characterSubstitutionFrequencies); pathFinder = new AdjacentWordGraphPathFinder(priorityCalculator, trieUtilities, allWordsTrieRoot); numberOfEdgesExplored = 0; path = pathFinder.FindPathViaAStar(currentWordPair.Item1, currentWordPair.Item2, ref numberOfEdgesExplored); Console.Write(" Path: "); WritePathToConsole(path); Console.WriteLine(" Edges explored: {0}", numberOfEdgesExplored); Console.WriteLine(" Length of path: {0}", path.Count); Console.WriteLine(); Console.WriteLine(); } #endregion }
public string SubmitReview(FrequencyTable frequency) { return(ratingRepo.SubmitReview(frequency)); }
/// <summary> /// Fired when the 'use quantity column value as frequency' checkbox is ticked/unticked /// This will clear the master frequency table and re-add data from all bound grids. /// The results will then be recalculated. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void chkQuantityAsFrequency_CheckedChanged(object sender, EventArgs e) { // Clear all current data. lock (_frequencyTable) { _frequencyTable = new FrequencyTable<decimal>(); } // Get all bound grid's selected values Dictionary<DataGridView, GridData>.Enumerator enumerator = _bindings.GetEnumerator(); while (enumerator.MoveNext()) { DataGridView grid = enumerator.Current.Key; // If this grid is not bound then don't worry about it. if (_bindings.ContainsKey(grid) && _bindings[grid].Bound) { AddSelectedData(grid); } } Recalculate(); }
static void Main(string[] args) { int[] rint = new int[10000]; Console.WriteLine("Initializing pseudorandom integer array - 10000 values"); Random ra = new Random(1001); for (int i = 0; i < rint.Length; i++) { rint[i] = (int)(ra.NextDouble() * 10.0); } Console.WriteLine("Initializing frequency table"); FrequencyTable <int> rtable = new FrequencyTable <int>(rint); Console.WriteLine("Get data in input-order"); int[] br = rtable.GetData(true); for (int i = 0; i < br.Length; i++) { Console.Write("Input array: {1} Output array: {1}", rint[i].ToString(), br[i].ToString()); if (br[i] != rint[i]) { Console.WriteLine(" --> NOT EQUAL", i.ToString()); } else { Console.WriteLine(" --> EQUAL"); } } #region data double[] ddd = new double[] { 5, 15, 15, 15, 25, 25, 25, 25, 25, 25, 25, 25, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 75, 75, 75, 75, 75, 75, 75, 75, 95 }; #endregion FrequencyTable <double> table = new FrequencyTable <double>(ddd); // display descriptive statistics Console.WriteLine("Sample size = {0}", table.SampleSize); Console.WriteLine("Mean = {0}", table.Mean); Console.WriteLine("Median = {0}", table.Median); Console.WriteLine("Sample variance = {0}", table.VarianceSample); Console.WriteLine("Population variance = {0}", table.VariancePop); Console.WriteLine("Standard deviation (Sample) = {0}", table.StandardDevSample); Console.WriteLine("Standard deviation (Population) = {0}", table.StandardDevPop); Console.WriteLine("Standard error of the mean = {0}", table.StandardError); Console.WriteLine("Minimum = {0}", table.Minimum); Console.WriteLine("Maximum = {0}", table.Maximum); Console.WriteLine("Range = {0}", table.Range); Console.WriteLine("Skewness = {0}", table.Skewness); Console.WriteLine("Kurtosis = {0}", table.Kurtosis); Console.WriteLine("Kurtosis excess = {0}", table.KurtosisExcess); Console.WriteLine("Highest Frequency = {0}", table.HighestFrequency); Console.WriteLine("Mode = {0}", table.Mode); Console.WriteLine("Smallest frequency = {0}", table.SmallestFrequency); Console.WriteLine("Scarcest value = {0}", table.ScarcestValue); Console.WriteLine("Sum over all values = {0}", table.Sum); if (table.IsGaussian) { Console.WriteLine("Date are normally distributed. p = {0}", table.P_Value); } else { Console.WriteLine("Date are NOT normally distributed. p = {0}", table.P_Value); } Console.WriteLine("Descriptive: Done"); Console.WriteLine("Press ENTER to continue"); Console.ReadLine(); CumulativeFrequencyTableEntry <double>[] sorted = table.GetCumulativeFrequencyTable(CumulativeFrequencyTableFormat.EachDatapoint); FrequencyTableEntry <double>[] r = table.GetTableAsArray(FrequencyTableSortOrder.None); FrequencyTableEntry <double>[] r1 = table.GetTableAsArray(FrequencyTableSortOrder.Value_Ascending); FrequencyTableEntry <double>[] r2 = table.GetTableAsArray(FrequencyTableSortOrder.Value_Descending); FrequencyTableEntry <double>[] r3 = table.GetTableAsArray(FrequencyTableSortOrder.Frequency_Ascending); FrequencyTableEntry <double>[] r4 = table.GetTableAsArray(FrequencyTableSortOrder.Frequency_Descending); Console.Clear(); Console.WriteLine("Table unsorted:"); Console.WriteLine("***************************************************"); foreach (FrequencyTableEntry <double> f in r) { Console.WriteLine("{0} {1} {2} {3}", f.Value, f.AbsoluteFreq, f.RelativeFreq, Math.Round(f.Percentage, 2)); } Console.WriteLine("***************************************************"); Console.WriteLine(); Console.WriteLine("Table sorted by value - ascending:"); Console.WriteLine("***************************************************"); foreach (FrequencyTableEntry <double> f in r1) { Console.WriteLine("{0} {1} {2} {3}", f.Value, f.AbsoluteFreq, f.RelativeFreq, Math.Round(f.Percentage, 2)); } Console.WriteLine("***************************************************"); Console.WriteLine(); Console.WriteLine("Table sorted by value - descending:"); Console.WriteLine("***************************************************"); foreach (FrequencyTableEntry <double> f in r2) { Console.WriteLine("{0} {1} {2} {3}", f.Value, f.AbsoluteFreq, f.RelativeFreq, Math.Round(f.Percentage, 2)); } Console.WriteLine("***************************************************"); Console.WriteLine(); Console.WriteLine("Table sorted by frequency - ascending:"); Console.WriteLine("***************************************************"); foreach (FrequencyTableEntry <double> f in r3) { Console.WriteLine("{0} {1} {2} {3}", f.Value, f.AbsoluteFreq, f.RelativeFreq, Math.Round(f.Percentage, 2)); } Console.WriteLine("***************************************************"); Console.WriteLine("Scarcest Value:\t{0}\tFrequency: {1}", table.ScarcestValue, table.SmallestFrequency); Console.WriteLine("Mode:\t\t{0}\tFrequency: {1}", table.Mode, table.HighestFrequency); Console.WriteLine(); Console.WriteLine("Table sorted by frequency - descending:"); Console.WriteLine("***************************************************"); foreach (FrequencyTableEntry <double> f in r4) { Console.WriteLine("{0} {1} {2} {3}", f.Value, f.AbsoluteFreq, f.RelativeFreq, Math.Round(f.Percentage, 2)); } Console.WriteLine("***************************************************"); Console.WriteLine("Press ENTER to display cumulative frequencies!"); Console.ReadLine(); CumulativeFrequencyTableEntry <double>[] cf = table.GetCumulativeFrequencyTable(CumulativeFrequencyTableFormat.EachDatapoint); Console.WriteLine("Cumulative frequencies - the cumulative density function:"); Console.WriteLine("***************************************************"); foreach (CumulativeFrequencyTableEntry <double> f in cf) { Console.WriteLine("{0} {1} {2}", f.Value, f.CumulativeAbsoluteFrequency, f.CumulativeRelativeFrequency); } Console.WriteLine("***************************************************"); /* now test the class with integers * initialize a new frequency table using an integer array*/ int[] test = new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }; FrequencyTable <int> table1 = new FrequencyTable <int>(test); Console.WriteLine(); Console.WriteLine("Integer table unsorted:"); Console.WriteLine("***************************************************"); foreach (FrequencyTableEntry <int> f in table1) { Console.WriteLine("{0} {1} {2} {3}", f.Value, f.AbsoluteFreq, f.RelativeFreq, Math.Round(f.Percentage, 2)); } /* now test the class using a string */ FrequencyTable <string> testString = new FrequencyTable <string>("NON NOBIS DOMINE, NON NOBIS, SED NOMINI TUO DA GLORIAM", TextAnalyzeMode.LettersOnly); FrequencyTableEntry <string>[] stringArray = testString.GetTableAsArray(FrequencyTableSortOrder.Frequency_Descending); Console.WriteLine(); Console.WriteLine("Character table sorted by frequency - descending:"); Console.WriteLine("***************************************************"); foreach (FrequencyTableEntry <string> f in stringArray) { Console.WriteLine("{0} {1} {2} {3}", f.Value, f.AbsoluteFreq, f.RelativeFreq, Math.Round(f.Percentage, 2)); } CumulativeFrequencyTableEntry <string>[] scf = testString.GetCumulativeFrequencyTable(CumulativeFrequencyTableFormat.EachDatapoint); Console.WriteLine("Cumulative frequencies - the cumulative density function:"); Console.WriteLine("***************************************************"); foreach (CumulativeFrequencyTableEntry <string> f in scf) { Console.WriteLine("{0} {1} {2}", f.Value, f.CumulativeAbsoluteFrequency, f.CumulativeRelativeFrequency); } Console.WriteLine("***************************************************"); Console.Write("Press any key to exit"); Console.ReadKey(); }