public static void Test() { File.Delete("web.links"); using (var memoryManager = new UInt64LinksMemoryManager("web.links", 8 * 1024 * 1024)) using (var links = new UInt64Links(memoryManager)) { var syncLinks = new SynchronizedLinks<ulong>(links); UnicodeMap.InitNew(syncLinks); var sequences = new Sequences(syncLinks); // Get content const string url = "https://en.wikipedia.org/wiki/Main_Page"; var pageContents = GetPageContents(url); var totalChars = url.Length + pageContents.Length; Global.Trash = totalChars; var urlLink = sequences.CreateBalancedVariant(UnicodeMap.FromStringToLinkArray(url)); var responseSourceArray = UnicodeMap.FromStringToLinkArray(pageContents); //for (var i = 0; i < 1; i++) //{ // var sw01 = Stopwatch.StartNew(); // var responseLink = sequences.CreateBalancedVariant(responseSourceArray); // sw01.Stop(); // Console.WriteLine(sw01.Elapsed); //} //var sw0 = Stopwatch.StartNew(); //var groups = UnicodeMap.FromStringToLinkArrayGroups(response); //var responseLink = sequences.CreateBalancedVariant(groups); sw0.Stop(); //var sw1 = Stopwatch.StartNew(); //var responseCompressedArray1 = links.PrecompressSequence1(responseSourceArray); sw1.Stop(); //var sw2 = Stopwatch.StartNew(); //var responseCompressedArray2 = links.PrecompressSequence2(responseSourceArray); sw2.Stop(); // [+] Можно попробовать искать не максимальный, а первый, который встречается как минимум дважды - медленно, высокое качество, не наивысшее // [+] Или использовать не локальный словарь, а глобальный (т.е. считать один раз, потом только делать замены) - быстро, но качество низкое // Precompress0 - лучшее соотношение скорость / качество. (тоже что и Data.Core.Sequences.Compressor.Precompress) ulong[] responseCompressedArray3 = null; for (var i = 0; i < 1; i++) { var sw3 = Stopwatch.StartNew(); var compressor = new Data.Core.Sequences.Compressor(syncLinks, sequences, 1); responseCompressedArray3 = compressor.Precompress(responseSourceArray); sw3.Stop(); Console.WriteLine(sw3.Elapsed); } // Combine Groups and Compression (first Compression, then Groups) (DONE) // Как после сжатия не группируй, больше сжатия не получить (странно, но это факт) //var groups = UnicodeMap.FromLinkArrayToLinkArrayGroups(responseCompressedArray3); //var responseLink2 = sequences.CreateBalancedVariant(groups); // Equal to `var responseLink2 = sequences.CreateBalancedVariant(responseCompressedArray3);` //for (int i = 0; i < responseCompressedArray1.Length; i++) //{ // if (responseCompressedArray1[i] != responseCompressedArray2[i]) // { // } //} //var responseLink1 = sequences.CreateBalancedVariant(responseCompressedArray1); var responseLink2 = sequences.CreateBalancedVariant(responseCompressedArray3); //var decompress1 = sequences.FormatSequence(responseLink1); var decompress2 = sequences.FormatSequence(responseLink2); Global.Trash = decompress2; //for (int i = 0; i < decompress1.Length; i++) //{ // if (decompress1[i] != decompress2[i]) // { // } //} var unpack = UnicodeMap.FromSequenceLinkToString(responseLink2, syncLinks); Global.Trash = (unpack == pageContents); var totalLinks = syncLinks.Count() - UnicodeMap.MapSize; Console.WriteLine(totalLinks); Global.Trash = totalLinks; syncLinks.CreateAndUpdate(urlLink, responseLink2); var divLinksArray = UnicodeMap.FromStringToLinkArray("div"); var fullyMatched = sequences.GetAllMatchingSequences1(divLinksArray); var partiallyMatched = sequences.GetAllPartiallyMatchingSequences1(divLinksArray); var intersection = fullyMatched.Intersect(partiallyMatched); } Console.ReadKey(); }
public static void Stats() { // Get content const string url = "https://en.wikipedia.org/wiki/Main_Page"; var pageContents = GetPageContents(url); var responseSourceArray = UnicodeMap.FromStringToLinkArray(pageContents); for (var i = 0; i < 3; i++) { File.Delete("stats.links"); using (var memoryManager = new UInt64LinksMemoryManager("stats.links", 8 * 1024 * 1024)) using (var links = new UInt64Links(memoryManager)) { var syncLinks = new SynchronizedLinks<ulong>(links); UnicodeMap.InitNew(syncLinks); var sequences = new Sequences(syncLinks); var sw3 = Stopwatch.StartNew(); sequences.CreateBalancedVariant(responseSourceArray); sw3.Stop(); var totalLinks = syncLinks.Count() - UnicodeMap.MapSize; Console.WriteLine($"Balanced Variant: {sw3.Elapsed}, {responseSourceArray.Length}, {totalLinks}"); } } var minFrequency = 0UL; for (var i = 1; i < 200; i++) { minFrequency += (ulong)(1 + Math.Log(i)); File.Delete("stats.links"); using (var memoryManager = new UInt64LinksMemoryManager("stats.links", 8 * 1024 * 1024)) using (var links = new UInt64Links(memoryManager)) { var syncLinks = new SynchronizedLinks<ulong>(links); UnicodeMap.InitNew(syncLinks); var sequences = new Sequences(syncLinks); var sw3 = Stopwatch.StartNew(); var compressor = new Data.Core.Sequences.Compressor(syncLinks, sequences, minFrequency); var responseCompressedArray3 = compressor.Precompress(responseSourceArray); sequences.CreateBalancedVariant(responseCompressedArray3); sw3.Stop(); var totalLinks = syncLinks.Count() - UnicodeMap.MapSize; Console.WriteLine($"{sw3.Elapsed}, {minFrequency}, {responseSourceArray.Length}, {totalLinks}"); } } Console.ReadKey(); }