// Main entry point. static void Main(string[] args) { // Parameters. long totalframes = 1000; VReader vreader = new VReader(); // We'll time our run. var sw = Stopwatch.StartNew(); // Try both - notice the difference in performance. //RunSequential(vreader, totalframes); RunPipeline(vreader, totalframes).Wait(); sw.Stop(); Console.WriteLine("Elapsed ms: {0}.", sw.ElapsedMilliseconds); // "File" validation: count. if (FrameFile.LongCount() == totalframes) { Console.WriteLine("The count is correct."); } else { Console.WriteLine("The count is incorrect."); } // "File" validation: frame order. var orderedFrames = FrameFile.OrderBy(i => i).ToArray(); if (orderedFrames.SequenceEqual(FrameFile)) { Console.WriteLine("Frame order in the file is correct."); } else { Console.WriteLine("Frame order in the file is incorrect."); } Console.ReadLine(); }
// Main entry point. private static void Main(string[] args) { // Arguments. long totalframes = 1000; VReader vreader = new VReader(); // We'll time our run. var sw = Stopwatch.StartNew(); // Try both for comparison. //RunSequential(vreader, totalframes); RunPipeline(vreader, totalframes).Wait(); sw.Stop(); Console.WriteLine("Elapsed ms: {0}.", sw.ElapsedMilliseconds); // Validation: count, order and contents. var orderedFrames = Range(1, totalframes); if (orderedFrames.SequenceEqual(FrameFile)) { Console.WriteLine("Frame count, order and contents in the file is CORRECT."); } else { Console.WriteLine("Frame count, order and contents in the file is INCORRECT."); } Console.ReadLine(); }
// This is a modification of Stephen Toub's Pipelines // example from Patterns Of Parallel Programming. private static async Task RunPipeline(VReader vreader, long totalframes) { var rawFrames = new BlockingCollection <Bitmap>(); var processedFrames = new BlockingCollection <Bitmap>(); // Stage 1: read raw frames. var readTask = Task.Run(() => { try { for (long n = 0; n < totalframes; n++) { rawFrames.Add(vreader.ReadVideoFrame()); } } finally { rawFrames.CompleteAdding(); } }); // Stage 2: process frames in parallel. var processTask = Task.Run(async() => { try { var degreesOfParallellism = Environment.ProcessorCount; var consumingEnumerable = rawFrames.GetConsumingEnumerable(); // Start our parallel tasks. while (true) { var tasks = consumingEnumerable .Take(degreesOfParallellism) .Select(frame => Task.Run(() => { Process(frame); return(frame); })) .ToArray(); if (tasks.Length == 0) { break; } await Task.WhenAll(tasks); foreach (var t in tasks) { processedFrames.Add(t.Result); } } } finally { processedFrames.CompleteAdding(); } }); // Stage 3: write results to file and dispose of the frame. var writeTask = Task.Run(() => { foreach (var processedFrame in processedFrames.GetConsumingEnumerable()) { FrameFile.Add(processedFrame.ID); processedFrame.Dispose(); } }); await Task.WhenAll(readTask, processTask, writeTask); }
// Sequential implementation - as is (for comparison). private static void RunSequential(VReader vreader, long totalframes) { for (long n = 0; n < totalframes; n++) { using (Bitmap frame = vreader.ReadVideoFrame()) { Process(frame); WriteToFile(frame); } } }
public TrainSetManager(IPrincipal principal, ApiUserStore userStore, IMapper mapper, ITrainSetStore trainSetStore, ITwitterCollectionsStore twitterCollectionsStore, ITwitterSourcesStore twitterSourcesStore, IUserSocialsStore userSocialsStore, TwitterClient twitterClient, VReader vReader, StorageBlobClient storageBlobClient) : base(principal, userStore, mapper) { _trainSets = trainSetStore; _twitterCollectionsStore = twitterCollectionsStore; _twitterSourcesStore = twitterSourcesStore; _userSocialsStore = userSocialsStore; _twitterClient = twitterClient; _vReader = vReader; _storageBlobClient = storageBlobClient; }
public void CryptDecryptWord() { var reader = new VReader(Consts.VOCAB_PATH); reader.UploadBinary(); var w = reader.Vocab.GetRepresentationFor("Hello"); var ww = new Representation(w.NumericVector); var www = reader.Vocab.Distance(ww, 1).FirstOrDefault()?.Representation.WordOrNull; Trace.WriteLine(www); }
// This is a modification of Stephen Toub's Pipelines // example from Patterns Of Parallel Programming. private static void RunPipeline(VReader vreader, long totalframes) { var rawFrames = new BlockingCollection <Bitmap>(); var processedFrames = new BlockingCollection <Bitmap>(); // Stage 1: read raw frames. var readTask = Task.Run(() => { try { for (long n = 0; n < totalframes; n++) { rawFrames.Add(vreader.ReadVideoFrame()); } } finally { rawFrames.CompleteAdding(); } }); // Stage 2: process frames in parallel. var processTask = Task.Run(() => { try { // Try both - see which performs better in your scenario. Step2WithParallelTasks(rawFrames, processedFrames); //Step2WithPLinq(rawFrames, processedFrames); } finally { processedFrames.CompleteAdding(); } }); // Stage 3: write results to file and dispose of the frame. var writeTask = Task.Run(() => { foreach (var processedFrame in processedFrames.GetConsumingEnumerable()) { WriteToFile(processedFrame); processedFrame.Dispose(); } }); Task.WaitAll(readTask, processTask, writeTask); }
// This is a modification of Stephen Toub's Pipelines // example from Patterns Of Parallel Programming. private static async Task RunPipeline(VReader vreader, long totalframes) { var rawFrames = new BlockingCollection <Bitmap>(); var processedFrames = new BlockingCollection <Bitmap>(); // Stage 1. var readTask = Task.Run(() => { try { for (long n = 0; n < totalframes; n++) { rawFrames.Add(vreader.ReadVideoFrame()); } } finally { rawFrames.CompleteAdding(); } }); // Stage 2. var processTask = Task.Run(() => { try { foreach (var frame in rawFrames.GetConsumingEnumerable()) { Process(frame); processedFrames.Add(frame); } } finally { processedFrames.CompleteAdding(); } }); // Stage 3. var writeTask = Task.Run(() => { FrameFile.AddRange(processedFrames.GetConsumingEnumerable().Select(f => f.ID)); }); await Task.WhenAll(readTask, processTask, writeTask); }
public async Task NeuralWordsTest() { // get GAME OF THRONES string regexText = string.Empty; try { var fullText = await File.ReadAllTextAsync(Consts.GAME_OF_THRONES_PATH); regexText = new Regex("Page [0-9]+").Replace(fullText, string.Empty); } catch (Exception ex) { var e = ex; } var vReader = new VReader(Consts.VOCAB_PATH); vReader.UploadBinary(); var bag = MRWordBag.CreateToWords(regexText, 4); // create traine vectors var allSet = new List <Tuple <double[], double[]> >(); foreach (var step in bag.Read()) { bool isValid = true; foreach (var v in step) { if (!vReader.Vocab.ContainsWord(v) || !vReader.Vocab.ContainsWord(v)) { isValid = false; break; } } if (!isValid) { continue; } var forInput = step.Take(3); List <double> input = new List <double>(); foreach (var i in forInput) { input.AddRange(vReader.Vocab.GetRepresentationFor(i).NumericVector.Select(x => (double)x).ToList()); } var forOut = step.Last(); double[] output = vReader.Vocab.GetRepresentationFor(forOut).NumericVector.Select(x => (double)x).ToArray(); allSet.Add(new Tuple <double[], double[]>(input.ToArray(), output)); } var trainSet = allSet.Take(allSet.Count - 10).ToArray(); var checkSet = allSet.TakeLast(10).ToArray(); var trainRates = new double[] { 0.00005d, 0.00001d }; foreach (var rate in trainRates) { foreach (var net in NetsWordTest) { Trace.WriteLine($"Train net: layers: {net.HiddenLayersCount} | neurons: {net.Hidden.First().NeuronsCount}\tRate: {rate}"); var trainer = new NeuralNetTrainer(net, trainSet, 500, 1, rate, 1, Consts.TraceLog); var trainResult = trainer.SimpleTrain(); Trace.WriteLine("-- check net --"); foreach (var s in checkSet) { var response = net.Activate(s.Item1); var responseR = new Representation(response.Select(x => (float)x).ToArray()); var responseWord = vReader.Vocab.Distance(responseR, 1)?.FirstOrDefault()?.Representation; var correct = vReader.Vocab.Distance(new Representation(s.Item2.Select(x => (float)x).ToArray()), 1)?.FirstOrDefault()?.Representation; Trace.WriteLine($"Correct: {correct.WordOrNull}\tResponse: {responseWord.WordOrNull}"); } var name = $"Neural net ({net.HiddenLayersCount}-{net.Hidden.First().NeuronsCount}-epochs-{trainResult.EpochFinished}-error-{trainResult.ResultError}-time-{trainResult.TotalTimeMs})"; await MRSerializer.ToFile($"d://{name}.txt", net, true); } } }