static void Main(string[] args) { var crawler = new WorkerFactory().GetWorker(args); if (crawler != null) { crawler.DoWork(); } else { Console.WriteLine("Error Job Type"); } }
static ExitStatus Run(BinaryReader sourceReader, BinaryWriter destinationWriter, IPackerEngine packer, PackerMode packerMode, int blockLength, int parallelismDegree, CancellationTokenSource token, ILoggable logger) { int blocksNumber = (int)Math.Ceiling((double)sourceReader.BaseStream.Length / blockLength); Stopwatch watcher = new Stopwatch(); watcher.Start(); logger?.LogMessage("Parallel Packer started:"); logger?.LogMessage($" {packerMode.ToString().ToLower()}ing..."); try { if (packerMode == PackerMode.Unpack) // we should save blockLength into the packed file because of blockLength parameter can be shanged { (blocksNumber, blockLength) = BinaryBlockReader.ReadBlockInfo(sourceReader); } var commonSourceConveyer = new LockableConveyer <Block>(); var commonDestinationConveyer = new LockableConveyer <Block>(); var workers = new List <IWorkable> { WorkerFactory.CreateSourceWorker(sourceReader, blocksNumber, blockLength, packerMode, commonSourceConveyer, logger), WorkerFactory.CreateDestinationWorker(destinationWriter, blocksNumber, blockLength, packerMode, commonDestinationConveyer, logger) }; for (int index = 1; index <= parallelismDegree; ++index) { workers.Add(WorkerFactory.CreatePackerWorker(index, packerMode, packer, commonSourceConveyer, commonDestinationConveyer, logger)); } WorkerFactory.DoWork(workers, token); watcher.Stop(); if (token.IsCancellationRequested) { logger?.LogMessage($"{packerMode}ing has been cancelled by user '{Environment.UserName}' after {watcher.Elapsed}"); return(ExitStatus.CANCEL); } else { logger?.LogMessage($"{packerMode}ing has been finished successfully in {watcher.Elapsed}:"); logger?.LogMessage($" total blocks number: {blocksNumber}"); logger?.LogMessage($" raw block length: {blockLength}"); return(ExitStatus.SUCCESS); } } catch (Exception e) { watcher.Stop(); logger?.LogError($"{packerMode}ing finished with ERRORS in {watcher.Elapsed}:", e); return(ExitStatus.ERROR); } }
public void CommonConveyerWorkersLogic_Test() { var commonSourceConveyer = new LockableConveyer <SourceItem>(); var commonDestinationConveyer = new LockableConveyer <DestinationItem>(); byte[] sourceRawData = { 1, 2, 3, 4, 5 }; int sourceRawDataIndex = 0; List <SourceItem> source = new List <SourceItem>(); List <DestinationItem> destination = new List <DestinationItem>(); IGettableConveyer <SourceItem> gettableConveyer = new GetOnlyConveyer <SourceItem>((out bool stopped) => { stopped = sourceRawDataIndex == sourceRawData.Length; return(stopped ? null : new SourceItem() { Data = sourceRawData[sourceRawDataIndex++] }); }); IPuttableConveyer <DestinationItem> puttableConveyer = new PutOnlyConveyer <DestinationItem>((DestinationItem item) => { destination.Add(item); }); var workers = new List <IWorkable> { new Worker <SourceItem, SourceItem>("Source", gettableConveyer, commonSourceConveyer, null, item => item), new Worker <DestinationItem, DestinationItem>("Destination", commonDestinationConveyer, puttableConveyer, null, item => item) }; for (int index = 1; index <= Environment.ProcessorCount; ++index) { workers.Add(new Worker <SourceItem, DestinationItem>($"Worker #{index}", commonSourceConveyer, commonDestinationConveyer, null, (SourceItem item) => { return(new DestinationItem() { Data = item.Data }); })); } WorkerFactory.DoWork(workers, token); Assert.Equal(sourceRawData.Length, destination.Count); }