private static void AnalyzeSamples(int n, int k, int d, Polynomial h, params AnalyzingSample[] samples) { var linearSystemsSolver = new GaussSolver(); var generatingPolynomialBuilder = new LiftingSchemeBasedBuilder(new GcdBasedBuilder(new RecursiveGcdFinder()), linearSystemsSolver); var decoder = new GsBasedDecoder( new GsDecoder(new KotterAlgorithmBasedBuilder(new PascalsTriangleBasedCalcualtor()), new RrFactorizator()), linearSystemsSolver) { TelemetryCollector = new GsBasedDecoderTelemetryCollectorForGsBasedDecoder() }; var generatingPolynomial = generatingPolynomialBuilder.Build(n, d, h); _logger.LogInformation("Generating polynomial was restored"); _logger.LogInformation("Samples analysis was started"); Parallel.ForEach(samples, new ParallelOptions { MaxDegreeOfParallelism = Math.Min((int)(Environment.ProcessorCount * 1.5d), samples.Length) }, x => PlaceNoiseIntoSamplesAndDecodeWrapperForParallelRunning(x, n, k, d, generatingPolynomial, decoder)); }
public LiftingSchemeBasedBuilderTests() { _builder = new LiftingSchemeBasedBuilder(new GcdBasedBuilder(new RecursiveGcdFinder()), new GaussSolver()); }
private static void AnalyzeCode(int n, int k, int d, Polynomial h, int?placedErrorsCount = null, int?minCorrectValuesCount = null, int?samplesCount = null, int?decodingThreadsCount = null) { var maxErrorsCount = (int)Math.Floor(n - Math.Sqrt(n * (n - d))); var errorsCount = placedErrorsCount ?? maxErrorsCount; if (errorsCount > maxErrorsCount) { throw new ArgumentException("Errors count is too large"); } if (errorsCount < d - maxErrorsCount) { throw new ArgumentException("Errors count is too small"); } var correctValuesCount = minCorrectValuesCount ?? n - errorsCount; if (correctValuesCount * correctValuesCount <= n * (n - d)) { throw new ArgumentException("Correct values count is too small for decoding"); } if (correctValuesCount > n - errorsCount) { throw new ArgumentException("Correct values count can't be larger than " + (n - errorsCount).ToString() + " for errors count " + errorsCount.ToString()); } if (correctValuesCount >= n - (d - 1) / 2) { throw new ArgumentException("List size will be always equal to 1"); } var linearSystemsSolver = new GaussSolver(); var generatingPolynomialBuilder = new LiftingSchemeBasedBuilder(new GcdBasedBuilder(new RecursiveGcdFinder()), linearSystemsSolver); var decoder = new GsBasedDecoder( new GsDecoder(new KotterAlgorithmBasedBuilder(new PascalsTriangleBasedCalcualtor()), new RrFactorizator()), linearSystemsSolver) { TelemetryCollector = new GsBasedDecoderTelemetryCollectorForGsBasedDecoder() }; var generatingPolynomial = generatingPolynomialBuilder.Build(n, d, h); _logger.LogInformation("Start samples generation"); var samplesGenerationTimer = Stopwatch.StartNew(); var samples = GenerateSamples(n, k, generatingPolynomial, samplesCount).ToArray(); samplesGenerationTimer.Stop(); _logger.LogInformation("Samples were generated in {0} seconds", samplesGenerationTimer.Elapsed.TotalSeconds); _logger.LogInformation("Start errors positions generation"); var errorsPositionsGenerationTimer = Stopwatch.StartNew(); var errorsPositions = GenerateErrorsPositions(n, errorsCount).ToArray(); errorsPositionsGenerationTimer.Stop(); _logger.LogInformation("Errors positions were generated in {0} seconds", errorsPositionsGenerationTimer.Elapsed.TotalSeconds); _logger.LogInformation("Start noise decoding"); var noiseGenerationTimer = Stopwatch.StartNew(); samples = samples.SelectMany(x => errorsPositions.Select(y => new AnalyzingSample(x) { ErrorPositions = y, CurrentNoiseValue = Enumerable.Repeat(1, errorsCount).ToArray(), CorrectValuesCount = correctValuesCount })) .ToArray(); Parallel.ForEach(samples, new ParallelOptions { MaxDegreeOfParallelism = decodingThreadsCount ?? (int)(Environment.ProcessorCount * 1.5d) }, x => PlaceNoiseIntoSamplesAndDecodeWrapperForParallelRunning(x, n, k, d, generatingPolynomial, decoder)); noiseGenerationTimer.Stop(); _logger.LogInformation("Noise decoding was performed in {0} seconds", noiseGenerationTimer.Elapsed.TotalSeconds); }