public async Task <PipeReadResult> ReadAsync(PipeReadParameters parameters, CancellationToken cancellation) { await ThreadingUtils.ContinueAtThreadPull(cancellation); var buffer = new byte[parameters.BytesExpected]; uint bytesRead = 0; var status = _ftdi.Read(buffer, parameters.BytesExpected.ToUInt32(), ref bytesRead); if (status == FT_STATUS.FT_OK) { if (bytesRead < parameters.BytesExpected) { return(new PipeReadResult(ReadStatus.PATIALLY_DONE, bytesRead.ToInt32(), buffer.Take(bytesRead.ToInt32()))); } else if (bytesRead == parameters.BytesExpected) { return(new PipeReadResult(ReadStatus.DONE, bytesRead.ToInt32(), buffer)); } else { throw new InvalidOperationException(); } } else { return(new PipeReadResult(ReadStatus.UNKNOWN_ERROR)); } }
/// <summary> /// /// </summary> /// <param name="from"></param> /// <param name="count"></param> /// <param name="operationInfo"></param> /// <returns></returns> public async Task <IPointsRow[]> GetDecimatedRangeAsync(int from, int count, AsyncOperationInfo operationInfo) { #warning what if underlying collection is not thread safe? Or it decreases its size?! await ThreadingUtils.ContinueAtThreadPull(operationInfo); IPointsRow[] result; if (count > _maxRowsCount) // Decimation required { var coefficient = (double)count / _maxRowsCount; var sourceRows = _maxRowsCount .Range() .AsParallel() .AsOrdered() .Select(i => (i * coefficient + from).Round()) .Where(i => i < Source.RowsCount) // To handle possible rounding error .ToArray(); return(await Source.ReadRowsAsync(from, sourceRows, operationInfo)); } else { result = await Source.ReadRowsAsync(from, count, operationInfo); } return(result); }
public async Task <PipeReadResult> ReadAsync(PipeReadParameters parameters, CancellationToken cancellation) { await ThreadingUtils.ContinueAtThreadPull(); var buffer = new byte[parameters.BytesExpected]; for (int i = 0; i < buffer.Length; i++) { var data = _port.ReadByte(); if (data == -1) { await Task.Delay(1, cancellation); i--; } else { buffer[i] = data.ToByte(); } cancellation.ThrowIfCancellationRequested(); } return(new PipeReadResult(ReadStatus.DONE, buffer.Length, buffer)); }
public async Task <StatusReadResult> TryReadStatusAsync(DeviceOperationScope scope, AsyncOperationInfo cancellation) { await ThreadingUtils.ContinueAtThreadPull(cancellation); using (Logger.Indent) { Logger.LogInfo(null, $"Чтение статуса прибора \"{Name}\"..."); var result = await ReadAsync(Command.STATUS, scope, cancellation); if (result.Status == ReadStatus.OK) { var flagsEntity = result.Entities.ElementAt(0); var numOfStatusBits = flagsEntity.Descriptor.Length.Length * 8; var flags = (uint)(dynamic)flagsEntity.Value; var serial = (ushort)result.Entities.ElementAt(1).Value; var status = new DeviceStatusInfo.StatusInfo(numOfStatusBits, flags); Logger.LogInfo(null, $"Статус: {status.ToBinString()}"); return(new StatusReadResult(result, new DeviceStatusInfo(Id, serial, status))); } else { return(new StatusReadResult(result, null)); } } }
public async Task <PipeWriteResult> WriteAsync(PipeWriteParameters parameters, CancellationToken cancellation) { await ThreadingUtils.ContinueAtThreadPull(); var buffer = parameters.Buffer.ToArray(); _port.Write(buffer, 0, buffer.Length); return(new PipeWriteResult(WriteStatus.DONE, buffer.Length)); }
public async Task <IPointsRow[]> ReadRowsAsync(int firstRowIndex, int rowsCount, AsyncOperationInfo operationInfo) { await ThreadingUtils.ContinueAtThreadPull(operationInfo); var indexes = rowsCount .Range() .Select(i => i + firstRowIndex) .ToArray(); return(await readRowsAsync(firstRowIndex, indexes, true, operationInfo)); }
public static async Task LogRequestAsync(IList <byte> requestBytes) { var time = DateTime.Now; await ThreadingUtils.ContinueAtThreadPull(); using (await _ioLogStream.Locker.AcquireAsync()) { var data = $"{time.ToString(Logging.Logger.TIME_FORMAT)} Request[{requestBytes.Count}] >> ".PadRight(40, ' ') + requestBytes.Select(b => b.ToString("X2")).Aggregate(" "); _ioLogStream.Object.WriteLine(data); _ioLogStream.Object.Flush(); } }
public async Task <TResponse> DeserializeResponseAsync(IResponseFuture inputStream, AsyncOperationInfo operationInfo) { var sw = Stopwatch.StartNew(); await ThreadingUtils.ContinueAtThreadPull(); var loggedResponseFuture = new LoggingResponseFutureDecorator(inputStream, MAX_AMOUNT_OF_BYTES_INSIDE_LOG_ENTRY); TResponse response = null; using (Logger.Indent) { Logger.LogInfo(null, "Чтение ответа..."); try { response = await deserializeResponseAsync(loggedResponseFuture, operationInfo); logDataRead(); } catch (TimeoutException ex) { logError(ex); response = BuildErrorResponse(RequestStatus.READ_TIMEOUT); } catch (Exception ex) { logError(ex); response = BuildErrorResponse(RequestStatus.DESERIALIZATION_ERROR); } } return(response); void logDataRead() { Logger.LogInfo(null, $"Пакет ответа был успешно прочитан{Global.NL}Полная длина: {loggedResponseFuture.ReadCount}, длительность чтения: {sw.Elapsed.TotalMilliseconds.ToString("F2")} мс{getBufferRepresentation()}"); } void logError(Exception ex) { Logger.LogError(null, $"Ошибка во время чтения/десериализации пакета. Было прочитано: {loggedResponseFuture.ReadCount}, длительность чтения: {sw.Elapsed.TotalMilliseconds.ToString("F2")} мс{getBufferRepresentation()}", ex); } string getBufferRepresentation() { var tooManyData = loggedResponseFuture.StorageCount < loggedResponseFuture.ReadCount; return($"{Global.NL}Первые {loggedResponseFuture.Capacity} байт из {loggedResponseFuture.ReadCount}".IfOrDefault(tooManyData) + $"{Global.NL}Данные<HEX>:{loggedResponseFuture.Storage.Select(b => b.ToString("X2").PadLeft(3)).Aggregate(" ")}" + $"{Global.NL}Данные<DEC>:{loggedResponseFuture.Storage.Select(b => b.ToString("D3").PadLeft(3)).Aggregate(" ")}"); } }
public override async Task <IEnumerable <CalibrationFileEntity> > GenerateCalibrationCoefficientsAsync() { await ThreadingUtils.ContinueAtThreadPull(); var calibrationCoefficients = new List <CalibrationFileEntity>(); if (CallibrationCanBeGenerated) { var calibrator = new CalibratorApplication(Results, Constants); var coefficients = await calibrator.CalculateCoefficientsAsync(); return(await generateCalibrationCoefficients(coefficients)); } else { throw new InvalidOperationException("Необходимые замеры не были произведены"); } }
public async Task <string> SanitizePostBodyAsync(string body) { await ThreadingUtils.ContinueAtThreadPull(); var intermediate = _allowAllButNotExecutable.Sanitize(body); var document = new HtmlDocument(); document.LoadHtml(intermediate); var nodes = document.DocumentNode.SelectNodes("//img"); foreach (var node in nodes.NullToEmpty()) { var srcAttr = node.Attributes.Single(a => a.Name == "src"); var src = srcAttr.Value; src = await trySanitizeImage(src); if (src == null) { node.Remove(); } else { srcAttr.Value = src; const string ATTRIBUTE = "class"; if (node.Attributes.NotContains(a => a.Name == ATTRIBUTE)) { node.Attributes.Add(ATTRIBUTE, ""); } node.Attributes[ATTRIBUTE].Value += " rounded img-fluid"; } } var ms = new MemoryStream(); var writer = new StreamWriter(ms); document.Save(writer); writer.Flush(); ms.Position = 0; var sanitized = new StreamReader(ms) .ReadAllText() .Aggregate(); return(sanitized); }
async Task dispatcherLoopAsync() { await ThreadingUtils.ContinueAtDedicatedThread(); while (true) { try { var message = _consumer.Consume(); var scope = _scopeBuilder .CreateScope(); var controller = scope.ServiceProvider .GetRequiredService <IStatisticServiceAPI>(); var parameter = message.Value; var handler = getHandler(); executeAsync(); ///////////////////////////////////////////// Func <Task> getHandler() { return(message.Value switch { CommentaryNotification cn => () => controller.OnCommentaryActionAsync(cn), PostNotification pn => () => controller.OnPostActionAsync(pn), SeenNotification sn => () => controller.OnSeenAsync(sn), UserNotification un => () => controller.OnUserActionAsync(un), _ => throw new NotSupportedException() }); } async void executeAsync() { using (scope) { await ThreadingUtils.ContinueAtThreadPull(); await handler(); _logger.LogInformation($"Consumed message '{message.Value}' at: '{message.TopicPartitionOffset}'."); } } }
public async Task SaveCalibrationFileAsync(FileType fileType, IEnumerable <IDataEntity> dataEntities) { await ThreadingUtils.ContinueAtThreadPull(); var serialized = new FileStringSerializer().Serialize(dataEntities); if (serialized != null) { var extensionInfo = _extensionFactory.GetExtension(_deviceId, fileType); var path = IOUtils.RequestFileSavingPath(extensionInfo.FileExtensionFilter); if (path != null) { try { File.WriteAllText(path, serialized); } catch (Exception ex) { Logger.LogErrorEverywhere($"Не удалось сохранить файл на диск", ex); } } } }
public static async Task <MeasureResult> DeserializeAsync(string fileName, IEnumerable <byte> content) { await ThreadingUtils.ContinueAtThreadPull(); var eb = new ExceptionBuilder(); try { eb.SetException("Не удалось прочитать параметры калибровки из имени файла. Имя файла некорректно."); #warning move to attribute var mode = EnumUtils.GetValues <IncTCalAngle>().First(v => fileName.Contains(v.ToAnglesString())); var date = DateTime.ParseExact(fileName.Split(" InGK ").FirstElement(), TIME_FORMAT, null); eb.SetException("Не удалось прочитать файл. Формат файла некорректен."); var reader = new StreamReader(content.ToMemoryStream(), Encoding.GetEncoding(CSV_ENCODING)); var columns = reader.ReadAllLines().FirstItem().Split(CSV_SEPARATOR); var rows = parseRows(reader).ToArray(); return(new MeasureResult(mode, date, columns, rows)); } catch (Exception ex) { throw eb.InstantiateException(ex); } IEnumerable <double[]> parseRows(StreamReader reader) { foreach (var line in reader.ReadAllLines()) { yield return(line .Split(CSV_SEPARATOR) .Select(v => v.ParseToDoubleInvariant()) .ToArray()); } } }
public async Task <PipeWriteResult> WriteAsync(PipeWriteParameters parameters, CancellationToken cancellation) { await ThreadingUtils.ContinueAtThreadPull(cancellation); var buffer = parameters.Buffer.ToArray(); uint bytesWritten = 0; var status = _ftdi.Write(parameters.Buffer.ToArray(), buffer.Length.ToUInt32(), ref bytesWritten); if (status != FT_STATUS.FT_OK) { return(new PipeWriteResult(WriteStatus.UNKNOWN_ERROR)); } else { if (bytesWritten != buffer.Length) { return(new PipeWriteResult(WriteStatus.PATIALLY_DONE, bytesWritten.ToInt32())); } else { return(new PipeWriteResult(WriteStatus.DONE, bytesWritten.ToInt32())); } } }
public static async Task <FileStream> OpenNewReadOnlyStreamAsync(this FileStream stream) { await ThreadingUtils.ContinueAtThreadPull(); return(new FileStream(stream.Name, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)); }
public static async Task <MeasureResult> DeserializeAsync(string fileName, IEnumerable <byte> content) { await ThreadingUtils.ContinueAtThreadPull(); var eb = new ExceptionBuilder(); try { var date = CommonUtils.TryOrDefault( () => DateTime.ParseExact(fileName.Split(" InGK ").FirstElement(), TIME_FORMAT, null)); eb.SetException("Не удалось прочитать файл. Формат файла некорректен."); var reader = new StreamReader(content.ToMemoryStream(), Encoding.GetEncoding(CSV_ENCODING)); var columns = reader .ReadAllLines() .FirstItem() .Split(CSV_SEPARATOR); var rows = parsePositionMeasures(reader).ToArray(); return(new MeasureResult(date, columns, rows)); } catch (Exception ex) { throw eb.InstantiateException(ex); } IEnumerable <PositionMeasureResult> parsePositionMeasures(StreamReader reader) { var lines = reader.ReadAllLines().ToArray().StartEnumeration(); lines.AdvanceOrThrow(); while (true) { var one = parseOne(); if (one == null) { yield break; } else { yield return(one); } } PositionMeasureResult parseOne() { if (lines.IsFinished) { return(null); } var line = lines.Current; if (line == null) { return(null); } else if (!line.StartsWith("Position")) { throw new Exception(); } var serializedPosition = line .Split(CSV_SEPARATOR) .Skip(2) .Take(3) .Select(v => v.ParseToDoubleInvariant()) .ToArray(); var p = new V3(serializedPosition[0], serializedPosition[1], serializedPosition[2]); var positionInfo = (from pos in EnumUtils.GetValues <Position>() let angles = pos.GetAngles() let v3 = new V3(angles.Inc, angles.Azi, angles.GTF) let dV = v3 - p select(DDistance: dV, Position: pos)) .OrderBy(v => v.DDistance.Mag) .FirstItem().Position; var position = PrecisePosition.FromAbsolute( positionInfo, p.X, p.Y, p.Z); var positionMeasure = new PositionMeasureResult( position, new List <IEnumerable <double> >()); foreach (var l in lines.AdvanceRange()) { if (l.StartsWith("Position")) { break; } var measures = l .Split(CSV_SEPARATOR) .Where(v => v.IsNotNullOrWhiteSpace()) .Select(v => v.ParseToDoubleInvariant()) .ToArray(); positionMeasure.Result.Add(measures); } return(positionMeasure); } } }
public async Task <CalibratedPoint[][]> CalculateErrorsAsync(bool useTestAngle = true) { await ThreadingUtils.ContinueAtThreadPull(); //It creates calibration file in the calibrator folder await CalculateCoefficientsAsync(useTestAngle); var correctionResults = await setupAsync(); run(); switch (_calibrationType) { case CalibrationType.TEMPERATURE: { var curves = new List <CalibratedPoint[]>(); foreach (var result in correctionResults) { var corrFileContent = File.ReadAllBytes(result.CorrFilePath); var corrFile = await InclinometrTemperatureCalibrator.MeasureResult .DeserializeAsync(Path.GetFileName(result.CorrFilePath), corrFileContent); var angles = ((InclinometrTemperatureCalibrator.MeasureResult)result.Measure).Rows .Select(row => new V3(row[1], row[2], row[3])) // INC, AZI, GTF .ToArray(); var accelerometr = ((InclinometrTemperatureCalibrator.MeasureResult)result.Measure).Rows .Select(row => new V3(row[4], row[5], row[6])) // GX Y Z .ToArray(); var magnitometr = ((InclinometrTemperatureCalibrator.MeasureResult)result.Measure).Rows .Select(row => new V3(row[7], row[8], row[9])) // BX Y Z .ToArray(); var accelerometrTemperaturtes = ((InclinometrTemperatureCalibrator.MeasureResult)result.Measure).Rows .Select(row => new V3(row[13], row[14], row[15])) // TempGx y z .ToArray(); var magnitometrTemperature = ((InclinometrTemperatureCalibrator.MeasureResult)result.Measure).Rows .Select(row => new V3(row[16], row[16], row[16])) .ToArray(); var correctedAngles = corrFile.Rows .Select(row => new V3(row[1], row[2], row[3])) // INC, AZI, GTF .ToArray(); var correctedAccelerometr = corrFile.Rows .Select(row => new V3(row[4], row[5], row[6])) // GX Y Z .ToArray(); var correctedMagnitometr = corrFile.Rows .Select(row => new V3(row[7], row[8], row[9])) // BX Y Z .ToArray(); var temperatures = corrFile.Rows.Select(row => row.TakeFromEnd(2).Average()).ToArray(); var angle = corrFile.Mode.GetAngles(); var curve = angles.Length.Range() .Select(i => new CalibratedPoint( angle, angles[i], accelerometr[i], magnitometr[i], correctedAngles[i], correctedAccelerometr[i], correctedMagnitometr[i], accelerometrTemperaturtes[i], magnitometrTemperature[i], temperatures[i])) .ToArray(); curves.Add(curve); } return(curves.ToArray()); } case CalibrationType.ANGULAR: { var curves = new List <CalibratedPoint[]>(); var result = correctionResults.Single(); var corrFileContent = File.ReadAllBytes(result.CorrFilePath); var corrFile = await InclinometrAngularCalibrator.MeasureResult .DeserializeAsync(Path.GetFileName(result.CorrFilePath), corrFileContent); var points = new CalibratedPoint[corrFile.Positions.Count()][]; var i = 0; foreach (var position in corrFile.Positions) { var angles = ((InclinometrAngularCalibrator.MeasureResult)result.Measure).Positions .First(p => p.Position.Position == position.Position.Position) .Result.Select(r => r.ToArray()) .Select(row => new V3(row[0], row[1], row[2])) // INC, AZI, GTF .ToArray(); var correctedAngles = position.Result .Select(r => r.ToArray()) .Select(row => new V3(row[0], row[1], row[2])) // INC, AZI, GTF .ToArray(); var expectedAngles = position.Position.Position.GetAngles(); var expectedAnglesV3 = new V3(expectedAngles.Inc, expectedAngles.Azi, expectedAngles.GTF); points[i] = angles.Length.Range() .Select(k => new CalibratedPoint(expectedAnglesV3, angles[k], V3.Zero, V3.Zero, correctedAngles[k], V3.Zero, V3.Zero, V3.Zero, V3.Zero, 0)) .ToArray(); i++; } return(points); } default: throw new NotSupportedException(); } async Task <(MeasureResultBase Measure, string CorrFilePath)[]> setupAsync()
async Task <IPointsRow[]> readRowsAsync(int firstRowIndex, IList <int> rowsIndexes, bool isIndexesSequential, AsyncOperationInfo operationInfo) { await ThreadingUtils.ContinueAtThreadPull(operationInfo); int DEGREE_OF_PARALLELISM = isIndexesSequential ? READER_DEGREE_OF_PARALLELISM : RANDOM_READER_DEGREE_OF_PARALLELISM; var futures = new Task[DEGREE_OF_PARALLELISM]; var workGroups = rowsIndexes.SplitOnGroups(DEGREE_OF_PARALLELISM); var offset = 0; var rows = new IPointsRow[rowsIndexes.Count]; for (int i = 0; i < DEGREE_OF_PARALLELISM; i++) { var work = workGroups[i]; futures[i] = readRowsTo(offset, work); offset += work.Length; } Task.WaitAll(futures); return(rows); async Task readRowsTo(int rowsArrayOffset, IList <int> indexes) { await ThreadingUtils.ContinueAtThreadPull(operationInfo); var reader = await _streams.AquireAsync(operationInfo); try { var rowBuffer = new byte[_numOfPointsInsideRow * sizeof(double)]; if (isIndexesSequential && indexes.Count > 0) { reader.BaseStream.Position = indexes[0] * _numOfPointsInsideRow * sizeof(double); } for (int i = 0; i < indexes.Count; i++) { if (!isIndexesSequential) { var index = indexes[i]; reader.BaseStream.Position = index * _numOfPointsInsideRow * sizeof(double); } var row = new double[_numOfPointsInsideRow]; reader.BaseStream.Read(rowBuffer, 0, rowBuffer.Length); // Hope the amount of data will be enough) for (int k = 0; k < _numOfPointsInsideRow; k++) { row[k] = readDoubleFast(rowBuffer, k * sizeof(double)); //row[k] = reader.ReadDouble(); // Bottleneck (15%) } rows[rowsArrayOffset + i] = new PointsRow(row); } } finally { await _streams.ReleaseAsync(reader, operationInfo); } } }
public override async Task DeactivateDeviceAsync(DeviceOperationScope scope, AsyncOperationInfo cancellation) { await ThreadingUtils.ContinueAtThreadPull(cancellation); await _base.DeactivateDeviceAsync(scope, cancellation); }
public override async Task <ReadResult> ReadAsync(Command request, DeviceOperationScope scope, AsyncOperationInfo cancellation) { await ThreadingUtils.ContinueAtThreadPull(cancellation); return(await _base.ReadAsync(request, scope, cancellation)); }
public async Task <IEnumerable <Curve> > CalculateCoefficientsAsync(bool useTestAngle = true) { await ThreadingUtils.ContinueAtThreadPull(); await setup(); run(); return(getResults().MakeCached()); async Task setup() { IOUtils.RecreateDirectory(MEASURE_RESULTS_PATH); var measureResultsPaths = new List <string>(); var mr = !useTestAngle && _calibrationType == CalibrationType.TEMPERATURE ? _results .Cast <InclinometrTemperatureCalibrator.MeasureResult>() .Where(r => r.Mode.GetAttribute <TestAngleAttribute>() == null) : _results; foreach (var measureResult in mr) { var serialized = await measureResult.SerializeAsync(); var path = Path.Combine(MEASURE_RESULTS_PATH, serialized.FileName); measureResultsPaths.Add(path); IOUtils.CreateFile(path).WriteAndDispose(serialized.Content); } IOUtils.CreateFile(CONFIG_FILE_PATH) .ToStreamWriter(Encoding.ASCII) .WriteAndDispose(generateConfig()); IOUtils.CreateFile(CONSTANTS_FILE_PATH) .ToStreamWriter(Encoding.ASCII) .WriteAndDispose(_constants.GenerateFile()); string generateConfig() { switch (_calibrationType) { case CalibrationType.TEMPERATURE: return(ConfigBuilder.CreateTemperatureCalibrationConfig( Path.GetFileName(TEMPERATURE_CALIBRATION_FILE_PATH), measureResultsPaths)); case CalibrationType.ANGULAR: return(ConfigBuilder.CreateAngularCalibrationConfig( Path.GetFileName(ACCELEROMETR_ANGLULAR_CALIBRATION_FILE_PATH), Path.GetFileName(MAGNITOMETR_ANGLULAR_CALIBRATION_FILE_PATH), measureResultsPaths.Single())); default: throw new NotSupportedException(); } } } IEnumerable <Curve> getResults() { switch (_calibrationType) { case CalibrationType.TEMPERATURE: { const string CURVE_SEPARATOR = ";"; var cells = File.ReadAllLines(TEMPERATURE_CALIBRATION_FILE_PATH) .Select(l => l.Split(CURVE_SEPARATOR)) .To2DArray(); for (int curveI = 0; curveI < cells.GetColumnsLength(); curveI++) { var points = new List <double>(); var curve = new Curve(cells[curveI, 0], points); for (int pointI = 1; pointI < cells.GetRowsLength(); pointI++) { points.Add(cells[curveI, pointI].ParseToDoubleInvariant()); } yield return(curve); } } break; case CalibrationType.ANGULAR: { var kXkYxZ = parse(ACCELEROMETR_ANGLULAR_CALIBRATION_FILE_PATH); yield return(new Curve("Kx", kXkYxZ[0])); yield return(new Curve("Ky", kXkYxZ[1])); yield return(new Curve("Kz", kXkYxZ[2])); var kM = parse(MAGNITOMETR_ANGLULAR_CALIBRATION_FILE_PATH); yield return(new Curve("MRow0", kM[0])); yield return(new Curve("MRow1", kM[1])); yield return(new Curve("MRow2", kM[2])); IEnumerable <double>[] parse(string filePath) { return(File.ReadAllLines(filePath) .Skip(1) .Select(l => l.Split(" ").ParseToDoubleInvariant()) .ToArray()); } } break; default: throw new NotSupportedException(); } } }
public override async Task <StatusReadResult> TryReadStatusAsync(DeviceOperationScope scope, AsyncOperationInfo cancellation) { await ThreadingUtils.ContinueAtThreadPull(cancellation); return(await _base.TryReadStatusAsync(scope, cancellation)); }
public override async Task <BurnResult> BurnAsync(Command request, IEnumerable <IDataEntity> entities, DeviceOperationScope scope, AsyncOperationInfo cancellation) { await ThreadingUtils.ContinueAtThreadPull(cancellation); return(await _base.BurnAsync(request, entities, scope, cancellation)); }