Ejemplo n.º 1
0
        async Task daemon()
        {
            await ThreadingUtils.ContinueAtDedicatedThread();

            while (true)
            {
                try
                {
                    using var scope = ScopeFactory.CreateScope();
                    var db = scope.ServiceProvider.GetRequiredService <RunnerContext>();

                    var toUpdate = await db.RunResults
                                   .AsNoTracking()
                                   .IncludeGroup(API.Models.EntityGroups.ALL, db)
                                   .Where(r => !r.ResultBase.State.IsFinal && r.ResultBase.State.NextStateUpdate != null && r.ResultBase.State.NextStateUpdate.Value < DateTime.UtcNow)
                                   .ToArrayAsync();

                    foreach (var r in toUpdate)
                    {
                        Producer.FireUpdateTestResultState(new UpdateTestResultStateMessage(r.ResultBase.TestId, r.Id, r.ResultBase.SourceId, r.ResultBase.State));
                    }
                }
                catch (Exception ex)
                {
                    Debugger.Break();
                }

                await Task.Delay(10 * 1000);
            }
        }
Ejemplo n.º 2
0
        async void openStateCheckingAsyncLoop()
        {
            await ThreadingUtils.ContinueAtDedicatedThread(CancellationToken.None);

            var period = new PeriodDelay(1000);

            while (true)
            {
                if (_openedDevice != null)
                {
                    try
                    {
                        var devices = getAllDevices();
                        var exists  = devices.FirstOrDefault(d => d.LocId == _openedDevice.LocId &&
                                                             d.SerialNumber == _openedDevice.SerialNumber &&
                                                             d.Description == _openedDevice.Description) != null;
                        if (!exists)
                        {
                            Close();
                        }
                    }
                    catch (Exception ex)
                    {
                        Logger.LogError(null, "Ошибка ", ex);
                    }
                }

                await period.WaitTimeLeftAsync();
            }
        }
Ejemplo n.º 3
0
        async Task daemon()
        {
            await ThreadingUtils.ContinueAtDedicatedThread();

            while (true)
            {
                try
                {
                    using var scope = ScopeFactory.CreateScope();
                    using var db    = scope.ServiceProvider.GetRequiredService <RunnerContext>();

                    var hang = await db.RunResults
                               .IncludeGroup(API.Models.EntityGroups.ALL, db)
                               .AsNoTracking()
                               .ToArrayAsync();

                    hang = hang
                           .Where(d => d.ResultBase.Result == API.Models.RunResult.Running &&
                                  DateTime.UtcNow - d.ResultBase.StartTime > TimeSpan.FromMinutes(3))
                           .ToArray();
                    db.RunResults.RemoveRange(hang);
                    await db.SaveChangesAsync();
                }
                catch (Exception ex)
                {
                    Debugger.Break();
                }

                await Task.Delay(60 * 1000);
            }
        }
Ejemplo n.º 4
0
        async Task updateAwailablePortsLoop(CancellationToken cancellation)
        {
            await ThreadingUtils.ContinueAtDedicatedThread(cancellation);

            var period = new PeriodDelay(1000);

            while (true)
            {
                try
                {
                    using (await Locker.AcquireAsync())
                    {
                        var devices = getAllDevices();
                        PortNames = devices.Length.Range().Select(i => PORT_PREFIX + i);
                    }
                }
                catch (OperationCanceledException)
                {
                    throw;
                }
                catch (Exception ex)
                {
                    Logger.LogError(null, $"Ошибка обновления списка устройств", ex);
                }

                await period.WaitTimeLeftAsync(cancellation);
            }
        }
        async Task daemon()
        {
            await ThreadingUtils.ContinueAtDedicatedThread();

            while (true)
            {
                try
                {
                    using var scope = ScopeFactory.CreateScope();
                    using var db    = scope.ServiceProvider.GetRequiredService <TestsContext>();
                    var threshold = DateTime.UtcNow.AddDays(-3);
                    var legacy    = await db.Cases
                                    .Where(c => c.State == TestCaseState.RecordedButNotSaved && c.CreationDate < threshold)
                                    .ToArrayAsync();

                    foreach (var e in legacy)
                    {
                        e.IsDeleted = true;
                    }
                    await db.SaveChangesAsync();

                    Logger.LogInformation($"{legacy.Length} cases have been deleted");
                }
                catch (Exception ex)
                {
                    Logger.LogError(ex, "Could not cleanup");
                }

                await Task.Delay(3600 * 1000);
            }
        }
Ejemplo n.º 6
0
        async void accquireDaemon()
        {
            await ThreadingUtils.ContinueAtDedicatedThread();

            while (true)
            {
                var parameters = "<Operation>";
                for (int i = 0; i < Global.Random.Next(1, 4); i++)
                {
                    parameters += $"<Step{i}>";

                    for (int k = 0; k < Global.Random.Next(2, 10); k++)
                    {
                        parameters += $"<p name=\"{Global.Random.NextRUWord()}\">{Global.Random.NextObjFrom(Global.Random.NextDateTime(default, DateTime.UtcNow).ToString(), Global.Random.NextDouble(0, 10000).ToString(), Global.Random.Next(-10000, 10000).ToString(), Global.Random.NextRUWord().ToString(), Global.Random.NextENWord().ToString())}</p>";
Ejemplo n.º 7
0
        async void cleanLoop()
        {
            await ThreadingUtils.ContinueAtDedicatedThread();

            while (true)
            {
                try
                {
                    CacheEntry[] entries;
                    using (await _storage.LockAsync())
                    {
                        entries = _storage.Entries.ToArray();
                    }

                    var entriesToRemove = new List <CacheEntry>();
                    var actualSize      = entries.Sum(e => e.ApproximateSizeInMemory);
                    if (actualSize > _maxCacheSize)
                    {
                        var delta = _maxCacheSize - _maxCacheSize;
                        foreach (var entry in entries.Shake(new Random()))
                        {
                            delta -= entry.ApproximateSizeInMemory;
                            entriesToRemove.Add(entry);

                            if (delta <= 0)
                            {
                                break;
                            }
                        }
                    }

                    foreach (var entry in entriesToRemove)
                    {
                        await _storage.TryRemoveAsync(entry);
                    }
                }
                catch { }
                finally
                {
                    await Task.Delay(1 * 60 * 1000);
                }
            }
        }
Ejemplo n.º 8
0
        async Task dispatcherLoopAsync()
        {
            await ThreadingUtils.ContinueAtDedicatedThread();

            while (true)
            {
                try
                {
                    var message = _consumer.Consume();
                    var scope   = _scopeBuilder
                                  .CreateScope();
                    var controller = scope.ServiceProvider
                                     .GetRequiredService <IStatisticServiceAPI>();
                    var parameter = message.Value;
                    var handler   = getHandler();
                    executeAsync();

                    /////////////////////////////////////////////

                    Func <Task> getHandler()
                    {
                        return(message.Value switch
                        {
                            CommentaryNotification cn => () => controller.OnCommentaryActionAsync(cn),
                            PostNotification pn => () => controller.OnPostActionAsync(pn),
                            SeenNotification sn => () => controller.OnSeenAsync(sn),
                            UserNotification un => () => controller.OnUserActionAsync(un),
                            _ => throw new NotSupportedException()
                        });
                    }

                    async void executeAsync()
                    {
                        using (scope)
                        {
                            await ThreadingUtils.ContinueAtThreadPull();

                            await handler();

                            _logger.LogInformation($"Consumed message '{message.Value}' at: '{message.TopicPartitionOffset}'.");
                        }
                    }
                }
Ejemplo n.º 9
0
        async void consumeDaemon <T>(IConsumer <Ignore, T> consumer, string topic, Func <T, Task> fireEventAsync)
        {
            await Task.Delay(3000); // To remove deadlock

            await ThreadingUtils.ContinueAtDedicatedThread();

            consumer.Subscribe(topic);
            while (true)
            {
                try
                {
                    var cr = consumer.Consume();
                    _logger.LogTrace($"Consumed message '{cr.Message.Value?.ToString()}' at: '{cr.TopicPartitionOffset}'.");
                    await fireEventAsync(cr.Message.Value);
                }
                catch (Exception e)
                {
                    _logger.LogError(e, $"Error occured");
                }
            }
        }
Ejemplo n.º 10
0
        async void unbanLoopAsync()
        {
            await ThreadingUtils.ContinueAtDedicatedThread();

            while (true)
            {
                using (var scope = _scopeFactory.CreateScope())
                {
                    var db = scope.ServiceProvider.GetService <BlogContext>();
                    foreach (var user in db.Users.Where(u => u.Status.State == ProfileState.BANNED))
                    {
                        if (user.Status.BannedTill < DateTime.UtcNow)
                        {
                            _log.LogInformation($"Trying to unban {user.UserName}");

                            user.Status.State = user.EmailConfirmed
                                ? ProfileState.ACTIVE
                                : ProfileState.RESTRICTED;
                            user.Status.BannedTill  = null;
                            user.Status.StateReason = null;
                        }
                    }

                    try
                    {
                        await db.SaveChangesAsync();
                    }
                    catch (Exception ex)
                    {
                        _log.LogError(ex, "Can't unban");
                    }
                }

                await Task.Delay(UPDATE_INTERVAL);
            }
        }
Ejemplo n.º 11
0
        public static async Task <FlashDumpDataParser> CreateParserAsync(IEnumerable <OpenStreamAsyncDelegate> rawDumpParts, IDataPacketParser rowParser, AsyncOperationInfo operationInfo)
        {
            Logger.LogInfoEverywhere("Начат парсинг дампа Flash памяти");
            var sw = Stopwatch.StartNew();
            await ThreadingUtils.ContinueAtDedicatedThread(operationInfo);

            var resultsStreamPath     = Storaging.GetTempFilePath();
            int parsedRowsCount       = 0; //Accessed inside multiple threads!
            int skippedRowsCount      = 0; //Accessed inside multiple threads!
            var dumpLengthInMegabytes = 0D;

            using (var resultsStream = new FileStream(resultsStreamPath, FileMode.Create, FileAccess.Write, FileShare.Read))
            {
                foreach (var part in rawDumpParts)
                {
                    var partLength = await getDumpLengthInMegabytesAsync(part);

                    Logger.LogInfo(null, $"Парсинг части дампа длиной: {partLength:F2} Мб");
                    dumpLengthInMegabytes += partLength;

                    var allRowsStartIndexes = await getRowsIndexes(part);
                    await parseRowsAsync(allRowsStartIndexes, part, resultsStream);
                }
            }

            Logger.LogOK($"Парсинг дампа завершен-NLСтрок считано: {parsedRowsCount}-NLСтрок пропущено: {skippedRowsCount}", $"-MSG-NLСредняя скорость чтения: {dumpLengthInMegabytes / sw.Elapsed.TotalSeconds:F1} Мб/с");

            return(new FlashDumpDataParser(
                       rowParser.Curves.Length,
                       parsedRowsCount,
                       oi => Task.Run(() => (Stream) new FileStream(resultsStreamPath, FileMode.Open, FileAccess.Read, FileShare.Read))));

            async Task <List <long> > getRowsIndexes(OpenStreamAsyncDelegate streamsFactory)
            {
                using (var mainStream = await streamsFactory(new StreamParameters(READ_BUFFER_LENGTH), operationInfo))
                {
                    var indexerEndPositions = new long[INDEXER_DEGREE_OF_PARALLELISM];
                    var indexerChunkSize    = mainStream.Length / INDEXER_DEGREE_OF_PARALLELISM;
                    Logger.LogInfo(null, "Разбиение файла перед индексацией...");
                    for (int i = 0; i < INDEXER_DEGREE_OF_PARALLELISM - 1; i++)
                    {
                        mainStream.Position = indexerChunkSize * (i + 1);
                        var beginningOfTheMarker = findAllRowIndexes(mainStream, 0).FirstOrDefault(-1);
                        if (beginningOfTheMarker == -1)
                        {
                            Logger.LogWarning(null, $"Не удалось найти начало строки после позиции: {mainStream.Position}. Данные после данной позиции (если есть) будут проигнорированы");

                            break;
                        }
                        else
                        {
                            indexerEndPositions[i] = beginningOfTheMarker;
                        }
                        operationInfo.CancellationToken.ThrowIfCancellationRequested();
                    }
                    indexerEndPositions[INDEXER_DEGREE_OF_PARALLELISM - 1] = mainStream.Length;

                    Logger.LogInfo(null, "Индексация...");
                    var indexersFuture = new Task <long[]> [INDEXER_DEGREE_OF_PARALLELISM];
                    var range          = new DisplaceCollection <long>(2);
                    range.Add(0);
                    for (int i = 0; i < indexerEndPositions.Length; i++)
                    {
                        range.Add(indexerEndPositions[i]);

                        var from       = range.FirstElement();
                        var to         = range.LastElement();
                        var rootStream = await streamsFactory(new StreamParameters(READ_BUFFER_LENGTH), operationInfo);

                        rootStream.Position = from;
                        var section = new SectionedStreamProxy(rootStream, to - from);
                        indexersFuture[i] = findAllRowIndexesAsync(section, from);

                        Logger.LogInfo(null, $"Запущен поток индексации в интервале: {from} : {to}");
                    }

                    var result = new List <long>((mainStream.Length / 200).ToInt32()); // Estimated size (dont want list to increase it's buffer too much)
                    foreach (var future in indexersFuture)
                    {
                        try
                        {
                            var indexes = await future;
                            result.AddRange(indexes);
                        }
                        catch (OperationCanceledException) // Cant throw here
                        {
                            Logger.LogInfo(null, "Чтение дампа отменено");
                        }
                        catch (Exception ex)
                        {
                            Logger.LogError(null, "Ошибка индексации. Большой объем данных может быть потерян", ex);
                        }
                    }

                    operationInfo.CancellationToken.ThrowIfCancellationRequested();
                    Logger.LogInfo(null, $"Индексация завершена. Найдено строк: {result.Count}");

                    return(result);
                }

                async Task <long[]> findAllRowIndexesAsync(Stream section, long from)
                {
                    await ThreadingUtils.ContinueAtDedicatedThread(operationInfo);

                    return(findAllRowIndexes(section, from).ToArray());
                }
            }

            async Task parseRowsAsync(IList <long> allRowsStartIndexes, OpenStreamAsyncDelegate rawDataStreamAsyncFactory, Stream resultDestinationStream)
            {
                var rowsIndexes   = getRowsIndexesForParsing();
                var parsersFuture = new Task <Stream> [rowsIndexes.Length];

                for (int i = 0; i < rowsIndexes.Length; i++)
                {
                    var indexes = rowsIndexes[i];
                    parsersFuture[i] = parseRowsRangeAsync(indexes);
                }

                foreach (var future in parsersFuture)
                {
                    Stream parsedRowsStream = null;
                    try
                    {
                        using (parsedRowsStream = await future)
                        {
                            parsedRowsStream.Position = 0;
                            await parsedRowsStream.CopyToAsync(resultDestinationStream, 81920, operationInfo);

                            parsedRowsStream.SetLength(0); // Delete file
                        }
                    }
                    catch (OperationCanceledException)
                    {
                        Logger.LogInfo(null, "Чтение дампа отменено");
                    }
                    catch (Exception ex)
                    {
                        Logger.LogError(null, "Ошибка индексации. Большой объем данных может быть потерян", ex);
                    }
                    finally
                    {
                        parsedRowsStream?.Dispose();
                    }
                }
                operationInfo.CancellationToken.ThrowIfCancellationRequested();

                IEnumerable <long>[] getRowsIndexesForParsing()
                {
                    var chunks     = new IEnumerable <long> [PARSER_DEGREE_OF_PARALLELISM];
                    var chunkRange = new DisplaceCollection <int>(2);

                    chunkRange.Add(0);
                    var chunkSize = allRowsStartIndexes.Count / PARSER_DEGREE_OF_PARALLELISM;

                    if (chunkSize == 0)
                    {
                        Logger.LogError(null, "Слишком мало данных для обработки");

                        chunks.SetAll(new long[0]);
                    }
                    for (int i = 0; i < PARSER_DEGREE_OF_PARALLELISM; i++)
                    {
                        chunkRange.Add((i + 1) * chunkSize);
                        var from = chunkRange.FirstElement();
                        from = from == 0
                            ? from
                            : from - 1; // We should create overlap for 1 element, otherwise one row will be lost
                        var to = chunkRange.LastElement();
                        chunks[i] = allRowsStartIndexes.GetRangeTill(from, to);
                    }

                    return(chunks);
                }

                async Task <Stream> parseRowsRangeAsync(IEnumerable <long> rowsStarts)
                {
                    await ThreadingUtils.ContinueAtDedicatedThread(operationInfo);

                    using (var sourceFile = await rawDataStreamAsyncFactory(new StreamParameters(READ_BUFFER_LENGTH), operationInfo))
                    {
                        var resultFile       = getTempFileStream().ToBinaryWriter();
                        var rowPositionRange = new DisplaceCollection <long>(2);
                        rowPositionRange.Add(rowsStarts.FirstOrDefault());
                        var rowBuffer = new byte[rowParser.RowLength.To];
                        foreach (var rowStart in rowsStarts.Skip(1))
                        {
                            operationInfo.CancellationToken.ThrowIfCancellationRequested();
                            rowPositionRange.Add(rowStart);
                            var rowDataAreaStart = rowPositionRange.FirstElement() + ROW_START_MARKER.Length + ROW_DATA_START_OFFSET;
                            var rowDataAreaEnd   = rowPositionRange.LastElement() - ROW_DATA_END_OFFSET;
                            var actualRowLength  = rowDataAreaEnd - rowDataAreaStart;
                            if (actualRowLength < rowParser.RowLength.From)
                            {
                                Logger.LogWarning(null, $"Строка пропущена из-за недостаточной длины. Позиция: {rowDataAreaStart}, длина: {actualRowLength}, требуемая длина: {rowParser.RowLength.ToString()}");

                                Interlocked.Increment(ref skippedRowsCount);
                            }
                            else
                            {
                                var rowLength = (int)Math.Min(rowBuffer.Length, actualRowLength);
                                if (actualRowLength > rowParser.RowLength.From)
                                {
                                    Logger.LogWarning(null, $"Строка строка имеет слишком большую длину. Позиция: {rowDataAreaStart}, длина: {actualRowLength}, требуемая длина: {rowParser.RowLength.ToString()}");
                                }

                                sourceFile.Position = rowDataAreaStart;
                                sourceFile.Read(rowBuffer, 0, rowLength);
                                var row = rowParser.ParseRow(rowBuffer);
                                foreach (var point in row.Points)
                                {
                                    resultFile.Write(point);
                                }

                                Interlocked.Increment(ref parsedRowsCount);
                            }
                        }
                        resultFile.Flush();

                        return(resultFile.BaseStream);
                    }
                }
            }

            FileStream getTempFileStream()
            {
                return(new FileStream(Storaging.GetTempFilePath(), FileMode.Create, FileAccess.ReadWrite, FileShare.Read));
            }

            async Task <double> getDumpLengthInMegabytesAsync(OpenStreamAsyncDelegate rawDataStreamAsyncFactory)
            {
                using (var stream = await rawDataStreamAsyncFactory(new StreamParameters(1000), operationInfo)) // any
                {
                    return(stream.Length / (1024D * 1024));
                }
            }

            // Return indexes on the first byte of the marker. Made it as robust as posible
            IEnumerable <long> findAllRowIndexes(Stream section, long sectionOffset)
            {
                using (section)
                {
                    var buffer = new byte[MAX_ROW_LENGTH + ROW_START_MARKER.Length];
                    while (true)
                    {
                        operationInfo.CancellationToken.ThrowIfCancellationRequested();

                        var readCount             = section.Read(buffer, 0, buffer.Length);
                        var endOfStreamWasReached = readCount < buffer.Length;
                        if (readCount == 0)
                        {
                            yield break;
                        }
                        else if (endOfStreamWasReached)
                        {
                            // Populate the rest of the buffer with data that wont cause false detection
                            buffer.Set((byte)~ROW_START_MARKER.FirstElement(), readCount, buffer.Length - 1);
                        }

                        var bufferStartPosition  = section.Position - readCount;
                        var bufferEndPosition    = section.Position;
                        var lastFoundMarkerIndex = -1L; // Relative to the section start
                        for (int i = 0; i < readCount - ROW_START_MARKER.Length; i++)
                        {
                            var found = true;
                            for (int k = 0; k < ROW_START_MARKER.Length; k++)
                            {
                                if (buffer[i + k] != ROW_START_MARKER[k])
                                {
                                    found = false;
                                    break;
                                }
                            }

                            if (found)
                            {
                                lastFoundMarkerIndex = bufferStartPosition + i;
                                yield return(bufferStartPosition + i + sectionOffset);
                            }
                        }

                        if (lastFoundMarkerIndex == -1)
                        {
                            Logger.LogWarning(null, $"Ничего не найдено в диапазоне индексов: {bufferStartPosition} : {bufferEndPosition}");
                        }

                        if (endOfStreamWasReached)
                        {
                            Logger.LogInfo(null, $"Парсинг секции завершен");

                            break;
                        }
                        else
                        {
                            // Isn't setting position slow?
                            section.Position = lastFoundMarkerIndex == bufferEndPosition - ROW_START_MARKER.Length // if we found marker exactly at the end of buffer
                                ? bufferEndPosition                                                                // continue reading where we stopped
                                : bufferEndPosition - ROW_START_MARKER.Length;                                     // take a bit of old buffer so that not to lost marker
                        }
                    }
                }
            }
        }
Ejemplo n.º 12
0
        public async Task SaveToAsync(Stream stream, IList <string> curveNames, IEnumerable <double[]> curveRows, int rowsCount, AsyncOperationInfo asyncOperationInfo)
        {
            await ThreadingUtils.ContinueAtDedicatedThread(asyncOperationInfo);

            //curveRows = curveRows.MakeCached();
            var rowFormat = getRowFormat();

            using (var sw = new StreamWriter(stream))
            {
                sw.WriteLine(HEADER_START.Format(rowsCount));
                foreach (var curveName in curveNames)
                {
                    sw.WriteLine(CURVE_NAME_FORMAT.Format(curveName + "."));
                }
                sw.WriteLine(END_OF_HEADER);

                var stringRows = curveRows
                                 .AsParallel()
                                 .AsOrdered()
                                 .WithDegreeOfParallelism(Environment.ProcessorCount)
                                 .WithExecutionMode(ParallelExecutionMode.ForceParallelism)
                                 .WithCancellation(asyncOperationInfo)
                                 .Select((row, i) => string.Format(rowFormat,
                                                                   (i + 1D).ToSequence()
                                                                   .Concat(row)
                                                                   .Select(v => (object)v.Exchange(double.NaN, NULL))
                                                                   .ToArray()))
                                 .Select(row => row.Replace(",", "."));
                foreach (var row in stringRows)
                {
                    asyncOperationInfo.CancellationToken.ThrowIfCancellationRequested();

                    sw.WriteLine(row);

                    asyncOperationInfo.Progress.AddProgress(1D / (rowsCount - 1), 0.3);
                }
            }

            string getRowFormat()
            {
                var valueRanges = new Interval[(curveRows.FirstOrDefault()?.Length ?? 0) + 1];
                var rowI        = 0;

                foreach (var row in curveRows.SkipNulls())
                {
                    asyncOperationInfo.CancellationToken.ThrowIfCancellationRequested();
                    asyncOperationInfo.Progress.AddProgress(1D / (rowsCount - 1), 0.7);

                    foreach (var pI in row.Length.Range())
                    {
                        var point = row[pI].Exchange(double.NaN, NULL);
                        valueRanges[pI + 1] = valueRanges[pI + 1].ExpandToContain(point);
                    }
                    rowI++;
                }
                valueRanges[0] = new Interval(0, rowsCount);
                return(valueRanges
                       .Select(r => Math.Max(r.From.ToStringInvariant(POINT_FORMAT).Length, r.To.ToStringInvariant(POINT_FORMAT).Length) + 1)
                       .Select((w, i) => i == 0 ? $"{{0,{w}}}" : $"{{{i},{w}:{POINT_FORMAT}}}")
                       .Aggregate(""));
            }
        }