public async void EnqueueTask(HyperServerTask task) { var path = Path.Combine(SystemPath, task.Path); try { Interlocked.Increment(ref _activeOperations); if (task.IsSegmentRequest) { using ( var fs = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read, task.Length, FileOptions.Asynchronous)) { fs.Position = task.Offset; var read = await fs.ReadAsync(task.Buffer.Object, 0, task.Length).ConfigureAwait(false); if (read != task.Length) { Logger.Error("Can't read all bytes {0}/{1} {2}", read, task.Length, task.Path); return; } SegmentService.Update(task.SinceCreatedMs); SegmentsPerSecond.Update(1); } } else { task.FileLength = new FileInfo(path).Length; } task.Done(); } catch (Exception x) { if (x is FileNotFoundException) { OnFileGone(new FileGoneEventArgs { RelativePath = task.Path, SystemPath = path }); } Logger.Error($"Error when reading the file {task.Path} {x.Message}"); } finally { Interlocked.Decrement(ref _activeOperations); } }
private void Worker() { while (IsEnabled) { while (_tasks.TryDequeue(out var task)) { var path = Path.Combine(SystemPath, task.Path); try { if (task.Length > 0) { using (var fs = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite, task.Length)) { fs.Position = task.Offset; var read = fs.Read(task.Buffer.Object, 0, task.Length); if (read != task.Length) { Logger.Error("Can't read all bytes {0}/{1} {2}", read, task.Length, task.Path); continue; } SegmentService.Update(task.SinceCreatedMs); SegmentsPerSecond.Update(1); } } else { var fi = new FileInfo(path); if (!fi.Exists) { task.FileLength = -1; } else { task.FileLength = fi.Length; } } task.Done(); } catch (Exception x) { if (x is FileNotFoundException) { OnFileGone(new FileGoneEventArgs { RelativePath = task.Path, SystemPath = path }); } Logger.Error("Error during reading of data {0} {1}", task.Path, x.Message); } } Thread.Yield(); } }
public string DebugLine() { return($"Storage {SystemPath} SVC: {(int)SegmentService.GetAverage()} RPS: {(int)SegmentsPerSecond.GetSpeed()} Q: {QueueSize}"); }
public async void EnqueueTask(HyperServerTask task) { try { Interlocked.Increment(ref _activeOperations); var normalizedPath = task.Path.Replace("/", "\\"); if (normalizedPath.StartsWith("/")) { normalizedPath = normalizedPath.Remove(0, 1); } string tth; lock (_pathIndex) { if (!_pathIndex.TryGetValue(normalizedPath, out tth)) { Logger.Error($"Error when requesting hyper segment {normalizedPath} No such file in share index yet"); return; } } var ci = _share.SearchByTth(tth); if (ci == null) { Logger.Error($"TTH {tth} not found in share"); return; } if (task.IsSegmentRequest) { if (!_cacheManager.ReadCacheSegment(tth, task.Offset, task.Length, task.Buffer.Object, 0)) { if (ci.Value.SystemPath.StartsWith("hyp://")) { var segment = await _manager.DownloadSegment(task.Path, task.Offset, task.Length); task.Buffer.Dispose(); task.Buffer = segment; } else if (ci.Value.SystemPath.StartsWith("http://")) { using (var stream = await HttpHelper.GetHttpChunkAsync(ci.Value.SystemPath, task.Offset, task.Length)) using (var ms = new MemoryStream(task.Buffer.Object)) { stream.CopyTo(ms); } } else { throw new InvalidOperationException("Not supported source type " + ci.Value.SystemPath); } SegmentServiceProxy.Update(task.SinceCreatedMs); SegmentsPerSecondProxy.Update(1); } else { SegmentServiceCached.Update(task.SinceCreatedMs); SegmentsPerSecondCached.Update(1); } SegmentService.Update(task.SinceCreatedMs); SegmentsPerSecond.Update(1); } else { task.FileLength = ci.Value.Magnet.Size; } } catch (Exception x) { Logger.Error($"Error when requesting hyper segment {task.Path} {x.Message}"); } finally { Interlocked.Decrement(ref _activeOperations); task.Done(); } }