public ColorService(ControlService controlService) { controlService.ColorService = this; _watch = new Stopwatch(); _streamTokenSource = new CancellationTokenSource(); _targetTokenSource = new CancellationTokenSource(); _sDevices = Array.Empty <IColorTarget>(); _systemData = DataUtil.GetSystemData(); LedColors = new Color[_systemData.LedCount]; SectorColors = new Color[+_systemData.SectorCount]; _enableAutoDisable = _systemData.EnableAutoDisable; _streams = new Dictionary <string, ColorSource>(); ControlService = controlService; Counter = new FrameCounter(this); ControlService.SetModeEvent += Mode; ControlService.DeviceReloadEvent += RefreshDeviceData; ControlService.RefreshLedEvent += ReloadLedData; ControlService.RefreshSystemEvent += ReloadSystemData; ControlService.TestLedEvent += LedTest; ControlService.FlashDeviceEvent += FlashDevice; ControlService.FlashSectorEvent += FlashSector; ControlService.DemoLedEvent += Demo; _splitter = new FrameSplitter(this); LoadServices(); }
public void TestDataSplitter() { var frame = new List <byte>(); frame.AddRange(GetWeightFrame()); frame.AddRange(GetTriggerFrame()); frame.AddRange(GetWeightFrame()); var splitter = new FrameSplitter(); var result = splitter.SplitAndProcess(frame.ToArray()); }
public AmbientStream(ColorService colorService) { _ambientColor = "#FFFFFF"; _currentColors = Array.Empty <Color>(); _nextColors = Array.Empty <Color>(); _sceneColors = Array.Empty <Color>(); _watch = new Stopwatch(); _random = new Random(); _loader = new JsonLoader("ambientScenes"); _scenes = _loader.LoadFiles <AmbientScene>(); _splitter = new FrameSplitter(colorService); colorService.ControlService.RefreshSystemEvent += RefreshSystem; }
public DreamScreenStream(ColorService colorService) { _cs = colorService; var client = _cs.ControlService.GetAgent("DreamAgent"); if (client != null) { _client = client; _client.CommandReceived += ProcessCommand; } var rect = new[] { 3, 3, 5, 5 }; _builder = new FrameBuilder(rect, true); _splitter = new FrameSplitter(colorService); _cs.ControlService.RefreshSystemEvent += RefreshSystem; RefreshSystem(); }
public async Task Start(FrameSplitter splitter, CancellationToken ct) { Log.Debug("Starting USB Stream..."); _splitter = splitter; await Refresh(); if (_video == null) { return; } _video.ImageGrabbed += SetFrame; _video.Start(); Log.Debug("USB Stream started."); while (!ct.IsCancellationRequested) { await Task.Delay(TimeSpan.FromMilliseconds(1), CancellationToken.None); } }
public Task Start(FrameSplitter splitter, CancellationToken ct) { _splitter = splitter; try { SetDimensions(); if (_width == 0 || _height == 0) { Log.Information("We have no screen, returning."); return(Task.CompletedTask); } _capturing = true; return(Task.Run(() => CaptureScreen(ct), CancellationToken.None)); } catch (Exception e) { Log.Warning("Exception, can't start screen cap: " + e.Message); _capturing = false; return(Task.CompletedTask); } }
public async Task Start(FrameSplitter frameSplitter, CancellationToken ct) { _splitter = frameSplitter; MMALCameraConfig.VideoStabilisation = false; MMALCameraConfig.SensorMode = MMALSensorMode.Mode1; MMALCameraConfig.ExposureMode = MMAL_PARAM_EXPOSUREMODE_T.MMAL_PARAM_EXPOSUREMODE_BACKLIGHT; MMALCameraConfig.VideoResolution = new Resolution(CapWidth, CapHeight); MMALCameraConfig.VideoFramerate = new MMAL_RATIONAL_T(60, 1); using var vidCaptureHandler = new EmguInMemoryCaptureHandler(); using var splitter = new MMALSplitterComponent(); using var renderer = new MMALNullSinkComponent(); _cam.ConfigureCameraSettings(); Log.Debug("Cam mode is " + MMALCameraConfig.SensorMode); // Register to the event. vidCaptureHandler.MyEmguEvent += ProcessFrame; // We are instructing the splitter to do a format conversion to BGR24. var splitterPortConfig = new MMALPortConfig(MMALEncoding.BGR24, MMALEncoding.BGR24, CapWidth, CapHeight, null); // By default in MMALSharp, the Video port outputs using proprietary communication (Opaque) with a YUV420 pixel format. // Changes to this are done via MMALCameraConfig.VideoEncoding and MMALCameraConfig.VideoSub format. splitter.ConfigureInputPort( new MMALPortConfig(MMALEncoding.OPAQUE, MMALEncoding.I420, CapWidth, CapHeight, null), _cam.Camera.VideoPort, null); // We then use the splitter config object we constructed earlier. We then tell this output port to use our capture handler to record data. splitter.ConfigureOutputPort <SplitterVideoPort>(0, splitterPortConfig, vidCaptureHandler); _cam.Camera.PreviewPort.ConnectTo(renderer); _cam.Camera.VideoPort.ConnectTo(splitter); await Task.Delay(2000, ct); await _cam.ProcessAsync(_cam.Camera.VideoPort, ct).ConfigureAwait(false); Log.Debug("Camera closed."); }
public UdpStream(ColorService cs) { _cs = cs.ControlService; _cs.RefreshSystemEvent += RefreshSystem; _cs.SetModeEvent += Mode; _cs.StartStreamEvent += StartStream; _splitter = new FrameSplitter(cs); _uc = new UdpClient(21324) { Ttl = 5, Client = { ReceiveBufferSize = 2000 } }; _uc.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true); _uc.Client.Blocking = false; if (!RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) { _uc.DontFragment = true; } var sd = DataUtil.GetSystemData(); _devMode = sd.DeviceMode; _sd = sd; _hostName = _sd.DeviceName; if (string.IsNullOrEmpty(_hostName)) { _hostName = Dns.GetHostName(); _sd.DeviceName = _hostName; DataUtil.SetSystemData(_sd); } RefreshSystem(); _timeOutWatch = new Stopwatch(); _cts = new CancellationTokenSource(); _listenToken = _cts.Token; Task.Run(Listen, _listenToken); Task.Run(Listen, _listenToken); }