/// <summary> /// Begins receiving frames from the client using the specified quality and display. /// </summary> /// <param name="quality">The quality of the remote desktop frames.</param> /// <param name="display">The display to receive frames from.</param> public void BeginReceiveFrames(int quality, int display) { lock (_syncLock) { IsStarted = true; _codec?.Dispose(); _codec = null; _client.Send(new GetDesktop { CreateNew = true, Quality = quality, DisplayIndex = display }); } }
public override void Dispose() { lock (_framesLock) { _lastFrame?.Dispose(); _lastFrame = null; } _videoCaptureDevice?.Stop(); _unsafeStreamCodec?.Dispose(); _unsafeStreamCodec = null; _videoCaptureDevice = null; }
public override void Dispose() { base.Dispose(); //important, else dead lock because this is UI thread and lock invokdes into UI thread -> block Task.Run(() => { lock (_unsafeStreamLock) { _unsafeStreamCodec?.Dispose(); _unsafeStreamCodec = null; } }); }
private void Execute(ISender client, GetWebcam message) { if (message.Destroy) { _webcamHelper.StopRunningVideo(); OnReport("Remote webcam session stopped"); return; } _webcamHelper.Init(message.DisplayIndex); var resolution = new Resolution { Height = _webcamHelper._resolution.Height, Width = _webcamHelper._resolution.Width }; if (_streamCodec == null) { _streamCodec = new UnsafeStreamCodec(message.Quality, message.DisplayIndex, resolution); } if (message.CreateNew) { _streamCodec?.Dispose(); _webcamHelper.NewVideoSource(message.DisplayIndex); _streamCodec = new UnsafeStreamCodec(message.Quality, message.DisplayIndex, resolution); OnReport("Remote webcam session started"); } if (_streamCodec.ImageQuality != message.Quality || _streamCodec.Monitor != message.DisplayIndex || _streamCodec.Resolution != resolution) { _streamCodec?.Dispose(); _webcamHelper.NewVideoSource(message.DisplayIndex); _streamCodec = new UnsafeStreamCodec(message.Quality, message.DisplayIndex, resolution); } if (_webcamHelper._currentFrame == null) { Bitmap emptyFrame = new Bitmap(1920, 1080, PixelFormat.Format32bppPArgb); SendFrame(client, emptyFrame, resolution); } else { Bitmap webcamFrame = new Bitmap(_webcamHelper._currentFrame); SendFrame(client, webcamFrame, resolution); } }
private unsafe void ProcessImage(byte[] data, int index) { if (!IsStreaming) { return; } lock (_unsafeStreamLock) { if (!IsStreaming) { return; } if (_unsafeStreamCodec != null && (_currentDevice != _webcamSettings.MonikerString || _currentResolution != _webcamSettings.Resolution)) { _unsafeStreamCodec.Dispose(); _unsafeStreamCodec = null; } if (_unsafeStreamCodec == null) { _currentResolution = _webcamSettings.Resolution; _currentDevice = _webcamSettings.MonikerString; _unsafeStreamCodec = new UnsafeStreamCodec(UnsafeStreamCodecParameters.None); } WriteableBitmap writeableBitmap; fixed(byte *dataPtr = data) writeableBitmap = _unsafeStreamCodec.DecodeData(dataPtr + index, (uint)(data.Length - index), Application.Current.Dispatcher); _framesReceived++; if (_writeableBitmap != writeableBitmap) { _writeableBitmap = writeableBitmap; RefreshWriteableBitmap?.Invoke(this, writeableBitmap); } } if (IsStreaming) { GetWebcamImage(); } if (FramesPerSecond == 0 && _framesReceived == 0) { _frameTimestamp = DateTime.UtcNow; } else if (DateTime.UtcNow - _frameTimestamp > TimeSpan.FromSeconds(1)) { FramesPerSecond = _framesReceived; _framesReceived = 0; _frameTimestamp = DateTime.UtcNow; } }
public unsafe void UpdateImage(byte[] data, int index, uint length) { if (_unsafeStreamCodec == null || _codecHeight != _height || _codecWidth != _width) { _unsafeStreamCodec?.Dispose(); _unsafeStreamCodec = new UnsafeStreamCodec(UnsafeStreamCodecParameters.None); _codecHeight = _height; _codecWidth = _width; } fixed(byte *dataPtr = data) Image = _unsafeStreamCodec.DecodeData(dataPtr + index, length, Application.Current.Dispatcher); LastUpdateUtc = DateTime.UtcNow; }
public override void ProcessCommand(byte[] parameter, IConnectionInfo connectionInfo) { switch ((WebcamCommunication)parameter[0]) { case WebcamCommunication.Start: _webcamSettings = new Serializer(typeof(WebcamSettings)).Deserialize <WebcamSettings>(parameter, 1); if (_videoCaptureDevice != null && _videoCaptureDevice.Source != _webcamSettings.MonikerString) { _videoCaptureDevice.Stop(); _videoCaptureDevice = null; } if (_videoCaptureDevice == null) { _videoCaptureDevice = new VideoCaptureDevice(_webcamSettings.MonikerString); } try { _videoCaptureDevice.VideoResolution = _videoCaptureDevice.VideoCapabilities[_webcamSettings.Resolution]; } catch (Exception) { ResponseByte((byte)WebcamCommunication.ResponseResolutionNotFoundUsingDefault, connectionInfo); } _isRunning = true; _videoCaptureDevice.NewFrame += _videoCaptureDevice_NewFrame; _videoCaptureDevice.Start(); ResponseByte((byte)WebcamCommunication.ResponseStarted, connectionInfo); break; case WebcamCommunication.Stop: if (_videoCaptureDevice != null) { _videoCaptureDevice.NewFrame -= _videoCaptureDevice_NewFrame; _videoCaptureDevice.Stop(); _isRunning = false; lock (_unsafeStreamCodecLock) { _unsafeStreamCodec?.Dispose(); _unsafeStreamCodec = null; } _videoCaptureDevice = null; lock (_framesLock) { _lastFrame?.Dispose(); _lastFrame = null; } ResponseByte((byte)WebcamCommunication.ResponseStopped, connectionInfo); } break; case WebcamCommunication.GetImage: if (_lastFrame == null) { if (!_isRunning) { return; } _screenWaitEvent?.Close(); _screenWaitEvent = new AutoResetEvent(false); if (!_screenWaitEvent.WaitOne(10000, false)) { if (!_isRunning) { return; } ResponseByte((byte)WebcamCommunication.ResponseNoFrameReceived, connectionInfo); _videoCaptureDevice.NewFrame -= _videoCaptureDevice_NewFrame; _videoCaptureDevice.Stop(); _unsafeStreamCodec?.Dispose(); _unsafeStreamCodec = null; _videoCaptureDevice = null; return; } } if (!_isRunning) { return; } lock (_unsafeStreamCodecLock) { if (!_isRunning) { return; } if (_unsafeStreamCodec != null && (_currentResolution != _webcamSettings.Resolution || _currentDevice != _webcamSettings.MonikerString)) { _unsafeStreamCodec.Dispose(); _unsafeStreamCodec = null; } if (_unsafeStreamCodec == null) { _currentResolution = _webcamSettings.Resolution; _currentDevice = _webcamSettings.MonikerString; _unsafeStreamCodec = new UnsafeStreamCodec(UnsafeStreamCodecParameters.None) { ImageQuality = parameter[1] }; } IDataInfo dataInfo; lock (_framesLock) { var webcamData = _lastFrame.LockBits(new Rectangle(0, 0, _lastFrame.Width, _lastFrame.Height), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); dataInfo = _unsafeStreamCodec.CodeImage(webcamData.Scan0, new Rectangle(0, 0, webcamData.Width, webcamData.Height), new Size(_lastFrame.Width, _lastFrame.Height), webcamData.PixelFormat); _lastFrame.UnlockBits(webcamData); } connectionInfo.UnsafeResponse(this, dataInfo.Length + 1, writer => { writer.Write((byte)WebcamCommunication.ResponseFrame); dataInfo.WriteIntoStream(writer.BaseStream); }); } break; case WebcamCommunication.GetWebcams: if (CoreHelper.RunningOnVistaOrGreater) { var webcams = new FilterInfoCollection(FilterCategory.VideoInputDevice).OfType <FilterInfo>() .Select( x => new WebcamInfo { MonikerString = x.MonikerString, Name = x.Name, AvailableResolutions = new VideoCaptureDevice(x.MonikerString).VideoCapabilities.Select( y => new WebcamResolution { Width = y.FrameSize.Width, Heigth = y.FrameSize.Height }).ToList() }) .ToList(); ResponseBytes((byte)WebcamCommunication.ResponseWebcams, new Serializer(typeof(List <WebcamInfo>)).Serialize(webcams), connectionInfo); } else { ResponseByte((byte)WebcamCommunication.ResponseNotSupported, connectionInfo); } break; default: throw new ArgumentOutOfRangeException(); } }
public void Dispose() { _unsafeStreamCodec?.Dispose(); Image?.Dispose(); }
private void Execute(ISender client, GetDesktop message) { // TODO: Switch to streaming mode without request-response once switched from windows forms // TODO: Capture mouse in frames: https://stackoverflow.com/questions/6750056/how-to-capture-the-screen-and-mouse-pointer-using-windows-apis var monitorBounds = ScreenHelper.GetBounds((message.DisplayIndex)); var resolution = new Resolution { Height = monitorBounds.Height, Width = monitorBounds.Width }; if (_streamCodec == null) { _streamCodec = new UnsafeStreamCodec(message.Quality, message.DisplayIndex, resolution); } if (message.CreateNew || _streamCodec.ImageQuality != message.Quality || _streamCodec.Monitor != message.DisplayIndex || _streamCodec.Resolution != resolution) { _streamCodec?.Dispose(); _streamCodec = new UnsafeStreamCodec(message.Quality, message.DisplayIndex, resolution); } BitmapData desktopData = null; Bitmap desktop = null; try { desktop = ScreenHelper.CaptureScreen(message.DisplayIndex); desktopData = desktop.LockBits(new Rectangle(0, 0, desktop.Width, desktop.Height), ImageLockMode.ReadWrite, desktop.PixelFormat); using (MemoryStream stream = new MemoryStream()) { if (_streamCodec == null) { throw new Exception("StreamCodec can not be null."); } _streamCodec.CodeImage(desktopData.Scan0, new Rectangle(0, 0, desktop.Width, desktop.Height), new Size(desktop.Width, desktop.Height), desktop.PixelFormat, stream); client.Send(new GetDesktopResponse { Image = stream.ToArray(), Quality = _streamCodec.ImageQuality, Monitor = _streamCodec.Monitor, Resolution = _streamCodec.Resolution }); } } catch (Exception) { if (_streamCodec != null) { client.Send(new GetDesktopResponse { Image = null, Quality = _streamCodec.ImageQuality, Monitor = _streamCodec.Monitor, Resolution = _streamCodec.Resolution }); } _streamCodec = null; } finally { if (desktop != null) { if (desktopData != null) { try { desktop.UnlockBits(desktopData); } catch { } } desktop.Dispose(); } } }