private async Task <long> ReceiveDataAsync() { InputFrame receivedFrame = await rsiAdapter.ReceiveDataAsync(); if (!Limits.CheckAxisPosition(receivedFrame.AxisPosition)) { Uninitialize(); throw new InvalidOperationException("Axis position limit has been exceeded:" + $"{Environment.NewLine}{receivedFrame.AxisPosition}"); } if (!Limits.CheckPosition(receivedFrame.Position)) { Uninitialize(); throw new InvalidOperationException("Available workspace limit has been exceeded:" + $"{Environment.NewLine}{receivedFrame.Position}"); } lock (receivedDataSyncLock) { position = receivedFrame.Position; axisPosition = receivedFrame.AxisPosition; } FrameReceived?.Invoke(this, new FrameReceivedEventArgs { ReceivedFrame = receivedFrame }); return(receivedFrame.IPOC); }
internal override void OnMessage(string method, JsonElement?serverParams) { switch (method) { case "close": Close?.Invoke(this, EventArgs.Empty); break; case "frameSent": FrameSent?.Invoke( this, new WebSocketFrame( serverParams?.GetProperty("data").ToObject <string>(), serverParams?.GetProperty("opcode").ToObject <int>() == OpcodeBase64)); break; case "frameReceived": FrameReceived?.Invoke( this, new WebSocketFrame( serverParams?.GetProperty("data").ToObject <string>(), serverParams?.GetProperty("opcode").ToObject <int>() == OpcodeBase64)); break; case "socketError": SocketError?.Invoke(this, serverParams?.GetProperty("error").ToObject <string>()); break; } }
public void Initialize() { if (isInitialized) { return; } int status = natNetClient.Initialize("127.0.0.1", "127.0.0.1"); if (status != 0) { throw new InvalidOperationException("OptiTrack system initialization failed. Is Motive application running?"); } status = natNetClient.GetServerDescription(serverDescription); if (status != 0) { throw new InvalidOperationException("Connection failed. Is Motive application running?"); } isInitialized = true; Initialized?.Invoke(); natNetClient.OnFrameReady += (data, client) => { FrameReceived?.Invoke(new InputFrame(data)); }; }
private void ProtoOnLineReceived(object sender, LineTransmitEventArgs <IntergasFrame> e) { if (e.Line is IntergasResponseFrame frame) { FrameReceived?.Invoke(this, new IntergasResponseFrameEventArgs(frame)); } }
public void CreatePipe() { if (!toolExists) { throw new FileNotFoundException("Failed: FFmpeg not found."); } if (pipeCreated) { Console.WriteLine("Failed: Duplicated pipe."); return; } Task.Run(async() => { using (NamedPipeServerStream ps = new NamedPipeServerStream(pipeName, PipeDirection.In, 1, PipeTransmissionMode.Byte, PipeOptions.Asynchronous, int.MaxValue, int.MaxValue)) { Debug.WriteLine($"Success: Pipe '{pipeName}' created."); pipeCreated = true; await ps.WaitForConnectionAsync(); byte[] data = new byte[Width * Height * 3]; while (ps.Read(data, 0, data.Length) > 0) { FrameReceived?.Invoke(this, new FrameReceivedEventArgs() { FrameData = data }); } ps.Disconnect(); Debug.WriteLine($"Info: No data remaining. Pipe disconnected."); } }); }
private void WaitForConnectionCallback(IAsyncResult ar) { if (_disposed) { return; } var stream = (NamedPipeServerStream)ar.AsyncState; stream.EndWaitForConnection(ar); _stream = null; CreateStream(); try { using (var bufferedStream = new BufferedStream(stream)) { while (!_disposed) { var frameBytes = bufferedStream.TakeBuffer(); var frame = new Frame(frameBytes); var context = new InputContext(bufferedStream, frame); FrameReceived?.Invoke(this, context); } } } catch (Exception) { // Hide all exceptions for unexpected connection lost with the client } }
private void OnFrameReceived(object sender, RawFrame rawFrame) { if (!(rawFrame is RawAudioFrame rawAudioFrame)) { return; } FFmpegAudioDecoder decoder = GetDecoderForFrame(rawAudioFrame); if (!decoder.TryDecode(rawAudioFrame, out int decodedFrameSize)) { return; } if (_decodedFrameBuffer.Length < decodedFrameSize) { _decodedFrameBuffer = new byte[decodedFrameSize]; } var bufferSegment = new ArraySegment <byte>(_decodedFrameBuffer, 0, decodedFrameSize); IDecodedAudioFrame decodedFrame = decoder.GetDecodedFrame(bufferSegment); FrameReceived?.Invoke(this, decodedFrame); }
/// <summary> /// Receives data (IPOC, cartesian and axis position) from the robot asynchronously, /// raises <see cref="KUKARobot.FrameRecived">FrameReceived</see> event /// </summary> private async Task ReceiveDataAsync() { InputFrame receivedFrame = await rsiAdapter.ReceiveDataAsync(); RobotVector correction = receivedFrame.Position - position; if (!Limits.CheckCorrection(correction)) { Uninitialize(); throw new InvalidOperationException("Correction limit has been exceeded:" + $"{Environment.NewLine}{correction}"); } if (!Limits.CheckAxisPosition(receivedFrame.AxisPosition)) { Uninitialize(); throw new InvalidOperationException("Axis position limit has been exceeded:" + $"{Environment.NewLine}{receivedFrame.AxisPosition}"); } if (!Limits.CheckPosition(receivedFrame.Position)) { Uninitialize(); throw new InvalidOperationException("Available workspace limit has been exceeded:" + $"{Environment.NewLine}{receivedFrame.Position}"); } lock (receivedDataSyncLock) { IPOC = receivedFrame.IPOC; position = receivedFrame.Position; axisPosition = receivedFrame.AxisPosition; } FrameReceived?.Invoke(receivedFrame); }
private void DecodeTask() { H264VideoPayloadParser parser = new H264VideoPayloadParser(new RtspClientSharp.Codecs.Video.H264CodecInfo()); FFmpegVideoDecoder decoder = FFmpegVideoDecoder.CreateDecoder(FFmpegVideoCodecId.H264); parser.FrameGenerated += (frame) => { lock (m_decoderLock) { IDecodedVideoFrame decodedFrame = decoder.TryDecode((RawVideoFrame)frame); if (decodedFrame != null) { m_lastDecodedFrame = decodedFrame; FrameReceived?.Invoke(this, decodedFrame); } else { Log.e(TAG, "Failed to decode frame"); } } }; while (!m_CancelToken.Token.IsCancellationRequested) { try { Payload nextFramePayload = m_payloadQueue.Receive(m_CancelToken.Token); parser.Parse(new TimeSpan(1), new ArraySegment <byte>(nextFramePayload.Data.GetBuffer(), (int)nextFramePayload.Data.Position, (int)nextFramePayload.Data.Remaining()), true); } catch (OperationCanceledException) { break; } } }
private void Process(TcpClient tcpClient) { try { var networkStream = tcpClient.GetStream(); using (var bufferedStream = new BufferedStream(networkStream)) { while (!_disposed) { var frameBytes = bufferedStream.TakeBuffer(); var frame = new Frame(frameBytes); var context = new InputContext(bufferedStream, frame); FrameReceived?.Invoke(this, context); } } } catch (Exception) { // Hide all exceptions for unexpected connection lost with the client } finally { if (tcpClient.Connected) { tcpClient.Close(); } } }
/// <summary> /// Raises an appropriate event when a media frame is received. /// </summary> /// <param name="sender">Sender object.</param> /// <param name="mediaFrame">Media frame.</param> private void OnFrameReceived(object?sender, RawFrame mediaFrame) { var metadataRequired = false; var now = DateTime.UtcNow; switch (mediaFrame) { case RawAudioFrame _ when now >= _audioMetadataTime + _metadataFrequency: metadataRequired = true; _audioMetadataTime = now; break; case RawH264Frame _ when now >= _videoMetadataTime + _metadataFrequency: metadataRequired = mediaFrame is RawH264IFrame; _videoMetadataTime = metadataRequired ? now : _videoMetadataTime; break; case RawVideoFrame _ when now >= _videoMetadataTime + _metadataFrequency: metadataRequired = true; _videoMetadataTime = now; break; case null: return; } Interlocked.Increment(ref _totalFramesReceived); FrameReceived?.Invoke(this, mediaFrame, metadataRequired); }
private void OnFrameReceived(object sender, RawFrame rawFrame) { if (!(rawFrame is RawVideoFrame rawVideoFrame)) { return; } FFmpegVideoDecoder decoder = GetDecoderForFrame(rawVideoFrame); if (!decoder.TryDecode(rawVideoFrame, out DecodedVideoFrameParameters decodedFrameParameters)) { return; } //long desiredSize = (long)rawFrame. << 32 | (uint)height; //long desiredSize = Interlocked.Read(ref _desiredSize); int targetWidth; int targetHeight; int bufferSize; //if (desiredSize == 0) //{ targetWidth = decodedFrameParameters.Width; targetHeight = decodedFrameParameters.Height; bufferSize = decodedFrameParameters.Height * ImageUtils.GetStride(decodedFrameParameters.Width, PixelFormat.Bgr24); //} //else //{ // targetWidth = (int)(desiredSize >> 32); // targetHeight = (int)desiredSize; // bufferSize = targetHeight * // ImageUtils.GetStride(targetWidth, PixelFormat.Bgr24); //} if (_decodedFrameBuffer.Length != bufferSize) { _decodedFrameBuffer = new byte[bufferSize]; } var bufferSegment = new ArraySegment <byte>(_decodedFrameBuffer); var postVideoDecodingParameters = new PostVideoDecodingParameters(RectangleF.Empty, new Size(targetWidth, targetHeight), ScalingPolicy.Stretch, PixelFormat.Bgr24, ScalingQuality.Bicubic); IDecodedVideoFrame decodedFrame = decoder.GetDecodedFrame(bufferSegment, postVideoDecodingParameters); FrameReceived?.Invoke(this, decodedFrame); }
private void EndReadStream(IAsyncResult callback) { int bytes = 0; try { lock (streamLock) { if (stream == null || !stream.IsConnected) { return; } bytes = stream.EndRead(callback); } } catch (IOException) { // The pipe has been closed return; } catch (NullReferenceException) { return; } catch (ObjectDisposedException) { return; } catch { // Unknown error return; } if (bytes > 0) { using (MemoryStream memory = new MemoryStream(buffer, 0, bytes)) { try { PipeFrame frame = new PipeFrame(); if (frame.ReadStream(memory)) { lock (frameQueue) { frameQueue.Enqueue(frame); } FrameReceived?.Invoke(); } else { Close(); } } catch { Close(); } } } if (!isClosed && IsConnected) { BeginReadStream(); } }
private RtspClientInternal CreateRtspClientInternal(ConnectionParameters connectionParameters, Func <IRtspTransportClient> transportClientProvider) { return(new RtspClientInternal(connectionParameters, transportClientProvider) { FrameReceived = frame => { Volatile.Write(ref _anyFrameReceived, true); FrameReceived?.Invoke(this, frame); } }); }
protected virtual void ProcessReceivedData() { try { var frame = XBeePacketUnmarshaler.Unmarshal(Stream.ToArray()); packetLength = 0; if (FrameReceived != null) { FrameReceived.Invoke(this, new FrameReceivedArgs(frame)); } } catch (XBeeFrameException ex) { throw new XBeeException("Unable to unmarshal packet.", ex); } }
/************************************************************************/ #region Public methods /// <summary> /// Asynchonously begins receiving data. /// </summary> /// <param name="token">Cancellation token</param> /// <returns>A task that represents the asynchronous receive operation.</returns> public async Task ReceiveAsync(CancellationToken token) { while (true) { token.ThrowIfCancellationRequested(); ArraySegment <byte> data = await GetNextPartAsync(token).ConfigureAwait(false); if (data != null) { FrameReceived?.Invoke(this, new RawJpegFrame(DateTime.Now, data)); } } }
internal WebSocket(IChannelOwner parent, string guid, WebSocketInitializer initializer) : base(parent, guid) { _channel = new(guid, parent.Connection, this); _initializer = initializer; _channel.Close += (_, _) => { IsClosed = true; Close?.Invoke(this, this); }; _channel.FrameReceived += (_, e) => FrameReceived?.Invoke(this, e); _channel.FrameSent += (_, e) => FrameSent?.Invoke(this, e); _channel.SocketError += (_, e) => SocketError?.Invoke(this, e); }
private void ProcessFrame(FrameOfMocapData data, NatNetClientML client) { double frameDeltaTime = data.fTimestamp - frameTimestamp; frameTimestamp = data.fTimestamp; var receivedFrame = new InputFrame(data, frameDeltaTime); var args = new FrameReceivedEventArgs { ReceivedFrame = receivedFrame, PrevBallPosition = ballPosition }; ballPosition = receivedFrame.BallPosition; FrameReceived?.Invoke(this, args); }
private void OnFrameReceived(object sender, RawFrame rawFrame) { if (!(rawFrame is RawVideoFrame rawVideoFrame)) { return; } FFmpegVideoDecoder decoder = GetDecoderForFrame(rawVideoFrame); IDecodedVideoFrame decodedFrame = decoder.TryDecode(rawVideoFrame); if (decodedFrame != null) { FrameReceived?.Invoke(this, decodedFrame); } }
internal override void OnMessage(string method, JsonElement?serverParams) { bool IsTextOrBinaryFrame(out int opcode) { opcode = serverParams?.GetProperty("opcode").ToObject <int>() ?? 0; return(opcode != 1 && opcode != 2); } int opcode; switch (method) { case "close": Close?.Invoke(this, EventArgs.Empty); break; case "frameSent": if (IsTextOrBinaryFrame(out opcode)) { break; } FrameSent?.Invoke( this, new WebSocketFrame( serverParams?.GetProperty("data").ToObject <string>(), opcode == OpcodeBase64)); break; case "frameReceived": if (IsTextOrBinaryFrame(out opcode)) { break; } FrameReceived?.Invoke( this, new WebSocketFrame( serverParams?.GetProperty("data").ToObject <string>(), opcode == OpcodeBase64)); break; case "socketError": SocketError?.Invoke(this, serverParams?.GetProperty("error").ToObject <string>()); break; } }
private void OnFrameReceived(object sender, RawFrame rawFrame) { if (!(rawFrame is RawVideoFrame rawVideoFrame)) { return; } Console.WriteLine($"OnFrameReceived sender : {sender.GetType()}, rawFrame : {rawFrame.GetType()}"); FFmpegVideoDecoder decoder = GetDecoderForFrame(rawVideoFrame); IDecodedVideoFrame decodedFrame = decoder.TryDecode(rawVideoFrame); if (decodedFrame != null) { FrameReceived?.Invoke(this, decodedFrame); } }
private long ReceiveDataAsync() { correctionBuffor.Add(correction); RobotVector currentCorrection = RobotVector.Zero; if (correctionBuffor.Count == 8) { currentCorrection = correctionBuffor[0]; correctionBuffor.RemoveAt(0); } InputFrame receivedFrame = new InputFrame { Position = position + currentCorrection, AxisPosition = RobotAxisVector.Zero, IPOC = 0 }; if (!Limits.CheckAxisPosition(receivedFrame.AxisPosition)) { Uninitialize(); throw new InvalidOperationException("Axis position limit has been exceeded:" + $"{Environment.NewLine}{receivedFrame.AxisPosition}"); } if (!Limits.CheckPosition(receivedFrame.Position)) { Uninitialize(); throw new InvalidOperationException("Available workspace limit has been exceeded:" + $"{Environment.NewLine}{receivedFrame.Position}"); } lock (receivedDataSyncLock) { position = receivedFrame.Position; axisPosition = receivedFrame.AxisPosition; } FrameReceived?.Invoke(this, new FrameReceivedEventArgs { ReceivedFrame = receivedFrame }); return(receivedFrame.IPOC); }
private void OnFrameReceived(object sender, RawFrame rawFrame) { if (!(rawFrame is RawVideoFrame rawVideoFrame)) { return; } FFmpegVideoDecoder decoder = GetDecoderForFrame(rawVideoFrame); if (!decoder.TryDecode(rawVideoFrame, out DecodedVideoFrameParameters decodedFrameParameters)) { return; } int targetWidth = decodedFrameParameters.Width; int targetHeight = decodedFrameParameters.Height; int bufferSize = decodedFrameParameters.Height * ImageUtils.GetStride(decodedFrameParameters.Width, PixelFormat.Bgr24); if (_decodedFrameBuffer.Length != bufferSize) { _decodedFrameBuffer = new byte[bufferSize]; } var bufferSegment = new ArraySegment <byte>(_decodedFrameBuffer); if (_postVideoDecodingParameters.TargetFrameSize.Width != targetWidth || _postVideoDecodingParameters.TargetFrameSize.Height != targetHeight) { _postVideoDecodingParameters = new PostVideoDecodingParameters(RectangleF.Empty, new Size(targetWidth, targetHeight), ScalingPolicy.Stretch, PixelFormat.Bgr24, ScalingQuality.Nearest); } IDecodedVideoFrame decodedFrame = decoder.GetDecodedFrame(bufferSegment, _postVideoDecodingParameters); FrameReceived?.Invoke(this, decodedFrame); }
private void OnFrameReceived(object sender, RawFrame rawFrame) { if (!(rawFrame is RawVideoFrame rawVideoFrame)) { return; } FFmpegVideoDecoder decoder = GetDecoderForFrame(rawVideoFrame); if (!decoder.TryDecode(rawVideoFrame, out DecodedVideoFrameParameters decodedFrameParameters)) { return; } long desiredSize = Interlocked.Read(ref _desiredSize); int targetWidth; int targetHeight; if (desiredSize == 0) { targetWidth = decodedFrameParameters.Width; targetHeight = decodedFrameParameters.Height; } else { targetWidth = (int)(desiredSize >> 32); targetHeight = (int)desiredSize; } var postVideoDecodingParameters = new PostVideoDecodingParameters(RectangleF.Empty, new Size(targetWidth, targetHeight), ScalingPolicy.Stretch, PixelFormat.Bgr24, ScalingQuality.Bicubic); IDecodedVideoFrame decodedFrame = decoder.GetDecodedFrame(postVideoDecodingParameters); FrameReceived?.Invoke(this, decodedFrame); }
private void OnFrameReceived(object sender, RawFrame rawFrame) { if (!(rawFrame is RawAudioFrame rawAudioFrame)) { return; } FFmpegAudioDecoder decoder = GetDecoderForFrame(rawAudioFrame); if (!decoder.TryDecode(rawAudioFrame)) { return; } IDecodedAudioFrame decodedFrame = decoder.GetDecodedFrame(new AudioConversionParameters() { OutBitsPerSample = 16 }); FrameReceived?.Invoke(this, decodedFrame); }
internal override void OnMessage(string method, JsonElement?serverParams) { switch (method) { case "close": Close?.Invoke(this, EventArgs.Empty); break; case "frameSent": FrameSent?.Invoke( this, new WebSocketFrameEventArgs { Payload = serverParams?.GetProperty("data").ToObject <string>(), }); break; case "frameReceived": FrameReceived?.Invoke( this, new WebSocketFrameEventArgs { Payload = serverParams?.GetProperty("data").ToObject <string>(), }); break; case "error": SocketError?.Invoke( this, new WebSocketErrorEventArgs { ErrorMessage = serverParams?.GetProperty("error").ToObject <string>(), }); break; } }
private void RtspClientOnFrameReceived(object sender, RawFrame rawFrame) { FrameReceived?.Invoke(this, rawFrame); }
/************************************************************************/ #region Private methods private void MpStreamFrameReceived(object sender, RawJpegFrame e) { FrameReceived?.Invoke(this, e); }
private void OnFrameReceived(FrameReceivedEventArgs e) { FrameReceived?.Invoke(this, e); DisposeFrame(e.Frame); }
private void CameraReceiverMethod() { byte[] framedata = new byte[1]; var responseCameraFrame = new ResponseCameraFrame(); while (!RequestedStop) { try { var responseBytes = new byte[responseCameraFrame.GetResponseLength()]; // Receive the first part of the frame. This part contains the informations like height, width or length Receive(_socket, responseBytes, responseBytes.Length); try { responseCameraFrame.FromByteArray(responseBytes); } catch (InvalidOperationException) // Error while receiving one frame. Close camera server { RequestedStop = true; TxtCommunication.SendCommand(new CommandStopCamera(), new ResponseStopCamera()); DisconnectFromCameraServerMethod(); break; } // Use the existing framedata object and resize if needed if (framedata.Length < responseCameraFrame.FrameSizeCompressed + 2) { Array.Resize(ref framedata, responseCameraFrame.FrameSizeCompressed + 2); } // Receive the second part of the frame. This part contains the compressed JPEG data Receive(_socket, framedata, responseCameraFrame.FrameSizeCompressed); // Add the missing EOI (End of image) tag framedata[framedata.Length - 2] = 0xFF; framedata[framedata.Length - 1] = 0xD9; // Store the received frame in the responseCameraFrame object responseCameraFrame.FrameData = framedata; // Process the received frame in another thread queue so that we can continue receiving frames ReceivedFrames.Enqueue(responseCameraFrame); _frameProcessingTaskQueue.DoWorkInQueue(() => { if (!ReceivedFrames.IsEmpty) { ResponseCameraFrame frame; if (ReceivedFrames.TryDequeue(out frame) && !RequestedStop) { FrameReceivedEventArgs eventArgs = new FrameReceivedEventArgs(framedata, responseCameraFrame.FrameSizeCompressed + 2); FrameReceived?.Invoke(this, eventArgs); } } }, false); // Send an acknowledge _socket.Send(BitConverter.GetBytes(TxtInterface.AcknowledgeIdCameraOnlineFrame)); } catch (Exception) { } } }