protected override bool GetLiveFrameInternal(TimeSpan timeout, out BaseDataHeader header, out byte[] data) { data = null; header = null; var measuredData = _connectionManager?.CurrentData; if (measuredData == null) { return(false); } data = Encoding.UTF8.GetBytes(measuredData.Serialize()); if (data == null || data.Length == 0) { return(false); } header = new MetadataHeader { Length = (ulong)data.Length, SequenceNumber = _sequence++, Timestamp = measuredData.Time }; return(true); }
protected override bool GetLiveFrameInternal(TimeSpan timeout, out BaseDataHeader header, out byte[] data) { header = null; data = null; // TODO: Implement request for fetching data from device if (data == null || data.Length == 0) { return(false); } DateTime dt = DateTime.UtcNow; // TODO: If a timestamp is provided by device, use that instead // TODO: Update to reflect actual data header = new VideoHeader() { CodecType = VideoCodecType.JPEG, Length = (ulong)data.Length, SequenceNumber = _sequence++, SyncFrame = true, TimestampSync = dt, TimestampFrame = dt }; return(true); }
protected override bool GetLiveFrameInternal(TimeSpan timeout, out BaseDataHeader header, out byte[] data) { bool colored = true; data = null; header = null; MetadataSetting md = _settingsManager.GetSetting(new MetadataSetting(Constants.BoundingBoxColor, _deviceId, _streamId, MetadataType.BoundingBoxTypeId, "")); if (md != null) { colored = md.Value == "C"; } data = _demoConnectionManager.GetLiveFrame(Channel, colored); if (data.Length == 0) { return(false); } DateTime dt = DateTime.UtcNow; header = new MetadataHeader() { Length = (ulong)data.Length, SequenceNumber = _sequence++, Timestamp = dt }; return(true); }
protected override bool GetLiveFrameInternal(TimeSpan timeout, out BaseDataHeader header, out byte[] data) { header = null; data = _demoConnectionManager.GetLiveFrame(Channel, false); if (data.Length == 0) { return(false); } DateTime dt = DateTime.UtcNow; var codec = VideoCodecType.JPEG; // If you wanted to support multiple codecs, the commented code below demonstrates how this could be done (and also how to get stream settings for any other purpose) // var setting = _settingsManager.GetSetting(new StreamSetting(Constants.Codec, _deviceId, _streamId, "")); // if (setting.Value == VideoCodecType.H264.ToString()) // { // codec = VideoCodecType.H264; // } // For video codec types other than JPEG, an important thing to note here is that we always transfer the data out of the GetLiveFrame // call in single frame chunks. That is to say, regardless of what kind of frame it might be (P-frame, I-frame, etc.) it is sent as // one frame per message. header = new VideoHeader() { CodecType = codec, Length = (ulong)data.Length, SequenceNumber = _sequence++, SyncFrame = true, // For codecs other than MJPEG, this should only be true for key frames. TimestampSync = dt, // If the video codec is e.g. H.264, this should be the time stamp of the most recent keyframe. On a keyframe, this will be the same as TimestampFrame. TimestampFrame = dt }; return(true); }
public sealed override bool GetLiveFrame(TimeSpan timeout, out BaseDataHeader header, out byte[] data) { try { return(GetLiveFrameInternal(timeout, out header, out data)); } catch (Exception ex) { Toolbox.Log.LogError(GetType().Name, "{0}, Channel {1}: {2}", nameof(GetLiveFrame), Channel, ex.Message + ex.StackTrace); throw new ConnectionLostException(ex.Message + ex.StackTrace); } }
protected override bool GetLiveFrameInternal(TimeSpan timeout, out BaseDataHeader header, out byte[] data) { header = null; data = null; if (_currentSpeakerData != null && _currentSpeakerHeader != null) { header = _currentSpeakerHeader.Clone(); data = _currentSpeakerData; _currentSpeakerData = null; return(true); } return(false); }
protected override bool GetLiveFrameInternal(TimeSpan timeout, out BaseDataHeader header, out byte[] data) { header = null; data = null; byte[] frame = _demoConnectionManager.GetLiveFrame(Channel, false); if (frame == null) { return(false); } _currentAudioHeader.Length = (ulong)frame.Length; _currentAudioHeader.Timestamp = DateTime.UtcNow; _currentAudioHeader.SequenceNumber++; _currentAudioHeader.SampleCount = frame.Length / 2; // Assume 16 bits per sample header = _currentAudioHeader.Clone(); data = frame; return(true); }
protected override bool GetLiveFrameInternal(TimeSpan timeout, out BaseDataHeader header, out byte[] data) { header = null; data = null; byte[] frame = null; // TODO: Implement request for fetching frame data from device if (frame == null) { return(false); } _currentAudioHeader.Length = (ulong)frame.Length; _currentAudioHeader.Timestamp = DateTime.UtcNow; _currentAudioHeader.SequenceNumber++; _currentAudioHeader.SampleCount = frame.Length / 2; // Assume 16 bits per sample header = _currentAudioHeader.Clone(); data = frame; return(true); }
protected override bool GetLiveFrameInternal(TimeSpan timeout, out BaseDataHeader header, out byte[] data) { data = null; header = null; // TODO: Implement request for fetching data from device if (data.Length == 0) { return(false); } DateTime dt = DateTime.UtcNow;// TODO: If a timestamp is provided by device, use that instead header = new MetadataHeader() { Length = (ulong)data.Length, SequenceNumber = _sequence++, Timestamp = dt }; return(true); }
protected override bool GetLiveFrameInternal(TimeSpan timeout, out BaseDataHeader header, out byte[] data) { header = null; data = _connectionManager.PopFrame(); if (data == null || data.Length == 0) { return(false); } DateTime dt = DateTime.UtcNow; header = new VideoHeader { CodecType = VideoCodecType.JPEG, Length = (ulong)data.Length, SequenceNumber = _sequence++, SyncFrame = true, TimestampSync = dt, TimestampFrame = dt }; return(true); }
protected abstract bool GetLiveFrameInternal(TimeSpan timeout, out BaseDataHeader header, out byte[] data);