private static void FillMediaPacket(IntPtr handle, MediaPacket mediaPacket) { Debug.Assert(handle != IntPtr.Zero); if (mediaPacket == null) { throw new ArgumentNullException(nameof(mediaPacket)); } InteropSource.FillMediaPacket(handle, mediaPacket.GetHandle()). Validate("Failed to fill media packet"); }
/// <summary> /// Pushes elementary stream to decode audio or video. /// </summary> /// <remarks> /// This source must be set as a source to a WebRTC and the WebRTC must be in the /// <see cref="WebRTCState.Negotiating"/> or <see cref="WebRTCState.Playing"/> state /// </remarks> /// <param name="packet">The <see cref="MediaPacket"/> to decode.</param> /// <exception cref="InvalidOperationException"> /// This source is not set as a source to a WebRTC.<br/> /// -or-<br/> /// The WebRTC is not in the valid state. /// </exception> /// <exception cref="ArgumentNullException"><paramref name="packet"/> is null.</exception> /// <exception cref="ObjectDisposedException"><paramref name="packet"/> has been disposed.</exception> /// <exception cref="ArgumentException"> /// <paramref name="packet"/> is neither video nor audio type.<br/> /// -or-<br/> /// The format of packet is not matched with the specified format in the constructor. /// </exception> /// <seealso cref="WebRTC.AddSource"/> /// <seealso cref="WebRTC.AddSources"/> /// <seealso cref="MediaPacket"/> /// <since_tizen> 9 </since_tizen> public void Push(MediaPacket packet) { if (WebRtc == null) { Log.Error(WebRTCLog.Tag, "The source is not set as a source to a WebRTC yet."); throw new InvalidOperationException("The source is not set as a source to a WebRTC yet."); } if (packet == null) { Log.Error(WebRTCLog.Tag, "packet is null"); throw new ArgumentNullException(nameof(packet)); } if (packet.IsDisposed) { Log.Error(WebRTCLog.Tag, "packet is disposed"); throw new ObjectDisposedException(nameof(packet)); } if (packet.Format.Type == MediaFormatType.Text || packet.Format.Type == MediaFormatType.Container) { Log.Error(WebRTCLog.Tag, "The format of the packet is invalid : " + packet.Format.Type); throw new ArgumentException($"The format of the packet is invalid : {packet.Format.Type}."); } if (!packet.Format.Equals(_audioMediaFormat) && !packet.Format.Equals(_videoMediaFormat)) { Log.Error(WebRTCLog.Tag, "The format of the packet is invalid : Unmatched format."); throw new ArgumentException("The format of the packet is invalid : Unmatched format."); } if (packet.Format.Type == MediaFormatType.Video && _videoMediaFormat == null) { Log.Error(WebRTCLog.Tag, "Video is not configured with the current source."); throw new ArgumentException("Video is not configured with the current source."); } if (packet.Format.Type == MediaFormatType.Audio && _audioMediaFormat == null) { Log.Error(WebRTCLog.Tag, "Audio is not configured with the current source."); throw new ArgumentException("Audio is not configured with the current source."); } WebRtc.ValidateWebRTCState(WebRTCState.Negotiating, WebRTCState.Playing); NativeWebRTC.PushMediaPacket(WebRtc.Handle, SourceId.Value, packet.GetHandle()). ThrowIfFailed("Failed to push the packet to the WebRTC"); }
internal async Task <MediaPacket> RunAsync(TransformHandle handle, MediaPacket source) { Debug.Assert(source.Format is VideoMediaFormat); ValidateFormat(source.Format as VideoMediaFormat); var tcs = new TaskCompletionSource <MediaPacket>(); using (var cbKeeper = ObjectKeeper.Get(GetCallback(tcs, source))) { var result = NativeTransform.Run(handle, source.GetHandle(), cbKeeper.Target); if (result == ImageUtilError.NotSupportedFormat) { throw new NotSupportedException( GenerateNotSupportedErrorMessage(source.Format as VideoMediaFormat)); } result.ThrowIfFailed("Failed to transform given packet with " + GetType()); return(await tcs.Task); } }