Exemplo n.º 1
0
        public MediaElement(NSImageView imageView)
        {
            this.ImageView        = imageView;
            this.mediaElementCore = new MediaElementCore(this, false);

            // RoutedEvent event bindings
            mediaElementCore.MediaOpening     += (s, e) => { };
            mediaElementCore.MediaClosed      += (s, e) => { };
            mediaElementCore.MediaOpened      += (s, e) => { };
            mediaElementCore.MediaFailed      += (s, e) => { };
            mediaElementCore.MediaEnded       += (s, e) => { };
            mediaElementCore.BufferingStarted += (s, e) => { };
            mediaElementCore.BufferingEnded   += (s, e) => { };
            mediaElementCore.SeekingStarted   += (s, e) => { };
            mediaElementCore.SeekingEnded     += (s, e) => { };

            // Non-RoutedEvent event bindings
            mediaElementCore.MessageLogged += (s, e) =>
            {
                // TODO: This is incomplete
                if (e.MessageType == MediaLogMessageType.Trace)
                {
                    return;
                }
                Console.WriteLine($"{e.MessageType,10} - {e.Message}");
            };
            mediaElementCore.PositionChanged += (s, e) => { };

            // INotifyPropertyChanged PropertyChanged Event binding
            mediaElementCore.PropertyChanged += (s, e) => { };
        }
        /// <summary>
        /// 單一鏡頭即時影像播放
        /// </summary>
        /// <param name="iCH">鏡頭編號 (0~15)</param>
        /// <param name="iAudio">傳輸聲音資料
        /// 0: do not send audio data
        /// 1: send audio data, if any
        /// </param>
        /// <param name="iStream">使用串流
        /// 0: use primary stream
        /// 1: try to use secondary stream
        /// </param>
        /// <returns></returns>
        public int Device_RealPlay(int iCH, int iAudio, int iStream)
        {
            int rtnVal = 0;

            try
            {
                Device_Logout();
                channel = iCH;

                apv = new VideoMediaElementCore.MediaElementCore(iCH + 1, 0);
                wpfmain.Children.Add(apv);
                wpfmain.MouseLeftButtonUp += new System.Windows.Input.MouseButtonEventHandler(wpfmain_MouseLeftButtonUp);
                apv.set_size(elementHost1.Width, elementHost1.Height);
                //apv.set_aes_key("1234567890123456");
                Console.WriteLine("Ip = {0} Port = {1}", IP, PORT);
                apv.set_source(0, IP, PORT, iCH, (byte)iAudio, (byte)iStream, 1);
                apv.set_print_msg(1);
                //MessageBox.Show();
                //wpfmain.Children.Add(apv);

                //0 => send request to get one frame
                //1 => send request to get one I-frame
                //2 => play continuous stream
                apv.set_read_mode(2);
                apv.play();

                rtnVal = 1;
            }
            catch (Exception ex)
            {
                _ErrDesc = ex.Message;
            }

            return(rtnVal);
        }
 /// <summary>
 /// 停止回放
 /// </summary>
 public void Device_StopPlayBack()
 {
     if (apv != null)
     {
         apv.stop();
         apv = null;
     }
 }
 /// <summary>
 /// 停止即時播放
 /// </summary>
 public void Device_StopRealPlay()
 {
     if (apv != null)
     {
         apv.stop();
         apv = null;
     }
 }
        /// <summary>
        /// 使用者登出
        /// </summary>
        public void Device_Logout()
        {
            if (apv != null)
            {
                apv.stop();
            }

            apv = null;
            wpfmain.Children.Clear();
            //Status = lsStatus.Logout;
            //elementHost1.Child = null;
        }
Exemplo n.º 6
0
 /// <summary>
 /// Handles the Tick event of the LogOutputter timer.
 /// </summary>
 /// <param name="sender">The source of the event.</param>
 /// <param name="e">The <see cref="EventArgs"/> instance containing the event data.</param>
 private static void LogOutputter_Tick(object sender, EventArgs e)
 {
     while (LogQueue.TryDequeue(out MediaLogMessagEventArgs eventArgs))
     {
         if (eventArgs.Source != null)
         {
             eventArgs.Source.RaiseMessageLogged(eventArgs);
         }
         else
         {
             MediaElementCore.RaiseFFmpegMessageLogged(eventArgs);
         }
     }
 }
        private int _PlayBackByTime(int iCH, int iAudio, string sStartTime, int is_utc)
        {
            int rtnVal = 0;

            try
            {
                Device_Logout();
                channel = iCH;

                apv = new VideoMediaElementCore.MediaElementCore(iCH + 1, 1);
                wpfmain.Children.Add(apv);
                wpfmain.MouseLeftButtonUp += new System.Windows.Input.MouseButtonEventHandler(wpfmain_MouseLeftButtonUp);
                apv.set_size(elementHost1.Width, elementHost1.Height);
                //apv.set_aes_key("1234567890123456");
                if (is_utc == 0)
                {
                    apv.set_source_l(1, IP, PORT, iCH, (byte)iAudio, ConvertToTimestamp(DateTime.Parse(sStartTime)));
                }
                else
                {
                    apv.set_source_l(1, IP, PORT, iCH, (byte)iAudio, ConvertToTimestamp(DateTime.Parse(sStartTime)));
                }
                //apv.set_playback_cmd(1, 1); //2014/07/21_CTS
                apv.set_print_msg(1);

                apv.play();
                System.Threading.Thread.Sleep(1000);
                apv.set_playback_cmd(1, 1);

                rtnVal = 1;
            }
            catch (Exception ex)
            {
                _ErrDesc = ex.Message;
            }

            return(rtnVal);
        }
Exemplo n.º 8
0
        /// <summary>
        /// Initializes a new instance of the <see cref="AudioRenderer"/> class.
        /// </summary>
        /// <param name="mediaElementCore">The core media element.</param>
        public AudioRenderer(MediaElementCore mediaElementCore)
        {
            MediaElementCore = mediaElementCore;

            m_Format = new WaveFormat(AudioParams.Output.SampleRate, AudioParams.OutputBitsPerSample, AudioParams.Output.ChannelCount);
            if (WaveFormat.BitsPerSample != 16 || WaveFormat.Channels != 2)
            {
                throw new NotSupportedException("Wave Format has to be 16-bit and 2-channel.");
            }

            if (MediaElement.HasAudio)
            {
                Initialize();
            }

            if (Application.Current != null)
            {
                WPFUtils.UIInvoke(DispatcherPriority.Normal, () =>
                {
                    Application.Current.Exit += OnApplicationExit;
                });
            }
        }
Exemplo n.º 9
0
        /// <summary>
        /// Logs a block rendering operation as a Trace Message
        /// if the debugger is attached.
        /// </summary>
        /// <param name="element">The media element.</param>
        /// <param name="block">The block.</param>
        /// <param name="clockPosition">The clock position.</param>
        /// <param name="renderIndex">Index of the render.</param>
        internal static void LogRenderBlock(this MediaElementCore element, MediaBlock block, TimeSpan clockPosition, int renderIndex)
        {
            if (IsInDebugMode == false)
            {
                return;
            }

            try
            {
                var drift = TimeSpan.FromTicks(clockPosition.Ticks - block.StartTime.Ticks);
                element?.Logger.Log(MediaLogMessageType.Trace,
                                    $"{block.MediaType.ToString().Substring(0, 1)} "
                                    + $"BLK: {block.StartTime.Format()} | "
                                    + $"CLK: {clockPosition.Format()} | "
                                    + $"DFT: {drift.TotalMilliseconds,4:0} | "
                                    + $"IX: {renderIndex,3} | "
                                    + $"PQ: {element.Container?.Components[block.MediaType]?.PacketBufferLength / 1024d,7:0.0}k | "
                                    + $"TQ: {element.Container?.Components.PacketBufferLength / 1024d,7:0.0}k");
            }
            catch
            {
                // swallow
            }
        }
Exemplo n.º 10
0
 /// <summary>
 /// Initializes a new instance of the <see cref="T:Unosquare.FFME.MacOS.Rendering.AudioRenderer"/> class.
 /// </summary>
 /// <param name="mediaElementCore">Media element core.</param>
 public AudioRenderer(MediaElementCore mediaElementCore)
 {
     MediaElementCore = mediaElementCore;
 }
Exemplo n.º 11
0
 /// <summary>
 /// Initializes a new instance of the <see cref="VideoRenderer"/> class.
 /// </summary>
 /// <param name="mediaElementCore">The core media element.</param>
 public VideoRenderer(MediaElementCore mediaElementCore)
 {
     MediaElementCore = mediaElementCore;
     InitializeTargetBitmap(null);
 }
Exemplo n.º 12
0
 /// <summary>
 /// Initializes a new instance of the <see cref="SubtitleRenderer"/> class.
 /// </summary>
 /// <param name="mediaElementCore">The core media element.</param>
 public SubtitleRenderer(MediaElementCore mediaElementCore)
 {
     MediaElementCore = mediaElementCore;
 }
Exemplo n.º 13
0
 /// <summary>
 /// Initializes a new instance of the <see cref="T:Unosquare.FFME.MacOS.Rendering.VideoRenderer"/> class.
 /// </summary>
 /// <param name="mediaElementCore">Media element core.</param>
 public VideoRenderer(MediaElementCore mediaElementCore)
 {
     MediaElementCore = mediaElementCore;
 }
Exemplo n.º 14
0
 /// <summary>
 /// Initializes a new instance of the <see cref="MediaCommandManager"/> class.
 /// </summary>
 /// <param name="mediaElementCore">The media element.</param>
 public MediaCommandManager(MediaElementCore mediaElementCore)
 {
     m_MediaElement = mediaElementCore;
 }