void UninitializeMediaPlayer() { m_hasSetMediaSource = false; if (media_stream_source != null) { // NotifyError to shutdown MediaStreamSource media_stream_source.NotifyError(MediaStreamSourceErrorStatus.Other); Video.Stop(); } }
/// <summary> /// Callback on video frame received from the local video capture device, /// for local rendering before (or in parallel of) being sent to the remote peer. /// </summary> /// <param name="frame">The newly captured video frame.</param> private void VideoTrack_I420AFrameReady(I420AVideoFrame frame) { // Lazily start the video media player when receiving the first video frame from // the video track. Currently there is no exposed API to tell once connected that // the remote peer will be sending some video track, so handle local and remote // video tracks the same for simplicity. bool needNewSource = false; uint width = frame.width; uint height = frame.height; lock (_mediaPlaybackLock) { if (!_isVideoPlaying) { _isVideoPlaying = true; _videoWidth = width; _videoHeight = height; needNewSource = true; } else if ((width != _videoWidth) || (height != _videoHeight)) { _videoWidth = width; _videoHeight = height; needNewSource = true; } } if (needNewSource) { // We don't know the remote video framerate yet, so use a default. uint framerate = 30; //RunOnMainThread(() => { Logger.Log($"Creating new video source: {width}x{height}@{framerate}"); _videoPlayer.Pause(); //_videoPlayer.Source = null; _videoStreamSource?.NotifyError(MediaStreamSourceErrorStatus.Other); _videoSource?.Dispose(); _videoStreamSource = CreateVideoStreamSource(width, height, framerate); _videoSource = MediaSource.CreateFromMediaStreamSource(_videoStreamSource); _videoPlayer.Source = _videoSource; }//); ThreadHelper.RunOnMainThread(() => { RaisePropertyChanged("FrameWidth"); RaisePropertyChanged("FrameHeight"); }); } _videoBridge.HandleIncomingVideoFrame(frame); }
/// <summary> /// Callback on Media Foundation pipeline media ended playback. /// </summary> /// <param name="sender">The <see xref="MediaPlayer"/> source object owning the media.</param> /// <param name="args">(unused)</param> /// <remarks>This appears to never be called for live sources.</remarks> private void OnMediaEnded(MediaPlayer sender, object args) { //ThreadHelper.RunOnMainThread(() => { Logger.Log("Local MediaElement video playback ended."); //StopLocalMedia(); sender.Pause(); sender.Source = null; if (sender == _videoPlayer) { //< TODO - This should never happen. But what to do with // local channels if it happens? lock (_mediaPlaybackLock) { _videoStreamSource.NotifyError(MediaStreamSourceErrorStatus.Other); _videoStreamSource = null; _videoSource.Dispose(); _videoSource = null; _isVideoPlaying = false; } } }//); }
private async Task <Tuple <MediaStreamSample, uint> > ParseMP3SampleAsync(bool partial = false, byte[] partialBytes = null) { //http://www.mpgedit.org/mpgedit/mpeg_format/MP3Format.html IBuffer buffer = null; MediaStreamSample sample = null; uint sampleLength = 0; if (partial) { buffer = partialBytes.AsBuffer(); sampleLength = mp3_sampleSize - (uint)partialBytes.Length; byteOffset += sampleLength; } else { var read = await socketReader.LoadAsync(mp3_sampleSize); if (read == 0) { Disconnect(); MediaStreamSource.NotifyError(MediaStreamSourceErrorStatus.ConnectionToServerLost); return(new Tuple <MediaStreamSample, uint>(null, 0)); } else if (read < mp3_sampleSize) { buffer = socketReader.ReadBuffer(read); byteOffset += mp3_sampleSize; } else { buffer = socketReader.ReadBuffer(mp3_sampleSize); byteOffset += mp3_sampleSize; } sampleLength = mp3_sampleSize; } sample = MediaStreamSample.CreateFromBuffer(buffer, timeOffSet); sample.Duration = mp3_sampleDuration; sample.KeyFrame = true; timeOffSet = timeOffSet.Add(mp3_sampleDuration); return(new Tuple <MediaStreamSample, uint>(sample, sampleLength)); }
void mkvStreamSource_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { MediaStreamSample sample = null; MediaStreamSourceSampleRequest request = args.Request; MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral(); try { var mkvFs = mediaStreamFileSource as CCPlayer.HWCodecs.Matroska.MKV.MKVFileSource; FrameBufferData fd = mkvFs.GetFrameData(request.StreamDescriptor); if (fd.Data != null) { sample = MediaStreamSample.CreateFromBuffer(fd.Data, fd.TimeCode); sample.Duration = fd.Duration; sample.KeyFrame = fd.KeyFrame; //자막을 검색하여 추가 MessengerInstance.Send <Message>(new Message("SubtitleFrameInMKV", mkvFs.SubtitleFrames), TransportControlViewModel.NAME); } else if (System.Diagnostics.Debugger.IsAttached) { //NUll이 보고되면 자연스럽게 종료처리가 됨. 즉, MediaEnded Event가 발생함. System.Diagnostics.Debug.WriteLine("***************************** null이 보고 되었음. 종료 코드가 들어옴 => MediaElement의 MediaEndedEvent 발생될 것임."); } request.Sample = sample; } catch (Exception e) { System.Diagnostics.Debug.WriteLine("********************************** 샘플오류 또는 강제 종료 => MediaStreamSource의 Closed 이벤트가 발생될 것임 : " + e.Message); //Close 이벤트 발생 sender.NotifyError(MediaStreamSourceErrorStatus.DecodeError); } finally { if (deferal != null) { deferal.Complete(); } } }
private void flvStreamSource_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { MediaStreamSourceSampleRequest request = args.Request; MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral(); FlvFile flvFile = mediaStreamFileSource as FlvFile; FlvTag flvTag = null; MemoryStream stream = null; MediaStreamSample sample = null; try { if (flvFile != null) { if (request.StreamDescriptor is VideoStreamDescriptor) { flvTag = flvFile.FlvFileBody.CurrentVideoTag; if (flvTag.VideoData.CodecID == CodecID.AVC) { byte[] by = flvTag.VideoData.AVCVideoPacket.NALUs; if (by != null && by.Length > 0) { MemoryStream srcStream = new MemoryStream(by); stream = new MemoryStream(); if (flvTag.VideoData.FrameType == FrameType.Keyframe) { if (NALUnitHeader != null) { stream.Write(NALUnitHeader, 0, NALUnitHeader.Length); } } using (BinaryReader reader = new BinaryReader(srcStream)) { var sampleSize = srcStream.Length; while (sampleSize > 4L) { var ui32 = reader.ReadUInt32(); var count = OldSkool.swaplong(ui32); stream.Write(h264StartCode, 0, h264StartCode.Length); stream.Write(reader.ReadBytes((int)count), 0, (int)count); sampleSize -= 4 + (uint)count; } } if (stream != null && stream.Length > 0) { IBuffer buffer = stream.ToArray().AsBuffer(); stream.Position = 0; sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromTicks(flvTag.Timestamp)); sample.KeyFrame = flvTag.VideoData.FrameType == FrameType.Keyframe; } } } else { IBuffer buffer = flvTag.VideoData.RawData.AsBuffer(); sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromTicks(flvTag.Timestamp)); sample.KeyFrame = flvTag.VideoData.FrameType == FrameType.Keyframe; } } else { byte[] by = null; flvTag = flvFile.FlvFileBody.CurrentAudioTag; switch (flvTag.AudioData.SoundFormat) { case SoundFormat.AAC: by = (flvTag.AudioData.SoundData as AACAudioData).RawAACFrameData; break; case SoundFormat.MP3: by = flvTag.AudioData.SoundData.RawData; break; case SoundFormat.ADPCM: by = flvTag.AudioData.SoundData.RawData; break; } if (by != null && by.Length > 0) { stream = new MemoryStream(by); IBuffer buffer = by.AsBuffer(); sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromTicks(flvTag.Timestamp)); sample.KeyFrame = true; request.Sample = sample; } } } //샘플보고 request.Sample = sample; } catch (Exception e) { System.Diagnostics.Debug.WriteLine("샘플오류 " + e.Message); sender.NotifyError(MediaStreamSourceErrorStatus.DecodeError); } finally { if (deferal != null) { deferal.Complete(); } } }
private async void MediaStreamSource_SampleRequested(Windows.Media.Core.MediaStreamSource sender, Windows.Media.Core.MediaStreamSourceSampleRequestedEventArgs args) { var request = args.Request; if (!IsInternetConnected() || !connected) { connected = false; Disconnect(); sender.NotifyError(MediaStreamSourceErrorStatus.ConnectionToServerLost); return; } var deferral = request.GetDeferral(); try { MediaStreamSample sample = null; uint sampleLength = 0; //request.ReportSampleProgress(25); //if metadataPos is less than mp3_sampleSize away from metadataInt if (metadataInt - metadataPos <= (AudioInfo.AudioFormat == StreamAudioFormat.MP3 ? mp3_sampleSize : aac_adts_sampleSize) && metadataInt - metadataPos > 0) { //parse part of the frame. byte[] partialFrame = new byte[metadataInt - metadataPos]; var read = await socketReader.LoadAsync(metadataInt - metadataPos); if (read == 0) { Disconnect(); MediaStreamSource.NotifyError(MediaStreamSourceErrorStatus.ConnectionToServerLost); return; } socketReader.ReadBytes(partialFrame); metadataPos += metadataInt - metadataPos; switch (AudioInfo.AudioFormat) { case StreamAudioFormat.MP3: { Tuple <MediaStreamSample, uint> result = await ParseMP3SampleAsync(partial : true, partialBytes : partialFrame); sample = result.Item1; sampleLength = result.Item2; } break; case StreamAudioFormat.AAC_ADTS: case StreamAudioFormat.AAC: { Tuple <MediaStreamSample, uint> result = await ParseAACSampleAsync(partial : true, partialBytes : partialFrame); sample = result.Item1; sampleLength = result.Item2; } break; } } else { await HandleMetadata(); //request.ReportSampleProgress(50); switch (AudioInfo.AudioFormat) { case StreamAudioFormat.MP3: { //mp3 Tuple <MediaStreamSample, uint> result = await ParseMP3SampleAsync(); sample = result.Item1; sampleLength = result.Item2; } break; case StreamAudioFormat.AAC_ADTS: case StreamAudioFormat.AAC: { Tuple <MediaStreamSample, uint> result = await ParseAACSampleAsync(); sample = result.Item1; sampleLength = result.Item2; } break; } try { if (sample == null || sampleLength == 0) //OLD bug: on RELEASE builds, sample.Buffer causes the app to die due to a possible .NET Native bug { MediaStreamSource.NotifyError(MediaStreamSourceErrorStatus.DecodeError); deferral.Complete(); return; } else { metadataPos += sampleLength; } } catch (Exception) { } } if (sample != null) { request.Sample = sample; } //request.ReportSampleProgress(100); } catch (Exception) { MediaStreamSource.NotifyError(MediaStreamSourceErrorStatus.Other); } deferral.Complete(); }
private async Task <Tuple <bool, KeyValuePair <string, string>[]> > EstablishConnectionAsync() { //http://www.smackfu.com/stuff/programming/shoutcast.html try { await socket.ConnectAsync(new Windows.Networking.HostName(streamUrl.Host), streamUrl.Port.ToString()); socketWriter = new DataWriter(socket.OutputStream); socketReader = new DataReader(socket.InputStream); } catch (Exception ex) { if (MediaStreamSource != null) { MediaStreamSource.NotifyError(MediaStreamSourceErrorStatus.FailedToConnectToServer); } else { throw new Exception("Connection Error", ex); } return(new Tuple <bool, KeyValuePair <string, string>[]>(false, null)); } //todo figure out how to resolve http requests better to get rid of this hack. String httpPath = ""; if (streamUrl.Host.Contains("radionomy.com") || serverType == ShoutcastServerType.Radionomy) { httpPath = streamUrl.LocalPath; serverType = ShoutcastServerType.Radionomy; } else { httpPath = "/" + relativePath; } socketWriter.WriteString("GET " + httpPath + " HTTP/1.1" + Environment.NewLine); if (ShouldGetMetadata) { socketWriter.WriteString("Icy-MetaData: 1" + Environment.NewLine); } socketWriter.WriteString("Host: " + streamUrl.Host + (streamUrl.Port != 80 ? ":" + streamUrl.Port : "") + Environment.NewLine); socketWriter.WriteString("Connection: Keep-Alive" + Environment.NewLine); socketWriter.WriteString("User-Agent: " + (UserAgent ?? "Shoutcast Player (http://github.com/Amrykid/UWPShoutcastMSS)") + Environment.NewLine); socketWriter.WriteString(Environment.NewLine); await socketWriter.StoreAsync(); await socketWriter.FlushAsync(); string response = string.Empty; while (!response.EndsWith(Environment.NewLine + Environment.NewLine)) { await socketReader.LoadAsync(1); response += socketReader.ReadString(1); } //todo support http 2.0. maybe usage of the http client would solve this. if (response.StartsWith("HTTP/1.0 200 OK") || response.StartsWith("HTTP/1.1 200 OK") || response.StartsWith("ICY 200")) { var headers = ParseResponse(response); return(new Tuple <bool, KeyValuePair <string, string>[]>(true, headers)); } else { //wasn't successful. handle each case accordingly. if (response.StartsWith("HTTP /1.0 302") || response.StartsWith("HTTP/1.1 302")) { socketReader.Dispose(); socketWriter.Dispose(); socket.Dispose(); var parsedResponse = ParseHttpResponseToKeyPairArray(response.Split(new string[] { "\r\n" }, StringSplitOptions.None).Skip(1).ToArray()); socket = new StreamSocket(); streamUrl = new Uri(parsedResponse.First(x => x.Key.ToLower() == "location").Value); return(await EstablishConnectionAsync()); } else if (response.StartsWith("HTTP/1.0 404")) { throw new Exception("Station is unavailable."); } else if (response.StartsWith("ICY 401")) //ICY 401 Service Unavailable { if (MediaStreamSource != null) { MediaStreamSource.NotifyError(MediaStreamSourceErrorStatus.FailedToConnectToServer); } else { throw new Exception("Station is unavailable at this time. Maybe they're down for maintainence?"); } return(new Tuple <bool, KeyValuePair <string, string>[]>(false, null)); } else if (response.StartsWith("HTTP/1.1 503")) //HTTP/1.1 503 Server limit reached { throw new Exception("Station is unavailable at this time. The maximum amount of listeners has been reached."); } } return(new Tuple <bool, KeyValuePair <string, string>[]>(false, null)); //not connected and no headers. }