private void PrepareVideo() { // Stream Description Dictionary <MediaStreamAttributeKeys, string> streamAttributes = new Dictionary <MediaStreamAttributeKeys, string>(); // Select the same encoding and dimensions as the video capture streamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264"; streamAttributes[MediaStreamAttributeKeys.Height] = _frameHeight.ToString(); streamAttributes[MediaStreamAttributeKeys.Width] = _frameWidth.ToString(); MediaStreamDescription msd = new MediaStreamDescription(MediaStreamType.Video, streamAttributes); _videoDesc = msd; }
/// <summary> /// Initialises the data structures to pass data to the media pipeline via the MediaStreamSource. /// </summary> protected override void OpenMediaAsync() { // General properties _frameBufferSize = (int)_frameSize.Width * (int)_frameSize.Height * 4; // RGBA _frameBuffer = new byte[_frameBufferSize]; _frameStream = new MemoryStream(_frameBuffer); // Media stream attributes var mediaStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>(); mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA"; mediaStreamAttributes[MediaStreamAttributeKeys.Width] = ((int)_frameSize.Width).ToString(); mediaStreamAttributes[MediaStreamAttributeKeys.Height] = ((int)_frameSize.Height).ToString(); _videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes); // Media stream descriptions var mediaStreamDescriptions = new List <MediaStreamDescription> { _videoStreamDescription }; // Media source attributes var mediaSourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>(); mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromSeconds(0).Ticks.ToString(CultureInfo.InvariantCulture); mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString(); _frameTime = (int)TimeSpan.FromSeconds((double)0).Ticks; // Start frame rate timer _frameRateTimer = new DispatcherTimer() { Interval = TimeSpan.FromSeconds(1) }; _frameRateTimer.Tick += FrameRateTimer_Tick; _frameRateTimer.Start(); // Report that we finished initializing its internal state and can now pass in frame samples ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions); }
private void ConfigureAudioStream(IMediaParserMediaStream audio) { IAudioConfigurationSource configurationSource = (IAudioConfigurationSource)audio.ConfigurationSource; Dictionary <MediaStreamAttributeKeys, string> dictionary = new Dictionary <MediaStreamAttributeKeys, string>(); string codecPrivateData = configurationSource.CodecPrivateData; Debug.WriteLine("TsMediaStreamSource.ConfigureAudioStream(): CodecPrivateData: " + codecPrivateData); if (!string.IsNullOrWhiteSpace(codecPrivateData)) { dictionary[MediaStreamAttributeKeys.CodecPrivateData] = codecPrivateData; } MediaStreamDescription streamDescription = new MediaStreamDescription(MediaStreamType.Audio, (IDictionary <MediaStreamAttributeKeys, string>)dictionary); lock (this._streamConfigurationLock) { this._audioStreamSource = audio.StreamSource; this._audioStreamDescription = streamDescription; } }
private bool SendLastStreamSample(MediaStreamDescription mediaStreamDescription) { this._taskScheduler.ThrowIfNotOnThread(); this.ReportGetSampleProgress(1.0); MediaStreamSample mediaStreamSample = new MediaStreamSample(mediaStreamDescription, (Stream)null, 0L, 0L, 0L, (IDictionary <MediaSampleAttributeKeys, string>)TsMediaStreamSource.NoMediaSampleAttributes); Debug.WriteLine("Sample {0} is null", (object)mediaStreamDescription.Type); switch (mediaStreamDescription.Type) { case MediaStreamType.Audio: this.AudioStreamSource = (IStreamSource)null; break; case MediaStreamType.Video: this.VideoStreamSource = (IStreamSource)null; break; default: Debug.Assert(false, "Unknown stream type: " + (object)mediaStreamDescription.Type); break; } bool flag = this.VideoStreamSource == null && null == this.AudioStreamSource; if (flag) { Debug.WriteLine("TsMediaStreamSource.SendLastStreamSample() All streams closed"); lock (this._stateLock) { this._isClosed = true; if (TsMediaStreamSource.SourceState.Closed != this._state) { this._state = TsMediaStreamSource.SourceState.WaitForClose; } } } this.ValidateEvent(MediaStreamFsm.MediaEvent.CallingReportSampleCompleted); this.ReportGetSampleCompleted(mediaStreamSample); if (flag) { this.ValidateEvent(MediaStreamFsm.MediaEvent.StreamsClosed); } return(true); }
protected override void OpenMediaAsync() { currentPosition = currentTimeStamp = 0; memoryStream = new MemoryStream(); binaryWriter = new BinaryWriter(memoryStream); var mediaStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>(1); var mediaSourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>(3); mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = Formatter.ToPcmBase16String(DefaultWaveFormat); mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "false"; mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = "0"; streamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes); timer.Start(); ReportOpenMediaCompleted(mediaSourceAttributes, new[] { streamDescription }); }
protected override void OpenMediaAsync() { var flvFile = new FlvFile(this.mediaStream); this.audioSamples = flvFile.FlvFileBody.Tags.Where(tag => tag.TagType == TagType.Audio).ToList(); this.videoSamples = flvFile.FlvFileBody.Tags.Where(tag => tag.TagType == TagType.Video).ToList(); //Audio WaveFormatExtensible wfx = new WaveFormatExtensible(); wfx.FormatTag = 0x00FF; wfx.Channels = 2; wfx.BlockAlign = 8; wfx.BitsPerSample = 16; wfx.SamplesPerSec = 44100; wfx.AverageBytesPerSecond = wfx.SamplesPerSec * wfx.Channels * wfx.BitsPerSample / wfx.BlockAlign; wfx.Size = 0; string codecPrivateData = wfx.ToHexString(); Dictionary <MediaStreamAttributeKeys, string> audioStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>(); audioStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = codecPrivateData; this.audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, audioStreamAttributes); //Video Dictionary <MediaStreamAttributeKeys, string> videoStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>(); videoStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264"; this.videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, videoStreamAttributes); //Media Dictionary <MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>(); mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = audioSamples.Last().Timestamp.ToString(CultureInfo.InvariantCulture); mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = true.ToString(); List <MediaStreamDescription> mediaStreamDescriptions = new List <MediaStreamDescription>(); mediaStreamDescriptions.Add(this.audioStreamDescription); mediaStreamDescriptions.Add(this.videoStreamDescription); this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions); }
protected override void OpenMediaAsync() { System.Diagnostics.Debug.WriteLine("Started OpenMediaAsync"); _startPosition = _currentPosition = 0; // Init Dictionary <MediaStreamAttributeKeys, string> streamAttributes = new Dictionary <MediaStreamAttributeKeys, string>(); Dictionary <MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>(); List <MediaStreamDescription> availableStreams = new List <MediaStreamDescription>(); // Stream Description and WaveFormatEx streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = _waveFormat.ToHexString(); // wfx MediaStreamDescription msd = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes); _audioDesc = msd; // next, add the description so that Silverlight will // actually request samples for it availableStreams.Add(_audioDesc); // Tell silverlight we have an endless stream sourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromMinutes(0).Ticks.ToString( CultureInfo.InvariantCulture); // we don't support seeking on our stream sourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString(); // tell Silverlight we're done opening our media ReportOpenMediaCompleted(sourceAttributes, availableStreams); //System.Diagnostics.Debug.WriteLine("Completed OpenMediaAsync"); }
protected override void OpenMediaAsync() { int channels = this.Asap.GetInfo().GetChannels(); int blockSize = channels * BitsPerSample >> 3; string waveFormatHex = string.Format("0100{0:X2}00{1:X8}{2:X8}{3:X2}00{4:X2}000000", channels, SwapBytes(ASAP.SampleRate), SwapBytes(ASAP.SampleRate * blockSize), blockSize, BitsPerSample); Dictionary <MediaStreamAttributeKeys, string> streamAttributes = new Dictionary <MediaStreamAttributeKeys, string>(); streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = waveFormatHex; this.MediaStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes); Dictionary <MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>(); sourceAttributes[MediaSourceAttributesKeys.CanSeek] = "True"; sourceAttributes[MediaSourceAttributesKeys.Duration] = (this.Duration < 0 ? 0 : this.Duration * 10000L).ToString(); ReportOpenMediaCompleted(sourceAttributes, new MediaStreamDescription[1] { this.MediaStreamDescription }); }
protected override void OpenMediaAsync() { // Prepare the description of the stream. var sourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>(); var availableStreams = new List <MediaStreamDescription>(); var streamAttributes = new Dictionary <MediaStreamAttributeKeys, string>(); streamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA"; streamAttributes[MediaStreamAttributeKeys.Height] = frameHeight.ToString(); streamAttributes[MediaStreamAttributeKeys.Width] = frameWidth.ToString(); videoDesc = new MediaStreamDescription(MediaStreamType.Video, streamAttributes); availableStreams.Add(videoDesc); // a zero timespan is an infinite video sourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.Zero.Ticks.ToString(CultureInfo.InvariantCulture); sourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString(); startTime = DateTime.Now; // Tell Silverlight that we've prepared and opened our video ReportOpenMediaCompleted(sourceAttributes, availableStreams); }
protected override void OpenMediaAsync() { var mediaStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>(); mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA"; mediaStreamAttributes[MediaStreamAttributeKeys.Width] = ((int)frameSize.Width).ToString(); mediaStreamAttributes[MediaStreamAttributeKeys.Height] = ((int)frameSize.Height).ToString(); videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes); var mediaStreamDescriptions = new List <MediaStreamDescription>(); mediaStreamDescriptions.Add(videoStreamDescription); var mediaSourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>(); mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromSeconds(0).Ticks.ToString(CultureInfo.InvariantCulture); mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString(); ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions); }
protected override void OpenMediaAsync() { Dictionary <MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>(); sourceAttributes[MediaSourceAttributesKeys.Duration] = 0.ToString(); sourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString(); // TODO: need to allow for configurable output format. this.format = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 44100, 1, 2 * 44100, 2, 2 * 8); this.samples = new MemoryStream(); Dictionary <MediaStreamAttributeKeys, string> streamAttributes = new Dictionary <MediaStreamAttributeKeys, string>(); streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = this.format.ToHexString(); this.description = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes); this.AudioBufferLength = 30; this.ReportOpenMediaCompleted(sourceAttributes, new MediaStreamDescription[] { this.description }); }
protected override void OpenMediaAsync() { _timestamp = 0; var sourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>() { { MediaSourceAttributesKeys.Duration, "0" }, { MediaSourceAttributesKeys.CanSeek, "false" } }; var streamAttributes = new Dictionary <MediaStreamAttributeKeys, string>() { { MediaStreamAttributeKeys.CodecPrivateData, _waveFormat.ToHexString() } }; _audioDescription = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes); var availableStreams = new List <MediaStreamDescription>() { _audioDescription }; ReportOpenMediaCompleted(sourceAttributes, availableStreams); }
private void ReadPastId3v2TagsCallback(MpegFrame mpegLayer3Frame, Dictionary <MediaStreamAttributeKeys, string> mediaStreamAttributes, List <MediaStreamDescription> mediaStreamDescriptions, Dictionary <MediaSourceAttributesKeys, string> mediaSourceAttributes) { if (mpegLayer3Frame.FrameSize <= 0) { throw new InvalidOperationException("MpegFrame's FrameSize cannot be negative"); } WaveFormatExtensible formatExtensible = new WaveFormatExtensible(); this.MpegLayer3WaveFormat = new MpegLayer3WaveFormat(); this.MpegLayer3WaveFormat.WaveFormatExtensible = formatExtensible; this.MpegLayer3WaveFormat.WaveFormatExtensible.FormatTag = (short)85; this.MpegLayer3WaveFormat.WaveFormatExtensible.Channels = mpegLayer3Frame.Channels == Channel.SingleChannel ? (short)1 : (short)2; this.MpegLayer3WaveFormat.WaveFormatExtensible.SamplesPerSec = mpegLayer3Frame.SamplingRate; this.MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond = mpegLayer3Frame.Bitrate / 8; this.MpegLayer3WaveFormat.WaveFormatExtensible.BlockAlign = (short)1; this.MpegLayer3WaveFormat.WaveFormatExtensible.BitsPerSample = (short)0; this.MpegLayer3WaveFormat.WaveFormatExtensible.ExtraDataSize = (short)12; this.MpegLayer3WaveFormat.Id = (short)1; this.MpegLayer3WaveFormat.BitratePaddingMode = 0; this.MpegLayer3WaveFormat.FramesPerBlock = (short)1; this.MpegLayer3WaveFormat.BlockSize = (short)mpegLayer3Frame.FrameSize; this.MpegLayer3WaveFormat.CodecDelay = (short)0; mediaStreamAttributes[(MediaStreamAttributeKeys)0] = this.MpegLayer3WaveFormat.ToHexString(); this.audioStreamDescription = new MediaStreamDescription((MediaStreamType)0, (IDictionary <MediaStreamAttributeKeys, string>)mediaStreamAttributes); mediaStreamDescriptions.Add(this.audioStreamDescription); this.trackDuration = new TimeSpan(0, 0, (int)(this.audioStreamLength / (long)this.MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond)); mediaSourceAttributes[(MediaSourceAttributesKeys)1] = this.trackDuration.Ticks.ToString((IFormatProvider)CultureInfo.InvariantCulture); if (this.audioStream.CanSeek) { mediaSourceAttributes[(MediaSourceAttributesKeys)0] = "True"; } else { mediaSourceAttributes[(MediaSourceAttributesKeys)0] = "False"; } this.ReportOpenMediaCompleted((IDictionary <MediaSourceAttributesKeys, string>)mediaSourceAttributes, (IEnumerable <MediaStreamDescription>)mediaStreamDescriptions); this.currentFrame = mpegLayer3Frame; this.currentFrameStartPosition = 4L; }
protected override void OpenMediaAsync() { Dictionary <MediaStreamAttributeKeys, string> streamAttr = new Dictionary <MediaStreamAttributeKeys, string>(); streamAttr[MediaStreamAttributeKeys.CodecPrivateData] = _encodedWaveFormat; _audioDesc = new MediaStreamDescription(MediaStreamType.Audio, streamAttr); List <MediaStreamDescription> availableStreams = new List <MediaStreamDescription>(); availableStreams.Add(_audioDesc); Dictionary <MediaSourceAttributesKeys, string> sourceAttr = new Dictionary <MediaSourceAttributesKeys, string>(); sourceAttr[MediaSourceAttributesKeys.Duration] = "0"; sourceAttr[MediaSourceAttributesKeys.CanSeek] = "False"; ReportOpenMediaCompleted(sourceAttr, availableStreams); }
private void ConfigureVideoStream(IMediaParserMediaStream video) { IVideoConfigurationSource configurationSource = (IVideoConfigurationSource)video.ConfigurationSource; Dictionary <MediaStreamAttributeKeys, string> dictionary = new Dictionary <MediaStreamAttributeKeys, string>(); dictionary[MediaStreamAttributeKeys.VideoFourCC] = configurationSource.VideoFourCc; string codecPrivateData = configurationSource.CodecPrivateData; Debug.WriteLine("MediaStreamConfigurator.ConfigureVideoStream(): CodecPrivateData: " + codecPrivateData); if (!string.IsNullOrWhiteSpace(codecPrivateData)) { dictionary[MediaStreamAttributeKeys.CodecPrivateData] = codecPrivateData; } dictionary[MediaStreamAttributeKeys.Height] = configurationSource.Height.ToString(); dictionary[MediaStreamAttributeKeys.Width] = configurationSource.Width.ToString(); MediaStreamDescription streamDescription = new MediaStreamDescription(MediaStreamType.Video, (IDictionary <MediaStreamAttributeKeys, string>)dictionary); lock (this._streamConfigurationLock) { this._videoStreamSource = video.StreamSource; this._videoStreamDescription = streamDescription; } }
protected override void OpenMediaAsync() { // Define the available streams. var streamAttributes = new Dictionary <MediaStreamAttributeKeys, string>(); streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = _waveFormat.ToHexString(); _mediaStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes); var availableStreams = new List <MediaStreamDescription> { _mediaStreamDescription }; // Define pieces that are common to all streams. var sourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>(); sourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.Zero.Ticks.ToString(CultureInfo.InvariantCulture); // 0 = Indefinite sourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0"; // 0 = False // Start the timer. _startTime = DateTime.Now; // Tell Silverlight we're ready to play. ReportOpenMediaCompleted(sourceAttributes, availableStreams); }
protected override void CloseMedia() { if (_frameStream != null) { _frameStream.Close(); _frameStream = null; } if (_frameRateTimer != null) { _frameRateTimer.Stop(); _frameRateTimer.Tick -= FrameRateTimer_Tick; _frameRateTimer = null; } _frameStreamOffset = 0; _frameTime = 0; _frameCount = 0; _frameBufferSize = 0; _frameBuffer = null; _videoStreamDescription = null; _currentTime = 0; }
private void Configure(IMediaStreamConfiguration configuration) { ICollection <MediaStreamDescription> descriptions = configuration.Descriptions; Debug.WriteLine("TsMediaStreamSource: ReportOpenMediaCompleted ({0} streams)", (object)descriptions.Count); this.VideoStreamSource = configuration.VideoStreamSource; this.AudioStreamSource = configuration.AudioStreamSource; IDictionary <MediaSourceAttributesKeys, string> attributes = configuration.Attributes; foreach (KeyValuePair <MediaSourceAttributesKeys, string> keyValuePair in (IEnumerable <KeyValuePair <MediaSourceAttributesKeys, string> >)attributes) { Debug.WriteLine("TsMediaStreamSource: ReportOpenMediaCompleted {0} = {1}", (object)keyValuePair.Key, (object)keyValuePair.Value); } foreach (MediaStreamDescription streamDescription in (IEnumerable <MediaStreamDescription>)descriptions) { switch (streamDescription.Type) { case MediaStreamType.Audio: this._audioStreamDescription = streamDescription; break; case MediaStreamType.Video: this._videoStreamDescription = streamDescription; break; } } bool canSeek = configuration.Duration.HasValue; Task task = Task.Factory.StartNew((Action)(() => { this._taskScheduler.ThrowIfNotOnThread(); this.ValidateEvent(canSeek ? MediaStreamFsm.MediaEvent.CallingReportOpenMediaCompleted : MediaStreamFsm.MediaEvent.CallingReportOpenMediaCompletedLive); this.ReportOpenMediaCompleted(attributes, (IEnumerable <MediaStreamDescription>)descriptions); this.State = canSeek ? TsMediaStreamSource.SourceState.Seek : TsMediaStreamSource.SourceState.Play; }), CancellationToken.None, TaskCreationOptions.None, (TaskScheduler)this._taskScheduler); TaskCollector.Default.Add(task, "TsMediaStreamSource CompleteConfigure"); }
protected override void OpenMediaAsync() { int byteRate = sampleRate * ChannelCount * BitsPerSample / 8; short blockAlign = (short)(ChannelCount * (BitsPerSample / 8)); // Build string-based wave-format structure string waveFormat = ""; waveFormat += ToLittleEndianString(string.Format("{0:X4}", 1)); // indicates PCM waveFormat += ToLittleEndianString(string.Format("{0:X4}", ChannelCount)); waveFormat += ToLittleEndianString(string.Format("{0:X8}", sampleRate)); waveFormat += ToLittleEndianString(string.Format("{0:X8}", byteRate)); waveFormat += ToLittleEndianString(string.Format("{0:X4}", blockAlign)); waveFormat += ToLittleEndianString(string.Format("{0:X4}", BitsPerSample)); waveFormat += ToLittleEndianString(string.Format("{0:X4}", 0)); // Put wave format string in media streams dictionary var mediaStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>(); mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = waveFormat; // Make description to add to available streams list var availableMediaStreams = new List <MediaStreamDescription>(); mediaStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes); availableMediaStreams.Add(mediaStreamDescription); // Set some appropriate keys in the media source dictionary var mediaSourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>(); mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = "0"; mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "false"; // Signal that the open operation is completed ReportOpenMediaCompleted(mediaSourceAttributes, availableMediaStreams); }
private static void ParseAvcConfig( MediaStreamDescription stream, List<MediaStreamSample> samples, byte[] data) { System.IO.Stream ios = new System.IO.MemoryStream(data); ios.Seek(5, System.IO.SeekOrigin.Begin); int num_sps = ios.ReadByte() & 0x1f; for (int i = 0; i < num_sps; ++i) { int len_sps = (ios.ReadByte() << 8) | ios.ReadByte(); byte[] sps = new byte[len_sps]; ios.Read(sps, 0, len_sps); samples.Add(new MediaStreamSample( stream, new System.IO.MemoryStream(sps), 0, len_sps, 0, new Dictionary<MediaSampleAttributeKeys, string>())); } int num_pps = ios.ReadByte(); for (int i = 0; i < num_pps; ++i) { int len_pps = (ios.ReadByte() << 8) | ios.ReadByte(); byte[] pps = new byte[len_pps]; ios.Read(pps, 0, len_pps); samples.Add(new MediaStreamSample( stream, new System.IO.MemoryStream(pps), 0, len_pps, 0, new Dictionary<MediaSampleAttributeKeys, string>())); } }
protected override void SwitchMediaStreamAsync(MediaStreamDescription mediaStreamDescription) { Debug.WriteLine("SwitchMediaStreamAsync: " + mediaStreamDescription.StreamId); this.ReportSwitchMediaStreamCompleted(mediaStreamDescription); }
protected override void CloseMedia() { _startPosition = _currentPosition = 0; _RiffParser = null; _audioDescription = null; }
protected override void OpenMediaAsync() { //WaveFormatEx HeAacWaveFormat aacf = new HeAacWaveFormat(); WaveFormatExtensible wfx = new WaveFormatExtensible(); aacf.WaveFormatExtensible = wfx; aacf.WaveFormatExtensible.FormatTag = 0x1610; //0xFF;//0x1610; aacf.WaveFormatExtensible.Channels = 2; // aacf.WaveFormatExtensible.BlockAlign = 1; aacf.WaveFormatExtensible.BitsPerSample = 0; //16; //unkonw set to 0 aacf.WaveFormatExtensible.SamplesPerSec = 24000; // from 8000 to 96000 Hz aacf.WaveFormatExtensible.AverageBytesPerSecond = 0; //wfx.SamplesPerSec * wfx.Channels * wfx.BitsPerSample / wfx.BlockAlign; aacf.WaveFormatExtensible.Size = 12; // Extra 3 words in WAVEFORMATEX // refer to http://msdn.microsoft.com/en-us/library/windows/desktop/dd757806(v=vs.85).aspx aacf.wPayloadType = 0x0; //Audio Data Transport Stream (ADTS). The stream contains an adts_sequence, as defined by MPEG-2. aacf.wAudioProfileLevelIndication = 0xFE; aacf.wStructType = 0; string codecPrivateData = aacf.ToHexString(); Dictionary <MediaStreamAttributeKeys, string> audioStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>(); audioStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = codecPrivateData; audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, audioStreamAttributes); m_vbuffer.WaitForWorkItem(); m_curVideoBlk = m_vbuffer.Dequeue().CommandParameter as MSF.VideoBlock; if (m_curVideoBlk == null) { return; } vIdx = 0; fNum = (int)m_curVideoBlk.VideoFrameNum; H264NalFormat h264f = new H264NalFormat(); h264f.sps = m_curVideoBlk.FirstIFrameInfo.sps; h264f.pps = m_curVideoBlk.FirstIFrameInfo.pps; string s = h264f.ToHexString(); //Video Dictionary <MediaStreamAttributeKeys, string> videoStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>(); videoStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264"; videoStreamAttributes[MediaStreamAttributeKeys.Height] = "240"; videoStreamAttributes[MediaStreamAttributeKeys.Width] = "320"; videoStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = s;//"0000016742E00D96520283F40500000168CE388000"; videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, videoStreamAttributes); //Media Dictionary <MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>(); mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromSeconds(6).Ticks.ToString(CultureInfo.InvariantCulture); mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0"; List <MediaStreamDescription> mediaStreamDescriptions = new List <MediaStreamDescription>(); #if !DEBUG // Emulator does not support HE-AAC mediaStreamDescriptions.Add(audioStreamDescription); #endif mediaStreamDescriptions.Add(videoStreamDescription); this.AudioBufferLength = 500; this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions); }
private void OpenMediaCallback( Error ec) { if (ec != Error.success) { ErrorOccurred(ec.ToString()); return; } Media media; demuxer_.get_media(out media); Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>(); mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = media.duration.ToString(); mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = (media.duration != ulong.MaxValue).ToString(); List<MediaStreamDescription> mediaStreamDescriptions = new List<MediaStreamDescription>(); for (int i = 0; i < media.streams.Length; ++i) { Stream stream = media.streams[i]; Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>(); mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = stream.codec_data.ToString(); if (stream.type == StreamType.video) { mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = FourCC[(int)stream.sub_type]; mediaStreamAttributes[MediaStreamAttributeKeys.Width] = stream.video.width.ToString(); mediaStreamAttributes[MediaStreamAttributeKeys.Height] = stream.video.height.ToString(); char[] CodecPrivateDataHex = new char[stream.codec_data.Length * 2]; int index = 0; ToHexHelper(CodecPrivateDataHex, ref index, stream.codec_data); // ExtraData //mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = // new String(CodecPrivateDataHex); MediaStreamDescription videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes); mediaStreamDescriptions.Add(videoStreamDescription); mediaStreamTypes_.Add(MediaStreamType.Video); mediaStreamDescriptions_[MediaStreamType.Video] = videoStreamDescription; mediaStreamSamples_[MediaStreamType.Video] = new List<MediaStreamSample>(); //ParseAvcConfig(videoStreamDescription, mediaStreamSamples_[MediaStreamType.Video], stream.codec_data); } else if (stream.type == StreamType.audio) { char[] WaveFormatExHex = new char[9 * 4 + stream.codec_data.Length * 2]; int index = 0; ToHexHelper(WaveFormatExHex, ref index, 2, 255); // FormatTag ToHexHelper(WaveFormatExHex, ref index, 2, stream.audio.channel_count); // Channels ToHexHelper(WaveFormatExHex, ref index, 4, stream.audio.sample_rate); // SamplesPerSec ToHexHelper(WaveFormatExHex, ref index, 4, 0); // AverageBytesPerSecond ToHexHelper(WaveFormatExHex, ref index, 2, 1); // BlockAlign ToHexHelper(WaveFormatExHex, ref index, 2, stream.audio.sample_size); // BitsPerSample ToHexHelper(WaveFormatExHex, ref index, 2, stream.codec_data.Length); // ExtraDataSize ToHexHelper(WaveFormatExHex, ref index, stream.codec_data); // ExtraData mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = new String(WaveFormatExHex); MediaStreamDescription audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes); mediaStreamDescriptions.Add(audioStreamDescription); mediaStreamTypes_.Add(MediaStreamType.Audio); mediaStreamDescriptions_[MediaStreamType.Audio] = audioStreamDescription; mediaStreamSamples_[MediaStreamType.Audio] = new List<MediaStreamSample>(); } else { mediaStreamTypes_.Add(MediaStreamType.Script); } } // for ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions); }
protected override void CloseMedia() { startPosition = currentPosition = 0; mediaStreamDescription = null; }
protected override void SwitchMediaStreamAsync(MediaStreamDescription mediaStreamDescription) { ReportSwitchMediaStreamCompleted(mediaStreamDescription); }
/// <summary> /// Parses the passed in MediaStream to find the first frame and signals /// to its parent MediaElement that it is ready to begin playback by calling /// ReportOpenMediaCompleted. /// </summary> protected override void OpenMediaAsync() { System.Diagnostics.Debug.WriteLine(System.Threading.Thread.CurrentThread.ManagedThreadId.ToString() + ": OpenMediaAsync()"); // So, here is why this is a little weird. // The Shoutcast server software has the ability to provide web pages. These pages just happen to be served from the SAME address as the media stream. // Putting a "/;" at the end of the Uri will tell the Shoutcast server that we aren't a web browser, so stream the data. The problem is that not ALL // Shoutcast servers are configured that way. So, we have to do a request to get the content type. If it is text/html, we append the "/;" and move on. // If it is an empty string, 99.9% of the time, this will be the media stream (If it's an ICY stream, the ICY "headers" don't parse properly). The ShoutcastStream // will handle this case, so we let it go through. HttpWebRequest contentTypeRequest = ShoutcastMediaStreamSource.CreateHttpWebRequest(this.StreamUri, this.IncludeMetadata); contentTypeRequest.BeginGetResponse( ia1 => { HttpWebRequest req1 = ia1.AsyncState as HttpWebRequest; try { HttpWebResponse res1 = (HttpWebResponse)req1.EndGetResponse(ia1); string contentType = res1.ContentType; if ((contentType == string.Empty) || (contentType == "audio/mpeg") || contentType == "audio/x-mpegurl") { try { this.audioStream = new ShoutcastStream(this, res1); this.audioStreamDescription = this.audioStream.AudioStreamDescription; this.ReportOpenMediaCompleted(this.audioStream.AudioSourceAttributes, new MediaStreamDescription[] { this.audioStream.AudioStreamDescription }); } catch (Exception ex) { this.CleanupAudioStream(); this.ErrorOccurred(ex.Message); } } else { // Close the original response. We need another one. res1.Close(); res1 = null; if (!this.StreamUri.OriginalString.EndsWith("/", StringComparison.Ordinal)) { this.StreamUri = new Uri(this.StreamUri.OriginalString + "/;", UriKind.Absolute); } else { this.StreamUri = new Uri(this.StreamUri.OriginalString + ";", UriKind.Absolute); } HttpWebRequest streamRequest = ShoutcastMediaStreamSource.CreateHttpWebRequest(this.StreamUri, this.IncludeMetadata); streamRequest.BeginGetResponse( ia => { HttpWebRequest req = ia.AsyncState as HttpWebRequest; try { HttpWebResponse res = (HttpWebResponse)req.EndGetResponse(ia); this.audioStream = new ShoutcastStream(this, res); this.audioStreamDescription = this.audioStream.AudioStreamDescription; this.ReportOpenMediaCompleted(this.audioStream.AudioSourceAttributes, new MediaStreamDescription[] { this.audioStream.AudioStreamDescription }); } catch (Exception ex) { if (res1 != null) { res1.Close(); } this.CleanupAudioStream(); this.ErrorOccurred(ex.Message); } }, streamRequest); } } catch (Exception ex) { this.CleanupAudioStream(); this.ErrorOccurred(ex.Message); } }, contentTypeRequest); }
protected override void CloseMedia() { mediaStreamDescription = null; }
protected override void OpenMediaAsync() { m_timestampBlock = (long)Math.Round((decimal)(TimeSpan.FromSeconds(1).Ticks / (BUFFER_BLOCK_SIZE * m_multiple))); Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>(); mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = WAVE_HEADER; description = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes); Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>(); mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString(); mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = m_tracks[Track.TEMPO_TRACK].Duration.TimeSpan.Ticks.ToString(CultureInfo.InvariantCulture); List<MediaStreamDescription> availableMediaStreams = new List<MediaStreamDescription>(); availableMediaStreams.Add(new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes)); ReportOpenMediaCompleted(mediaSourceAttributes, availableMediaStreams); ReportGetSampleProgress(0); GetSamples(); }
protected override void CloseMedia() { binaryWriter.Dispose(); timer.Stop(); streamDescription = null; }
// Shouldn't get a call here because only one MediaStreamDescription is supported protected override void SwitchMediaStreamAsync(MediaStreamDescription mediaStreamDescription) { throw new NotImplementedException(); }
protected override void SwitchMediaStreamAsync(MediaStreamDescription mediaStreamDescription) { }