コード例 #1
0
ファイル: Mp3Demuxer.cs プロジェクト: dfr0/moon
		protected override void OpenMediaAsync ()
		{
			Dictionary<MediaSourceAttributesKeys, string> media_attributes = new Dictionary<MediaSourceAttributesKeys, string> ();
			List<MediaStreamDescription> media_streams = new List<MediaStreamDescription> ();
			Dictionary<MediaStreamAttributeKeys, string> stream_attributes = new Dictionary<MediaStreamAttributeKeys,string> ();
			MediaStreamDescription media_stream = new MediaStreamDescription (MediaStreamType.Audio, stream_attributes);
			long duration = 60 * 10000;
			WaveFormatEx wave = new WaveFormatEx ();
			Mp3Frame frame = Mp3Frame.Read (stream);

			wave.FormatTag = 85;
			wave.AvgBytesPerSec = (uint) frame.Bitrate / 8;
			wave.BitsPerSample = 0;
			wave.BlockAlign = 1;
			wave.Channels = (ushort) frame.Channels;
			wave.SamplesPerSec = (ushort) frame.SampleRate;
			wave.Size = 12;

			media_attributes.Add (MediaSourceAttributesKeys.CanSeek, "0");
			media_attributes.Add (MediaSourceAttributesKeys.Duration, duration.ToString ());
			stream_attributes [MediaStreamAttributeKeys.CodecPrivateData] = wave.Encoded;
			
			media_streams.Add (media_stream);

			try {
				this.frame = frame;
				this.description = media_stream;
				ReportOpenMediaCompleted (media_attributes, media_streams);
				opened = DateTime.Now;
			} catch (Exception ex) {
				Console.WriteLine (ex);
			}
		}
コード例 #2
0
ファイル: DiracStreamSource.cs プロジェクト: mono/mooncodecs
        protected override void OpenMediaAsync()
        {
            Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
            Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
            List<MediaStreamDescription> mediaStreamDescriptions = new List<MediaStreamDescription>();

            byte[] videoData = new byte[this.videoStream.Length];
            if (videoData.Length != this.videoStream.Read(videoData, 0, videoData.Length))
            {
                throw new IOException("Could not read in the VideoStream");
            }

            dec = new org.diracvideo.Jirac.Decoder();
            dec.Push(videoData, 0, videoData.Length);
            dec.Decode();

            mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA";
            mediaStreamAttributes[MediaStreamAttributeKeys.Height] = dec.format.width.ToString ();
            mediaStreamAttributes[MediaStreamAttributeKeys.Width] = dec.format.height.ToString ();

            this.streamDescription = new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes);

            mediaStreamDescriptions.Add(streamDescription);

            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromMinutes(5).Ticks.ToString(CultureInfo.InvariantCulture);
            mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString ();

            this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
        }
コード例 #3
0
ファイル: MyMediaStreamSource.cs プロジェクト: prog76/Pacman
 protected override void CloseMedia()
 {
     System.Diagnostics.Debug.WriteLine("CloseMedia");
     // Close the stream
     _startPosition = _currentPosition = 0;
     _audioDesc = null;
 }
コード例 #4
0
 /// <summary>
 /// Close the media. Release the resources.
 /// </summary>
 protected override void CloseMedia()
 {
     // Close the stream
     this.startPosition = this.currentPosition = 0;
     this.wavParser = null;
     this.audioDesc = null;
 }
コード例 #5
0
        protected override void OpenMediaAsync()
        {
            _frameTime = (int)TimeSpan.FromSeconds((double)1 / 30).Ticks;

            // Init
            Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
            List<MediaStreamDescription> availableStreams = new List<MediaStreamDescription>();

            // Stream Description
            Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();

            streamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA";
            streamAttributes[MediaStreamAttributeKeys.Height] = format.PixelHeight.ToString();
            streamAttributes[MediaStreamAttributeKeys.Width] = format.PixelWidth.ToString();

            MediaStreamDescription msd = new MediaStreamDescription(MediaStreamType.Video, streamAttributes);

            _videoDescription = msd;
            availableStreams.Add(_videoDescription);

            // a zero timespan is an infinite video
            sourceAttributes[MediaSourceAttributesKeys.Duration] =
                TimeSpan.FromSeconds(0).Ticks.ToString(CultureInfo.InvariantCulture);
            sourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString();

            ReportOpenMediaCompleted(sourceAttributes, availableStreams);
        }
コード例 #6
0
        protected override void OpenMediaAsync()
        {
            try
            {
                this.wavParser = new WavParser(this.stream);

                this.wavParser.ParseWaveHeader();

                this.wavParser.WaveFormatEx.ValidateWaveFormat();

                this.startPosition = this.currentPosition = this.wavParser.DataPosition;

                Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
                Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
                List<MediaStreamDescription> availableStreams = new List<MediaStreamDescription>();

                streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = this.wavParser.WaveFormatEx.ToHexString();
                MediaStreamDescription msd = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes);

                this.audioDesc = msd;
                availableStreams.Add(this.audioDesc);

                sourceAttributes[MediaSourceAttributesKeys.Duration] = this.wavParser.Duration.ToString();
                ReportOpenMediaCompleted(sourceAttributes, availableStreams);
            }
            catch(Exception){}
        }
コード例 #7
0
        /// <summary>
        /// Default constructor
        /// </summary>
        /// <param name="msd"></param>
        public H264Parser(SampleBuffer outputBuffer, IContainerMetadata metadata, HLSStream hlsStream)
            : base(outputBuffer, hlsStream)
        {
            string[] resolution = null;

            string s;
            if (metadata.Attributes != null &&
                metadata.Attributes.TryGetValue(HLSPlaylistMetaKeys.Resolution, out s))
            {
                string[] components = s.Split(new char[] { 'x' });
                if (components != null && components.Length == 2)
                    resolution = components;
            }

            if (resolution == null)
            {
                HLSTrace.WriteLine("Missing 'Resolution' tag in HLS MetaKeys, defaulting to the maximum supported resolution of 1280x720.");
                resolution = new string[] { "1280", "720" };
            }

            Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
            streamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
            streamAttributes[MediaStreamAttributeKeys.Width] = resolution[0];
            streamAttributes[MediaStreamAttributeKeys.Height] = resolution[1];
            Description = new MediaStreamDescription(MediaStreamType.Video, streamAttributes);
        }
コード例 #8
0
ファイル: MediaStreamSample.cs プロジェクト: dfr0/moon
		public MediaStreamSample (MediaStreamDescription mediaStreamDescription, Stream stream, long offset, long count, long timestamp, long duration, IDictionary<MediaSampleAttributeKeys, string> attributes)
		{
			this.media_stream_description = mediaStreamDescription;
			this.stream = stream;
			this.offset = offset;
			this.count = count;
			this.timestamp = timestamp;
			this.attributes = attributes;
			this.duration = duration;
		}
コード例 #9
0
	protected override void OpenMediaAsync()
	{
		int channels = this.Asap.GetInfo().GetChannels();
		int blockSize = channels * BitsPerSample >> 3;
		string waveFormatHex = string.Format("0100{0:X2}00{1:X8}{2:X8}{3:X2}00{4:X2}000000",
			channels, SwapBytes(ASAP.SampleRate), SwapBytes(ASAP.SampleRate * blockSize), blockSize, BitsPerSample);
		Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
		streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = waveFormatHex;
		this.MediaStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes);

		Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
		sourceAttributes[MediaSourceAttributesKeys.CanSeek] = "True";
		sourceAttributes[MediaSourceAttributesKeys.Duration] = (this.Duration < 0 ? 0 : this.Duration * 10000).ToString();

		ReportOpenMediaCompleted(sourceAttributes, new MediaStreamDescription[1] { this.MediaStreamDescription });
	}
コード例 #10
0
        protected override void OpenMediaAsync()
        {
            var mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
            var mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
            var mediaStreamDescriptions = new List<MediaStreamDescription>();

            var wfx = new MediaParsers.WaveFormatExtensible () {
                FormatTag = 1, // PCM
                Channels = parameters.Channels,
                SamplesPerSec = parameters.SamplesPerSecond,
                AverageBytesPerSecond = parameters.SamplesPerSecond * 2 * 2,
                BlockAlign = 0,
                BitsPerSample = parameters.BitsPerSample,
                Size = 0 };

            mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = wfx.ToHexString();
            this.media_desc = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);

            mediaStreamDescriptions.Add(this.media_desc);

            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = this.track_duration.Ticks.ToString (CultureInfo.InvariantCulture);
            mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = true.ToString ();
        }
コード例 #11
0
ファイル: DiracStreamSource.cs プロジェクト: mono/csdirac
        protected override void OpenMediaAsync()
        {
            // Initialize data structures to pass to the Media pipeline via the MediaStreamSource
            Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
            Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
            List<MediaStreamDescription> mediaStreamDescriptions = new List<MediaStreamDescription>();

            // Pull in the entire Audio stream.
            byte[] videoData = new byte[this.videoStream.Length];
            if (videoData.Length != this.videoStream.Read(videoData, 0, videoData.Length))
            {
                throw new IOException("Could not read in the VideoStream");
            }
            //TODO parse until first frame
            //todo find what is the offset of first frame and put it in push len param
            org.diracvideo.Jirac.Decoder dec = new org.diracvideo.Jirac.Decoder();
            dec.Push(videoData, 0, videoData.Length);
            dec.Decode();

            mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = dec.format.ToString();
            this.streamDescription = new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes);

            mediaStreamDescriptions.Add(streamDescription);

            // Setting a 0 duration to avoid the math to calcualte the Mp3 file length in minutes and seconds.
            // This was done just to simplify this initial version of the code for other people reading it.
            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromMinutes(5).Ticks.ToString(CultureInfo.InvariantCulture);
            mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0";

            // Report that the DiracMediaStreamSource has finished initializing its internal state and can now
            // pass in Dirac Samples.
            this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);

            //this.currentFrameStartPosition = result;
            //this.currentFrameSize = mpegLayer3Frame.FrameSize;
        }
コード例 #12
0
ファイル: WaveMediaStreamSource2.cs プロジェクト: PavelPZ/REW
    /// <summary>
    /// Open the media.
    /// Create the structures.
    /// </summary>
    protected override void OpenMediaAsync() {
      header = WaveFormatExtensible.ReadHeader(stream);
      header.ValidateWaveFormat();

      sampleSize = (long)header.Channels * header.BitsPerSample / 8 * numSamples;
      startPosition = currentPosition = stream.Position;
      pcmDataLen = stream.Length - startPosition;
      duration = header.AudioDurationFromDataLen(pcmDataLen);

      // Init
      Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
      Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
      List<MediaStreamDescription> availableStreams = new List<MediaStreamDescription>();

      // Stream Description
      streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = header.ToHexString();
      MediaStreamDescription msd = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes);

      this.audioDesc = msd;
      availableStreams.Add(this.audioDesc);

      sourceAttributes[MediaSourceAttributesKeys.Duration] = duration.ToString();
      ReportOpenMediaCompleted(sourceAttributes, availableStreams);
    }
コード例 #13
0
        protected override void OpenMediaAsync()
        {
            startPosition = currentPosition = 0;

            Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
            Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
            List<MediaStreamDescription> availableStreams = new List<MediaStreamDescription>();

            string format = "";
            format += ToLittleEndianString(string.Format("{0:X4}", 1));  //PCM
            format += ToLittleEndianString(string.Format("{0:X4}", Constants.ChannelCount));
            format += ToLittleEndianString(string.Format("{0:X8}", Constants.SampleRate));
            format += ToLittleEndianString(string.Format("{0:X8}", byteRate));
            format += ToLittleEndianString(string.Format("{0:X4}", blockAlign));
            format += ToLittleEndianString(string.Format("{0:X4}", Constants.BitsPerSample));
            format += ToLittleEndianString(string.Format("{0:X4}", 0));

            streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = format;
            mediaStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes);
            availableStreams.Add(mediaStreamDescription);
            sourceAttributes[MediaSourceAttributesKeys.Duration] = "0";
            sourceAttributes[MediaSourceAttributesKeys.CanSeek] = "false";
            ReportOpenMediaCompleted(sourceAttributes, availableStreams);
        }
コード例 #14
0
        /// <summary>
        /// Initialises the data structures to pass data to the media pipeline
        /// via the MediaStreamSource.
        /// </summary>
        protected override void OpenMediaAsync()
        {
            Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes =
                new Dictionary<MediaSourceAttributesKeys, string>();
            Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes =
                new Dictionary<MediaStreamAttributeKeys, string>();
            List<MediaStreamDescription> mediaStreamDescriptions =
                new List<MediaStreamDescription>();

            CameraStreamSourceDataSingleton dataSource = CameraStreamSourceDataSingleton.Instance;

            mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA";
            mediaStreamAttributes[MediaStreamAttributeKeys.Width] = dataSource.FrameWidth.ToString();
            mediaStreamAttributes[MediaStreamAttributeKeys.Height] = dataSource.FrameHeight.ToString();

            videoStreamDescription =
                new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes);
            mediaStreamDescriptions.Add(videoStreamDescription);

            // A zero timespan is an infinite video
            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] =
                TimeSpan.FromSeconds(0).Ticks.ToString(CultureInfo.InvariantCulture);

            mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString();

            frameTime = (int)TimeSpan.FromSeconds((double)0).Ticks;

            // Report that we finished initializing its internal state and can now
            // pass in frame samples.
            ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);

            DispatcherTimer fpsTimer = new DispatcherTimer();
            fpsTimer.Interval = TimeSpan.FromSeconds(1);
            fpsTimer.Tick += Fps_Tick; 
            fpsTimer.Start();
        }
コード例 #15
0
ファイル: PpboxSource.cs プロジェクト: uvbs/MyProjects
        private void OpenMediaCallback(
            Error ec)
        {
            if (ec != Error.success)
            {
                ErrorOccurred(ec.ToString());
                return;
            }

            Media media;
            demuxer_.get_media(out media);

            Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = 
                new Dictionary<MediaSourceAttributesKeys, string>();
            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] =
                media.duration.ToString();
            mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] =
                (media.duration != ulong.MaxValue).ToString();

            List<MediaStreamDescription> mediaStreamDescriptions = 
                new List<MediaStreamDescription>();
            for (int i = 0; i < media.streams.Length; ++i)
            {
                Stream stream = media.streams[i];
                Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes =
                    new Dictionary<MediaStreamAttributeKeys, string>();
                mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] =
                    stream.codec_data.ToString();
                if (stream.type == StreamType.video)
                {
                    mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] =
                        FourCC[(int)stream.sub_type];
                    mediaStreamAttributes[MediaStreamAttributeKeys.Width] =
                        stream.video.width.ToString();
                    mediaStreamAttributes[MediaStreamAttributeKeys.Height] =
                        stream.video.height.ToString();
                    char[] CodecPrivateDataHex = new char[stream.codec_data.Length * 2];
                    int index = 0;
                    ToHexHelper(CodecPrivateDataHex, ref index, stream.codec_data); // ExtraData
                    //mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] =
                    //    new String(CodecPrivateDataHex);
                    MediaStreamDescription videoStreamDescription =
                        new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes);
                    mediaStreamDescriptions.Add(videoStreamDescription);
                    mediaStreamTypes_.Add(MediaStreamType.Video);
                    mediaStreamDescriptions_[MediaStreamType.Video] = videoStreamDescription;
                    mediaStreamSamples_[MediaStreamType.Video] = new List<MediaStreamSample>();
                    //ParseAvcConfig(videoStreamDescription, mediaStreamSamples_[MediaStreamType.Video], stream.codec_data);
                }
                else if (stream.type == StreamType.audio)
                {
                    char[] WaveFormatExHex = new char[9 * 4 + stream.codec_data.Length * 2];
                    int index = 0;
                    ToHexHelper(WaveFormatExHex, ref index, 2, 255); // FormatTag
                    ToHexHelper(WaveFormatExHex, ref index, 2, stream.audio.channel_count); // Channels
                    ToHexHelper(WaveFormatExHex, ref index, 4, stream.audio.sample_rate); // SamplesPerSec
                    ToHexHelper(WaveFormatExHex, ref index, 4, 0); // AverageBytesPerSecond
                    ToHexHelper(WaveFormatExHex, ref index, 2, 1); // BlockAlign
                    ToHexHelper(WaveFormatExHex, ref index, 2, stream.audio.sample_size); // BitsPerSample
                    ToHexHelper(WaveFormatExHex, ref index, 2, stream.codec_data.Length); // ExtraDataSize
                    ToHexHelper(WaveFormatExHex, ref index, stream.codec_data); // ExtraData
                    mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] =
                        new String(WaveFormatExHex);
                    MediaStreamDescription audioStreamDescription = 
                        new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);
                    mediaStreamDescriptions.Add(audioStreamDescription);
                    mediaStreamTypes_.Add(MediaStreamType.Audio);
                    mediaStreamDescriptions_[MediaStreamType.Audio] = audioStreamDescription;
                    mediaStreamSamples_[MediaStreamType.Audio] = new List<MediaStreamSample>();
                }
                else
                {
                    mediaStreamTypes_.Add(MediaStreamType.Script);
                }
            } // for

            ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
        }
コード例 #16
0
ファイル: MediaStreamSource.cs プロジェクト: ya1gaurav/moon
 protected abstract void SwitchMediaStreamAsync(MediaStreamDescription mediaStreamDescription);
コード例 #17
0
ファイル: MediaStreamSource.cs プロジェクト: ya1gaurav/moon
 internal void SwitchMediaStreamAsyncInternal(MediaStreamDescription mediaStreamDescription)
 {
     media_element.Dispatcher.BeginInvoke(delegate() {
         SwitchMediaStreamAsync(mediaStreamDescription);
     });
 }
コード例 #18
0
 protected override void CloseMedia()
 {
     binaryWriter.Dispose();
     timer.Stop();
     streamDescription = null;
 }
コード例 #19
0
        protected override void OpenMediaAsync()
        {
            currentPosition = currentTimeStamp = 0;
            memoryStream = new MemoryStream();
            binaryWriter = new BinaryWriter(memoryStream);

            var mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>(1);
            var mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>(3);

            mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = Formatter.ToPcmBase16String(DefaultWaveFormat);
            mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "false";
            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = "0";

            streamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);

            timer.Start();
            
            ReportOpenMediaCompleted(mediaSourceAttributes, new[] { streamDescription });
        }
コード例 #20
0
        private void PrepareVideo()
        {
            Debug.WriteLine("VideoMediaStreamSource::PrepareVideo()");
            // Stream Description
            Dictionary<MediaStreamAttributeKeys, string> streamAttributes =
                new Dictionary<MediaStreamAttributeKeys, string>();

            // Select the same encoding and dimensions as the video capture
            streamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
            streamAttributes[MediaStreamAttributeKeys.Height] = _frameHeight.ToString();
            streamAttributes[MediaStreamAttributeKeys.Width] = _frameWidth.ToString();

            MediaStreamDescription msd =
                new MediaStreamDescription(MediaStreamType.Video, streamAttributes);

            _videoDesc = msd;
        }
コード例 #21
0
ファイル: MediaStreamSource.cs プロジェクト: kangaroo/moon
		protected void ReportSwitchMediaStreamCompleted (MediaStreamDescription mediaStreamDescription)
		{
			// FIXME: wrong/overzealous validations wrt SL2 (see unit tests)
			if (closed || media_element == null || demuxer == IntPtr.Zero)
				throw new InvalidOperationException ();

			// FIXME: where is the mediaStreamDescription parameter being used ?
			NativeMethods.imedia_demuxer_report_get_frame_completed (demuxer, IntPtr.Zero);
		}
コード例 #22
0
        /// <summary>
        /// Kick-start the media player.
        /// Adds H264 playing info.
        /// </summary>
        protected override void OpenMediaAsync()
        {
            var mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
            Dictionary<MediaStreamAttributeKeys, string> videoStreamAttributes = GetVideoSettings();
            Dictionary<MediaStreamAttributeKeys, string> audioStreamAttributes = GetAudioSettings();

            var mediaStreamDescriptions = new List<MediaStreamDescription>();

            this.videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, videoStreamAttributes);
            mediaStreamDescriptions.Add(this.videoStreamDescription);

            this.audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, audioStreamAttributes);
            mediaStreamDescriptions.Add(this.audioStreamDescription);

            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.MaxValue.Ticks.ToString(CultureInfo.InvariantCulture);
            mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString();

            this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
        }
コード例 #23
0
        /// <summary>
        /// Callback which handles setting up an MSS once the first MpegFrame after Id3v2 data has been read.
        /// </summary>
        /// <param name="mpegLayer3Frame"> First MpegFrame</param>
        /// <param name="mediaStreamAttributes">Empty dictionary for MediaStreamAttributes</param>
        /// <param name="mediaStreamDescriptions">Empty dictionary for MediaStreamDescriptions</param>
        /// <param name="mediaSourceAttributes">Empty dictionary for MediaSourceAttributes</param>
        private void ReadPastId3v2TagsCallback(
            MpegFrame mpegLayer3Frame,
            Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes,
            List<MediaStreamDescription> mediaStreamDescriptions,
            Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes)
        {
            if (mpegLayer3Frame.FrameSize <= 0)
            {
                throw new InvalidOperationException("MpegFrame's FrameSize cannot be negative");
            }

            // Initialize the Mp3 data structures used by the Media pipeline with state from the first frame.
            WaveFormatExtensible wfx = new WaveFormatExtensible();
            this.MpegLayer3WaveFormat = new MpegLayer3WaveFormat();
            this.MpegLayer3WaveFormat.WaveFormatExtensible = wfx;

            this.MpegLayer3WaveFormat.WaveFormatExtensible.FormatTag = 85;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.Channels = (short)((mpegLayer3Frame.Channels == Channel.SingleChannel) ? 1 : 2);
            this.MpegLayer3WaveFormat.WaveFormatExtensible.SamplesPerSec = mpegLayer3Frame.SamplingRate;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond = mpegLayer3Frame.Bitrate / 8;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.BlockAlign = 1;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.BitsPerSample = 0;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.ExtraDataSize = 12;

            this.MpegLayer3WaveFormat.Id = 1;
            this.MpegLayer3WaveFormat.BitratePaddingMode = 0;
            this.MpegLayer3WaveFormat.FramesPerBlock = 1;
            this.MpegLayer3WaveFormat.BlockSize = (short)mpegLayer3Frame.FrameSize;
            this.MpegLayer3WaveFormat.CodecDelay = 0;

            mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = this.MpegLayer3WaveFormat.ToHexString();
            this.audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);

            mediaStreamDescriptions.Add(this.audioStreamDescription);

            this.trackDuration = new TimeSpan(0, 0, (int)(this.audioStreamLength / MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond));
            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = this.trackDuration.Ticks.ToString(CultureInfo.InvariantCulture);
            if (this.audioStream.CanSeek)
            {
                mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "1";
            }
            else
            {
                mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0";
            }

            // Report that the Mp3MediaStreamSource has finished initializing its internal state and can now
            // pass in Mp3 Samples.
            this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);

            this.currentFrame = mpegLayer3Frame;
            this.currentFrameStartPosition = MpegFrame.FrameHeaderSize;
        }
コード例 #24
0
        /// <summary>
        /// See description of abstract method for general information about this method.
        /// </summary>
        /// <param name="data"></param>
        /// <param name="offset"></param>
        /// <param name="count"></param>
        /// <returns></returns>
        protected override int ParseData(byte[] data, int offset, int count)
        {
            const int ATDSAyncWords = 0xfff0;
            int syncOffset = offset;
            if (count < MaxADTSHeaderLength)
                return 0;

            uint syncBits =  ( (uint)((data[syncOffset] << 8) | data[syncOffset + 1]) ) ;

            // search for valid sync bits(FFF), ignore FFFF which is invalid ATDS header and could be stuffing bits.
            while ((syncBits == 0xffff || (syncBits & ATDSAyncWords) != ATDSAyncWords) && (offset + count - syncOffset) >= 3)
            {
                syncOffset++;
                syncBits = (uint)((data[syncOffset] << 8) | data[syncOffset + 1]);
            }

            if ((syncBits & ATDSAyncWords) != ATDSAyncWords)
            {
                return count - 1;
            }

            if ((offset + count - syncOffset) < MaxADTSHeaderLength)
            {
                return 0;
            }

            _bitstream.Init(data, syncOffset);
            _bitstream.SkipBits(12);

            uint mpeg_version = _bitstream.ReadUBits(1);
            uint mpeg_layer = _bitstream.ReadUBits(2);
            uint protection_absent = _bitstream.ReadUBits(1);
            uint profile_code = _bitstream.ReadUBits(2);
            uint sampling_rate_code = _bitstream.ReadUBits(4);
            _bitstream.SkipBits(1);
            uint channel_config = _bitstream.ReadUBits(3);

            _bitstream.SkipBits(4);

            int header_length = protection_absent != 0 ? 7 : 9;
            int frame_length = _bitstream.ReadBits(13);
            _bitstream.SkipBits(11);

            int numberOfAACFrames = _bitstream.ReadBits(2) + 1;

            if (sampling_rate_code >= _aacSamplingRatesFromRateCode.Length)
            {
                HLSTrace.WriteLine(" no good!!!! bad ADTS sync word, skip it ");
                return syncOffset - offset + 2;
            }

            if (syncOffset > offset)
            {
                // the audio frame is not started from the PES buffer boundary, read next frame to be sure.
                if (count < syncOffset + frame_length + MaxADTSHeaderLength)
                {
                    // return 0 to get more data, need to read to next frame
                    return syncOffset - offset -1;
                }
                else
                {
                    uint syncBitsNext = (uint)((data[syncOffset + frame_length] << 8) | data[syncOffset + frame_length + 1]);
                    if (frame_length == 0 || syncBitsNext == 0xffff || (syncBitsNext & ATDSAyncWords) != ATDSAyncWords)
                    {
                        // bad, did not find next sync bits after frame length, this is bad sync bits. skip the fake sync bits
                        return syncOffset - offset + 2;
                    }
                }
            }

            Debug.Assert(numberOfAACFrames == 1);

            int samplingRate = _aacSamplingRatesFromRateCode[sampling_rate_code];

            // Each ADTS frame contains 1024 raw PCM samples in encoded format.
            // Therefore, the duration of each frame in seconds is given by
            // 1024/(sampling frequency). The time stamps passed to MediaElement
            // are in Hns (100 nanosecond) increments. Therefore, frame duration
            // is given by  10,000,000 * 1024 / SamplingFrequency
            long frameDuration = (long)(10000000.00 * 1024.00 / (double)samplingRate);

            if (_PTSTimestampList.Count == 0)
            {
                // This ADTS frame does not have a PTS from PES header, and therefore
                // we should calculate its PTS based on the time passed since last frame.
                _currentFrameTimeStamp += frameDuration;
            }
            else
            {
                _baseTimeStamp = _PTSTimestampList[0];
                _currentFrameTimeStamp = _PTSTimestampList[0];
                _PTSTimestampList.RemoveAt(0);
            }

            BeginSample(frame_length - header_length, frameDuration, _currentFrameTimeStamp);

            if (Description == null)
            {
                if (channel_config != 1 && channel_config != 2)
                    throw new ADTSParserException("unsupported channel config");

                ushort numberOfChannels = (ushort)channel_config;
                ushort aacProfile = (ushort)profile_code;
                const ushort sampleSize = 16;

                _aacInfo = new AACAudioFrameInfo();
                _aacInfo.NoOfSamples = 1024;
                _aacInfo.OutFrameSize = 1024 * numberOfChannels * 2;
                _aacInfo.SamplingFrequency = samplingRate;
                _aacInfo.NoOfChannels = numberOfChannels;
                _aacInfo.Profile = aacProfile;
                _aacInfo.OutSamplingFrequency = samplingRate;
                _aacInfo.ExtObjectType = 0;
                _aacInfo.DownSampledMode = 0;

                _waveFormat = new AudioDataTypesHelper.WAVEFORMATEX();
                _waveFormat.formatTag = 0x1601;      // AAC format flag.
                _waveFormat.channels = numberOfChannels;
                _waveFormat.bitsPerSample = sampleSize;
                _waveFormat.samplesPerSec = samplingRate;
                _waveFormat.avgBytesPerSec = numberOfChannels * _waveFormat.samplesPerSec * sampleSize / 8;
                _waveFormat.blockAlign = (ushort)(numberOfChannels * sampleSize / 8);
                _waveFormat.size = 0x20;  // size of AACAudioFrameInfo. 4 * 8

                Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
                streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = GetCodecPrivateData();
                Description = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes);
            }

            return header_length + ( syncOffset - offset );
        }
コード例 #25
0
ファイル: PpboxSource.cs プロジェクト: uvbs/MyProjects
 private static void ParseAvcConfig(
     MediaStreamDescription stream, 
     List<MediaStreamSample> samples, 
     byte[] data)
 {
     System.IO.Stream ios = new System.IO.MemoryStream(data);
     ios.Seek(5, System.IO.SeekOrigin.Begin);
     int num_sps = ios.ReadByte() & 0x1f;
     for (int i = 0; i < num_sps; ++i)
     {
         int len_sps = (ios.ReadByte() << 8) | ios.ReadByte();
         byte[] sps = new byte[len_sps];
         ios.Read(sps, 0, len_sps);
         samples.Add(new MediaStreamSample(
             stream, 
             new System.IO.MemoryStream(sps), 
             0, 
             len_sps, 
             0, 
             new Dictionary<MediaSampleAttributeKeys, string>()));
     }
     int num_pps = ios.ReadByte();
     for (int i = 0; i < num_pps; ++i)
     {
         int len_pps = (ios.ReadByte() << 8) | ios.ReadByte();
         byte[] pps = new byte[len_pps];
         ios.Read(pps, 0, len_pps);
         samples.Add(new MediaStreamSample(
             stream, 
             new System.IO.MemoryStream(pps), 
             0, 
             len_pps, 
             0, 
             new Dictionary<MediaSampleAttributeKeys, string>()));
     }
 }
コード例 #26
0
ファイル: CmmbStreamSource2.cs プロジェクト: yspxman/CMTVDemo
        protected override void OpenMediaAsync()
        {
            //WaveFormatEx
            HeAacWaveFormat aacf = new HeAacWaveFormat();
            WaveFormatExtensible wfx = new WaveFormatExtensible();
            aacf.WaveFormatExtensible = wfx;

            aacf.WaveFormatExtensible.FormatTag = 0x1610; //0xFF;//0x1610;
            aacf.WaveFormatExtensible.Channels = 2; //
            aacf.WaveFormatExtensible.BlockAlign = 1;
            aacf.WaveFormatExtensible.BitsPerSample = 0;//16; //unkonw set to 0
            aacf.WaveFormatExtensible.SamplesPerSec = 24000; //  from 8000 to 96000 Hz
            aacf.WaveFormatExtensible.AverageBytesPerSecond = 0;//wfx.SamplesPerSec * wfx.Channels * wfx.BitsPerSample / wfx.BlockAlign;
            aacf.WaveFormatExtensible.Size = 12;

            // Extra 3 words in WAVEFORMATEX
            // refer to http://msdn.microsoft.com/en-us/library/windows/desktop/dd757806(v=vs.85).aspx
            aacf.wPayloadType = 0x0; //Audio Data Transport Stream (ADTS). The stream contains an adts_sequence, as defined by MPEG-2.
            aacf.wAudioProfileLevelIndication = 0xFE;
            aacf.wStructType = 0;

            string codecPrivateData = aacf.ToHexString();

            Dictionary<MediaStreamAttributeKeys, string> audioStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
            audioStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = codecPrivateData;
            audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, audioStreamAttributes);

            m_vbuffer.WaitForWorkItem();

            m_curVideoBlk = m_vbuffer.Dequeue().CommandParameter as MSF.VideoBlock;

            if (m_curVideoBlk == null)
                return;
            vIdx = 0;
            fNum = (int)m_curVideoBlk.VideoFrameNum;

            H264NalFormat h264f = new H264NalFormat();
            h264f.sps = m_curVideoBlk.FirstIFrameInfo.sps;
            h264f.pps = m_curVideoBlk.FirstIFrameInfo.pps;
            string s = h264f.ToHexString();

            //Video
            Dictionary<MediaStreamAttributeKeys, string> videoStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
            videoStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
            videoStreamAttributes[MediaStreamAttributeKeys.Height] = "240";
            videoStreamAttributes[MediaStreamAttributeKeys.Width] = "320";
            videoStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = s;//"0000016742E00D96520283F40500000168CE388000";
            videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, videoStreamAttributes);

            //Media
            Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromSeconds(6).Ticks.ToString(CultureInfo.InvariantCulture);
            mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0";

            List<MediaStreamDescription> mediaStreamDescriptions = new List<MediaStreamDescription>();

            #if !DEBUG
            // Emulator does not support HE-AAC
               mediaStreamDescriptions.Add(audioStreamDescription);
            #endif

            mediaStreamDescriptions.Add(videoStreamDescription);

            this.AudioBufferLength = 500;
            this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
        }
コード例 #27
0
ファイル: MediaStreamSource.cs プロジェクト: kangaroo/moon
		protected abstract void SwitchMediaStreamAsync (MediaStreamDescription mediaStreamDescription);		
コード例 #28
0
ファイル: MediaStreamSample.cs プロジェクト: ya1gaurav/moon
 public MediaStreamSample(MediaStreamDescription mediaStreamDescription, Stream stream, long offset, long count, long timestamp, IDictionary <MediaSampleAttributeKeys, string> attributes)
     : this(mediaStreamDescription, stream, offset, count, timestamp, 0, attributes)
 {
 }
コード例 #29
0
 protected override void CloseMedia()
 {
     startPosition = currentPosition = 0;
     mediaStreamDescription = null;
 }
コード例 #30
0
 /// <summary>
 /// Stream media stream.
 /// Not implemented
 /// </summary>
 /// <param name="mediaStreamDescription">The mediaStreamDescription that we want to switch to</param>
 protected override void SwitchMediaStreamAsync(MediaStreamDescription mediaStreamDescription)
 {
     throw new NotImplementedException();
 }
コード例 #31
0
ファイル: MediaStreamSource.cs プロジェクト: kangaroo/moon
		static void SwitchMediaStreamAsyncInternal (IntPtr instance, MediaStreamDescription mediaStreamDescription)
		{
			try {
				FromIntPtr (instance).SwitchMediaStreamAsyncInternal (mediaStreamDescription);
			} catch (Exception ex) {
				try {
					Console.WriteLine ("Unhandled exception in MediaStreamSource.SwitchMediaStreamAsyncInternal: {0}", ex);
				} catch {
				}
			}
		}
コード例 #32
0
 protected override void SwitchMediaStreamAsync(MediaStreamDescription mediaStreamDescription)
 {
     // nothing.
 }
コード例 #33
0
ファイル: MediaStreamSource.cs プロジェクト: kangaroo/moon
		internal void SwitchMediaStreamAsyncInternal (MediaStreamDescription mediaStreamDescription)
		{
			media_element.Dispatcher.BeginInvoke (delegate () {
				SwitchMediaStreamAsync (mediaStreamDescription);
			});
		}