Esempio n. 1
0
        public static AudioStreamDescriptor CreateAudioDesc(this AudioTag tag, ScriptDataTag MediaData)
        {
            //{"duration":6,"width":640,"height":360,"videodatarate":700,"framerate":30,"videocodecid":4,"audiodatarate":128,"audiodelay":0.038,"audiocodecid":2,"canSeekToEnd":true}
            //{  "duration":30.093,"width":512,"height":288,
            //   "videodatarate":1019.6845703125,"framerate":99999.999999999985,
            //   "videocodecid":7,"audiodatarate":130.0380859375,"audiosamplerate":44100,"audiosamplesize":16,
            //   "stereo":true,"audiocodecid":10,"filesize":4460895  }
            var m            = MediaData;
            var sampleRate   = (uint)m["audiosamplerate"].GetNumber();
            var channelCount = (uint)2;
            var bitRate      = (uint)m["audiodatarate"].GetNumber();

            if (sampleRate < 10)
            {
                Debug.WriteLine("AudioDesc ERRORERRORERRORERRORERRORERROR sampleRate: " + sampleRate);
                sampleRate = bitRate > 120U ? 22050U : 44100U;
            }
            AudioEncodingProperties encode = null;

            if (tag.Codec == SoundFormat.AAC)
            {
                encode = AudioEncodingProperties.CreateAac(sampleRate, channelCount, bitRate);
            }
            if (tag.Codec == SoundFormat.MP3)
            {
                encode = AudioEncodingProperties.CreateMp3(sampleRate, channelCount, bitRate);
            }
            Debug.WriteLine("AudioDesc ## " + tag.Codec + "  " + sampleRate + " " + channelCount + " " + bitRate);

            return(new AudioStreamDescriptor(encode));
        }
Esempio n. 2
0
        public static async Task <MediaStreamSample> CreateAudioSample(this AudioTag tag)
        {
            var stream = tag.GetDataStream();
            var sample = await MediaStreamSample.CreateFromStreamAsync(
                stream.AsInputStream(),
                (uint)stream.Length,
                tag.TimeSpan);//每一段的数据大小

            //sample.Duration = tag.TimeSpan;//BUG
            return(sample);

            #region MyRegion


            //Debug.WriteLine(tag.GetDataStream().toString(30));
            //var ss = tag.GetDataInputStream();
            //var sample = await MediaStreamSample.CreateFromStreamAsync(ss, tag.Count, tag.TimeSpan);//每一段的数据大小
            //return sample;

            /*
             * var stream = tag.GetDataStream();//.GetDataStream().AsRandomAccessStream().GetInputStreamAt();
             * var si = stream.AsInputStream();
             * var sample = await MediaStreamSample.CreateFromStreamAsync(si, (uint)stream.Length, tag.TimeSpan);
             */
            //stream.Position = (long)a.Offset;//问题的并发式或交错操作改变了对象的状态,无效此操作
            //var ss = stream.AsInputStream(); //A concurrent or interleaved operation changed the state of the object, invalidating this operation. (Exception from HRESULT: 0x8000000C)

            //var ss = stream.AsRandomAccessStream().GetInputStreamAt(a.Offset);//success
            //var ss = stream.GetInputStreamAt(a.Offset);//success

            //sample.Duration = a.TimeSpan - at; //每一段进度条移动距离
            //sample.KeyFrame = true;
            #endregion
        }
Esempio n. 3
0
 public FlvParse(Stream stream)
 {
     Head = new FlvHead(stream);
     if (Head.Signature != "FLV")
     {
         throw new Exception("格式错误:" + Head.Signature);
     }
     while (true)
     {
         var    PreviousTagSize = stream.ReadUInt32();
         FlvTag flvTag          = FlvTag.createTag(stream);
         if (flvTag == null)
         {
             break;
         }
         var      data = flvTag.LoadTagData(stream);
         VideoTag v    = flvTag as VideoTag;
         if (v != null)
         {
             Videos.Add(v);
         }
         AudioTag a = flvTag as AudioTag;
         if (a != null)
         {
             Audios.Add(a);
         }
         ScriptDataTag d = flvTag as ScriptDataTag;
         if (d != null)
         {
             MediaData = d;
         }
         tags.Add(flvTag);
     }
 }