Example #1
0
 /// <summary>
 /// Default constructor
 /// </summary>
 public ISMVTrackFormat()
 {
     this.CurrentFragment = null;
       fragSequenceNum = 1;
 }
Example #2
0
        public void Write(TextWriter ISM, string inOrigISMCDir, string inName, ISMCFile ismc)
        {
            string tmpRate = _bitrate.ToString();
              tmpRate = tmpRate.Substring(0, tmpRate.Length - 3);

              if (_fragmentType == MP4.FragmentType.Video)
            ISM.WriteLine("      <video");
              else if (_fragmentType == MP4.FragmentType.Audio)
            ISM.WriteLine("      <audio");
              else return;

              ISM.WriteLine("        src=\"" + inName + "_" + tmpRate + ".ismv\"");
              ISM.WriteLine("        systemBitrate=\"" + _bitrate + "\">");
              if (ismc == null)
              {
            ISM.WriteLine("        <param");
            ISM.WriteLine("          name=\"trackID\"");
            ISM.WriteLine("          value=\"2\"");
            ISM.WriteLine("          valuetype=\"data\" />");
              }
              else
              {
            ISM.WriteLine("        <param");
            ISM.WriteLine("          name=\"trackID\"");
            ISM.WriteLine("          value=\"2\"");
            ISM.WriteLine("          valuetype=\"data\"");
            ISM.Write("          chunks=\"");

            //          Console.WriteLine("Video Bitrate: " + vid.systemBitrate);
            ISMVFile vfile = new ISMVFile(inOrigISMCDir, _source);

            string chunkData = "";
            int chunkId = 0;
            ulong currTime = 0;
            Fragment frag = new Fragment();
            foreach (c cidx in ismc.indexs[0].cs)
            {
              ulong ChunkStart = 0;
              ulong ChunkLen = 0;
              // need to fix this line            vfile.GetFragmentPosition(currTime, 2, out ChunkStart, out ChunkLen);

              currTime += ulong.Parse(cidx.d);
              chunkId++;

              //            Console.WriteLine(" ChunkStart: " + ChunkStart + " ChunkLen: " + ChunkLen);
              chunkData += ChunkStart + "-" + ChunkLen + ",";
              ISM.Write(chunkData);
            }
            ISM.WriteLine("\">");
            ChunkInfo[_bitrate.ToString()] = chunkData;
              }

              if (_fragmentType == MP4.FragmentType.Video)
            ISM.WriteLine("      </video>");
              else if (_fragmentType == MP4.FragmentType.Audio)
            ISM.WriteLine("      </audio>");
        }
Example #3
0
        // assumption: this can ONLY be called when we know the next box is a fragment...
        private void ScanForAudioOrVideo()
        {
            long pos = boxReader.BaseStream.Position;
              do
              {
            long fragPos = boxReader.BaseStream.Position;
            Fragment frag = new Fragment();

            int trakID = (int)frag.GetMP4TrackID(boxReader);
            if (GetFragmentHandlerType(trakID) == _handlerType)
            {
              if (_handlerType.Equals("soun"))
              {
            nextAudioFragPosition = fragPos;
            audioTrackID = trakID;
              }
              else if (_handlerType.Equals("vide"))
              {
            nextVideoFragPosition = fragPos;
            videoTrackID = trakID;
              }
              break;
            }
              } while (this.boxReader.PeekNextBoxType() == BoxTypes.MovieFragment);
              boxReader.BaseStream.Position = pos;
              audiovideoScanCompleted = true;
        }
Example #4
0
        /// <summary>
        /// Read
        /// Everytime we get here and base.Read() is called, a new fragment is created and becomes CurrentFragment of MP4TrackFormat.
        /// For a fragmented file, the first 'read' will really just scan for if there is sourceAudio and/or sourceVideo
        /// and set positions for the start of each type of fragment, prepping for another read to actually find the
        /// sourceAudio and sourceVideo fragment.
        /// </summary>
        public void Read()
        {
            if (!audiovideoScanCompleted)
              {
            ReadMP4Headers();

            if (mmb == null) throw new Exception("Moov box must come before moof");
            ScanForAudioOrVideo();
              }

              // read only one fragment at a time into each trakBx
              {
            // this means we have already done prep work, etc. and now all we care about are fragments in
            // non-linear order...
            bool moreFragments = false;

            CurrentFragment = null; // we're done with the current fragment, get rid of it

            foreach (TrackBox trakBx in mmb.TrackBoxes)
            {
              string handler = trakBx.MediaBox.HandlerReferenceBox.HandlerType;
              if ((handler != null) && (handler.Equals(_handlerType)))
              {
            if (handler == "soun")
            {
              CurrentFragment = GetNextAudioFrag();
            }
            else if (handler == "vide")
            {
              CurrentFragment = GetNextVideoFrag();
            }

            moreFragments |= (CurrentFragment != null);

            ulong oneSecondTicks = (ulong)TimeSpan.FromSeconds(1.0).Ticks;
            if (CurrentFragment != null)
              FragmentDuration = (ulong)((CurrentFragment.Duration * oneSecondTicks) / trakBx.MediaBox.MediaHeaderBox.TimeScale);
              }
            }

            //if (!moreFragments)
            //  base.EOF = true; // end of file
              }
        }
Example #5
0
        /// <summary>
        /// PrepareSampleWriting
        /// Do not call the base method from here because fragment processing is separate from moov box processing.
        /// </summary>
        /// <param name="streamLocations"></param>
        /// <param name="currMdatOffset"></param>
        public override void PrepareSampleWriting(List<StreamDataBlockInfo> streamLocations, ref ulong currMdatOffset)
        {
            if (this.CurrentFragment != null)
            throw new Exception("CurrentFragment must be null when ISMVTrackFormat.PrepareSampleWriting is entered");

              // prepare parameters for creating a fragment
              // our output will always use track ID 1 for audio
              //uint trackID = (uint)((streamLocations[0].SampleType == SampleType.MP4A || streamLocations[0].SampleType == SampleType.WMA) ? 1 : 2);
              //uint trackID = (uint)(IsAudio(streamLocations[0].SampleType) ? 1 : 2);

              // this is what fragment run flags should be for ISMV file output
              // FIXME: this is adhoc, need to figure out exactly which bits are correct for these flags
              uint fragmentRunFlags;
              uint defaultSampleFlags;
              if (IsAudio(streamLocations[0].SliceType)) {
            fragmentRunFlags = (uint)0x301; // magic number for audio
            defaultSampleFlags = (uint)0x8002;
              } else {
            fragmentRunFlags = (uint)0xb05; // magic number for video
            defaultSampleFlags = (uint)0x4001;
              }

              // for audio, all samples are of the same size
              uint sampleSize = (IsAudio(streamLocations[0].SliceType)) ? (uint)streamLocations[0].SliceSize : 0;

              // first, create the fragment boxes
              CurrentFragment = new Fragment(fragSequenceNum, base.TrackID, streamLocations.Count, fragmentRunFlags, defaultSampleFlags, sampleSize);
              if (CurrentFragment.Duration != _ismElement.FragmentDurations[(int)fragSequenceNum])
            throw new Exception("ISMVTrackFormat: mismatch in fragment duration between ISMC file and MP4");
              currMdatOffset += this.CurrentFragment.DataOffset;

              fragSequenceNum++;

              // now prepare the samples in this fragment, without copying the sample bits
              this.CurrentFragment.AddSampleStream(streamLocations, base.TimeScale, ref currMdatOffset);
        }
Example #6
0
        private Fragment GetNextVideoFrag()
        {
            if (nextVideoFragPosition < 0) return (null); // there are no more!!
              boxReader.BaseStream.Position = nextVideoFragPosition;

              // we know where at least the next frag is as we prepared this prior to the call of this function...
              Fragment answer = new Fragment(GetTimeScale(videoTrackID), GetPayloadType(videoTrackID));
              answer.Read(boxReader);

              nextVideoFragPosition = -1;
              while (this.boxReader.PeekNextBoxType() == BoxTypes.MovieFragment)
              {
            long fragPos = boxReader.BaseStream.Position;
            Fragment tmp = new Fragment();

            int trakID = (int)tmp.GetTrackID(boxReader);
            if (GetFragmentHandlerType(trakID) == "vide")
            {
              nextVideoFragPosition = fragPos;
              break;
            }
              }

              return (answer);
        }
Example #7
-1
        private Fragment GetNextVideoFrag()
        {
            if (nextVideoFragPosition < 0) return (null); // there are no more!!
              boxReader.BaseStream.Position = nextVideoFragPosition;

              // we know where at least the next frag is as we prepared this prior to the call of this function...
              Fragment answer = new Fragment(GetTimeScale(videoTrackID), GetPayloadType(videoTrackID), runningTimeIn100NanoSecs, runningSliceIndex);
              answer.Read(boxReader);
              runningTimeIn100NanoSecs += (ulong)TimeArithmetic.ConvertToStandardUnit(answer.TimeScale, (decimal)answer.Duration);
              runningSliceIndex += answer.Length;

              nextVideoFragPosition = -1;
              while (this.boxReader.PeekNextBoxType() == BoxTypes.MovieFragment)
              {
            long fragPos = boxReader.BaseStream.Position;
            Fragment tmp = new Fragment();

            int trakID = (int)tmp.GetMP4TrackID(boxReader);
            if (GetFragmentHandlerType(trakID) == "vide")
            {
              nextVideoFragPosition = fragPos;
              break;
            }
              }

              return (answer);
        }