Exemple #1
0
        private static ODMLHeader ParseOdmlHeader(AtomicBinaryReader br, long p)
        {
            var odml = new ODMLHeader();

            odml.dwTotalFrames = br.ReadUInt32(ref p);
            return(odml);
        }
Exemple #2
0
        private static AviStreamIndex ParseOldIndex(long idx1Offset, AtomicBinaryReader abr, int size, uint streamId, long idx1EntryOffset)
        {
            int count = (int)(size / 16);

            var index = new AviStreamIndex();

            index.streamId         = streamId;
            index.entries.Capacity = count;             // less memory allocation, more used temporarily

            long p       = idx1Offset;
            var  uintBuf = new uint[count * 4];

            abr.Read(ref p, uintBuf, 0, count * 4);

            for (int i = 0; i < count; i++)
            {
                uint ckid = uintBuf [i * 4];
                if (ckid == streamId || (ckid == AviDemux.ID_00db && streamId == AviDemux.ID_00dc))
                {
                    var entry = new AviStreamIndex.Entry();
                    entry.isKeyframe  = (uintBuf [i * 4 + 1] & 0x00000010) != 0;
                    entry.chunkOffset = idx1EntryOffset + uintBuf [i * 4 + 2];
                    entry.chunkLength = (int)uintBuf [i * 4 + 3];
                    index.entries.Add(entry);
                }
            }
            return(index);
        }
Exemple #3
0
        private static void ParseChunkIndex(AtomicBinaryReader reader, long p, ref AviStreamIndex index)
        {
            // read ix.. chunk id and size. do sanity check
            uint ixChunkFCC  = reader.ReadUInt32(ref p);
            uint ixChunkFCCb = (ixChunkFCC & 0x0000FFFF) | 0x20200000;

            if (ixChunkFCCb != RiffParser.ToFourCC("ix  ") && ixChunkFCC != RiffParser.ToFourCC("indx"))
            {
                throw new MpException("Unexpected chunk id for index " + RiffParser.FromFourCC(ixChunkFCC) +
                                      " for stream " + RiffParser.FromFourCC(index.streamId));
            }
            uint ixChunkSize = reader.ReadUInt32(ref p);

            // read index data header and do sanity check
            ushort wLongsPerEntry = reader.ReadUInt16(ref p);
            byte   bSubIndexType  = reader.ReadByte(ref p);
            byte   bIndexType     = reader.ReadByte(ref p);
            uint   nEntriesInUse  = reader.ReadUInt32(ref p);
            uint   streamId       = reader.ReadUInt32(ref p);

                        #if MP_DEBUG
            //Debug.Log("Parsing index for " + RiffParser.FromFourCC(index.streamId));
                        #endif

            if (bIndexType != (int)AviStreamIndex.Type.CHUNKS || bSubIndexType != 0 || streamId != index.streamId ||
                wLongsPerEntry != 2 || ixChunkSize < 4 * wLongsPerEntry * nEntriesInUse + 24)
            {
                throw new MpException("Broken or unsupported index for stream " + RiffParser.FromFourCC(streamId) +
                                      ". " + streamId + "!=" + index.streamId + ", wLongsPerEntry=" + wLongsPerEntry +
                                      ", bIndexType=" + bIndexType + ", bSubIndexType=" + bSubIndexType);
            }

            long qwBaseOffset = reader.ReadInt64(ref p);
            p += 4;             // not caring about reserved bytes

            // reading it all at once is about 10x faster than reading individual uints.
            // the index chunk is not that big, so it's ok for GC too.
            var uintBuf = new uint[nEntriesInUse * 2];
            reader.Read(ref p, uintBuf, 0, (int)nEntriesInUse * 2);

            for (int i = 0; i < nEntriesInUse; i++)
            {
                var entry = new AviStreamIndex.Entry();
                entry.chunkOffset = qwBaseOffset + uintBuf [2 * i];
                uint len = uintBuf [2 * i + 1];
                entry.chunkLength = (int)(len & 0x7FFFFFFF);
                if ((len & 0x80000000) == 0)
                {
                    entry.isKeyframe = true;
                }
                index.entries.Add(entry);
            }
        }
Exemple #4
0
        private static WaveFormatEx ParseAudioFormatHeader(AtomicBinaryReader br, long p)
        {
            var strf = new WaveFormatEx();

            strf.wFormatTag      = br.ReadUInt16(ref p);
            strf.nChannels       = br.ReadUInt16(ref p);
            strf.nSamplesPerSec  = br.ReadUInt32(ref p);
            strf.nAvgBytesPerSec = br.ReadUInt32(ref p);
            strf.nBlockAlign     = br.ReadUInt16(ref p);
            strf.wBitsPerSample  = br.ReadUInt16(ref p);
            strf.cbSize          = br.ReadUInt16(ref p);
            return(strf);
        }
Exemple #5
0
        public override void Init(Stream sourceStream, LoadOptions loadOptions = null)
        {
            if (sourceStream == null || loadOptions == null || loadOptions.audioStreamInfo == null) {
                throw new System.ArgumentException ("sourceStream and loadOptions.audioStreamInfo are required");
            }

            reader = new AtomicBinaryReader (sourceStream);

            // set all the audio stream info we know
            audioStreamInfo = loadOptions.audioStreamInfo;
            audioStreamInfo.lengthBytes = reader.StreamLength;

            nextAudioSample = 0;
        }
Exemple #6
0
        public override void Init(Stream sourceStream, LoadOptions loadOptions = null)
        {
            if (sourceStream == null || loadOptions == null || loadOptions.audioStreamInfo == null)
            {
                throw new System.ArgumentException("sourceStream and loadOptions.audioStreamInfo are required");
            }

            reader = new AtomicBinaryReader(sourceStream);

            // set all the audio stream info we know
            audioStreamInfo             = loadOptions.audioStreamInfo;
            audioStreamInfo.lengthBytes = reader.StreamLength;

            nextAudioSample = 0;
        }
Exemple #7
0
        private static BitmapInfoHeader ParseVideoFormatHeader(AtomicBinaryReader br, long p)
        {
            var strf = new BitmapInfoHeader();

            strf.biSize          = br.ReadUInt32(ref p);
            strf.biWidth         = br.ReadInt32(ref p);
            strf.biHeight        = br.ReadInt32(ref p);
            strf.biPlanes        = br.ReadUInt16(ref p);
            strf.biBitCount      = br.ReadUInt16(ref p);
            strf.biCompression   = br.ReadUInt32(ref p);
            strf.biSizeImage     = br.ReadUInt32(ref p);
            strf.biXPelsPerMeter = br.ReadInt32(ref p);
            strf.biYPelsPerMeter = br.ReadInt32(ref p);
            strf.biClrUsed       = br.ReadUInt32(ref p);
            strf.biClrImportant  = br.ReadUInt32(ref p);
            return(strf);
        }
Exemple #8
0
        private static AVIMainHeader ParseMainHeader(AtomicBinaryReader br, long p)
        {
            var avih = new AVIMainHeader();

            avih.dwMicroSecPerFrame   = br.ReadUInt32(ref p);
            avih.dwMaxBytesPerSec     = br.ReadUInt32(ref p);
            avih.dwPaddingGranularity = br.ReadUInt32(ref p);
            avih.dwFlags               = br.ReadUInt32(ref p);
            avih.dwTotalFrames         = br.ReadUInt32(ref p);
            avih.dwInitialFrames       = br.ReadUInt32(ref p);
            avih.dwStreams             = br.ReadUInt32(ref p);
            avih.dwSuggestedBufferSize = br.ReadUInt32(ref p);
            avih.dwWidth               = br.ReadUInt32(ref p);
            avih.dwHeight              = br.ReadUInt32(ref p);
            avih.dwReserved0           = br.ReadUInt32(ref p);
            avih.dwReserved1           = br.ReadUInt32(ref p);
            avih.dwReserved2           = br.ReadUInt32(ref p);
            avih.dwReserved3           = br.ReadUInt32(ref p);
            return(avih);
        }
Exemple #9
0
        private static AVIStreamHeader ParseStreamHeader(AtomicBinaryReader br, long p)
        {
            var strh = new AVIStreamHeader();

            strh.fccType               = br.ReadUInt32(ref p);
            strh.fccHandler            = br.ReadUInt32(ref p);
            strh.dwFlags               = br.ReadUInt32(ref p);
            strh.wPriority             = br.ReadUInt16(ref p);
            strh.wLanguage             = br.ReadUInt16(ref p);
            strh.dwInitialFrames       = br.ReadUInt32(ref p);
            strh.dwScale               = br.ReadUInt32(ref p);
            strh.dwRate                = br.ReadUInt32(ref p);
            strh.dwStart               = br.ReadUInt32(ref p);
            strh.dwLength              = br.ReadUInt32(ref p);
            strh.dwSuggestedBufferSize = br.ReadUInt32(ref p);
            strh.dwQuality             = br.ReadUInt32(ref p);
            strh.dwSampleSize          = br.ReadUInt32(ref p);
            strh.rcFrameLeft           = br.ReadInt16(ref p);
            strh.rcFrameTop            = br.ReadInt16(ref p);
            strh.rcFrameRight          = br.ReadInt16(ref p);
            strh.rcFrameBottom         = br.ReadInt16(ref p);
            return(strh);
        }
        public override void Init(Stream sourceStream, LoadOptions loadOptions = null)
        {
            // skip the video if asked not to load it
            if (loadOptions != null && loadOptions.skipVideo)
            {
                return;
            }

            // check the arguments
            if (sourceStream == null)
            {
                throw new System.ArgumentException("sourceStream is required");
            }

            // measure load time
            var watch = new System.Diagnostics.Stopwatch();

            watch.Start();

            reader = new AtomicBinaryReader(sourceStream);

            // for detecting the buffer size
            int maxRawJpgSize = 0;

            // the stream can't be seeked unless there is an index. create it
            frameStartIndex.Clear();
            frameSize.Clear();

            long markerCount = 0;
            long startIndex  = -1;
            bool markerStart = false;
            int  bytesRead   = -1;
            long i           = 0;
            var  buffer      = new byte[FILE_READ_BUFFER_SIZE];

            // read the file in chunks (more than 30x faster than reading by byte)
            long p = 0;

            do
            {
                bytesRead = reader.Read(ref p, buffer, 0, FILE_READ_BUFFER_SIZE);

                for (int j = 0; j < bytesRead; j++)
                {
                    byte b = buffer [j];

                    // wait for marker start
                    if (b == 0xFF)
                    {
                        markerStart = true;
                    }
                    else if (markerStart)
                    {
                        // read the other marker byte and decide what to do
                        switch (b)
                        {
                        case 0xD8:                         // Start of image
                            startIndex = i + j - 1;
                            break;

                        case 0xD9:                         // End of image
                            frameStartIndex.Add(startIndex);
                            int size = (int)(i + j - startIndex + 1);
                            if (size > maxRawJpgSize)
                            {
                                maxRawJpgSize = size;
                            }
                            frameSize.Add(size);
                            //Debug.Log("Found frame OFFS: " + startIndex + " SIZE: " + size);
                            break;
                        }
                        markerStart = false;
                        markerCount++;
                    }
                }
                i += bytesRead;
            } while(bytesRead >= FILE_READ_BUFFER_SIZE);

            // create a buffer for holding raw jpg data when decoding a frame
            rawJpgBuffer = new byte[maxRawJpgSize];

            watch.Stop();
                        #if MP_DEBUG
            Debug.Log("Recreated index for raw MJPEG stream in " + (watch.Elapsed.TotalMilliseconds * 0.001f) + " seconds." +
                      "Frames: " + frameStartIndex.Count + ". Max jpg size: " + maxRawJpgSize + ". Markers: " + markerCount);
                        #endif

            // set all the info about the video stream we know
            if (loadOptions != null && loadOptions.videoStreamInfo != null)
            {
                videoStreamInfo = loadOptions.videoStreamInfo;
            }
            else
            {
                videoStreamInfo             = new VideoStreamInfo();
                videoStreamInfo.codecFourCC = VideoDecoderMJPEG.FOURCC_MJPG;
            }
            videoStreamInfo.frameCount  = frameSize.Count;
            videoStreamInfo.lengthBytes = reader.StreamLength;
        }
Exemple #11
0
        /// <summary>
        /// Initialized the demux for given stream. After calling this you can
        /// query A/V stream info and create decoders to play back those streams.
        /// </summary>
        /// <param name="sourceStream">Source stream.</param>
        /// <param name="loadOptions">Load options.</param>
        public override void Init(Stream sourceStream, LoadOptions loadOptions = null)
        {
            var watch = new System.Diagnostics.Stopwatch();

            watch.Start();

            reader = new AtomicBinaryReader(sourceStream);

            var riffParser = new RiffParser(reader);

            avi = new AVIFile();

            idx1EntryOffset = -1;
            idx1Offset      = -1;
            currentStrh4CC  = 0;
            while (riffParser.ReadNext(ProcessAviChunk, ProcessAviList, ProcessAviRiff))
            {
                ;
            }

            if (avi.strhVideo != null)
            {
                videoStreamInfo              = new VideoStreamInfo();
                videoStreamInfo.codecFourCC  = avi.strhVideo.fccHandler;
                videoStreamInfo.bitsPerPixel = avi.strfVideo.biBitCount;
                videoStreamInfo.frameCount   = avi.odml != null ? (int)avi.odml.dwTotalFrames : (int)avi.avih.dwTotalFrames;
                videoStreamInfo.width        = (int)avi.avih.dwWidth;
                videoStreamInfo.height       = (int)avi.avih.dwHeight;
                videoStreamInfo.framerate    = (float)avi.strhVideo.dwRate / (float)avi.strhVideo.dwScale;
            }
            else
            {
                videoStreamInfo = null;
            }
            if (avi.strhAudio != null)
            {
                audioStreamInfo             = new AudioStreamInfo();
                audioStreamInfo.codecFourCC = avi.strhAudio.fccHandler;
                audioStreamInfo.audioFormat = avi.strfAudio.wFormatTag;
                audioStreamInfo.sampleCount = (int)avi.strhAudio.dwLength;
                audioStreamInfo.sampleSize  = (int)avi.strhAudio.dwSampleSize;
                audioStreamInfo.channels    = (int)avi.strfAudio.nChannels;
                audioStreamInfo.sampleRate  = (int)avi.strfAudio.nSamplesPerSec;
            }
            else
            {
                audioStreamInfo = null;
            }

            // we may already have indexes here. it happens when the AVI contained OpenDML indx elements.
            // if we don't have indexes yet, then try to parse then out from an old idx1 chunk.
            if (hasVideo)
            {
                if (avi.videoIndex == null)
                {
                    avi.videoIndex = ParseOldIndex(idx1Offset, riffParser.reader, idx1Size, AviDemux.ID_00dc, idx1EntryOffset);
                }
                if (avi.videoIndex == null)
                {
                    // currently we're just throwing an exception here, but we could also rebuild the index. It's slow, but doable.
                    throw new MpException("No video index found (required for playback and seeking)");
                }
                PrepareVideoStream();
            }
            if (hasAudio)
            {
                if (avi.audioIndex == null)
                {
                    avi.audioIndex = ParseOldIndex(idx1Offset, riffParser.reader, idx1Size, AviDemux.ID_01wb, idx1EntryOffset);
                }
                if (avi.audioIndex == null)
                {
                    // currently we're just throwing an exception here, but we could also rebuild the index. It's slow, but doable.
                    throw new MpException("No audio index found (required for playback and seeking)");
                }
                PrepareAudioStream();
            }

            // if not all the frames are indexed, fix it
            if (videoStreamInfo != null && avi.videoIndex != null && videoStreamInfo.frameCount > avi.videoIndex.entries.Count)
            {
                                #if MP_DEBUG
                Debug.LogWarning("Not all video frames are indexed. Adjusting video length to match indexed frame count " +
                                 avi.videoIndex.entries.Count + ". AVI header told that there should be " +
                                 videoStreamInfo.frameCount + " frames." + " It's likely that your encoder has a bug.");
                                #endif
                videoStreamInfo.frameCount = avi.videoIndex.entries.Count;
            }

            watch.Stop();
                        #if MP_DEBUG
            Debug.Log("AVI loaded in " + (watch.Elapsed.TotalMilliseconds * 0.001f) + " seconds");
                        #endif

            nextVideoFrame  = 0;
            nextAudioSample = 0;
        }
Exemple #12
0
        private static AviStreamIndex ParseOdmlIndex(AtomicBinaryReader reader, long p, out uint streamId)
        {
            ushort wLongsPerEntry = reader.ReadUInt16(ref p);
            byte   bSubIndexType  = reader.ReadByte(ref p);
            byte   bIndexType     = reader.ReadByte(ref p);
            uint   nEntriesInUse  = reader.ReadUInt32(ref p);

            streamId = reader.ReadUInt32(ref p);

            var index = new AviStreamIndex();

            index.streamId = streamId;

            // if there is AVI_INDEX_OF_CHUNKS (superindex) in this element
            if (bIndexType == (byte)AviStreamIndex.Type.SUPERINDEX)
            {
                p += 3 * 4;                 // not caring about reserved bytes

                                #if MP_DEBUG
                //Debug.Log("Parsing superindex for " + RiffParser.FromFourCC(streamId));
                                #endif

                // sanity check
                if (bSubIndexType != 0 || wLongsPerEntry != 4)
                {
                                        #if MP_DEBUG
                    Debug.LogWarning("Broken superindex for stream " + RiffParser.FromFourCC(streamId) +
                                     ", but trying to continue. " + bSubIndexType + " " + wLongsPerEntry);
                                        #endif
                }

                for (uint i = 0; i < nEntriesInUse; i++)
                {
                    long qwOffset = reader.ReadInt64(ref p);
                    int  dwSize   = reader.ReadInt32(ref p);
                    reader.ReadInt32(ref p);                      // dwDuration. don't care

                    if (qwOffset != 0)
                    {
                        long currentStreamPos = p;
                        p = qwOffset;

                        // reduce memory allocations by (over)estimating entry count from index size in bytes
                        index.entries.Capacity += dwSize / 8;
                        ParseChunkIndex(reader, p, ref index);

                        p = currentStreamPos;
                    }
                }
            }
            // if there is AVI_INDEX_OF_CHUNKS (chunk index) in here
            else if (bIndexType == (byte)AviStreamIndex.Type.CHUNKS)
            {
                // seek back to the beginning of this chunk (12bytes read here, 8bytes read by RiffParser)
                ParseChunkIndex(reader, p - 20, ref index);
            }
            else
            {
                throw new MpException("Unsupported index type " + bIndexType +
                                      " encountered for stream " + RiffParser.FromFourCC(streamId));
            }
            index.entries.TrimExcess();
            return(index);
        }