コード例 #1
0
        /// <summary>
        /// Given a byte[] containing a video frame, return the video MediaType.
        /// </summary>
        /// <param name="frame"></param>
        /// <param name="compressionData"></param>
        /// <returns></returns>
        private static MediaTypeVideoInfo VideoMediaTypeFromFrame(byte[] frame, out byte[] compressionData)
        {
            if (frame == null)
            {
                compressionData = null;
                return(null);
            }

            BufferChunk bc         = new BufferChunk(frame);
            short       headerSize = bc.NextInt16();        //first short tells us the header size
            BufferChunk header     = bc.NextBufferChunk(headerSize);

            //The header contains a custom serialization of AM_SAMPLE2_PROPERTIES followed by
            // AM_MEDIA_TYPE and an optional format type.

            //AM_SAMPLE2_PROPERTIES
            BufferChunk AmSample2Properties = header.NextBufferChunk(48);

            //AM_MEDIA_TYPE
            MediaTypeVideoInfo vmt = new MediaTypeVideoInfo();

            ReconstituteBaseMediaType((MediaType)vmt, header);
            compressionData = null;
            if (vmt.FormatType == FormatType.VideoInfo)
            {
                ReconstituteVideoFormat(vmt, header, out compressionData);
            }
            return(vmt);
        }
コード例 #2
0
 public static void DebugPrintVideoFormat(MediaTypeVideoInfo mt)
 {
     Debug.WriteLine("  AvgTimePerFrame=" + mt.VideoInfo.AvgTimePerFrame.ToString());
     Debug.WriteLine("  BitErrorRate=" + mt.VideoInfo.BitErrorRate.ToString());
     Debug.WriteLine("  BitRate=" + mt.VideoInfo.BitRate.ToString());
     Debug.WriteLine("  Source.top=" + mt.VideoInfo.Source.top.ToString());
     Debug.WriteLine("  Source.left=" + mt.VideoInfo.Source.left.ToString());
     Debug.WriteLine("  Source.bottom=" + mt.VideoInfo.Source.bottom.ToString());
     Debug.WriteLine("  Source.right=" + mt.VideoInfo.Source.right.ToString());
     Debug.WriteLine("  Target.top=" + mt.VideoInfo.Target.top.ToString());
     Debug.WriteLine("  Target.left=" + mt.VideoInfo.Target.left.ToString());
     Debug.WriteLine("  Target.bottom=" + mt.VideoInfo.Target.bottom.ToString());
     Debug.WriteLine("  Target.right=" + mt.VideoInfo.Target.right.ToString());
     Debug.WriteLine("  BitmapInfo.Height=" + mt.VideoInfo.BitmapInfo.Height.ToString());
     Debug.WriteLine("  BitmapInfo.Planes=" + mt.VideoInfo.BitmapInfo.Planes.ToString());
     Debug.WriteLine("  BitmapInfo.BitCount=" + mt.VideoInfo.BitmapInfo.BitCount.ToString());
     Debug.WriteLine("  BitmapInfo.ClrImportant=" + mt.VideoInfo.BitmapInfo.ClrImportant.ToString());
     Debug.WriteLine("  BitmapInfo.ClrUsed=" + mt.VideoInfo.BitmapInfo.ClrUsed.ToString());
     Debug.WriteLine("  BitmapInfo.Compression=" + mt.VideoInfo.BitmapInfo.Compression.ToString());
     Debug.WriteLine("  BitmapInfo.Size=" + mt.VideoInfo.BitmapInfo.Size.ToString());
     Debug.WriteLine("  BitmapInfo.SizeImage=" + mt.VideoInfo.BitmapInfo.SizeImage.ToString());
     Debug.WriteLine("  BitmapInfo.Width=" + mt.VideoInfo.BitmapInfo.Width.ToString());
     Debug.WriteLine("  BitmapInfo.XPelsPerMeter=" + mt.VideoInfo.BitmapInfo.XPelsPerMeter.ToString());
     Debug.WriteLine("  BitmapInfo.YPelsPerMeter=" + mt.VideoInfo.BitmapInfo.YPelsPerMeter.ToString());
 }
コード例 #3
0
 public ProfileData(MediaTypeVideoInfo videoMediaType, byte[] videoCodecData, MediaTypeWaveFormatEx audioMediaType, byte[] audioCodecData)
 {
     init();
     this.videoMediaType = videoMediaType;
     this.videoCodecData = videoCodecData;
     this.audioMediaType = audioMediaType;
     this.audioCodecData = audioCodecData;
 }
コード例 #4
0
 private void init()
 {
     this.audioMediaType = null;
     this.audioCodecData = null;
     this.videoMediaType = null;
     this.videoCodecData = null;
     this.videoCodecGuid = Guid.Empty;
     this.height         = 0;
     this.width          = 0;
     this.bitrate        = 0;
     this.bufferwindow   = 0;
 }
コード例 #5
0
        /// <summary>
        /// Return a new ProfileData instance containing MediaTypes and codec private data as determined
        /// by the audio and video frames given.  One, but not both, frames may be null.
        /// </summary>
        /// <param name="aframe"></param>
        /// <param name="vframe"></param>
        /// <returns></returns>
        private static ProfileData FramesToProfileData(byte[] aframe, byte[] vframe)
        {
            if ((aframe == null) && (vframe == null))
            {
                return(null);
            }

            byte[] audioCompressionData;
            byte[] videoCompressionData;
            MediaTypeWaveFormatEx amt = AudioMediaTypeFromFrame(aframe, out audioCompressionData);
            MediaTypeVideoInfo    vmt = VideoMediaTypeFromFrame(vframe, out videoCompressionData);

            return(new ProfileData(vmt, videoCompressionData, amt, audioCompressionData));
        }
コード例 #6
0
        /// <summary>
        /// Fill in the video-specific parts of the MediaType from the data in the BufferChunk.
        /// Also return the compression data which is the remaining bytes at the end of the byte[].
        /// </summary>
        /// <param name="mt"></param>
        /// <param name="bc"></param>
        /// <param name="compressionData"></param>
        public static void ReconstituteVideoFormat(MediaTypeVideoInfo mt, BufferChunk bc, out byte[] compressionData)
        {
            VIDEOINFOHEADER vi;
            RECT            s;

            s.left    = NextInt32(bc);
            s.top     = NextInt32(bc);
            s.right   = NextInt32(bc);
            s.bottom  = NextInt32(bc);
            vi.Source = s;
            RECT t;

            t.left             = NextInt32(bc);
            t.top              = NextInt32(bc);
            t.right            = NextInt32(bc);
            t.bottom           = NextInt32(bc);
            vi.Target          = t;
            vi.BitRate         = (uint)NextInt32(bc);
            vi.BitErrorRate    = (uint)NextInt32(bc);
            vi.AvgTimePerFrame = bc.NextUInt64();
            BITMAPINFOHEADER bih;

            bih.Size          = (uint)NextInt32(bc);
            bih.Width         = NextInt32(bc);
            bih.Height        = NextInt32(bc);
            bih.Planes        = (ushort)NextInt16(bc);
            bih.BitCount      = (ushort)NextInt16(bc);
            bih.Compression   = (uint)NextInt32(bc);
            bih.SizeImage     = (uint)NextInt32(bc);
            bih.XPelsPerMeter = NextInt32(bc);
            bih.YPelsPerMeter = NextInt32(bc);
            bih.ClrUsed       = (uint)NextInt32(bc);
            bih.ClrImportant  = (uint)NextInt32(bc);
            vi.BitmapInfo     = bih;
            mt.VideoInfo      = vi;
            compressionData   = new byte[bc.Length];
            for (int i = 0; i < compressionData.Length; i++)
            {
                compressionData[i] = bc.NextByte();
            }
        }
コード例 #7
0
        /// <summary>
        /// Return the media type representing the uncompressed frames we will deliver to the caller.
        /// Assume RGB24.
        /// </summary>
        /// <param name="w"></param>
        /// <param name="h"></param>
        /// <returns></returns>
        private MediaTypeVideoInfo getUncompressedMT(int w, int h, double fps)
        {
            MediaTypeVideoInfo mt = new MediaTypeVideoInfo();

            mt.FixedSizeSamples                   = true;
            mt.TemporalCompression                = false;
            mt.SampleSize                         = h * w * 3;
            mt.MajorType                          = MajorType.Video;
            mt.SubType                            = SubType.RGB24;
            mt.FormatType                         = FormatType.VideoInfo;
            mt.VideoInfo.Source                   = new RECT();
            mt.VideoInfo.Source.left              = 0;
            mt.VideoInfo.Source.top               = 0;
            mt.VideoInfo.Source.bottom            = h;
            mt.VideoInfo.Source.right             = w;
            mt.VideoInfo.Target                   = new RECT();
            mt.VideoInfo.Target.left              = 0;
            mt.VideoInfo.Target.top               = 0;
            mt.VideoInfo.Target.bottom            = h;
            mt.VideoInfo.Target.right             = w;
            mt.VideoInfo.AvgTimePerFrame          = (ulong)Constants.TicksPerSec / (ulong)fps;
            mt.VideoInfo.BitErrorRate             = 0;
            mt.VideoInfo.BitRate                  = (uint)((double)h * (double)w * 3d * 8d * fps);
            mt.VideoInfo.BitmapInfo.Height        = h;
            mt.VideoInfo.BitmapInfo.Width         = w;
            mt.VideoInfo.BitmapInfo.SizeImage     = (uint)(h * w * 3);
            mt.VideoInfo.BitmapInfo.Planes        = 1;
            mt.VideoInfo.BitmapInfo.BitCount      = 24;
            mt.VideoInfo.BitmapInfo.ClrImportant  = 0;
            mt.VideoInfo.BitmapInfo.ClrUsed       = 0;
            mt.VideoInfo.BitmapInfo.Compression   = 0;
            mt.VideoInfo.BitmapInfo.XPelsPerMeter = 0;
            mt.VideoInfo.BitmapInfo.YPelsPerMeter = 0;
            mt.VideoInfo.BitmapInfo.Size          = 40;
            mt.Update();
            return(mt);
        }
コード例 #8
0
        /// <summary>
        /// Compare important fields to make sure the MediaTypes are "compatible".
        /// This is used with compressed samples to make sure we won't cause the Windows Media Writer
        /// object to except by feeding it stream samples from multiple RTP streams.
        /// By definition a null is compatible with any media type.
        /// Note: in the case of uncompressed samples, we can reconfigure the WM Writer to accept the
        /// new media type, so there is no need to do this checking.
        /// </summary>
        /// <param name="mt1"></param>
        /// <param name="mt2"></param>
        /// <returns></returns>
        public static bool CompareVideoMediaTypes(MediaTypeVideoInfo mt1, MediaTypeVideoInfo mt2)
        {
            if ((mt1 == null) || (mt2 == null))
            {
                return(true);
            }

            if ((mt1.MajorType != mt2.MajorType) ||
                (mt1.SubType != mt2.SubType) ||
                (mt1.FormatType != mt2.FormatType))
            {
                return(false);
            }

            if ((mt1.VideoInfo.BitRate != mt2.VideoInfo.BitRate))
            {
                return(false);
            }

            if ((mt1.VideoInfo.BitmapInfo.Height != mt2.VideoInfo.BitmapInfo.Height) ||
                (mt1.VideoInfo.BitmapInfo.Width != mt2.VideoInfo.BitmapInfo.Width) ||
                (mt1.VideoInfo.BitmapInfo.SizeImage != mt2.VideoInfo.BitmapInfo.SizeImage))
            {
                return(false);
            }

            //special case for the Screen Streaming codec we had to repair these two members, so
            // only test them if not MSS2.
            if ((mt1.SubType != SubType.MSS2) &&
                ((mt1.VideoInfo.BitmapInfo.Compression != mt2.VideoInfo.BitmapInfo.Compression) ||
                 (mt1.VideoInfo.BitmapInfo.BitCount != mt2.VideoInfo.BitmapInfo.BitCount)))
            {
                return(false);
            }

            return(true);
        }
コード例 #9
0
        public SlideStreamMgr(ArchiveTranscoderJob job, ArchiveTranscoderJobSegment segment,
                              LogMgr logMgr, double fps, int width, int height)
        {
            this.job     = job;
            this.segment = segment;
            this.logMgr  = logMgr;

            if (width > 0 && height > 0)
            {
                this.outputWidth  = width;
                this.outputHeight = height;
            }

            this.ticksBetweenFrames = (long)((double)Constants.TicksPerSec / fps);

            uncompressedMT = getUncompressedMT(this.outputWidth, this.outputHeight, fps);
            cancel         = false;
            initialized    = false;
            pptInstalled   = Utility.CheckPptIsInstalled();

            if ((!DateTime.TryParse(segment.StartTime, out start)) ||
                (!DateTime.TryParse(segment.EndTime, out end)))
            {
                throw(new System.Exception("Failed to parse start/end time"));
            }

            this.nextFrameTime = start.Ticks;

            format  = Utility.StringToPresenterWireFormatType(segment.PresentationDescriptor.PresentationFormat);
            payload = Utility.formatToPayload(format);
            cname   = segment.PresentationDescriptor.PresentationCname;

            slideImageMgr = new SlideImageMgr(format, this.outputWidth, this.outputHeight);

            //Get the start time for the entire conference and use that to get streams.
            long confStart = DatabaseUtility.GetConferenceStartTime(payload, cname, start.Ticks, end.Ticks);

            if (confStart <= 0)
            {
                logMgr.WriteLine("Warning: No conference exists in the database that matches this presentation: " + cname +
                                 " with PresentationFormat " + format.ToString());
                logMgr.ErrorLevel = 7;
                confStart         = start.Ticks;
            }

            //Get the relevant stream_id's and create DBStreamPlayers for each.
            streamIDs = DatabaseUtility.GetStreams(payload, segment.PresentationDescriptor.PresentationCname, null, confStart, end.Ticks);
            DateTime sdt = new DateTime(confStart);

            Debug.WriteLine("***Conference start: " + sdt.ToString() + " end: " + end.ToString());
            if ((streamIDs == null) || (streamIDs.Length == 0))
            {
                Debug.WriteLine("No presentation data found.");
                logMgr.WriteLine("Warning: No presentation data was found for the given time range for " +
                                 cname + " with PresentationFormat " + format.ToString());
                logMgr.ErrorLevel = 7;
                streamPlayers     = null;
                return;
            }

            streamPlayers = new DBStreamPlayer[streamIDs.Length];
            for (int i = 0; i < streamIDs.Length; i++)
            {
                streamPlayers[i] = new DBStreamPlayer(streamIDs[i], confStart, end.Ticks, payload);
            }

            lookBehindDuration = 0;
            if (streamPlayers[0].Start < start)
            {
                lookBehindDuration = ((TimeSpan)(start - streamPlayers[0].Start)).TotalSeconds;
            }
        }
コード例 #10
0
        public FileStreamPlayer(String filename, long start, long end, bool compressed, int streamID)
        {
            this.streamID = streamID;
            this.filename = filename;
            this.start    = start;
            this.end      = end;
            this.duration = (ulong)(end - start);
            outOfData     = false;
            this.guid     = Guid.NewGuid();

            //create IWMSyncReader and open the file.
            uint   hr = WMFSDKFunctions.WMCreateSyncReader(null, 0, out reader);
            IntPtr fn = Marshal.StringToCoTaskMemUni(filename);

            reader.Open(fn);
            Marshal.FreeCoTaskMem(fn);

            //Verify that the file contains one stream.
            uint outputcnt;

            reader.GetOutputCount(out outputcnt);
            Debug.Assert(outputcnt == 1);

            //Extract the MediaType for the stream.
            uint   cmt = 0;
            IntPtr ipmt;
            IWMOutputMediaProps outputProps;

            reader.GetOutputProps(0, out outputProps);
            outputProps.GetMediaType(IntPtr.Zero, ref cmt);
            ipmt = Marshal.AllocCoTaskMem((int)cmt);
            outputProps.GetMediaType(ipmt, ref cmt);
            byte[] bmt = new byte[cmt];
            Marshal.Copy(ipmt, bmt, 0, (int)cmt);
            BufferChunk bc = new BufferChunk(bmt);

            byte[] cd;

            GUID majorTypeGUID;

            outputProps.GetType(out majorTypeGUID);
            if (WMGuids.ToGuid(majorTypeGUID) == WMGuids.WMMEDIATYPE_Video)
            {
                vmt = new MediaTypeVideoInfo();
                ProfileUtility.ReconstituteBaseMediaType((MediaType)vmt, bc);
                ProfileUtility.ReconstituteVideoFormat(vmt, bc, out cd);
                //Note: This is a special case which we would like to generalize:  The default output format for the
                //12bpp video was found not to return any uncompressed samples.  Setting this particular case to RGB 24 fixed it.
                if ((!compressed) && (vmt.VideoInfo.BitmapInfo.BitCount == 12))
                {
                    SetVideoOutputProps();
                }
            }
            else if (WMGuids.ToGuid(majorTypeGUID) == WMGuids.WMMEDIATYPE_Audio)
            {
                amt = new MediaTypeWaveFormatEx();
                ProfileUtility.ReconstituteBaseMediaType((MediaType)amt, bc);
                ProfileUtility.ReconstituteAudioFormat(amt, bc, out cd);
            }

            //if compressed is set, retrieve stream samples
            if (compressed)
            {
                reader.SetReadStreamSamples(1, 1);
            }
        }
コード例 #11
0
 public ProfileData(MediaTypeVideoInfo videoMediaType, byte[] videoCodecData)
 {
     init();
     this.videoMediaType = videoMediaType;
     this.videoCodecData = videoCodecData;
 }
コード例 #12
0
        /// <summary>
        /// Capture stills from the video stream.  Create a temporary directory and save the images there.
        /// Compile a list of DataItem objects to indicate when the slide transitions should take place.
        /// </summary>
        /// <returns></returns>
        public string Process()
        {
            ImageFilter.ImageFilter imgFilter = null;
            try {
                imgFilter = new ImageFilter.ImageFilter();
                imgFilter.DifferenceThreshold = this.differenceThreshold;
            }
            catch {
                this.errorLog.Append("Video capture images in the presentation will not be filtered probably " +
                                     "because ImageMagick is not available in the configuration.\r\n");
            }

            this.differenceMetrics = new List <double>();
            metadata = new List <PresentationMgr.DataItem>();
            RTUpdate rtu = new RTUpdate();

            this.deckGuid = Guid.NewGuid();
            rtu.DeckGuid  = this.deckGuid;
            rtu.SlideSize = 1.0;
            rtu.DeckType  = (Int32)DeckTypeEnum.Presentation;

            this.videoStream = new StreamMgr(videoDescriptor.VideoCname, videoDescriptor.VideoName, new  DateTime(this.start), new DateTime(this.end), false, PayloadType.dynamicVideo);
            this.videoStream.ToRawWMFile(this.progressTracker);
            MediaTypeVideoInfo mtvi = videoStream.GetUncompressedVideoMediaType();

            this.tempdir = Utility.GetTempDir();
            Directory.CreateDirectory(this.tempdir);
            string filebase  = "slide";
            string extent    = ".jpg";
            int    fileindex = 1;

            BufferChunk bc;
            long        time;
            bool        newStream;
            string      previousFile = null;

            this.stopNow = false;
            while ((!stopNow) && (videoStream.GetNextSample(out bc, out time, out newStream)))
            {
                if ((time - lastFramegrab) >= (long)(this.frameGrabIntervalMs * Constants.TicksPerMs))
                {
                    DateTime dt = new DateTime(time);
                    Debug.WriteLine("time=" + dt.ToString() + ";length=" + bc.Length.ToString());
                    lastFramegrab = time;
                    string      filepath    = Path.Combine(tempdir, filebase + fileindex.ToString() + extent);
                    PixelFormat pixelFormat = subtypeToPixelFormat(mtvi.SubType);
                    Bitmap      bm          = new Bitmap(mtvi.VideoInfo.BitmapInfo.Width, mtvi.VideoInfo.BitmapInfo.Height, pixelFormat);
                    BitmapData  bd          = bm.LockBits(new Rectangle(0, 0, mtvi.VideoInfo.BitmapInfo.Width, mtvi.VideoInfo.BitmapInfo.Height), ImageLockMode.ReadWrite, pixelFormat);
                    Marshal.Copy(bc.Buffer, 0, bd.Scan0, bc.Length);
                    bm.UnlockBits(bd);
                    bm.RotateFlip(RotateFlipType.RotateNoneFlipY);
                    if ((SCALE) && (mtvi.VideoInfo.BitmapInfo.Width >= 1280))
                    {
                        int    w      = mtvi.VideoInfo.BitmapInfo.Width / 2;
                        int    h      = mtvi.VideoInfo.BitmapInfo.Height / 2;
                        Bitmap scaled = new Bitmap(bm, new Size(w, h));
                        scaled.Save(filepath, ImageFormat.Jpeg);
                    }
                    else
                    {
                        bm.Save(filepath, ImageFormat.Jpeg);
                    }
                    if (imgFilter != null)
                    {
                        string filterMsg;
                        bool   filterError;
                        double metric;
                        bool   differ = imgFilter.ImagesDiffer(filepath, previousFile, out filterMsg, out filterError, out metric);
                        if (filterError)
                        {
                            //this.errorLog.Append(filterMsg);
                            Console.WriteLine(filterMsg);
                        }
                        if (!differ)
                        {
                            continue;
                        }
                        this.differenceMetrics.Add(metric);
                        Console.WriteLine("Framegrab slide index: " + fileindex.ToString() +
                                          "; difference: " + metric.ToString() + "; time: " + dt.ToString());
                    }
                    rtu.SlideIndex = fileindex - 1; // This is a zero-based index
                    metadata.Add(new PresentationMgr.DataItem(time - this.offset, PresentationMgr.CopyRTUpdate(rtu)));
                    fileindex++;
                    previousFile = filepath;
                }
            }
            return(null);
        }