Beispiel #1
0
        public Bitmap GetBitmapFromSample(INSSBuffer sample, Guid videoSubType, VideoInfoHeader outputVideoInfoHeader, VideoInfoHeader inputVideoInfoHeader)
        {
            IntPtr      sampleBuffer;
            PixelFormat pixelFormat = PixelFormat.DontCare;
            uint        length      = 0;

            sample.GetBufferAndLength(out sampleBuffer, out length);

            if (videoSubType == MediaSubTypes.WMMEDIASUBTYPE_RGB32)
            {
                pixelFormat = PixelFormat.Format32bppRgb;
            }
            else if (videoSubType == MediaSubTypes.WMMEDIASUBTYPE_RGB555)
            {
                pixelFormat = PixelFormat.Format16bppRgb555;
            }
            else if (videoSubType == MediaSubTypes.WMMEDIASUBTYPE_RGB24)
            {
                pixelFormat = PixelFormat.Format24bppRgb;
            }
            else
            {
                throw new ArgumentException("videoSubType", "Unsupported video subtype [" + videoSubType + "].");
            }

#if DEBUG && DEBUG_SAMPLES
            Logger.WriteLogMessage("Grabbed sample buffer, length [" + length + "], pixelFormat [" + pixelFormat + "].");
#endif

            uint stride = outputVideoInfoHeader.bmiHeader.biWidth * outputVideoInfoHeader.bmiHeader.biPlanes * outputVideoInfoHeader.bmiHeader.biBitCount / 8;

#if DEBUG && DEBUG_SAMPLES
            Logger.WriteLogMessage("Creating bitmap [" + outputVideoInfoHeader.bmiHeader.biWidth + "x" + outputVideoInfoHeader.bmiHeader.biHeight + "], stride [" + stride + "], pixelFormat [" + pixelFormat + "].");
#endif

            Bitmap outputBitmap = new Bitmap((int)outputVideoInfoHeader.bmiHeader.biWidth, (int)outputVideoInfoHeader.bmiHeader.biHeight, (int)stride, pixelFormat, sampleBuffer);
            Bitmap inputBitmap  = new Bitmap((int)inputVideoInfoHeader.bmiHeader.biWidth, (int)inputVideoInfoHeader.bmiHeader.biHeight, outputBitmap.PixelFormat);

            using (Graphics g = Graphics.FromImage(inputBitmap))
            {
                g.InterpolationMode  = InterpolationMode.HighQualityBicubic;
                g.CompositingQuality = CompositingQuality.HighQuality;

                g.SmoothingMode   = SmoothingMode.HighQuality;
                g.PixelOffsetMode = PixelOffsetMode.HighQuality;

                g.DrawImage(outputBitmap, 0, 0, inputBitmap.Width, inputBitmap.Height);

#if DEBUG && DEBUG_SAMPLES
                Logger.WriteLogMessage("Copied output bitmap [" + outputBitmap.Width + "x" + outputBitmap.Height + "] to input bitmap [" + inputBitmap.Width + "x" + inputBitmap.Height + "].");
#endif
            }

            //
            // make bitmap was screen-oriented
            //
            inputBitmap.RotateFlip(RotateFlipType.RotateNoneFlipY);

            return(inputBitmap);
        }
Beispiel #2
0
        public bool FindVideoInfo(ref WM_MEDIA_TYPE mediaType, ref VideoInfoHeader videoInfoHeader)
        {
            bool            success = false;
            IWMStreamConfig stream  = null;
            IWMMediaProps   props   = null;
            Guid            mediaTypeGuid;

            IWMProfile profile = (IWMProfile)_reader;

            for (uint i = 0; i < _readerStreamCount; i++)
            {
                profile.GetStream(i, out stream);

                props = (IWMMediaProps)stream;

                WMMediaProps mediaProps = new WMMediaProps(props);

                mediaType     = mediaProps.MediaType;
                mediaTypeGuid = mediaType.majortype;

                if (mediaTypeGuid == MediaTypes.WMMEDIATYPE_Video)
                {
                    Logger.WriteLogMessage("Found video stream [" + i + "], format type [" + mediaType.formattype + "].");

                    videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.pbFormat, typeof(VideoInfoHeader));
                    success         = true;
                    break;
                }
            }

            return(success);
        }
Beispiel #3
0
        public VeWmvLibVideoReader(string videoFilePath)
        {
            wmvReader.Open(videoFilePath);

            wmvReader.FindVideoOutputFormat(0, ref  mediaType, ref  subtype, ref  inputVideoInfoHeader);
            //wmvReader.FindVideoInfo(ref  mediaType, ref  outputVideoInfoHeader);
            outputVideoInfoHeader = inputVideoInfoHeader;

            if (currentImg == null)
            {
                getNextFrame(out currentImg);
            }
            outputFrameInterval = inputFrameInterval;
        }
Beispiel #4
0
		/// <summary>
		/// Saves the video.
		/// </summary>
		/// <param name="fileName">Name of the file.</param>
		/// <param name="profileFileName">Name of the profile file.</param>
		/// <param name="dr">Dr.</param>
		public static void SaveVideo(string fileName, string profileFileName, ulong framesPerSecond, SqlDataReader dr)
		{
			WMEncoder encoder = new WMEncoder();
			IWMEncProfile2 profile = WMEncProfile.LoadEncodingProfile(encoder, profileFileName);
			WMEncProfile.ReleaseEncoder(ref encoder);
			
			using(WmvWriter writer = new WmvWriter())
			{
				
				writer.Initialize((IWMProfile)profile.SaveToIWMProfile(), fileName);
				
				VideoInfoHeader viHeader = new VideoInfoHeader();
				writer.FindVideoInputFormat(0, MediaSubTypes.WMMEDIASUBTYPE_RGB24, ref viHeader, false);
				writer.Start();

				
				INSSBuffer sample = null;
				int bmcount = 0;
				Bitmap c = new Bitmap((int)viHeader.bmiHeader.biWidth, (int)viHeader.bmiHeader.biHeight, PixelFormat.Format24bppRgb);;
				ulong fps = viHeader.AvgTimePerFrame / 100000;

				while(dr.Read())
				{
					Bitmap b = Image.FromStream(new MemoryStream(dr.GetValue(1) as byte[])) as Bitmap;
					
					using (Graphics g = Graphics.FromImage(c))
					{
						g.InterpolationMode = InterpolationMode.HighQualityBicubic;
						g.CompositingQuality = CompositingQuality.HighQuality;
						g.SmoothingMode = SmoothingMode.HighQuality;
						g.PixelOffsetMode = PixelOffsetMode.HighQuality;
						g.DrawImage(b, 0, 0, c.Width, c.Height);
					}

					try
					{	
						ulong time = ((ulong)bmcount++ * TimeSpan.TicksPerSecond)/fps;
						sample = writer.GetSampleFromBitmap(c);
						writer.Writer.WriteSample(0, time, (uint)WMT_STREAM_SAMPLE_TYPE.WM_SF_CLEANPOINT, sample);
					}
					finally
					{
						Marshal.ReleaseComObject(sample);
						sample = null;
						b = null;
						GC.Collect();
					}
				}
			}
		}
Beispiel #5
0
		/// <summary>
		/// Saves the video.
		/// </summary>
		/// <param name="fileName">Name of the file.</param>
		/// <param name="profileFileName">Name of the profile file.</param>
		/// <param name="framesPerSecond">Frames per second.</param>
		/// <param name="bitmaps">Bitmaps.</param>
        public WmvEncoder(string profileFileName, ulong framesPerSecond)
        {
            try
            {
                WMEncoder encoder = new WMEncoder();
                IWMEncProfile2 profile = WMEncProfile.LoadEncodingProfile(encoder, profileFileName);
                WMEncProfile.ReleaseEncoder(ref encoder);

                writer = new WmvWriter();
                writer.SetProfile((IWMProfile)profile.SaveToIWMProfile());

                viHeader = new VideoInfoHeader();
                bool bret = writer.FindVideoInputFormat(0, MediaSubTypes.WMMEDIASUBTYPE_RGB24, ref viHeader, false);
                if (!bret) throw new Exception("FindVideoInputFormat error"); ;
                frame = new Bitmap((int)viHeader.bmiHeader.biWidth, (int)viHeader.bmiHeader.biHeight, PixelFormat.Format24bppRgb);

                fps = framesPerSecond;
            }
            catch (Exception)
            {
                // error handle
                throw;
            }
        }
Beispiel #6
0
        public Bitmap GetBitmapFromSample(INSSBuffer sample, Guid videoSubType, VideoInfoHeader outputVideoInfoHeader, VideoInfoHeader inputVideoInfoHeader)
        {
            IntPtr sampleBuffer;
        	PixelFormat pixelFormat = PixelFormat.DontCare;
            uint length = 0;

            sample.GetBufferAndLength(out sampleBuffer, out length);

            if (videoSubType == MediaSubTypes.WMMEDIASUBTYPE_RGB32)
            {
                pixelFormat = PixelFormat.Format32bppRgb;
            }
            else if (videoSubType == MediaSubTypes.WMMEDIASUBTYPE_RGB555)
            {
                pixelFormat = PixelFormat.Format16bppRgb555;
            }
            else if (videoSubType == MediaSubTypes.WMMEDIASUBTYPE_RGB24)
            {
                pixelFormat = PixelFormat.Format24bppRgb;
            }
            else
            {
                throw new ArgumentException("videoSubType", "Unsupported video subtype [" + videoSubType + "].");
            }

#if DEBUG && DEBUG_SAMPLES
            Logger.WriteLogMessage("Grabbed sample buffer, length [" + length + "], pixelFormat [" + pixelFormat + "].");
#endif

            uint stride = outputVideoInfoHeader.bmiHeader.biWidth * outputVideoInfoHeader.bmiHeader.biPlanes * outputVideoInfoHeader.bmiHeader.biBitCount / 8;

#if DEBUG && DEBUG_SAMPLES
            Logger.WriteLogMessage("Creating bitmap [" + outputVideoInfoHeader.bmiHeader.biWidth + "x" + outputVideoInfoHeader.bmiHeader.biHeight + "], stride [" + stride + "], pixelFormat [" + pixelFormat + "].");
#endif

            Bitmap outputBitmap = new Bitmap((int)outputVideoInfoHeader.bmiHeader.biWidth, (int)outputVideoInfoHeader.bmiHeader.biHeight, (int)stride, pixelFormat, sampleBuffer);
            Bitmap inputBitmap = new Bitmap((int)inputVideoInfoHeader.bmiHeader.biWidth, (int)inputVideoInfoHeader.bmiHeader.biHeight, outputBitmap.PixelFormat);

            using (Graphics g = Graphics.FromImage(inputBitmap))
            {
                g.InterpolationMode = InterpolationMode.HighQualityBicubic;
                g.CompositingQuality = CompositingQuality.HighQuality;

                g.SmoothingMode = SmoothingMode.HighQuality;
                g.PixelOffsetMode = PixelOffsetMode.HighQuality;

                g.DrawImage(outputBitmap, 0, 0, inputBitmap.Width, inputBitmap.Height);

#if DEBUG && DEBUG_SAMPLES
                Logger.WriteLogMessage("Copied output bitmap [" + outputBitmap.Width + "x" + outputBitmap.Height + "] to input bitmap [" + inputBitmap.Width + "x" + inputBitmap.Height + "].");
#endif
            }

            //
            // make bitmap was screen-oriented
            // 
            inputBitmap.RotateFlip(RotateFlipType.RotateNoneFlipY);

            return inputBitmap;
        }
Beispiel #7
0
        public void FindVideoOutputFormat(uint outputNum, ref WM_MEDIA_TYPE mediaType, ref Guid subtype, ref VideoInfoHeader outputVideoInfoHeader)
        {
            IWMOutputMediaProps readerOutputProps = null;
            uint bufferSize = (uint)(Marshal.SizeOf(typeof(WM_MEDIA_TYPE)) + Marshal.SizeOf(typeof(VideoInfoHeader)));
            uint formatCount;

            Logger.WriteLogMessage("Finding video output formats for reader, output [" + outputNum + "].");

            _reader.GetOutputFormatCount(outputNum, out formatCount);

            Logger.WriteLogMessage("Reader can produce " + formatCount + " possible video output formats.");

            IntPtr buffer = Marshal.AllocCoTaskMem((int)bufferSize);

            try
            {
                for (uint j = 0; j < formatCount; j++)
                {
                    uint size = 0;

                    _reader.GetOutputFormat(outputNum, j, out readerOutputProps);

                    readerOutputProps.GetMediaType(IntPtr.Zero, ref size);

                    if (size > bufferSize)
                    {
                        bufferSize = size;
                        Marshal.FreeCoTaskMem(buffer);
                        buffer = Marshal.AllocCoTaskMem((int)bufferSize);
                    }

                    readerOutputProps.GetMediaType(buffer, ref size);

                    mediaType = (WM_MEDIA_TYPE)Marshal.PtrToStructure(buffer, typeof(WM_MEDIA_TYPE));

                    if (mediaType.formattype == FormatTypes.WMFORMAT_VideoInfo)
                    {
                        Logger.WriteLogMessage("Walking output format [" + j + "], format type [" + GetFormatTypeName(mediaType.formattype) + "], subtype [" + GetSubTypeName(mediaType.subtype) + "], sample size [" + mediaType.lSampleSize + "].");

                        //
                        // NOTE: only look for RGB subtypes
                        //
                        if ((mediaType.subtype == MediaSubTypes.WMMEDIASUBTYPE_RGB555) ||
                             (mediaType.subtype == MediaSubTypes.WMMEDIASUBTYPE_RGB24) ||
                             (mediaType.subtype == MediaSubTypes.WMMEDIASUBTYPE_RGB32))
                        {
                            Logger.WriteLogMessage("- Found RGB555, RGB24 or RGB32 sub type, grabbing VideoInfoHeader.");

                            subtype = mediaType.subtype;

                            outputVideoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.pbFormat, typeof(VideoInfoHeader));

                            Logger.WriteLogMessage("- width [" + outputVideoInfoHeader.bmiHeader.biWidth + "], height [" + outputVideoInfoHeader.bmiHeader.biHeight + "], dwBitrate [" + outputVideoInfoHeader.dwBitRate + "], dwBitErrorRate [" + outputVideoInfoHeader.dwBitErrorRate + "].");

                            _reader.SetOutputProps(outputNum, readerOutputProps);
                            break;
                        }
                    }
                }
            }
            finally
            {
                Marshal.FreeCoTaskMem(buffer);
            }

            Marshal.ReleaseComObject(readerOutputProps);
        }
Beispiel #8
0
        public bool FindVideoInfo(ref WM_MEDIA_TYPE mediaType, ref VideoInfoHeader videoInfoHeader)
        {
            bool success = false;
            IWMStreamConfig stream = null;
            IWMMediaProps props = null;
            Guid mediaTypeGuid;

            IWMProfile profile = (IWMProfile)_reader;

            for (uint i = 0; i < _readerStreamCount; i++)
            {
                profile.GetStream(i, out stream);

                props = (IWMMediaProps)stream;

                WMMediaProps mediaProps = new WMMediaProps(props);

                mediaType = mediaProps.MediaType;
                mediaTypeGuid = mediaType.majortype;

                if (mediaTypeGuid == MediaTypes.WMMEDIATYPE_Video)
                {
                    Logger.WriteLogMessage("Found video stream [" + i + "], format type [" + mediaType.formattype + "].");

                    videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.pbFormat, typeof(VideoInfoHeader));
                    success = true;
                    break;
                }
            }

            return success;
        }
Beispiel #9
0
        public bool FindVideoInputFormat(uint inputNum, Guid subtype, ref VideoInfoHeader inputVideoInfoHeader, bool enableCompressedSamples)
        {
            bool success = false;
            IWMInputMediaProps writerInputProps = null;
            WM_MEDIA_TYPE      mediaType;
            uint bufferSize = (uint)(Marshal.SizeOf(typeof(WM_MEDIA_TYPE)) + Marshal.SizeOf(typeof(VideoInfoHeader)));
            uint formatCount;

            Logger.WriteLogMessage("Finding video input formats for writer, input [" + inputNum + "].");

            _writer.GetInputFormatCount(inputNum, out formatCount);

            Logger.WriteLogMessage("Video writer can consume " + formatCount + " possible video input formats.");

            IntPtr buffer = Marshal.AllocCoTaskMem((int)bufferSize);

            try
            {
                for (uint j = 0; j < formatCount; j++)
                {
                    uint size = 0;

                    try
                    {
                        _writer.GetInputFormat(inputNum, j, out writerInputProps);

                        writerInputProps.GetMediaType(IntPtr.Zero, ref size);

                        if (size > bufferSize)
                        {
                            bufferSize = size;
                            Marshal.FreeCoTaskMem(buffer);
                            buffer = Marshal.AllocCoTaskMem((int)bufferSize);
                        }

                        writerInputProps.GetMediaType(buffer, ref size);

                        mediaType = (WM_MEDIA_TYPE)Marshal.PtrToStructure(buffer, typeof(WM_MEDIA_TYPE));

                        if (mediaType.formattype == FormatTypes.WMFORMAT_VideoInfo)
                        {
                            Logger.WriteLogMessage("Found video writer input format [" + j + "], format type [" + GetFormatTypeName(mediaType.formattype) + "], subtype [" + GetSubTypeName(mediaType.subtype) + "], sample size [" + mediaType.lSampleSize + "].");

                            inputVideoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.pbFormat, typeof(VideoInfoHeader));

                            Logger.WriteLogMessage("Found input video stream, width [" + inputVideoInfoHeader.bmiHeader.biWidth + "], height [" + inputVideoInfoHeader.bmiHeader.biHeight + "], bit count [" + inputVideoInfoHeader.bmiHeader.biBitCount + "], image size [" + inputVideoInfoHeader.bmiHeader.biSizeImage + "].");

                            if (mediaType.subtype == subtype)
                            {
                                writerInputProps.SetMediaType(ref mediaType);

                                if (!enableCompressedSamples)
                                {
                                    _writer.SetInputProps(inputNum, writerInputProps);
                                }
                                else
                                {
                                    _writer.SetInputProps(inputNum, null);
                                }

                                success = true;
                                break;
                            }
                        }
                    }
                    catch (Exception)
                    {
                        // error handle
                        throw;
                    }
                    finally
                    {
                        Marshal.ReleaseComObject(writerInputProps);
                        writerInputProps = null;
                    }
                }
            }
            catch (Exception)
            {
                // error handle
                throw;
            }
            finally
            {
                Marshal.FreeCoTaskMem(buffer);
            }

            return(success);
        }
Beispiel #10
0
        public bool FindVideoInputFormat(uint inputNum, Guid subtype, ref VideoInfoHeader inputVideoInfoHeader, bool enableCompressedSamples)
        {
            bool success = false;
            IWMInputMediaProps writerInputProps = null;
            WM_MEDIA_TYPE mediaType;
            uint bufferSize = (uint)(Marshal.SizeOf(typeof(WM_MEDIA_TYPE)) + Marshal.SizeOf(typeof(VideoInfoHeader)));
            uint formatCount;

            Logger.WriteLogMessage("Finding video input formats for writer, input [" + inputNum + "].");

            _writer.GetInputFormatCount(inputNum, out formatCount);

            Logger.WriteLogMessage("Video writer can consume " + formatCount + " possible video input formats.");

            IntPtr buffer = Marshal.AllocCoTaskMem((int)bufferSize);

            try
            {
                for (uint j = 0; j < formatCount; j++)
                {
                    uint size = 0;

                    try
                    {
                        _writer.GetInputFormat(inputNum, j, out writerInputProps);

                        writerInputProps.GetMediaType(IntPtr.Zero, ref size);

                        if (size > bufferSize)
                        {
                            bufferSize = size;
                            Marshal.FreeCoTaskMem(buffer);
                            buffer = Marshal.AllocCoTaskMem((int)bufferSize);
                        }

                        writerInputProps.GetMediaType(buffer, ref size);

                        mediaType = (WM_MEDIA_TYPE)Marshal.PtrToStructure(buffer, typeof(WM_MEDIA_TYPE));

                        if (mediaType.formattype == FormatTypes.WMFORMAT_VideoInfo)
                        {
                            Logger.WriteLogMessage("Found video writer input format [" + j + "], format type [" + GetFormatTypeName(mediaType.formattype) + "], subtype [" + GetSubTypeName(mediaType.subtype) + "], sample size [" + mediaType.lSampleSize + "].");

                            inputVideoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.pbFormat, typeof(VideoInfoHeader));

                            Logger.WriteLogMessage("Found input video stream, width [" + inputVideoInfoHeader.bmiHeader.biWidth + "], height [" + inputVideoInfoHeader.bmiHeader.biHeight + "], bit count [" + inputVideoInfoHeader.bmiHeader.biBitCount + "], image size [" + inputVideoInfoHeader.bmiHeader.biSizeImage + "].");

                            if (mediaType.subtype == subtype)
                            {
                                writerInputProps.SetMediaType(ref mediaType);

                                if (!enableCompressedSamples)
                                    _writer.SetInputProps(inputNum, writerInputProps);
                                else
                                    _writer.SetInputProps(inputNum, null);

                                success = true;
                                break;
                            }
                        }
                    }
                    catch (Exception)
                    {
                        // error handle
                        throw;
                    }
                    finally
                    {
                        Marshal.ReleaseComObject(writerInputProps);
                        writerInputProps = null;
                    }
                }
            }
            catch (Exception)
            {
                // error handle
                throw;
            }
            finally
            {
                Marshal.FreeCoTaskMem(buffer);
            }

            return success;
        }
Beispiel #11
0
        public void FindVideoOutputFormat(uint outputNum, ref WM_MEDIA_TYPE mediaType, ref Guid subtype, ref VideoInfoHeader outputVideoInfoHeader)
        {
            IWMOutputMediaProps readerOutputProps = null;
            uint bufferSize = (uint)(Marshal.SizeOf(typeof(WM_MEDIA_TYPE)) + Marshal.SizeOf(typeof(VideoInfoHeader)));
            uint formatCount;

            Logger.WriteLogMessage("Finding video output formats for reader, output [" + outputNum + "].");

            _reader.GetOutputFormatCount(outputNum, out formatCount);

            Logger.WriteLogMessage("Reader can produce " + formatCount + " possible video output formats.");

            IntPtr buffer = Marshal.AllocCoTaskMem((int)bufferSize);

            try
            {
                for (uint j = 0; j < formatCount; j++)
                {
                    uint size = 0;

                    _reader.GetOutputFormat(outputNum, j, out readerOutputProps);

                    readerOutputProps.GetMediaType(IntPtr.Zero, ref size);

                    if (size > bufferSize)
                    {
                        bufferSize = size;
                        Marshal.FreeCoTaskMem(buffer);
                        buffer = Marshal.AllocCoTaskMem((int)bufferSize);
                    }

                    readerOutputProps.GetMediaType(buffer, ref size);

                    mediaType = (WM_MEDIA_TYPE)Marshal.PtrToStructure(buffer, typeof(WM_MEDIA_TYPE));

                    if (mediaType.formattype == FormatTypes.WMFORMAT_VideoInfo)
                    {
                        Logger.WriteLogMessage("Walking output format [" + j + "], format type [" + GetFormatTypeName(mediaType.formattype) + "], subtype [" + GetSubTypeName(mediaType.subtype) + "], sample size [" + mediaType.lSampleSize + "].");

                        //
                        // NOTE: only look for RGB subtypes
                        //
                        if ((mediaType.subtype == MediaSubTypes.WMMEDIASUBTYPE_RGB555) ||
                            (mediaType.subtype == MediaSubTypes.WMMEDIASUBTYPE_RGB24) ||
                            (mediaType.subtype == MediaSubTypes.WMMEDIASUBTYPE_RGB32))
                        {
                            Logger.WriteLogMessage("- Found RGB555, RGB24 or RGB32 sub type, grabbing VideoInfoHeader.");

                            subtype = mediaType.subtype;

                            outputVideoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.pbFormat, typeof(VideoInfoHeader));

                            Logger.WriteLogMessage("- width [" + outputVideoInfoHeader.bmiHeader.biWidth + "], height [" + outputVideoInfoHeader.bmiHeader.biHeight + "], dwBitrate [" + outputVideoInfoHeader.dwBitRate + "], dwBitErrorRate [" + outputVideoInfoHeader.dwBitErrorRate + "].");

                            _reader.SetOutputProps(outputNum, readerOutputProps);
                            break;
                        }
                    }
                }
            }
            finally
            {
                Marshal.FreeCoTaskMem(buffer);
            }

            Marshal.ReleaseComObject(readerOutputProps);
        }
Beispiel #12
0
		/// <summary>
		/// Saves the video.
		/// </summary>
		/// <param name="fileName">Name of the file.</param>
		/// <param name="profileFileName">Name of the profile file.</param>
		/// <param name="files">Files.</param>
		public static void SaveVideo(string fileName, string profileFileName, ulong framesPerSecond, string [] files)
		{
			WMEncoder encoder = new WMEncoder();
			IWMEncProfile2 profile = WMEncProfile.LoadEncodingProfile(encoder, profileFileName);
			WMEncProfile.ReleaseEncoder(ref encoder);
			
			using(WmvWriter writer = new WmvWriter())
			{
				
				writer.Initialize((IWMProfile)profile.SaveToIWMProfile(), fileName);
				
				VideoInfoHeader viHeader = new VideoInfoHeader();
				writer.FindVideoInputFormat(0, MediaSubTypes.WMMEDIASUBTYPE_RGB24, ref viHeader, false);
				writer.Start();

				INSSBuffer sample = null;
				int bmcount = 0;
				Bitmap frame = new Bitmap((int)viHeader.bmiHeader.biWidth, (int)viHeader.bmiHeader.biHeight, PixelFormat.Format24bppRgb);;
				ulong fps = framesPerSecond;	
				
				for(int i = 0; i < files.Length; i++)
				{
					if(files[i] != null)
					{		
						Bitmap bmp = new Bitmap(files[i]);
						using (Graphics g = Graphics.FromImage(frame))
						{
							g.InterpolationMode = InterpolationMode.HighQualityBicubic;
							g.CompositingQuality = CompositingQuality.HighQuality;
							g.SmoothingMode = SmoothingMode.HighQuality;
							g.PixelOffsetMode = PixelOffsetMode.HighQuality;
							g.DrawImage(bmp, 0, 0, frame.Width, frame.Height);
							Console.Out.Write(".");
						}

						// Now render to the movie
						try
						{	
							ulong time = ((ulong)bmcount++ * TimeSpan.TicksPerSecond)/fps;
							sample = writer.GetSampleFromBitmap(frame);
							writer.Writer.WriteSample(0, time, (uint)WMT_STREAM_SAMPLE_TYPE.WM_SF_CLEANPOINT, sample);
						}
						finally
						{													
							Marshal.ReleaseComObject(sample);
							sample = null;
							bmp = null;
							GC.Collect();
						}
					}
				}
			}
		}