Beispiel #1
0
        public override void EncodeToFile(Stream input, string outFileName, CodecData data)
        {
            var fiBitMap = _encode(input, data);

            FI.FreeImage.Save((FI.FREE_IMAGE_FORMAT) this._freeImageType, fiBitMap, outFileName, FI.FREE_IMAGE_SAVE_FLAGS.DEFAULT);
            FI.FreeImage.Unload(fiBitMap);
        }
        void LogCodecState(string Name)
        {
            if (KnownCodecs.ContainsKey(Name))
            {
                CodecData Data = KnownCodecs[Name] as CodecData;
                string    Out  = "";
                switch (Data.State)
                {
                case CodecState.Recommended:
                    Out += "Recommended";
                    break;

                case CodecState.Known:
                    Out += "Known";
                    break;

                case CodecState.Unrecommended:
                    Out += "Unrecommended";
                    break;
                }
                Out += " ";
                switch (Data.Type)
                {
                case CodecType.Source:
                    Out += "Source";
                    break;

                case CodecType.VideoDecoder:
                    Out += "Video Decoder";
                    break;

                case CodecType.AudioDecoder:
                    Out += "Audio Decoder";
                    break;

                case CodecType.VideoRenderer:
                    Out += "Video Renderer";
                    break;

                case CodecType.AudioRenderer:
                    Out += "Audio Renderer";
                    break;

                case CodecType.VideoProcessor:
                    Out += "Video Processor";
                    break;

                case CodecType.AudioProcessor:
                    Out += "Audio Processor";
                    break;
                }
                Out += " used: " + Name;
                EventConsumer.FireLogStepEvent(Out);
            }
            else
            {
                EventConsumer.FireLogStepEvent("Unknown filter used [" + Name + "], please report to tech support");
            }
        }
        public void AdjustAudioProfileTest()
        {
            //Arrange
            const string myCodec1       = "myCodec1";
            const string myCodec2       = "myCodec2";
            const string anotherProfile = "anotherProfile";
            const string firstProfile   = "firstProfile";

            var audioCodec = new CodecData(myCodec1, null, firstProfile);

            var supportedAudioCodecs = new List <CodecData>()
            {
                audioCodec,
                new CodecData(myCodec2, null)
            };

            var serviceConfigurator = new MetadataServiceConfigurator(null, null, null, audioCodec, supportedAudioCodecs);

            var metadata1 = new VideoMetadata()
            {
                AudioCodec   = myCodec1,
                AudioProfile = firstProfile
            };
            var metadata2 = new VideoMetadata()
            {
                AudioCodec   = myCodec1,
                AudioProfile = anotherProfile
            };
            var metadata3 = new VideoMetadata()
            {
                AudioCodec   = myCodec2,
                AudioProfile = anotherProfile
            };
            var metadataWithoutProfile = new VideoMetadata()
            {
                AudioCodec   = myCodec1,
                AudioProfile = null
            };

            var metadataService1 = new MetadataService(serviceConfigurator, metadata1);
            var metadataService2 = new MetadataService(serviceConfigurator, metadata2);
            var metadataService3 = new MetadataService(serviceConfigurator, metadata3);
            var metadataService4 = new MetadataService(serviceConfigurator, metadataWithoutProfile);

            //Act
            var profile1 = metadataService1.AdjustAudioProfile();
            var profile2 = metadataService2.AdjustAudioProfile();
            var profile3 = metadataService3.AdjustAudioProfile();
            var profile4 = metadataService4.AdjustAudioProfile();

            //Assert
            Assert.AreEqual(firstProfile, profile1);
            Assert.AreEqual(firstProfile, profile2);
            Assert.AreEqual(anotherProfile, profile3);
            Assert.AreEqual(firstProfile, profile4);
        }
Beispiel #4
0
        public static MetadataServiceConfigurator CreateMp4MetadataServiceConfigurator()
        {
            const string container       = "MPEG-4";
            const string ffmpegContainer = "mp4";

            var videoCodec = new CodecData("AVC", "libx264", "Main", "Baseline");

            var audioCodec = new CodecData("MPEG Audio", "libmp3lame", "Layer 3");

            return(new MetadataServiceConfigurator(container, ffmpegContainer, videoCodec, audioCodec, new CodecData("AAC", null)));
        }
Beispiel #5
0
        public static MetadataServiceConfigurator CreateWebMMetadataServiceConfigurator()
        {
            const string container       = "WebM";
            const string ffmpegContainer = "webm";

            var videoCodec = new CodecData("VP8", "libvpx");

            var audioCodec = new CodecData("Vorbis", "libvorbis");

            return(new MetadataServiceConfigurator(container, ffmpegContainer, videoCodec, audioCodec));
        }
Beispiel #6
0
        public static MetadataServiceConfigurator CreateMp4MetadataServiceConfigurator()
        {
            const string container = "MPEG-4";
            const string ffmpegContainer = "mp4";

            var videoCodec = new CodecData("AVC", "libx264", "Main", "Baseline");

            var audioCodec = new CodecData("MPEG Audio", "libmp3lame", "Layer 3");
            
            return new MetadataServiceConfigurator(container, ffmpegContainer, videoCodec, audioCodec, new CodecData("AAC", null));
        }
Beispiel #7
0
        public void Mp4EncodeTest()
        {
            Directory.CreateDirectory(_destinationPath);
            string[] filePathes = Directory.GetFiles(_sourcePath);
            var      videoCodec = new CodecData(FfmpegConstant.AvcCodec, FfmpegConstant.AvcCodecLib, FfmpegConstant.AvcMainProfile, FfmpegConstant.AvcMainProfile, FfmpegConstant.AvcBaselineProfile);
            var      audioCodec = new CodecData(FfmpegConstant.AacCodec, FfmpegConstant.AacCodecLib);

            var serviceConfigurator = new MetadataServiceConfigurator(FfmpegConstant.Mp4Container, FfmpegConstant.Mp4FfmpegContainer, videoCodec, audioCodec);

            StartEncode(serviceConfigurator, filePathes);
        }
Beispiel #8
0
        public void TheoraEncodeTest()
        {
            Directory.CreateDirectory(_destinationPath);
            string[] filePathes = Directory.GetFiles(_sourcePath);
            var videoCodec = new CodecData(FfmpegConstant.TheoraCodec, FfmpegConstant.TheoraCodecLib);
            var audioCodec = new CodecData(FfmpegConstant.VorbisCodec, FfmpegConstant.VorbisCodecLib);

            var serviceConfigurator = new MetadataServiceConfigurator(FfmpegConstant.OggContainer, FfmpegConstant.OggFfmpegContainer, videoCodec, audioCodec);

            StartEncode(serviceConfigurator, filePathes);
        }
Beispiel #9
0
        public void Mp4EncodeTest()
        {
            Directory.CreateDirectory(_destinationPath);
            string[] filePathes = Directory.GetFiles(_sourcePath);
            var videoCodec = new CodecData(FfmpegConstant.AvcCodec, FfmpegConstant.AvcCodecLib, FfmpegConstant.AvcMainProfile, FfmpegConstant.AvcMainProfile, FfmpegConstant.AvcBaselineProfile);
            var audioCodec = new CodecData(FfmpegConstant.AacCodec, FfmpegConstant.AacCodecLib);

            var serviceConfigurator = new MetadataServiceConfigurator(FfmpegConstant.Mp4Container, FfmpegConstant.Mp4FfmpegContainer, videoCodec, audioCodec);

            StartEncode(serviceConfigurator, filePathes);
        }
Beispiel #10
0
        public void TheoraEncodeTest()
        {
            Directory.CreateDirectory(_destinationPath);
            string[] filePathes = Directory.GetFiles(_sourcePath);
            var      videoCodec = new CodecData(FfmpegConstant.TheoraCodec, FfmpegConstant.TheoraCodecLib);
            var      audioCodec = new CodecData(FfmpegConstant.VorbisCodec, FfmpegConstant.VorbisCodecLib);

            var serviceConfigurator = new MetadataServiceConfigurator(FfmpegConstant.OggContainer, FfmpegConstant.OggFfmpegContainer, videoCodec, audioCodec);

            StartEncode(serviceConfigurator, filePathes);
        }
Beispiel #11
0
        public static MetadataServiceConfigurator CreateWebMMetadataServiceConfigurator()
        {
            const string container = "WebM";
            const string ffmpegContainer = "webm";

            var videoCodec = new CodecData("VP8", "libvpx");

            var audioCodec = new CodecData("Vorbis", "libvorbis");
            
            return new MetadataServiceConfigurator(container, ffmpegContainer, videoCodec, audioCodec);
        }
        public void AdjustAudioCodecTest()
        {
            //Arrange
            const string myCodec1     = "myCodec1";
            const string myCodec2     = "myCodec2";
            const string anotherCodec = "anotherCodec";

            var audioCodec = new CodecData(myCodec1, null);

            var supportedAudioCodecs = new List <CodecData>()
            {
                audioCodec,
                new CodecData(myCodec2, null)
            };

            var serviceConfigurator = new MetadataServiceConfigurator(null, null, null, audioCodec, supportedAudioCodecs);

            var metadata1 = new VideoMetadata()
            {
                AudioCodec = myCodec1
            };
            var metadata2 = new VideoMetadata()
            {
                AudioCodec = myCodec2
            };
            var metadata3 = new VideoMetadata()
            {
                AudioCodec = anotherCodec
            };
            var metadataWithoutCodec = new VideoMetadata()
            {
                AudioCodec = null
            };

            var metadataService1 = new MetadataService(serviceConfigurator, metadata1);
            var metadataService2 = new MetadataService(serviceConfigurator, metadata2);
            var metadataService3 = new MetadataService(serviceConfigurator, metadata3);
            var metadataService4 = new MetadataService(serviceConfigurator, metadataWithoutCodec);

            //Act
            var codec1 = metadataService1.AdjustAudioCodec();
            var codec2 = metadataService2.AdjustAudioCodec();
            var codec3 = metadataService3.AdjustAudioCodec();
            var codec4 = metadataService4.AdjustAudioCodec();

            //Assert
            Assert.AreEqual(myCodec1, codec1);
            Assert.AreEqual(myCodec2, codec2);
            Assert.AreEqual(myCodec1, codec3);
            Assert.AreEqual(null, codec4);
        }
Beispiel #13
0
        public void CreateScreenshotTest()
        {
            Directory.CreateDirectory(DestinationPath);
            var filePathes = Directory.GetFiles(SourcePath);
            var videoCodec = new CodecData("VP8", "libvpx");
            var audioCodec = new CodecData("Vorbis", "libvorbis");
            var supportedAudioCodecs = new List<CodecData>()
                                           {
                                               audioCodec
                                           };
            var serviceConfigurator = new MetadataServiceConfigurator("WebM", "webm", videoCodec, audioCodec, supportedAudioCodecs);

            StartScreenshot(serviceConfigurator, filePathes);
        }
Beispiel #14
0
        public void CreateCodecDataWithoutProfileTest()
        {
            //Arrange
            const string videoCodec = "videoCodec";
            const string libName    = "libName";

            //Act
            var codecData = new CodecData(videoCodec, libName);

            //Assert
            Assert.AreEqual(videoCodec, codecData.Codec);
            Assert.AreEqual(libName, codecData.LibName);
            Assert.IsTrue(codecData.Profiles.Any(p => p == null));
        }
Beispiel #15
0
        public void CreateScreenshotTest()
        {
            Directory.CreateDirectory(DestinationPath);
            var filePathes           = Directory.GetFiles(SourcePath);
            var videoCodec           = new CodecData("VP8", "libvpx");
            var audioCodec           = new CodecData("Vorbis", "libvorbis");
            var supportedAudioCodecs = new List <CodecData>()
            {
                audioCodec
            };
            var serviceConfigurator = new MetadataServiceConfigurator("WebM", "webm", videoCodec, audioCodec, supportedAudioCodecs);

            StartScreenshot(serviceConfigurator, filePathes);
        }
        public void AdjustVideoProfileTest()
        {
            //Arrange
            const string myCodec        = "myCodec";
            const string anotherProfile = "anotherProfile";
            const string firstProfile   = "firstProfile";
            const string secondProfile  = "secondProfile";

            var videoCodec = new CodecData(myCodec, null, firstProfile, secondProfile);

            var serviceConfigurator = new MetadataServiceConfigurator(null, null, videoCodec, null, null);

            var metadata1 = new VideoMetadata()
            {
                VideoCodec   = myCodec,
                VideoProfile = firstProfile
            };
            var metadata2 = new VideoMetadata()
            {
                VideoCodec   = myCodec,
                VideoProfile = secondProfile
            };
            var metadata3 = new VideoMetadata()
            {
                VideoCodec   = myCodec,
                VideoProfile = anotherProfile
            };
            var metadataWithoutCodec = new VideoMetadata()
            {
                VideoCodec   = myCodec,
                VideoProfile = null
            };

            var metadataService1 = new MetadataService(serviceConfigurator, metadata1);
            var metadataService2 = new MetadataService(serviceConfigurator, metadata2);
            var metadataService3 = new MetadataService(serviceConfigurator, metadata3);
            var metadataService4 = new MetadataService(serviceConfigurator, metadataWithoutCodec);

            //Act
            var profile1 = metadataService1.AdjustVideoProfile();
            var profile2 = metadataService2.AdjustVideoProfile();
            var profile3 = metadataService3.AdjustVideoProfile();
            var profile4 = metadataService4.AdjustVideoProfile();

            //Assert
            Assert.AreEqual(firstProfile, profile1);
            Assert.AreEqual(secondProfile, profile2);
            Assert.AreEqual(firstProfile, profile3);
            Assert.AreEqual(firstProfile, profile4);
        }
Beispiel #17
0
        public void CreateCodecDataWithoutProfileTest()
        {
            //Arrange
            const string videoCodec = "videoCodec";
            const string libName = "libName";
            
            //Act
            var codecData = new CodecData(videoCodec, libName);

            //Assert
            Assert.AreEqual(videoCodec, codecData.Codec);
            Assert.AreEqual(libName, codecData.LibName);
            Assert.IsTrue(codecData.Profiles.Any(p => p == null));
        }
Beispiel #18
0
        public void Mp4EncodeTest()
        {
            Directory.CreateDirectory(DestinationPath);
            var filePathes = Directory.GetFiles(SourcePath);
            var videoCodec = new CodecData("AVC", "libx264", "Main", "Main", "Baseline");
            var audioCodec = new CodecData("MPEG Audio", "libmp3lame", "Layer 3", "Layer 3");
            var supportedAudioCodecs = new List<CodecData>()
                                           {
                                               audioCodec,
                                               new CodecData("AAC", null)
                                           };
            var serviceConfigurator = new MetadataServiceConfigurator("MPEG-4", "mp4", videoCodec, audioCodec, supportedAudioCodecs);

            StartEncode(serviceConfigurator, filePathes);
        }
Beispiel #19
0
        public void Mp4EncodeTest()
        {
            Directory.CreateDirectory(DestinationPath);
            var filePathes           = Directory.GetFiles(SourcePath);
            var videoCodec           = new CodecData("AVC", "libx264", "Main", "Main", "Baseline");
            var audioCodec           = new CodecData("MPEG Audio", "libmp3lame", "Layer 3", "Layer 3");
            var supportedAudioCodecs = new List <CodecData>()
            {
                audioCodec,
                new CodecData("AAC", null)
            };
            var serviceConfigurator = new MetadataServiceConfigurator("MPEG-4", "mp4", videoCodec, audioCodec, supportedAudioCodecs);

            StartEncode(serviceConfigurator, filePathes);
        }
Beispiel #20
0
        public void Mp4EncodeWithDifferentResolutionTest()
        {
            Directory.CreateDirectory(_destinationPath);
            string[] filePathes = Directory.GetFiles(_sourcePath);
            var      videoCodec = new CodecData(FfmpegConstant.AvcCodec, FfmpegConstant.AvcCodecLib, FfmpegConstant.AvcMainProfile, FfmpegConstant.AvcMainProfile, FfmpegConstant.AvcBaselineProfile);
            var      audioCodec = new CodecData(FfmpegConstant.AacCodec, FfmpegConstant.AacCodecLib);

            var resolutions         = new[] { Int16.MaxValue, 500, 400, 100, 10 };
            var serviceConfigurator = new MetadataServiceConfigurator(FfmpegConstant.Mp4Container, FfmpegConstant.Mp4FfmpegContainer, videoCodec, audioCodec);

            foreach (int resolution in resolutions)
            {
                serviceConfigurator.MaxWidth  = resolution;
                serviceConfigurator.MaxHeight = resolution;
                StartEncode(serviceConfigurator, filePathes, String.Format("{0}_", resolution));
            }
        }
Beispiel #21
0
        public void CreateCodecDataWithProfileTest()
        {
            //Arrange
            const string videoCodec = "videoCodec";
            const string videoProfile1 = "videoProfile1";
            const string videoProfile2 = "videoProfile2";
            const string libName = "libName";
            
            //Act
            var codecData = new CodecData(videoCodec, libName, videoProfile1, videoProfile1, videoProfile2);

            //Assert
            Assert.AreEqual(videoCodec,codecData.Codec);
            Assert.AreEqual(videoProfile1,codecData.DefaultProfile);
            Assert.AreEqual(libName, codecData.LibName);
            Assert.IsTrue(codecData.Profiles.Any(p => p == videoProfile1));
            Assert.IsTrue(codecData.Profiles.Any(p => p == videoProfile2));
        }
Beispiel #22
0
        public void CreateCodecDataWithProfileTest()
        {
            //Arrange
            const string videoCodec    = "videoCodec";
            const string videoProfile1 = "videoProfile1";
            const string videoProfile2 = "videoProfile2";
            const string libName       = "libName";

            //Act
            var codecData = new CodecData(videoCodec, libName, videoProfile1, videoProfile1, videoProfile2);

            //Assert
            Assert.AreEqual(videoCodec, codecData.Codec);
            Assert.AreEqual(videoProfile1, codecData.DefaultProfile);
            Assert.AreEqual(libName, codecData.LibName);
            Assert.IsTrue(codecData.Profiles.Any(p => p == videoProfile1));
            Assert.IsTrue(codecData.Profiles.Any(p => p == videoProfile2));
        }
        public void AdjustVideoCodecTest()
        {
            //Arrange
            const string anotherCodecName = "anotherCodec";
            const string myCodecName      = "myCodec";
            var          myCodec          = new CodecData(myCodecName, null);

            var serviceConfigurator = new MetadataServiceConfigurator(null, null, myCodec, null, null);

            var metadata1 = new VideoMetadata()
            {
                VideoCodec = myCodecName
            };
            var metadata2 = new VideoMetadata()
            {
                VideoCodec = anotherCodecName
            };
            var metadataWithoutCodec = new VideoMetadata()
            {
                VideoCodec = null
            };

            var metadataService1 = new MetadataService(serviceConfigurator, metadata1);
            var metadataService2 = new MetadataService(serviceConfigurator, metadata2);
            var metadataService3 = new MetadataService(serviceConfigurator, metadataWithoutCodec);

            //Act
            var codec1 = metadataService1.AdjustVideoCodec();
            var codec2 = metadataService2.AdjustVideoCodec();

            //Act & Assert
            CustomAssert.IsThrown <MediaFormatException>(() => metadataService3.AdjustContainer());

            //Assert
            Assert.AreEqual(myCodecName, codec1);
            Assert.AreEqual(myCodecName, codec2);
        }
Beispiel #24
0
		public override void EncodeToFile( Stream input, string outFileName, CodecData data )
		{
			var fiBitMap = _encode( input, data );
			FI.FreeImage.Save( (FI.FREE_IMAGE_FORMAT)this._freeImageType, fiBitMap, outFileName, FI.FREE_IMAGE_SAVE_FLAGS.DEFAULT );
			FI.FreeImage.Unload( fiBitMap );
		}
        public void AdjustVideoCodecTest()
        {
            //Arrange
            const string anotherCodecName = "anotherCodec";
            const string myCodecName = "myCodec";
            var myCodec = new CodecData(myCodecName, null);

            var serviceConfigurator = new MetadataServiceConfigurator(null, null, myCodec, null, null);

            var metadata1 = new VideoMetadata()
            {
                VideoCodec = myCodecName
            };
            var metadata2 = new VideoMetadata()
            {
                VideoCodec = anotherCodecName
            };
            var metadataWithoutCodec = new VideoMetadata()
            {
                VideoCodec = null
            };

            var metadataService1 = new MetadataService(serviceConfigurator, metadata1);
            var metadataService2 = new MetadataService(serviceConfigurator, metadata2);
            var metadataService3 = new MetadataService(serviceConfigurator, metadataWithoutCodec);

            //Act
            var codec1 = metadataService1.AdjustVideoCodec();
            var codec2 = metadataService2.AdjustVideoCodec();

            //Act & Assert
            CustomAssert.IsThrown<MediaFormatException>(() => metadataService3.AdjustContainer());

            //Assert
            Assert.AreEqual(myCodecName, codec1);
            Assert.AreEqual(myCodecName, codec2);
        }
Beispiel #26
0
 public DecodeResult(Stream s, CodecData data)
 {
     this._tuple = new Tuple <Stream, CodecData>(s, data);
 }
Beispiel #27
0
		private FI.FIBITMAP _encode( Stream input, CodecData codecData )
		{
			var ret = new FI.FIBITMAP();
			ret.SetNull();
			var imgData = codecData as ImageData;

			if ( imgData != null )
			{
				var data = new byte[(int)input.Length];
				input.Read( data, 0, data.Length );
				var dataPtr = BufferBase.Wrap( data );
				var src = new PixelBox( imgData.width, imgData.height, imgData.depth, imgData.format, dataPtr );

				// The required format, which will adjust to the format
				// actually supported by FreeImage.
				var requiredFormat = imgData.format;

				// determine the settings
				var imageType = FI.FREE_IMAGE_TYPE.FIT_UNKNOWN;
				var determiningFormat = imgData.format;

				switch ( determiningFormat )
				{
					case PixelFormat.R5G6B5:
					case PixelFormat.B5G6R5:
					case PixelFormat.R8G8B8:
					case PixelFormat.B8G8R8:
					case PixelFormat.A8R8G8B8:
					case PixelFormat.X8R8G8B8:
					case PixelFormat.A8B8G8R8:
					case PixelFormat.X8B8G8R8:
					case PixelFormat.B8G8R8A8:
					case PixelFormat.R8G8B8A8:
					case PixelFormat.A4L4:
					case PixelFormat.BYTE_LA:
					case PixelFormat.R3G3B2:
					case PixelFormat.A4R4G4B4:
					case PixelFormat.A1R5G5B5:
					case PixelFormat.A2R10G10B10:
					case PixelFormat.A2B10G10R10:
						// I'd like to be able to use r/g/b masks to get FreeImage to load the data
						// in it's existing format, but that doesn't work, FreeImage needs to have
						// data in RGB[A] (big endian) and BGR[A] (little endian), always.
						if ( PixelUtil.HasAlpha( determiningFormat ) )
						{
							if ( FI.FreeImageEngine.IsLittleEndian )
							{
								requiredFormat = PixelFormat.BYTE_BGRA;
							}
							else
							{
								requiredFormat = PixelFormat.BYTE_RGBA;
							}
						}
						else
						{
							if ( FI.FreeImageEngine.IsLittleEndian )
							{
								requiredFormat = PixelFormat.BYTE_BGR;
							}
							else
							{
								requiredFormat = PixelFormat.BYTE_RGB;
							}
						}
						imageType = FI.FREE_IMAGE_TYPE.FIT_BITMAP;
						break;

					case PixelFormat.L8:
					case PixelFormat.A8:
						imageType = FI.FREE_IMAGE_TYPE.FIT_BITMAP;
						break;

					case PixelFormat.L16:
						imageType = FI.FREE_IMAGE_TYPE.FIT_UINT16;
						break;

					case PixelFormat.SHORT_GR:
						requiredFormat = PixelFormat.SHORT_RGB;
						break;

					case PixelFormat.SHORT_RGB:
						imageType = FI.FREE_IMAGE_TYPE.FIT_RGB16;
						break;

					case PixelFormat.SHORT_RGBA:
						imageType = FI.FREE_IMAGE_TYPE.FIT_RGBA16;
						break;

					case PixelFormat.FLOAT16_R:
						requiredFormat = PixelFormat.FLOAT32_R;
						break;

					case PixelFormat.FLOAT32_R:
						imageType = FI.FREE_IMAGE_TYPE.FIT_FLOAT;
						break;

					case PixelFormat.FLOAT16_GR:
					case PixelFormat.FLOAT16_RGB:
					case PixelFormat.FLOAT32_GR:
						requiredFormat = PixelFormat.FLOAT32_RGB;
						break;

					case PixelFormat.FLOAT32_RGB:
						imageType = FI.FREE_IMAGE_TYPE.FIT_RGBF;
						break;

					case PixelFormat.FLOAT16_RGBA:
						requiredFormat = PixelFormat.FLOAT32_RGBA;
						break;

					case PixelFormat.FLOAT32_RGBA:
						imageType = FI.FREE_IMAGE_TYPE.FIT_RGBAF;
						break;

					default:
						throw new AxiomException( "Not Supported image format :{0}", determiningFormat.ToString() );
				} //end switch

				// Check support for this image type & bit depth
				if ( !FI.FreeImage.FIFSupportsExportType( (FI.FREE_IMAGE_FORMAT)this._freeImageType, imageType ) ||
				     !FI.FreeImage.FIFSupportsExportBPP( (FI.FREE_IMAGE_FORMAT)this._freeImageType,
				                                         PixelUtil.GetNumElemBits( requiredFormat ) ) )
				{
					// Ok, need to allocate a fallback
					// Only deal with RGBA . RGB for now
					switch ( requiredFormat )
					{
						case PixelFormat.BYTE_RGBA:
							requiredFormat = PixelFormat.BYTE_RGB;
							break;

						case PixelFormat.BYTE_BGRA:
							requiredFormat = PixelFormat.BYTE_BGR;
							break;

						default:
							break;
					}
				}

				var conversionRequired = false;
				input.Position = 0;
                var srcData = new byte[ (int)input.Length ];
				input.Read( srcData, 0, srcData.Length );
				var srcDataPtr = BufferBase.Wrap( srcData );

				// Check BPP
				var bpp = PixelUtil.GetNumElemBits( requiredFormat );
				if ( !FI.FreeImage.FIFSupportsExportBPP( (FI.FREE_IMAGE_FORMAT)this._freeImageType, bpp ) )
				{
					if ( bpp == 32 && PixelUtil.HasAlpha( imgData.format ) &&
					     FI.FreeImage.FIFSupportsExportBPP( (FI.FREE_IMAGE_FORMAT)this._freeImageType, 24 ) )
					{
						// drop to 24 bit (lose alpha)
						if ( FI.FreeImage.IsLittleEndian() )
						{
							requiredFormat = PixelFormat.BYTE_BGR;
						}
						else
						{
							requiredFormat = PixelFormat.BYTE_RGB;
						}

						bpp = 24;
					}
					else if ( bpp == 128 && PixelUtil.HasAlpha( imgData.format ) &&
					          FI.FreeImage.FIFSupportsExportBPP( (FI.FREE_IMAGE_FORMAT)this._freeImageType, 96 ) )
					{
						// drop to 96-bit floating point
						requiredFormat = PixelFormat.FLOAT32_RGB;
					}
				}

				var convBox = new PixelBox( imgData.width, imgData.height, 1, requiredFormat );
				if ( requiredFormat != imgData.format )
				{
					conversionRequired = true;
					// Allocate memory
					var convData = new byte[convBox.ConsecutiveSize];
					convBox.Data = BufferBase.Wrap( convData );
					// perform conversion and reassign source
					var newSrc = new PixelBox( imgData.width, imgData.height, 1, imgData.format, dataPtr );
					PixelConverter.BulkPixelConversion( newSrc, convBox );
					srcDataPtr = convBox.Data;
				}

				ret = FI.FreeImage.AllocateT( imageType, imgData.width, imgData.height, bpp );
				if ( ret.IsNull )
				{
					if ( conversionRequired )
					{
                        srcDataPtr.SafeDispose();
						convBox = null;
					}

					throw new AxiomException( "FreeImage.AllocateT failed - possibly out of memory. " );
				}

				if ( requiredFormat == PixelFormat.L8 || requiredFormat == PixelFormat.A8 )
				{
					// Must explicitly tell FreeImage that this is greyscale by setting
					// a "grey" palette (otherwise it will save as a normal RGB
					// palettized image).
					var tmp = FI.FreeImage.ConvertToGreyscale( ret );
					FI.FreeImage.Unload( ret );
					ret = tmp;
				}

				var dstPitch = (int)FI.FreeImage.GetPitch( ret );
				var srcPitch = imgData.width*PixelUtil.GetNumElemBytes( requiredFormat );

				// Copy data, invert scanlines and respect FreeImage pitch
				var pSrc = srcDataPtr;
                using ( var pDest = BufferBase.Wrap( FI.FreeImage.GetBits( ret ), imgData.height * srcPitch ) )
				{
					var byteSrcData = pSrc;
					var byteDstData = pDest;
					for ( var y = 0; y < imgData.height; ++y )
					{
                        byteSrcData += ( imgData.height - y - 1 ) * srcPitch;
						Memory.Copy( pSrc, pDest, srcPitch );
						byteDstData += dstPitch;
					}
				}

				if ( conversionRequired )
				{
					// delete temporary conversion area
                    srcDataPtr.SafeDispose();
					convBox = null;
				}
			}
			return ret;
		}
Beispiel #28
0
 public bool Equals(VideoCodecState other)
 {
     return(true && ref_count.Equals(other.ref_count) && Info.Equals(other.Info) && Caps.Equals(other.Caps) && CodecData.Equals(other.CodecData) && AllocationCaps.Equals(other.AllocationCaps));
 }
Beispiel #29
0
        public void Mp4EncodeWithDifferentResolutionTest()
        {
            Directory.CreateDirectory(_destinationPath);
            string[] filePathes = Directory.GetFiles(_sourcePath);
            var videoCodec = new CodecData(FfmpegConstant.AvcCodec, FfmpegConstant.AvcCodecLib, FfmpegConstant.AvcMainProfile, FfmpegConstant.AvcMainProfile, FfmpegConstant.AvcBaselineProfile);
            var audioCodec = new CodecData(FfmpegConstant.AacCodec, FfmpegConstant.AacCodecLib);

            var resolutions = new[] {Int16.MaxValue, 500, 400, 100, 10};
            var serviceConfigurator = new MetadataServiceConfigurator(FfmpegConstant.Mp4Container, FfmpegConstant.Mp4FfmpegContainer, videoCodec, audioCodec);

            foreach (int resolution in resolutions)
            {
                serviceConfigurator.MaxWidth = resolution;
                serviceConfigurator.MaxHeight = resolution;
                StartEncode(serviceConfigurator, filePathes, String.Format("{0}_", resolution));
            }
        }
Beispiel #30
0
 public override int GetHashCode()
 {
     return(this.GetType().FullName.GetHashCode() ^ ref_count.GetHashCode() ^ Info.GetHashCode() ^ Caps.GetHashCode() ^ CodecData.GetHashCode() ^ AllocationCaps.GetHashCode());
 }
Beispiel #31
0
        private FI.FIBITMAP _encode(Stream input, CodecData codecData)
        {
            var ret = new FI.FIBITMAP();

            ret.SetNull();
            var imgData = codecData as ImageData;

            if (imgData != null)
            {
                var data = new byte[(int)input.Length];
                input.Read(data, 0, data.Length);
                var dataPtr = BufferBase.Wrap(data);
                var src     = new PixelBox(imgData.width, imgData.height, imgData.depth, imgData.format, dataPtr);

                // The required format, which will adjust to the format
                // actually supported by FreeImage.
                var requiredFormat = imgData.format;

                // determine the settings
                var imageType         = FI.FREE_IMAGE_TYPE.FIT_UNKNOWN;
                var determiningFormat = imgData.format;

                switch (determiningFormat)
                {
                case PixelFormat.R5G6B5:
                case PixelFormat.B5G6R5:
                case PixelFormat.R8G8B8:
                case PixelFormat.B8G8R8:
                case PixelFormat.A8R8G8B8:
                case PixelFormat.X8R8G8B8:
                case PixelFormat.A8B8G8R8:
                case PixelFormat.X8B8G8R8:
                case PixelFormat.B8G8R8A8:
                case PixelFormat.R8G8B8A8:
                case PixelFormat.A4L4:
                case PixelFormat.BYTE_LA:
                case PixelFormat.R3G3B2:
                case PixelFormat.A4R4G4B4:
                case PixelFormat.A1R5G5B5:
                case PixelFormat.A2R10G10B10:
                case PixelFormat.A2B10G10R10:
                    // I'd like to be able to use r/g/b masks to get FreeImage to load the data
                    // in it's existing format, but that doesn't work, FreeImage needs to have
                    // data in RGB[A] (big endian) and BGR[A] (little endian), always.
                    if (PixelUtil.HasAlpha(determiningFormat))
                    {
                        if (FI.FreeImageEngine.IsLittleEndian)
                        {
                            requiredFormat = PixelFormat.BYTE_BGRA;
                        }
                        else
                        {
                            requiredFormat = PixelFormat.BYTE_RGBA;
                        }
                    }
                    else
                    {
                        if (FI.FreeImageEngine.IsLittleEndian)
                        {
                            requiredFormat = PixelFormat.BYTE_BGR;
                        }
                        else
                        {
                            requiredFormat = PixelFormat.BYTE_RGB;
                        }
                    }
                    imageType = FI.FREE_IMAGE_TYPE.FIT_BITMAP;
                    break;

                case PixelFormat.L8:
                case PixelFormat.A8:
                    imageType = FI.FREE_IMAGE_TYPE.FIT_BITMAP;
                    break;

                case PixelFormat.L16:
                    imageType = FI.FREE_IMAGE_TYPE.FIT_UINT16;
                    break;

                case PixelFormat.SHORT_GR:
                    requiredFormat = PixelFormat.SHORT_RGB;
                    break;

                case PixelFormat.SHORT_RGB:
                    imageType = FI.FREE_IMAGE_TYPE.FIT_RGB16;
                    break;

                case PixelFormat.SHORT_RGBA:
                    imageType = FI.FREE_IMAGE_TYPE.FIT_RGBA16;
                    break;

                case PixelFormat.FLOAT16_R:
                    requiredFormat = PixelFormat.FLOAT32_R;
                    break;

                case PixelFormat.FLOAT32_R:
                    imageType = FI.FREE_IMAGE_TYPE.FIT_FLOAT;
                    break;

                case PixelFormat.FLOAT16_GR:
                case PixelFormat.FLOAT16_RGB:
                case PixelFormat.FLOAT32_GR:
                    requiredFormat = PixelFormat.FLOAT32_RGB;
                    break;

                case PixelFormat.FLOAT32_RGB:
                    imageType = FI.FREE_IMAGE_TYPE.FIT_RGBF;
                    break;

                case PixelFormat.FLOAT16_RGBA:
                    requiredFormat = PixelFormat.FLOAT32_RGBA;
                    break;

                case PixelFormat.FLOAT32_RGBA:
                    imageType = FI.FREE_IMAGE_TYPE.FIT_RGBAF;
                    break;

                default:
                    throw new AxiomException("Not Supported image format :{0}", determiningFormat.ToString());
                } //end switch

                // Check support for this image type & bit depth
                if (!FI.FreeImage.FIFSupportsExportType((FI.FREE_IMAGE_FORMAT) this._freeImageType, imageType) ||
                    !FI.FreeImage.FIFSupportsExportBPP((FI.FREE_IMAGE_FORMAT) this._freeImageType,
                                                       PixelUtil.GetNumElemBits(requiredFormat)))
                {
                    // Ok, need to allocate a fallback
                    // Only deal with RGBA . RGB for now
                    switch (requiredFormat)
                    {
                    case PixelFormat.BYTE_RGBA:
                        requiredFormat = PixelFormat.BYTE_RGB;
                        break;

                    case PixelFormat.BYTE_BGRA:
                        requiredFormat = PixelFormat.BYTE_BGR;
                        break;

                    default:
                        break;
                    }
                }

                var conversionRequired = false;
                input.Position = 0;
                var srcData = new byte[(int)input.Length];
                input.Read(srcData, 0, srcData.Length);
                var srcDataPtr = BufferBase.Wrap(srcData);

                // Check BPP
                var bpp = PixelUtil.GetNumElemBits(requiredFormat);
                if (!FI.FreeImage.FIFSupportsExportBPP((FI.FREE_IMAGE_FORMAT) this._freeImageType, bpp))
                {
                    if (bpp == 32 && PixelUtil.HasAlpha(imgData.format) &&
                        FI.FreeImage.FIFSupportsExportBPP((FI.FREE_IMAGE_FORMAT) this._freeImageType, 24))
                    {
                        // drop to 24 bit (lose alpha)
                        if (FI.FreeImage.IsLittleEndian())
                        {
                            requiredFormat = PixelFormat.BYTE_BGR;
                        }
                        else
                        {
                            requiredFormat = PixelFormat.BYTE_RGB;
                        }

                        bpp = 24;
                    }
                    else if (bpp == 128 && PixelUtil.HasAlpha(imgData.format) &&
                             FI.FreeImage.FIFSupportsExportBPP((FI.FREE_IMAGE_FORMAT) this._freeImageType, 96))
                    {
                        // drop to 96-bit floating point
                        requiredFormat = PixelFormat.FLOAT32_RGB;
                    }
                }

                var convBox = new PixelBox(imgData.width, imgData.height, 1, requiredFormat);
                if (requiredFormat != imgData.format)
                {
                    conversionRequired = true;
                    // Allocate memory
                    var convData = new byte[convBox.ConsecutiveSize];
                    convBox.Data = BufferBase.Wrap(convData);
                    // perform conversion and reassign source
                    var newSrc = new PixelBox(imgData.width, imgData.height, 1, imgData.format, dataPtr);
                    PixelConverter.BulkPixelConversion(newSrc, convBox);
                    srcDataPtr = convBox.Data;
                }

                ret = FI.FreeImage.AllocateT(imageType, imgData.width, imgData.height, bpp);
                if (ret.IsNull)
                {
                    if (conversionRequired)
                    {
                        srcDataPtr.SafeDispose();
                        convBox = null;
                    }

                    throw new AxiomException("FreeImage.AllocateT failed - possibly out of memory. ");
                }

                if (requiredFormat == PixelFormat.L8 || requiredFormat == PixelFormat.A8)
                {
                    // Must explicitly tell FreeImage that this is greyscale by setting
                    // a "grey" palette (otherwise it will save as a normal RGB
                    // palettized image).
                    var tmp = FI.FreeImage.ConvertToGreyscale(ret);
                    FI.FreeImage.Unload(ret);
                    ret = tmp;
                }

                var dstPitch = (int)FI.FreeImage.GetPitch(ret);
                var srcPitch = imgData.width * PixelUtil.GetNumElemBytes(requiredFormat);

                // Copy data, invert scanlines and respect FreeImage pitch
                var pSrc = srcDataPtr;
                using (var pDest = BufferBase.Wrap(FI.FreeImage.GetBits(ret), imgData.height * srcPitch))
                {
                    var byteSrcData = pSrc;
                    var byteDstData = pDest;
                    for (var y = 0; y < imgData.height; ++y)
                    {
                        byteSrcData += (imgData.height - y - 1) * srcPitch;
                        Memory.Copy(pSrc, pDest, srcPitch);
                        byteDstData += dstPitch;
                    }
                }

                if (conversionRequired)
                {
                    // delete temporary conversion area
                    srcDataPtr.SafeDispose();
                    convBox = null;
                }
            }
            return(ret);
        }
        public void AdjustAudioCodecTest()
        {
            //Arrange
            const string myCodec1 = "myCodec1";
            const string myCodec2 = "myCodec2";
            const string anotherCodec = "anotherCodec";

            var audioCodec = new CodecData(myCodec1, null);

            var supportedAudioCodecs = new List<CodecData>()
                                           {
                                               audioCodec,
                                               new CodecData(myCodec2, null)
                                           };

            var serviceConfigurator = new MetadataServiceConfigurator(null, null, null, audioCodec, supportedAudioCodecs);

            var metadata1 = new VideoMetadata()
            {
                AudioCodec = myCodec1
            };
            var metadata2 = new VideoMetadata()
            {
                AudioCodec = myCodec2
            };
            var metadata3 = new VideoMetadata()
            {
                AudioCodec = anotherCodec
            };
            var metadataWithoutCodec = new VideoMetadata()
            {
                AudioCodec = null
            };

            var metadataService1 = new MetadataService(serviceConfigurator, metadata1);
            var metadataService2 = new MetadataService(serviceConfigurator, metadata2);
            var metadataService3 = new MetadataService(serviceConfigurator, metadata3);
            var metadataService4 = new MetadataService(serviceConfigurator, metadataWithoutCodec);

            //Act
            var codec1 = metadataService1.AdjustAudioCodec();
            var codec2 = metadataService2.AdjustAudioCodec();
            var codec3 = metadataService3.AdjustAudioCodec();
            var codec4 = metadataService4.AdjustAudioCodec();

            //Assert
            Assert.AreEqual(myCodec1, codec1);
            Assert.AreEqual(myCodec2, codec2);
            Assert.AreEqual(myCodec1, codec3);
            Assert.AreEqual(null, codec4);
        }
        public void AdjustAudioProfileTest()
        {
            //Arrange
            const string myCodec1 = "myCodec1";
            const string myCodec2 = "myCodec2";
            const string anotherProfile = "anotherProfile";
            const string firstProfile = "firstProfile";

            var audioCodec = new CodecData(myCodec1, null, firstProfile);

            var supportedAudioCodecs = new List<CodecData>()
                                           {
                                               audioCodec,
                                               new CodecData(myCodec2,null)
                                           };

            var serviceConfigurator = new MetadataServiceConfigurator(null, null, null, audioCodec, supportedAudioCodecs);

            var metadata1 = new VideoMetadata()
            {
                AudioCodec = myCodec1,
                AudioProfile = firstProfile
            };
            var metadata2 = new VideoMetadata()
            {
                AudioCodec = myCodec1,
                AudioProfile = anotherProfile
            };
            var metadata3 = new VideoMetadata()
            {
                AudioCodec = myCodec2,
                AudioProfile = anotherProfile
            };
            var metadataWithoutProfile = new VideoMetadata()
            {
                AudioCodec = myCodec1,
                AudioProfile = null
            };

            var metadataService1 = new MetadataService(serviceConfigurator, metadata1);
            var metadataService2 = new MetadataService(serviceConfigurator, metadata2);
            var metadataService3 = new MetadataService(serviceConfigurator, metadata3);
            var metadataService4 = new MetadataService(serviceConfigurator, metadataWithoutProfile);

            //Act
            var profile1 = metadataService1.AdjustAudioProfile();
            var profile2 = metadataService2.AdjustAudioProfile();
            var profile3 = metadataService3.AdjustAudioProfile();
            var profile4 = metadataService4.AdjustAudioProfile();

            //Assert
            Assert.AreEqual(firstProfile, profile1);
            Assert.AreEqual(firstProfile, profile2);
            Assert.AreEqual(anotherProfile, profile3);
            Assert.AreEqual(firstProfile, profile4);
        }
Beispiel #34
0
 public abstract void EncodeToFile(Stream input, string outFileName, CodecData data);
Beispiel #35
0
 internal static global::System.Runtime.InteropServices.HandleRef getCPtr(CodecData obj)
 {
     return((obj == null) ? new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero) : obj.swigCPtr);
 }
Beispiel #36
0
			public DecodeResult( Stream s, CodecData data )
			{
				this._tuple = new Tuple<Stream, CodecData>( s, data );
			}
Beispiel #37
0
		public abstract void EncodeToFile( Stream input, string outFileName, CodecData data );
Beispiel #38
0
		public abstract Stream Encode( Stream input, CodecData data );
Beispiel #39
0
 public abstract Stream Encode(Stream input, CodecData data);
        public void AdjustVideoProfileTest()
        {
            //Arrange
            const string myCodec = "myCodec";
            const string anotherProfile = "anotherProfile";
            const string firstProfile = "firstProfile";
            const string secondProfile = "secondProfile";

            var videoCodec = new CodecData(myCodec, null, firstProfile, secondProfile);

            var serviceConfigurator = new MetadataServiceConfigurator(null, null, videoCodec, null, null);

            var metadata1 = new VideoMetadata()
            {
                VideoCodec = myCodec,
                VideoProfile = firstProfile
            };
            var metadata2 = new VideoMetadata()
            {
                VideoCodec = myCodec,
                VideoProfile = secondProfile
            };
            var metadata3 = new VideoMetadata()
            {
                VideoCodec = myCodec,
                VideoProfile = anotherProfile
            };
            var metadataWithoutCodec = new VideoMetadata()
            {
                VideoCodec = myCodec,
                VideoProfile = null
            };

            var metadataService1 = new MetadataService(serviceConfigurator, metadata1);
            var metadataService2 = new MetadataService(serviceConfigurator, metadata2);
            var metadataService3 = new MetadataService(serviceConfigurator, metadata3);
            var metadataService4 = new MetadataService(serviceConfigurator, metadataWithoutCodec);

            //Act
            var profile1 = metadataService1.AdjustVideoProfile();
            var profile2 = metadataService2.AdjustVideoProfile();
            var profile3 = metadataService3.AdjustVideoProfile();
            var profile4 = metadataService4.AdjustVideoProfile();

            //Assert
            Assert.AreEqual(firstProfile, profile1);
            Assert.AreEqual(secondProfile, profile2);
            Assert.AreEqual(firstProfile, profile3);
            Assert.AreEqual(firstProfile, profile4);
        }