The Bitmap class should be used to replace the standard System.Drawing.Bitmap The big advantage of the Bitmap class is to accurately read back the color profile and gamma correction data stored in the image's metadata so that, internally, the image is stored: • As device-independent CIE XYZ (http://en.wikipedia.org/wiki/CIE_1931_color_space) format, our Profile Connection Space • In linear space (i.e. no gamma curve is applied) • NOT pre-multiplied alpha (you can later re-pre-multiply if needed) This helps to ensure that whatever the source image format stored on disk, you always deal with a uniformized image internally. Later, you can cast from the CIE XYZ device-independent format into any number of pre-defined texture profiles: • sRGB or Linear space textures (for 8bits per component images only) • Compressed (BC1-BC5) or uncompressed (for 8bits per component images only) • 8-, 16-, 16F- 32- or 32F-bits per component • Pre-multiplied alpha or not The following image formats are currently supported: • JPG • PNG • TIFF • TGA • BMP • GIF • HDR • Any RAW camera format supported by the LibRaw library
The Bitmap class has been tested with various formats, various bit depths and color profiles all created from Adobe Photoshop CS4 using the "Save As" dialog and the "Save for Web & Devices" dialog box. In a general manner, you should NOT use the latter save option but rather select your working color profile from the "Edit > Color Settings" menu, then save your files and make sure you tick the "ICC Profile" checkbox using the DEFAULT save file dialog box to embed that profile in the image.
Inheritance: IDisposable
Ejemplo n.º 1
0
        protected void TestConvertLDR2HDR( System.IO.FileInfo[] _LDRImageFileNames, bool _responseCurveOnly, bool _RAW )
        {
            try {

                // Load the LDR images
                List< ImageFile >	LDRImages = new List< ImageFile >();
                foreach ( System.IO.FileInfo LDRImageFileName in _LDRImageFileNames )
                    LDRImages.Add( new ImageFile( LDRImageFileName ) );

                // Retrieve the shutter speeds
                List< float >	shutterSpeeds = new List< float >();
                foreach ( ImageFile LDRImage in LDRImages ) {
                    shutterSpeeds.Add( LDRImage.Metadata.ExposureTime );
                }

                // Retrieve filter type
                Bitmap.FILTER_TYPE	filterType = Bitmap.FILTER_TYPE.NONE;
                if ( radioButtonFilterNone.Checked ) {
                     filterType = Bitmap.FILTER_TYPE.NONE;
                } else if ( radioButtonFilterGaussian.Checked ) {
                     filterType = Bitmap.FILTER_TYPE.SMOOTHING_GAUSSIAN;
                } else if ( radioButtonFilterGaussian2Pass.Checked ) {
                     filterType = Bitmap.FILTER_TYPE.SMOOTHING_GAUSSIAN_2_PASSES;
                } else if ( radioButtonFilterTent.Checked ) {
                     filterType = Bitmap.FILTER_TYPE.SMOOTHING_TENT;
                } else if ( radioButtonFilterCurveFitting.Checked ) {
                     filterType = Bitmap.FILTER_TYPE.GAUSSIAN_PLUS_CURVE_FITTING;
                }

            // Check EXR save is working!
            // ImageFile	pipo = new ImageFile();
            // 			pipo.ConvertFrom( LDRImages[0], ImageFile.PIXEL_FORMAT.RGB32F );
            // pipo.Save( new System.IO.FileInfo( @"..\..\Images\Out\LDR2HDR\FromJPG\Result.exr" ), ImageFile.FILE_FORMAT.EXR, ImageFile.SAVE_FLAGS.SF_EXR_DEFAULT );

            // Check bitmap->tone mapped image file is working
            // {
            // 	Bitmap	tempBitmap = new Bitmap();
            // 	List< float >	responseCurve = new List< float >( 256 );
            // 	for ( int i=0; i < 256; i++ )
            // 		responseCurve.Add( (float) (Math.Log( (1+i) / 256.0 ) / Math.Log(2)) );
            // 	tempBitmap.LDR2HDR( new ImageFile[] { LDRImages[4] }, new float[] { 1.0f }, responseCurve, 1.0f );
            //
            // 	ImageFile	tempHDR = new ImageFile();
            // 	tempBitmap.ToImageFile( tempHDR, new ColorProfile( ColorProfile.STANDARD_PROFILE.LINEAR ) );
            //
            // 	ImageFile	tempToneMappedHDR = new ImageFile();
            // 	tempToneMappedHDR.ToneMapFrom( tempHDR,( float3 _HDRColor, ref float3 _LDRColor ) => {
            // 		// Just do gamma un-correction, don't care about actual HDR range...
            // 		_LDRColor.x = (float) Math.Pow( Math.Max( 0.0f, _HDRColor.x ), 1.0f / 2.2f );	// Here we need to clamp negative values that we sometimes get in EXR format
            // 		_LDRColor.y = (float) Math.Pow( Math.Max( 0.0f, _HDRColor.y ), 1.0f / 2.2f );	//  (must be coming from the log encoding I suppose)
            // 		_LDRColor.z = (float) Math.Pow( Math.Max( 0.0f, _HDRColor.z ), 1.0f / 2.2f );
            // 	} );
            //
            // 	panel1.Bitmap = tempToneMappedHDR.AsBitmap;
            // 	return;
            // }

                //////////////////////////////////////////////////////////////////////////////////////////////
                // Build the HDR device-independent bitmap
            //				uint bitsPerPixel = _RAW ? 12U : 8U;
            uint	bitsPerPixel = _RAW ? 8U : 8U;
            float	quality = _RAW ? 3.0f : 3.0f;

                Bitmap.HDRParms	parms = new Bitmap.HDRParms() {
                    _inputBitsPerComponent = bitsPerPixel,
                    _luminanceFactor = 1.0f,
                    _curveSmoothnessConstraint = 1.0f,
                    _quality = quality,
                    _responseCurveFilterType = filterType
                };

                ImageUtility.Bitmap	HDRImage = new ImageUtility.Bitmap();

            //				HDRImage.LDR2HDR( LDRImages.ToArray(), shutterSpeeds.ToArray(), parms );

                // Compute response curve
                List< float >	responseCurve = new List< float >();
                Bitmap.ComputeCameraResponseCurve( LDRImages.ToArray(), shutterSpeeds.ToArray(), parms._inputBitsPerComponent, parms._curveSmoothnessConstraint, parms._quality, responseCurve );

                // Filter
                List< float >	responseCurve_filtered = new List< float >();
            //				Bitmap.FilterCameraResponseCurve( responseCurve, responseCurve_filtered, Bitmap.FILTER_TYPE.CURVE_FITTING );
                Bitmap.FilterCameraResponseCurve( responseCurve, responseCurve_filtered, filterType );

            // 				using ( System.IO.FileStream S = new System.IO.FileInfo( "../../responseCurve3.float" ).Create() )
            // 					using ( System.IO.BinaryWriter W = new System.IO.BinaryWriter( S ) ) {
            // 						for ( int i=0; i < 256; i++ )
            // 							W.Write( responseCurve[i] );
            // 					}

                // Write info
                string		info = "Exposures:\r\n";
                foreach ( float shutterSpeed in shutterSpeeds )
                    info += " " + shutterSpeed + "s + ";
                info += "\r\nLog2 exposures (EV):\r\n";
                foreach ( float shutterSpeed in shutterSpeeds )
                    info += " " + (float) (Math.Log( shutterSpeed ) / Math.Log(2)) + "EV + ";
                info += "\r\n\r\n";

                if ( _responseCurveOnly ) {
                    //////////////////////////////////////////////////////////////////////////////////////////////
                    // Render the response curve as a graph
             					ImageFile	tempCurveBitmap = new ImageFile( 1024, 768, ImageFile.PIXEL_FORMAT.RGB8, new ColorProfile( ColorProfile.STANDARD_PROFILE.sRGB ) );

                    int			responseCurveSizeMax = responseCurve.Count-1;

                    float2		rangeX = new float2( 0, responseCurveSizeMax+1 );
                    float2		rangeY = new float2( 0, 500 );
                    tempCurveBitmap.Clear( new float4( 1, 1, 1, 1 ) );
            //					tempCurveBitmap.PlotGraphAutoRangeY( red, rangeX, ref rangeY, ( float x ) => {
                    tempCurveBitmap.PlotGraph( red, rangeX, rangeY, ( float x ) => {
                        int		i0 = (int) Math.Min( responseCurveSizeMax, Math.Floor( x ) );
                        int		i1 = (int) Math.Min( responseCurveSizeMax, i0+1 );
                        float	g0 = responseCurve[i0];
                        float	g1 = responseCurve[i1];
                        float	t = x - i0;
            //						return g0 + (g1-g0) * t;
                        return (float) Math.Pow( 2.0f, g0 + (g1-g0) * t );
                    } );
                    tempCurveBitmap.PlotGraph( blue, rangeX, rangeY, ( float x ) => {
                        int		i0 = (int) Math.Min( responseCurveSizeMax, Math.Floor( x ) );
                        int		i1 = (int) Math.Min( responseCurveSizeMax, i0+1 );
                        float	g0 = responseCurve_filtered[i0];
                        float	g1 = responseCurve_filtered[i1];
                        float	t = x - i0;
            //						return g0 + (g1-g0) * t;
                        return (float) Math.Pow( 2.0f, g0 + (g1-g0) * t );
                    } );
            //					tempCurveBitmap.PlotAxes( black, rangeX, rangeY, 8, 2 );

                    info += "• Linear range Y = [" + rangeY.x + ", " + rangeY.y + "]\r\n";

                    rangeY = new float2( -4, 4 );
                    tempCurveBitmap.PlotLogGraphAutoRangeY( black, rangeX, ref rangeY, ( float x ) => {
            //					tempCurveBitmap.PlotLogGraph( black, rangeX, rangeY, ( float x ) => {
                        int		i0 = (int) Math.Min( responseCurveSizeMax, Math.Floor( x ) );
                        int		i1 = (int) Math.Min( responseCurveSizeMax, i0+1 );
                        float	g0 = responseCurve[i0];
                        float	g1 = responseCurve[i1];
                        float	t = x - i0;
            //						return g0 + (g1-g0) * t;
                        return (float) Math.Pow( 2.0f, g0 + (g1-g0) * t );
                    }, -1.0f, 2.0f );
                    tempCurveBitmap.PlotLogGraph( blue, rangeX, rangeY, ( float x ) => {
            //					tempCurveBitmap.PlotLogGraph( black, rangeX, rangeY, ( float x ) => {
                        int		i0 = (int) Math.Min( responseCurveSizeMax, Math.Floor( x ) );
                        int		i1 = (int) Math.Min( responseCurveSizeMax, i0+1 );
                        float	g0 = responseCurve_filtered[i0];
                        float	g1 = responseCurve_filtered[i1];
                        float	t = x - i0;
            //						return g0 + (g1-g0) * t;
                        return (float) Math.Pow( 2.0f, g0 + (g1-g0) * t );
                    }, -1.0f, 2.0f );
                    tempCurveBitmap.PlotLogAxes( black, rangeX, rangeY, -16, 2 );

                    info += "• Log2 range Y = [" + rangeY.x + ", " + rangeY.y + "]\r\n";

             					panelOutputHDR.Bitmap = tempCurveBitmap.AsBitmap;

                } else {
                    //////////////////////////////////////////////////////////////////////////////////////////////
                    // Recompose the HDR image
                    HDRImage.LDR2HDR( LDRImages.ToArray(), shutterSpeeds.ToArray(), responseCurve_filtered, 1.0f );

                    // Display as a tone-mapped bitmap
                    ImageFile	tempHDR = new ImageFile();
                    HDRImage.ToImageFile( tempHDR, new ColorProfile( ColorProfile.STANDARD_PROFILE.LINEAR ) );

                    if ( _RAW ) {
                        tempHDR.Save( new System.IO.FileInfo( @"..\..\Images\Out\LDR2HDR\FromRAW\Result.exr" ), ImageFile.FILE_FORMAT.EXR, ImageFile.SAVE_FLAGS.SF_EXR_DEFAULT );
                        tempHDR.Save( new System.IO.FileInfo( @"..\..\Images\Out\LDR2HDR\FromRAW\Result_B44LC.exr" ), ImageFile.FILE_FORMAT.EXR, ImageFile.SAVE_FLAGS.SF_EXR_B44 | ImageFile.SAVE_FLAGS.SF_EXR_LC );
                        tempHDR.Save( new System.IO.FileInfo( @"..\..\Images\Out\LDR2HDR\FromRAW\Result_noLZW.exr" ), ImageFile.FILE_FORMAT.EXR, ImageFile.SAVE_FLAGS.SF_EXR_NONE );
                    } else {
                        tempHDR.Save( new System.IO.FileInfo( @"..\..\Images\Out\LDR2HDR\FromJPG\Result.exr" ), ImageFile.FILE_FORMAT.EXR, ImageFile.SAVE_FLAGS.SF_EXR_DEFAULT );
                        tempHDR.Save( new System.IO.FileInfo( @"..\..\Images\Out\LDR2HDR\FromJPG\Result_B44LC.exr" ), ImageFile.FILE_FORMAT.EXR, ImageFile.SAVE_FLAGS.SF_EXR_B44 | ImageFile.SAVE_FLAGS.SF_EXR_LC );
                        tempHDR.Save( new System.IO.FileInfo( @"..\..\Images\Out\LDR2HDR\FromJPG\Result_noLZW.exr" ), ImageFile.FILE_FORMAT.EXR, ImageFile.SAVE_FLAGS.SF_EXR_NONE );
                    }

                    ImageFile	tempToneMappedHDR = new ImageFile();
                    tempToneMappedHDR.ToneMapFrom( tempHDR,( float3 _HDRColor, ref float3 _LDRColor ) => {
                        // Just do gamma un-correction, don't care about actual HDR range...
                        _LDRColor.x = (float) Math.Pow( Math.Max( 0.0f, _HDRColor.x ), 1.0f / 2.2f );	// Here we need to clamp negative values that we sometimes get in EXR format
                        _LDRColor.y = (float) Math.Pow( Math.Max( 0.0f, _HDRColor.y ), 1.0f / 2.2f );	//  (must be coming from the log encoding I suppose)
                        _LDRColor.z = (float) Math.Pow( Math.Max( 0.0f, _HDRColor.z ), 1.0f / 2.2f );
                    } );

                    panelOutputHDR.Bitmap = tempToneMappedHDR.AsBitmap;
                }

                textBoxHDR.Text = info;

            } catch ( Exception _e ) {
                MessageBox.Show( "Error: " + _e.Message );

            // Show debug image
            // panelLoad.Bitmap = Bitmap.DEBUG.AsBitmap;
            }
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Creates the color profile from metadata embedded in the image file
        /// </summary>
        /// <param name="_MetaData"></param>
        /// <param name="_FileType"></param>
        public ColorProfile( BitmapMetadata _MetaData, Bitmap.FILE_TYPE _FileType )
        {
            string	MetaDump = _MetaData != null ? DumpMetaData( _MetaData ) : null;

            bool	bGammaFoundInFile = false;
            switch ( _FileType ) {
                case Bitmap.FILE_TYPE.JPEG:
                    m_GammaCurve = GAMMA_CURVE.STANDARD;
                    m_Gamma = 2.2f;							// JPG uses a 2.2 gamma by default
                    m_Chromaticities = Chromaticities.sRGB;	// Default for JPEGs is sRGB
                    EnumerateMetaDataJPG( _MetaData, out m_bProfileFoundInFile, out bGammaFoundInFile );

                    if ( !m_bProfileFoundInFile && !bGammaFoundInFile )
                        bGammaFoundInFile = true;			// Unless specified otherwise, we override the gamma no matter what since JPEGs use a 2.2 gamma by default anyway
                    break;

                case Bitmap.FILE_TYPE.PNG:
                    m_GammaCurve = GAMMA_CURVE.sRGB;
                    m_Gamma = GAMMA_EXPONENT_sRGB;
                    m_Chromaticities = Chromaticities.sRGB;	// Default for PNGs is standard sRGB
                    EnumerateMetaDataPNG( _MetaData, out m_bProfileFoundInFile, out bGammaFoundInFile );
                    break;

                case Bitmap.FILE_TYPE.TIFF:
                    m_GammaCurve = GAMMA_CURVE.STANDARD;
                    m_Gamma = 1.0f;							// Linear gamma by default
                    m_Chromaticities = Chromaticities.sRGB;	// Default for TIFFs is sRGB
                    EnumerateMetaDataTIFF( _MetaData, out m_bProfileFoundInFile, out bGammaFoundInFile );
                    break;

                case Bitmap.FILE_TYPE.GIF:
                    m_GammaCurve = GAMMA_CURVE.STANDARD;
                    m_Gamma = 1.0f;
                    m_Chromaticities = Chromaticities.sRGB;	// Default for GIFs is standard sRGB with no gamma
                    break;

                case Bitmap.FILE_TYPE.BMP:	// BMP Don't have metadata!
                    m_GammaCurve = GAMMA_CURVE.STANDARD;
                    m_Gamma = 1.0f;
                    m_Chromaticities = Chromaticities.sRGB;	// Default for BMPs is standard sRGB with no gamma
                    break;

                case Bitmap.FILE_TYPE.CRW:	// Raw files have no correction
                case Bitmap.FILE_TYPE.CR2:
                case Bitmap.FILE_TYPE.DNG:
                    m_GammaCurve = GAMMA_CURVE.STANDARD;
                    m_Gamma = 1.0f;
                    m_Chromaticities = Chromaticities.sRGB;	// Default for BMPs is standard sRGB with no gamma
                    break;
            }

            BuildTransformFromChroma( bGammaFoundInFile );
        }