public static float2 Parse( string v ) { string[] Components = v.Split( ';' ); if ( Components.Length < 2 ) throw new Exception( "Not enough vector components!" ); float2 Result = new float2(); if ( !float.TryParse( Components[0].Trim(), out Result.x ) ) throw new Exception( "Can't parse X field!" ); if ( !float.TryParse( Components[1].Trim(), out Result.y ) ) throw new Exception( "Can't parse Y field!" ); return Result; }
void TestBlackBodyRadiation( TEST_COLOR_PROFILES _type ) { ColorProfile sRGB = new ColorProfile( ColorProfile.STANDARD_PROFILE.sRGB ); switch ( _type ) { // Load the color gamut and try and plot the locii of various white points // case TEST_COLOR_PROFILES.DRAW_WHITE_POINT_LOCI: { m_imageFile.Load( new System.IO.FileInfo( @"..\..\Images\In\xyGamut.png" ) ); float2 cornerZero = new float2( 114, 1336 ); // xy=(0.0, 0.0) float2 cornerPoint8Point9 = new float2( 1257, 49 ); // xy=(0.8, 0.9) // Check XYZ<->RGB and XYZ<->xyY converter code // float3 xyY = new float3(); // float3 XYZ = new float3(); // // float4 testRGB = new float4(); // float4 testXYZ = new float4(); // for ( int i=1; i <= 10; i++ ) { // float f = i / 10.0f; // testRGB.Set( 1*f, 1*f, 1*f, 1.0f ); // sRGB.RGB2XYZ( testRGB, ref testXYZ ); // // XYZ.Set( testXYZ.x, testXYZ.y, testXYZ.z ); // ColorProfile.XYZ2xyY( XYZ, ref xyY ); // ColorProfile.xyY2XYZ( xyY, ref XYZ ); // testXYZ.Set( XYZ, 1.0f ); // // sRGB.XYZ2RGB( testXYZ, ref testRGB ); // } float2 xy = new float2(); float4 color = new float4( 1, 0, 0, 1 ); float4 color2 = new float4( 0, 0.5f, 1, 1 ); for ( int locusIndex=0; locusIndex < 20; locusIndex++ ) { // float T = 1500.0f + (8000.0f - 1500.0f) * locusIndex / 20.0f; float T = 1500.0f + 500.0f * locusIndex; ColorProfile.ComputeWhitePointChromaticities( T, ref xy ); // Plot with the color of the white point // ColorProfile.xyY2XYZ( new float3( xy, 1.0f ), ref XYZ ); // sRGB.XYZ2RGB( new float4( XYZ, 1.0f ), ref color ); float2 fPos = cornerZero + (cornerPoint8Point9 - cornerZero) * new float2( xy.x / 0.8f, xy.y / 0.9f ); DrawPoint( (int) fPos.x, (int) fPos.y, 6, ref color ); ColorProfile.ComputeWhitePointChromaticitiesAnalytical( T, ref xy ); fPos = cornerZero + (cornerPoint8Point9 - cornerZero) * new float2( xy.x / 0.8f, xy.y / 0.9f ); DrawPoint( (int) fPos.x, (int) fPos.y, 3, ref color2 ); } } break; case TEST_COLOR_PROFILES.BUILD_WHITE_POINTS_GRADIENT_NO_BALANCE: case TEST_COLOR_PROFILES.BUILD_WHITE_POINTS_GRADIENT_BALANCE_D50_TO_D65: case TEST_COLOR_PROFILES.BUILD_WHITE_POINTS_GRADIENT_BALANCE_D65_TO_D50: { float3x3 whiteBalancingXYZ = float3x3.Identity; float whitePointCCT = 6500.0f; if ( _type == TEST_COLOR_PROFILES.BUILD_WHITE_POINTS_GRADIENT_BALANCE_D50_TO_D65 ) { // Compute white balancing from a D50 to a D65 illuminant whiteBalancingXYZ = ColorProfile.ComputeWhiteBalanceXYZMatrix( ColorProfile.Chromaticities.AdobeRGB_D50, ColorProfile.ILLUMINANT_D65 ); whitePointCCT = 5000.0f; } else if ( _type == TEST_COLOR_PROFILES.BUILD_WHITE_POINTS_GRADIENT_BALANCE_D65_TO_D50 ) { // Compute white balancing from a D65 to a D50 illuminant whiteBalancingXYZ = ColorProfile.ComputeWhiteBalanceXYZMatrix( ColorProfile.Chromaticities.sRGB, ColorProfile.ILLUMINANT_D50 ); whitePointCCT = 10000.0f; // ?? Actually we're already in D65 so assuming we're starting from a D50 illuminant instead actually pushes the white point far away... } // Build a gradient of white points from 1500K to 8000K m_imageFile.Init( 650, 32, ImageFile.PIXEL_FORMAT.RGBA8, sRGB ); float4 RGB = new float4( 0, 0, 0, 0 ); float3 XYZ = new float3( 0, 0, 0 ); float2 xy = new float2(); for ( uint X=0; X < 650; X++ ) { float T = 1500 + 10 * X; // From 1500K to 8000K ColorProfile.ComputeWhitePointChromaticities( T, ref xy ); ColorProfile.xyY2XYZ( new float3( xy, 1.0f ), ref XYZ ); // Apply white balancing XYZ *= whiteBalancingXYZ; sRGB.XYZ2RGB( new float4( XYZ, 1 ), ref RGB ); // "Normalize" //RGB /= Math.Max( Math.Max( RGB.x, RGB.y ), RGB.z ); // Isolate D65 if ( Math.Abs( T - whitePointCCT ) < 10.0f ) RGB.Set( 1, 0, 1, 1 ); for ( uint Y=0; Y < 32; Y++ ) { m_imageFile[X,Y] = RGB; } } // Check white balancing yields correct results // float3 XYZ_R_in = new float3(); // float3 XYZ_G_in = new float3(); // float3 XYZ_B_in = new float3(); // float3 XYZ_W_in = new float3(); // sRGB.RGB2XYZ( new float3( 1, 0, 0 ), ref XYZ_R_in ); // sRGB.RGB2XYZ( new float3( 0, 1, 0 ), ref XYZ_G_in ); // sRGB.RGB2XYZ( new float3( 0, 0, 1 ), ref XYZ_B_in ); // sRGB.RGB2XYZ( new float3( 1, 1, 1 ), ref XYZ_W_in ); // // float3 XYZ_R_out = XYZ_R_in * XYZ_D65_D50; // float3 XYZ_G_out = XYZ_G_in * XYZ_D65_D50; // float3 XYZ_B_out = XYZ_B_in * XYZ_D65_D50; // float3 XYZ_W_out = XYZ_W_in * XYZ_D65_D50; // // float3 xyY_R_out = new float3(); // float3 xyY_G_out = new float3(); // float3 xyY_B_out = new float3(); // float3 xyY_W_out = new float3(); // ColorProfile.XYZ2xyY( XYZ_R_out, ref xyY_R_out ); // ColorProfile.XYZ2xyY( XYZ_G_out, ref xyY_G_out ); // ColorProfile.XYZ2xyY( XYZ_B_out, ref xyY_B_out ); // ColorProfile.XYZ2xyY( XYZ_W_out, ref xyY_W_out ); } break; } panelColorProfile.Bitmap = m_imageFile.AsBitmap; }
void FastFit() { // Load response curve // string text = ""; List<float> responseCurve = new List<float>(); using ( System.IO.FileStream S = new System.IO.FileInfo( "../../responseCurve9.float" ).OpenRead() ) using ( System.IO.BinaryReader R = new System.IO.BinaryReader( S ) ) { for ( int i=0; i < 256; i++ ) { responseCurve.Add( R.ReadSingle() ); // text += ", " + responseCurve[responseCurve.Count-1]; } } // Perform fitting float a = 0.0f, b = 1.0f, c = 0.0f, d = 0.0f; // sumSqDiff = 21.664576085822642 // float a = -6.55077f, b = 0.1263f, c = -0.000435788f, d = 7.52068e-7f; // FindFit( responseCurve.ToArray(), ref a, ref b, ref c, ref d ); FindFitBFGS( responseCurve.ToArray(), ref a, ref b, ref c, ref d ); // Render m_imageFile.Init( 1024, 768, ImageFile.PIXEL_FORMAT.RGBA8, new ColorProfile( ColorProfile.STANDARD_PROFILE.sRGB ) ); m_imageFile.Clear( new float4( 1, 1, 1, 1 ) ); float2 rangeX = new float2( 0, 255 ); float2 rangeY = new float2( -2, 2 ); /* m_imageFile.PlotGraphAutoRangeY( black, rangeX, ref rangeY, ( float x ) => { int i0 = (int) Math.Min( 255, Math.Floor( x ) ); int i1 = (int) Math.Min( 255, i0+1 ); float g0 = responseCurve[i0]; float g1 = responseCurve[i1]; float t = x - i0; return TentFilter( x ) * (g0 + (g1-g0) * t); // return (float) Math.Pow( 2.0f, g0 + (g1-g0) * t ); } ); m_imageFile.PlotGraph( red, rangeX, rangeY, ( float x ) => { return TentFilter( x ) * (a + b * x + c * x*x + d * x*x*x); } ); m_imageFile.PlotLogAxes( black, rangeX, rangeY, -16.0f, 2.0f ); */ // m_imageFile.PlotGraphAutoRangeY( black, rangeX, ref rangeY, ( float x ) => { rangeY = new float2( 0, 400 ); m_imageFile.PlotGraph( black, rangeX, rangeY, ( float x ) => { int i0 = (int) Math.Min( 255, Math.Floor( x ) ); int i1 = (int) Math.Min( 255, i0+1 ); float g0 = responseCurve[i0]; float g1 = responseCurve[i1]; float t = x - i0; // return (float) (Math.Log( (g0 + (g1-g0) * t) ) / Math.Log( 2 )); return (float) Math.Pow( 2.0, (g0 + (g1-g0) * t) ); } ); m_imageFile.PlotGraph( red, rangeX, rangeY, ( float x ) => { // return (float) (Math.Log( (a + b * x + c * x*x + d * x*x*x) ) / Math.Log( 2 )); return (float) Math.Pow( 2.0, (a + b * x + c * x*x + d * x*x*x) ); } ); panelLoad.Bitmap = m_imageFile.AsBitmap; }
protected void TestConvertLDR2HDR( System.IO.FileInfo[] _LDRImageFileNames, bool _responseCurveOnly, bool _RAW ) { try { // Load the LDR images List< ImageFile > LDRImages = new List< ImageFile >(); foreach ( System.IO.FileInfo LDRImageFileName in _LDRImageFileNames ) LDRImages.Add( new ImageFile( LDRImageFileName ) ); // Retrieve the shutter speeds List< float > shutterSpeeds = new List< float >(); foreach ( ImageFile LDRImage in LDRImages ) { shutterSpeeds.Add( LDRImage.Metadata.ExposureTime ); } // Retrieve filter type Bitmap.FILTER_TYPE filterType = Bitmap.FILTER_TYPE.NONE; if ( radioButtonFilterNone.Checked ) { filterType = Bitmap.FILTER_TYPE.NONE; } else if ( radioButtonFilterGaussian.Checked ) { filterType = Bitmap.FILTER_TYPE.SMOOTHING_GAUSSIAN; } else if ( radioButtonFilterGaussian2Pass.Checked ) { filterType = Bitmap.FILTER_TYPE.SMOOTHING_GAUSSIAN_2_PASSES; } else if ( radioButtonFilterTent.Checked ) { filterType = Bitmap.FILTER_TYPE.SMOOTHING_TENT; } else if ( radioButtonFilterCurveFitting.Checked ) { filterType = Bitmap.FILTER_TYPE.GAUSSIAN_PLUS_CURVE_FITTING; } // Check EXR save is working! // ImageFile pipo = new ImageFile(); // pipo.ConvertFrom( LDRImages[0], ImageFile.PIXEL_FORMAT.RGB32F ); // pipo.Save( new System.IO.FileInfo( @"..\..\Images\Out\LDR2HDR\FromJPG\Result.exr" ), ImageFile.FILE_FORMAT.EXR, ImageFile.SAVE_FLAGS.SF_EXR_DEFAULT ); // Check bitmap->tone mapped image file is working // { // Bitmap tempBitmap = new Bitmap(); // List< float > responseCurve = new List< float >( 256 ); // for ( int i=0; i < 256; i++ ) // responseCurve.Add( (float) (Math.Log( (1+i) / 256.0 ) / Math.Log(2)) ); // tempBitmap.LDR2HDR( new ImageFile[] { LDRImages[4] }, new float[] { 1.0f }, responseCurve, 1.0f ); // // ImageFile tempHDR = new ImageFile(); // tempBitmap.ToImageFile( tempHDR, new ColorProfile( ColorProfile.STANDARD_PROFILE.LINEAR ) ); // // ImageFile tempToneMappedHDR = new ImageFile(); // tempToneMappedHDR.ToneMapFrom( tempHDR,( float3 _HDRColor, ref float3 _LDRColor ) => { // // Just do gamma un-correction, don't care about actual HDR range... // _LDRColor.x = (float) Math.Pow( Math.Max( 0.0f, _HDRColor.x ), 1.0f / 2.2f ); // Here we need to clamp negative values that we sometimes get in EXR format // _LDRColor.y = (float) Math.Pow( Math.Max( 0.0f, _HDRColor.y ), 1.0f / 2.2f ); // (must be coming from the log encoding I suppose) // _LDRColor.z = (float) Math.Pow( Math.Max( 0.0f, _HDRColor.z ), 1.0f / 2.2f ); // } ); // // panel1.Bitmap = tempToneMappedHDR.AsBitmap; // return; // } ////////////////////////////////////////////////////////////////////////////////////////////// // Build the HDR device-independent bitmap // uint bitsPerPixel = _RAW ? 12U : 8U; uint bitsPerPixel = _RAW ? 8U : 8U; float quality = _RAW ? 3.0f : 3.0f; Bitmap.HDRParms parms = new Bitmap.HDRParms() { _inputBitsPerComponent = bitsPerPixel, _luminanceFactor = 1.0f, _curveSmoothnessConstraint = 1.0f, _quality = quality, _responseCurveFilterType = filterType }; ImageUtility.Bitmap HDRImage = new ImageUtility.Bitmap(); // HDRImage.LDR2HDR( LDRImages.ToArray(), shutterSpeeds.ToArray(), parms ); // Compute response curve List< float > responseCurve = new List< float >(); Bitmap.ComputeCameraResponseCurve( LDRImages.ToArray(), shutterSpeeds.ToArray(), parms._inputBitsPerComponent, parms._curveSmoothnessConstraint, parms._quality, responseCurve ); // Filter List< float > responseCurve_filtered = new List< float >(); // Bitmap.FilterCameraResponseCurve( responseCurve, responseCurve_filtered, Bitmap.FILTER_TYPE.CURVE_FITTING ); Bitmap.FilterCameraResponseCurve( responseCurve, responseCurve_filtered, filterType ); // using ( System.IO.FileStream S = new System.IO.FileInfo( "../../responseCurve3.float" ).Create() ) // using ( System.IO.BinaryWriter W = new System.IO.BinaryWriter( S ) ) { // for ( int i=0; i < 256; i++ ) // W.Write( responseCurve[i] ); // } // Write info string info = "Exposures:\r\n"; foreach ( float shutterSpeed in shutterSpeeds ) info += " " + shutterSpeed + "s + "; info += "\r\nLog2 exposures (EV):\r\n"; foreach ( float shutterSpeed in shutterSpeeds ) info += " " + (float) (Math.Log( shutterSpeed ) / Math.Log(2)) + "EV + "; info += "\r\n\r\n"; if ( _responseCurveOnly ) { ////////////////////////////////////////////////////////////////////////////////////////////// // Render the response curve as a graph ImageFile tempCurveBitmap = new ImageFile( 1024, 768, ImageFile.PIXEL_FORMAT.RGB8, new ColorProfile( ColorProfile.STANDARD_PROFILE.sRGB ) ); int responseCurveSizeMax = responseCurve.Count-1; float2 rangeX = new float2( 0, responseCurveSizeMax+1 ); float2 rangeY = new float2( 0, 500 ); tempCurveBitmap.Clear( new float4( 1, 1, 1, 1 ) ); // tempCurveBitmap.PlotGraphAutoRangeY( red, rangeX, ref rangeY, ( float x ) => { tempCurveBitmap.PlotGraph( red, rangeX, rangeY, ( float x ) => { int i0 = (int) Math.Min( responseCurveSizeMax, Math.Floor( x ) ); int i1 = (int) Math.Min( responseCurveSizeMax, i0+1 ); float g0 = responseCurve[i0]; float g1 = responseCurve[i1]; float t = x - i0; // return g0 + (g1-g0) * t; return (float) Math.Pow( 2.0f, g0 + (g1-g0) * t ); } ); tempCurveBitmap.PlotGraph( blue, rangeX, rangeY, ( float x ) => { int i0 = (int) Math.Min( responseCurveSizeMax, Math.Floor( x ) ); int i1 = (int) Math.Min( responseCurveSizeMax, i0+1 ); float g0 = responseCurve_filtered[i0]; float g1 = responseCurve_filtered[i1]; float t = x - i0; // return g0 + (g1-g0) * t; return (float) Math.Pow( 2.0f, g0 + (g1-g0) * t ); } ); // tempCurveBitmap.PlotAxes( black, rangeX, rangeY, 8, 2 ); info += "• Linear range Y = [" + rangeY.x + ", " + rangeY.y + "]\r\n"; rangeY = new float2( -4, 4 ); tempCurveBitmap.PlotLogGraphAutoRangeY( black, rangeX, ref rangeY, ( float x ) => { // tempCurveBitmap.PlotLogGraph( black, rangeX, rangeY, ( float x ) => { int i0 = (int) Math.Min( responseCurveSizeMax, Math.Floor( x ) ); int i1 = (int) Math.Min( responseCurveSizeMax, i0+1 ); float g0 = responseCurve[i0]; float g1 = responseCurve[i1]; float t = x - i0; // return g0 + (g1-g0) * t; return (float) Math.Pow( 2.0f, g0 + (g1-g0) * t ); }, -1.0f, 2.0f ); tempCurveBitmap.PlotLogGraph( blue, rangeX, rangeY, ( float x ) => { // tempCurveBitmap.PlotLogGraph( black, rangeX, rangeY, ( float x ) => { int i0 = (int) Math.Min( responseCurveSizeMax, Math.Floor( x ) ); int i1 = (int) Math.Min( responseCurveSizeMax, i0+1 ); float g0 = responseCurve_filtered[i0]; float g1 = responseCurve_filtered[i1]; float t = x - i0; // return g0 + (g1-g0) * t; return (float) Math.Pow( 2.0f, g0 + (g1-g0) * t ); }, -1.0f, 2.0f ); tempCurveBitmap.PlotLogAxes( black, rangeX, rangeY, -16, 2 ); info += "• Log2 range Y = [" + rangeY.x + ", " + rangeY.y + "]\r\n"; panelOutputHDR.Bitmap = tempCurveBitmap.AsBitmap; } else { ////////////////////////////////////////////////////////////////////////////////////////////// // Recompose the HDR image HDRImage.LDR2HDR( LDRImages.ToArray(), shutterSpeeds.ToArray(), responseCurve_filtered, 1.0f ); // Display as a tone-mapped bitmap ImageFile tempHDR = new ImageFile(); HDRImage.ToImageFile( tempHDR, new ColorProfile( ColorProfile.STANDARD_PROFILE.LINEAR ) ); if ( _RAW ) { tempHDR.Save( new System.IO.FileInfo( @"..\..\Images\Out\LDR2HDR\FromRAW\Result.exr" ), ImageFile.FILE_FORMAT.EXR, ImageFile.SAVE_FLAGS.SF_EXR_DEFAULT ); tempHDR.Save( new System.IO.FileInfo( @"..\..\Images\Out\LDR2HDR\FromRAW\Result_B44LC.exr" ), ImageFile.FILE_FORMAT.EXR, ImageFile.SAVE_FLAGS.SF_EXR_B44 | ImageFile.SAVE_FLAGS.SF_EXR_LC ); tempHDR.Save( new System.IO.FileInfo( @"..\..\Images\Out\LDR2HDR\FromRAW\Result_noLZW.exr" ), ImageFile.FILE_FORMAT.EXR, ImageFile.SAVE_FLAGS.SF_EXR_NONE ); } else { tempHDR.Save( new System.IO.FileInfo( @"..\..\Images\Out\LDR2HDR\FromJPG\Result.exr" ), ImageFile.FILE_FORMAT.EXR, ImageFile.SAVE_FLAGS.SF_EXR_DEFAULT ); tempHDR.Save( new System.IO.FileInfo( @"..\..\Images\Out\LDR2HDR\FromJPG\Result_B44LC.exr" ), ImageFile.FILE_FORMAT.EXR, ImageFile.SAVE_FLAGS.SF_EXR_B44 | ImageFile.SAVE_FLAGS.SF_EXR_LC ); tempHDR.Save( new System.IO.FileInfo( @"..\..\Images\Out\LDR2HDR\FromJPG\Result_noLZW.exr" ), ImageFile.FILE_FORMAT.EXR, ImageFile.SAVE_FLAGS.SF_EXR_NONE ); } ImageFile tempToneMappedHDR = new ImageFile(); tempToneMappedHDR.ToneMapFrom( tempHDR,( float3 _HDRColor, ref float3 _LDRColor ) => { // Just do gamma un-correction, don't care about actual HDR range... _LDRColor.x = (float) Math.Pow( Math.Max( 0.0f, _HDRColor.x ), 1.0f / 2.2f ); // Here we need to clamp negative values that we sometimes get in EXR format _LDRColor.y = (float) Math.Pow( Math.Max( 0.0f, _HDRColor.y ), 1.0f / 2.2f ); // (must be coming from the log encoding I suppose) _LDRColor.z = (float) Math.Pow( Math.Max( 0.0f, _HDRColor.z ), 1.0f / 2.2f ); } ); panelOutputHDR.Bitmap = tempToneMappedHDR.AsBitmap; } textBoxHDR.Text = info; } catch ( Exception _e ) { MessageBox.Show( "Error: " + _e.Message ); // Show debug image // panelLoad.Bitmap = Bitmap.DEBUG.AsBitmap; } }
void TestGraph( TEST_GRAPH_TYPE _type ) { ColorProfile sRGB = new ColorProfile( ColorProfile.STANDARD_PROFILE.sRGB ); m_imageFile.Init( 1024, 768, ImageFile.PIXEL_FORMAT.RGBA8, sRGB ); m_imageFile.Clear( new float4( 1, 1, 1, 1 ) ); // m_imageFile.Clear( new float4( 0, 0, 0, 1 ) ); if ( _type == TEST_GRAPH_TYPE.SIMPLE_FUNCTION ) { // Unit test simple graph float2 rangeY = new float2( -1.0f, 1.0f ); // m_imageFile.PlotGraph( black, new float2( -30.0f, 30.0f ), rangeY, ( float x ) => { return (float) Math.Sin( x ) / x; } ); m_imageFile.PlotGraphAutoRangeY( black, new float2( -30.0f, 30.0f ), ref rangeY, ( float x ) => { return (float) Math.Sin( x ) / x; } ); m_imageFile.PlotAxes( black, new float2( -30.0f, 30.0f ), rangeY, (float) (0.5 * Math.PI), 0.1f ); } else if ( _type == TEST_GRAPH_TYPE.SIMPLE_LOG_FUNCTIONS ) { m_imageFile.PlotLogGraph( red, new float2( 0.0f, 2.0f ), new float2( 0.0f, 100.0f ), ( float x ) => { return (float) Math.Pow( 10.0, x ); }, 1.0f, 1.0f ); // m_imageFile.PlotLogGraph( green, new float2( -2.0f, 2.0f ), new float2( 0.0f, 100.0f ), ( float x ) => { return (float) Math.Pow( 10.0, x ); }, 10.0f, 1.0f ); m_imageFile.PlotLogGraph( green, new float2( 0.0f, 2.0f ), new float2( 0.0f, 2.0f ), ( float x ) => { return (float) Math.Pow( 10.0, x ); }, 1.0f, 10.0f ); m_imageFile.PlotLogGraph( blue, new float2( -2.0f, 2.0f ), new float2( -2.0f, 2.0f ), ( float x ) => { return (float) Math.Pow( 10.0, x ); }, 10.0f, 10.0f ); // m_imageFile.PlotLogAxes( black, new float2( -1000.0f, 1000.0f ), new float2( -100.0f, 100.0f ), -100.0f, -10.0f ); // m_imageFile.PlotLogAxes( black, new float2( -100.0f, 1000.0f ), new float2( -2.0f, 2.0f ), -10.0f, 10.0f ); m_imageFile.PlotLogAxes( black, new float2( -2.0f, 2.0f ), new float2( -2.0f, 2.0f ), 10.0f, 2.0f ); } else if ( _type == TEST_GRAPH_TYPE.MANY_LINES ) { // Unit test a LOT of clipped lines! int W = (int) m_imageFile.Width; int H = (int) m_imageFile.Height; Random R = new Random( 1 ); float2 P0 = new float2(); float2 P1 = new float2(); for ( int i=0; i < 10000; i++ ) { P0.x = (float) (R.NextDouble() * 3*W) - W; P0.y = (float) (R.NextDouble() * 3*H) - H; P1.x = (float) (R.NextDouble() * 3*W) - W; P1.y = (float) (R.NextDouble() * 3*H) - H; m_imageFile.DrawLine( black, P0, P1 ); // m_imageFile.DrawLine( R.NextDouble() > 0.5 ? white : black, P0, P1 ); } } panelDrawing.Bitmap = m_imageFile.AsBitmap; }
void UpdateGraph() { double time = (DateTime.Now - m_startTime).TotalSeconds; TestTransform( time ); m_image.Clear( float4.One ); float2 rangeX = new float2( 0.0f, 1024.0f ); float2 rangeY = new float2( -1, 1 ); // Plot input signal // m_image.PlotGraphAutoRangeY( m_black, rangeX, ref rangeY, ( float x ) => { m_image.PlotGraph( m_black, rangeX, rangeY, ( float x ) => { int X = Math.Max( 0, Math.Min( 1023, (int) x ) ); return (float) m_signalSource[X].r; } ); // Plot reconstructed signals (Real and Imaginary parts) m_image.PlotGraph( m_red, rangeX, rangeY, ( float x ) => { int X = Math.Max( 0, Math.Min( 1023, (int) x ) ); return (float) m_signalReconstructed[X].r; } ); m_image.PlotGraph( m_blue, rangeX, rangeY, ( float x ) => { int X = Math.Max( 0, Math.Min( 1023, (int) x ) ); return (float) m_signalReconstructed[X].i; } ); m_image.PlotAxes( m_black, rangeX, rangeY, 16.0f, 0.1f ); ////////////////////////////////////////////////////////////////////////// // Render spectrum as (Real=Red, Imaginary=Blue) vertical lines for each frequency float2 cornerMin = m_image.RangedCoordinates2ImageCoordinates( rangeX, rangeY, new float2( rangeX.x, -1.0f ) ); float2 cornerMax = m_image.RangedCoordinates2ImageCoordinates( rangeX, rangeY, new float2( rangeX.y, +1.0f ) ); float2 delta = cornerMax - cornerMin; float zeroY = cornerMin.y + 0.5f * delta.y; float2 Xr0 = new float2( 0, zeroY ); float2 Xr1 = new float2( 0, 0 ); float2 Xi0 = new float2( 0, zeroY ); float2 Xi1 = new float2( 0, 0 ); float scale = 10.0f; float4 spectrumColorRe = new float4( 1, 0.25f, 0, 1 ); float4 spectrumColorIm = new float4( 0, 0.5f, 1, 1 ); int size = m_spectrum.Length; int halfSize = size >> 1; for ( int i=0; i < m_spectrum.Length; i++ ) { float X = cornerMin.x + i * delta.x / m_spectrum.Length; // int frequencyIndex = i; // Show spectrum as output by FFT int frequencyIndex = (i + halfSize) % size; // Show offset spectrum with DC term in the middle Xr0.x = X; Xr1.x = X; Xr1.y = cornerMin.y + 0.5f * (scale * (float) m_spectrum[frequencyIndex].r + 1.0f) * delta.y; Xi0.x = X+1; Xi1.x = X+1; Xi1.y = cornerMin.y + 0.5f * (scale * (float) m_spectrum[frequencyIndex].i + 1.0f) * delta.y; m_image.DrawLine( spectrumColorRe, Xr0, Xr1 ); m_image.DrawLine( spectrumColorIm, Xi0, Xi1 ); } imagePanel.Bitmap = m_image.AsBitmap; }
public float Dot( float2 a ) { return x*a.x + y*a.y; }
private bool Equals( float2 a, float2 b ) { return Math.Abs( a.x - b.x ) < EPSILON && Math.Abs( a.y - b.y ) < EPSILON; }
public Chromaticities( float xr, float yr, float xg, float yg, float xb, float yb, float xw, float yw ) { R = new float2( xr, yr ); G = new float2( xg, yg ); B = new float2( xb, yb ); W = new float2( xw, yw ); }