private unsafe void UpdateBitmap() { if (m_Image == null) { return; } // Fill pixel per pixel int W = m_Image.Width; int H = m_Image.Height; if (m_Bitmap != null && (m_Bitmap.Width != W || m_Bitmap.Height != H)) { m_Bitmap.Dispose(); m_Bitmap = null; } if (m_Bitmap == null) { m_Bitmap = new Bitmap(W, H, PixelFormat.Format32bppArgb); } ImageUtility.float4[,] OriginalContentRGB = new ImageUtility.float4[W, H]; if (m_ViewLinear) { m_ProfileLinear.XYZ2RGB(m_Image.ContentXYZ, OriginalContentRGB); } else { m_ProfilesRGB.XYZ2RGB(m_Image.ContentXYZ, OriginalContentRGB); } // ImageUtility.float4[,] ContentRGB = ApplyBrightnessContrastGamma( OriginalContentRGB, m_Brightness, m_Contrast, m_Gamma ); ImageUtility.float4[,] ContentRGB = OriginalContentRGB; BitmapData LockedBitmap = m_Bitmap.LockBits(new Rectangle(0, 0, W, H), ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb); for (int Y = 0; Y < H; Y++) { byte *pScanline = (byte *)LockedBitmap.Scan0.ToPointer() + LockedBitmap.Stride * Y; for (int X = 0; X < W; X++) { byte R = (byte)Math.Max(0, Math.Min(255, 255 * ContentRGB[X, Y].x)); byte G = (byte)Math.Max(0, Math.Min(255, 255 * ContentRGB[X, Y].y)); byte B = (byte)Math.Max(0, Math.Min(255, 255 * ContentRGB[X, Y].z)); byte A = (byte)Math.Max(0, Math.Min(255, 255 * (m_ViewLinear ? ContentRGB[X, Y].w : ImageUtility.ColorProfile.Linear2sRGB(ContentRGB[X, Y].w)))); *pScanline++ = B; *pScanline++ = G; *pScanline++ = R; *pScanline++ = 0xFF; } } m_Bitmap.UnlockBits(LockedBitmap); Refresh(); }
/// <summary> /// Uses the white reference image to retrieve the luminance factor to apply based on the position in the image /// </summary> /// <param name="_U">The U coordinate in the image (U=X/Width)</param> /// <param name="_V">The V coordinate in the image (V=Y/Height)</param> /// <returns>The luminance factor to apply to correct the spatial luminance discrepancies</returns> public float GetSpatialLuminanceCorrectionFactor(float _U, float _V) { if (m_WhiteReferenceImage == null) { return(1.0f); } ImageUtility.float4 XYZ = m_WhiteReferenceImage.BilinearSample(_U * m_WhiteReferenceImage.Width, _V * m_WhiteReferenceImage.Height); float SpatialWhiteRefCorrection = m_WhiteReflectanceImageMax / Math.Max(1e-6f, XYZ.y); return(SpatialWhiteRefCorrection); }
public unsafe void UpdateBitmap() { if (m_Bitmap == null) { return; } int W = m_Bitmap.Width; int H = m_Bitmap.Height; // Fill pixel per pixel if (m_Image != null) { int SizeX = m_Image.GetLength(0); int SizeY = m_Image.GetLength(1); RectangleF ImageRect = ImageClientRect(); BitmapData LockedBitmap = m_Bitmap.LockBits(new Rectangle(0, 0, W, H), ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb); byte R, G, B, A = 0xFF; for (int Y = 0; Y < H; Y++) { byte *pScanline = (byte *)LockedBitmap.Scan0.ToPointer() + LockedBitmap.Stride * Y; for (int X = 0; X < W; X++) { if (X >= ImageRect.X && X < ImageRect.Right && Y >= ImageRect.Y && Y < ImageRect.Bottom) { ImageUtility.float4 RGB = m_Image[(int)(SizeX * (X - ImageRect.X) / ImageRect.Width), (int)(SizeY * (Y - ImageRect.Y) / ImageRect.Height)]; R = (byte)Math.Max(0, Math.Min(255, 255.0f * RGB.x)); G = (byte)Math.Max(0, Math.Min(255, 255.0f * RGB.y)); B = (byte)Math.Max(0, Math.Min(255, 255.0f * RGB.z)); } else { R = G = B = 0; } *pScanline++ = B; *pScanline++ = G; *pScanline++ = R; *pScanline++ = A; } } m_Bitmap.UnlockBits(LockedBitmap); } else { using (Graphics G = Graphics.FromImage(m_Bitmap)) G.FillRectangle(Brushes.Black, 0, 0, W, H); } Invalidate(); }
/// <summary> /// Builds a swatch bitmap /// </summary> /// <param name="_Width"></param> /// <param name="_Height"></param> /// <param name="_xyY"></param> /// <returns></returns> private ImageUtility.Bitmap BuildSwatch(int _Width, int _Height, ImageUtility.float3 _xyY) { ImageUtility.Bitmap Result = new ImageUtility.Bitmap(_Width, _Height, new ImageUtility.ColorProfile(ImageUtility.ColorProfile.STANDARD_PROFILE.sRGB)); ImageUtility.float4 XYZ = new ImageUtility.float4(ImageUtility.ColorProfile.xyY2XYZ(_xyY), 1.0f); for (int Y = 0; Y < _Height; Y++) { for (int X = 0; X < _Width; X++) { Result.ContentXYZ[X, Y] = XYZ; } } return(Result); }
private void buttonTestBilateral_Click(object sender, EventArgs e) { try { panelParameters.Enabled = false; ////////////////////////////////////////////////////////////////////////// // 1] Apply bilateral filtering to the input texture as a pre-process ApplyBilateralFiltering(m_TextureSource, m_TextureTarget0, floatTrackbarControlBilateralRadius.Value, floatTrackbarControlBilateralTolerance.Value, checkBoxWrap.Checked, 100); progressBar.Value = progressBar.Maximum; ////////////////////////////////////////////////////////////////////////// // 2] Copy target to staging for CPU readback and update the resulting bitmap m_TextureTarget_CPU.CopyFrom(m_TextureTarget0); if (m_BitmapResult != null) { m_BitmapResult.Dispose(); } m_BitmapResult = null; m_BitmapResult = new ImageUtility.Bitmap(W, H, m_ProfileLinear); m_BitmapResult.HasAlpha = true; RendererManaged.PixelsBuffer Pixels = m_TextureTarget_CPU.Map(0, 0); using (System.IO.BinaryReader R = Pixels.OpenStreamRead()) for (int Y = 0; Y < H; Y++) { R.BaseStream.Position = Y * Pixels.RowPitch; for (int X = 0; X < W; X++) { float AO = R.ReadSingle(); ImageUtility.float4 Color = new ImageUtility.float4(AO, AO, AO, AO); Color = m_ProfileLinear.RGB2XYZ(Color); m_BitmapResult.ContentXYZ[X, Y] = Color; } } Pixels.Dispose(); m_TextureTarget_CPU.UnMap(0, 0); // Assign result viewportPanelResult.Image = m_BitmapResult; } catch (Exception _e) { MessageBox("An error occurred during generation!\r\n\r\nDetails: ", _e); } finally { panelParameters.Enabled = true; } }
/// <summary> /// Creates an embeddable thumbnail of the reference image /// </summary> /// <param name="_Image"></param> public void CreateThumbnail(ImageUtility.Bitmap _Image) { int MaxDim = Math.Max(_Image.Width, _Image.Height); int ThumbnailSize = 256; int W = ThumbnailSize * _Image.Width / MaxDim; int H = ThumbnailSize * _Image.Height / MaxDim; // Build the thumbnail m_Thumbnail = new byte[W, H]; for (int Y = 0; Y < H; Y++) { for (int X = 0; X < W; X++) { ImageUtility.float4 XYZ = _Image.ContentXYZ[X * _Image.Width / W, Y *_Image.Height / H]; m_Thumbnail[X, Y] = (byte)Math.Min(255, Math.Max(0, 255.0f * XYZ.y)); } } }
public ImageUtility.float4[,] ApplyBrightnessContrastGamma(ImageUtility.float4[,] _Source, float _Brightness, float _Contrast, float _Gamma) { int W = _Source.GetLength(0); int H = _Source.GetLength(1); ImageUtility.float4[,] Result = new ImageUtility.float4[W, H]; for (int Y = 0; Y < H; Y++) { for (int X = 0; X < W; X++) { ImageUtility.float4 RGBA = _Source[X, Y]; RGBA.x = ApplyBrightnessContrastGamma(RGBA.x, _Brightness, _Contrast, _Gamma); RGBA.y = ApplyBrightnessContrastGamma(RGBA.y, _Brightness, _Contrast, _Gamma); RGBA.z = ApplyBrightnessContrastGamma(RGBA.z, _Brightness, _Contrast, _Gamma); Result[X, Y] = RGBA; } } return(Result); }
/// <summary> /// Computes the average color within a rectangle in UV space /// </summary> /// <param name="_TopLeft">The top left corner (in UV space) of the rectangle to sample</param> /// <param name="_BottomRight">The bottom right corner (in UV space) of the rectangle to sample</param> /// <returns>The average xyY color</returns> public ImageUtility.float3 ComputeAverageSwatchColor(ImageUtility.float2 _TopLeft, ImageUtility.float2 _BottomRight) { // Average xyY values in the specified rectangle int X0 = Math.Max(0, Math.Min(m_Texture.Width - 1, (int)Math.Floor(_TopLeft.x * m_Texture.Width))); int Y0 = Math.Max(0, Math.Min(m_Texture.Height - 1, (int)Math.Floor(_TopLeft.y * m_Texture.Height))); int X1 = Math.Min(m_Texture.Width, Math.Max(X0 + 1, (int)Math.Floor(_BottomRight.x * m_Texture.Width))); int Y1 = Math.Min(m_Texture.Height, Math.Max(Y0 + 1, (int)Math.Floor(_BottomRight.y * m_Texture.Height))); int W = X1 - X0; int H = Y1 - Y0; ImageUtility.float4 AverageXYZ = new ImageUtility.float4(0, 0, 0, 0); for (int Y = Y0; Y < Y1; Y++) { for (int X = X0; X < X1; X++) { ImageUtility.float4 XYZ = m_Texture.ContentXYZ[X, Y]; AverageXYZ += XYZ; } } AverageXYZ = (1.0f / (W * H)) * AverageXYZ; ImageUtility.float3 xyY = ImageUtility.ColorProfile.XYZ2xyY((ImageUtility.float3)AverageXYZ); return(xyY); }
////////////////////////////////////////////////////////////////////////// // White Reference Image // private void buttonPickWhiteRefImage_Click( object sender, EventArgs e ) { if ( m_BitmapXYZ == null ) { // No image loaded you moron! MessageBox( "Can't pick white reference as no image is currently loaded!", MessageBoxButtons.OK, MessageBoxIcon.Exclamation ); return; } try { int W, H; if ( m_BitmapXYZ.Width > m_BitmapXYZ.Height ) { W = DEFAULT_WHITE_REFERENCE_IMAGE_SIZE; H = W * m_BitmapXYZ.Height / m_BitmapXYZ.Width; } else { H = DEFAULT_WHITE_REFERENCE_IMAGE_SIZE; W = H * m_BitmapXYZ.Width / m_BitmapXYZ.Height; } // Find the maximum luminance in the image that we'll use as a normalizer ImageUtility.Bitmap WhiteRef = new ImageUtility.Bitmap( W, H, m_sRGBProfile ); for ( int Y=0; Y < H; Y++ ) for ( int X=0; X < W; X++ ) { float x0 = m_BitmapXYZ.Width * (float) X / W; float x1 = m_BitmapXYZ.Width * (float) (X+1) / W; float y0 = m_BitmapXYZ.Height * (float) Y / H; float y1 = m_BitmapXYZ.Height * (float) (Y+1) / H; ImageUtility.float4 SumXYZ = new ImageUtility.float4( 0, 0, 0, 0 ); int Count = 0; float y = y0; while ( y < y1 ) { float x = x0; while ( x < x1 ) { SumXYZ += m_BitmapXYZ.BilinearSample( x, y ); Count++; x++; } y++; } float Test = (float) (Math.Ceiling(x1-x0) * Math.Ceiling(y1-y0)); // Should equal Count SumXYZ = (1.0f / Math.Max( 1, Count)) * SumXYZ; ImageUtility.float3 xyY = ImageUtility.ColorProfile.XYZ2xyY( (ImageUtility.float3) SumXYZ ); xyY.x = m_sRGBProfile.Chromas.W.x; // B&W xyY.y = m_sRGBProfile.Chromas.W.y; ImageUtility.float4 XYZ = new ImageUtility.float4( ImageUtility.ColorProfile.xyY2XYZ( xyY ), SumXYZ.w ); WhiteRef.ContentXYZ[X,Y] = XYZ; } // Assign to the database m_CalibrationDatabase.WhiteReferenceImage = WhiteRef; UpdateWhiteReferenceImageUI(); } catch ( Exception _e ) { MessageBox( "An error occurred while creating the white reference image:\r\n\r\n", _e ); } }
/// <summary> /// Prepares the interpolated calibration table to process the pixels in an image shot with the specified shot infos /// </summary> /// <param name="_ISOSpeed"></param> /// <param name="_ShutterSpeed"></param> /// <param name="_Aperture"></param> public void PrepareCalibrationFor(float _ISOSpeed, float _ShutterSpeed, float _Aperture) { if (m_RootNode == null) { throw new Exception("Calibration grid hasn't been built: did you provide a valid database path? Does the path contain camera calibration data?"); } if (IsPreparedFor(_ISOSpeed, _ShutterSpeed, _Aperture)) { return; // Already prepared! } ////////////////////////////////////////////////////////////////////////// // Find the 8 nodes encompassing our values // I'm making the delicate assumption that, although the starting node is chosen on the // condition its EV values are strictly inferior to the target we're looking for, all // neighbor nodes should satisfy the condition they're properly placed. // // This is true for the direct neighbors +X, +Y, +Z that are immediately above target values // but for example, neighbor (+X +Y) may have a very bad aperture value (Z) that may be // above the target aperture... // // Let's hope the user won't provide too fancy calibrations... // (anyway, interpolants are clamped in [0,1] so there's no risk of overshooting) // ImageUtility.float3 EV; GridNode.Convert2EV(_ISOSpeed, _ShutterSpeed, _Aperture, out EV.x, out EV.y, out EV.z); // Find the start node GridNode StartNode = FindStartNode(EV.x, EV.y, EV.z); m_InterpolationStartNode = StartNode; // Build the 8 grid nodes from it GridNode[,,] Grid = new GridNode[2, 2, 2]; Grid[0, 0, 0] = StartNode; Grid[1, 0, 0] = StartNode.m_Neighbors[0][1] != null ? StartNode.m_Neighbors[0][1] : StartNode; // +X Grid[0, 1, 0] = StartNode.m_Neighbors[1][1] != null ? StartNode.m_Neighbors[1][1] : StartNode; // +Y Grid[0, 0, 1] = StartNode.m_Neighbors[2][1] != null ? StartNode.m_Neighbors[2][1] : StartNode; // +Z Grid[1, 1, 0] = Grid[1, 0, 0].m_Neighbors[1][1] != null ? Grid[1, 0, 0].m_Neighbors[1][1] : Grid[1, 0, 0]; // +X +Y Grid[0, 1, 1] = Grid[0, 1, 0].m_Neighbors[2][1] != null ? Grid[0, 1, 0].m_Neighbors[2][1] : Grid[0, 1, 0]; // +Y +Z Grid[1, 0, 1] = Grid[0, 0, 1].m_Neighbors[0][1] != null ? Grid[0, 0, 1].m_Neighbors[0][1] : Grid[0, 0, 1]; // +X +Z Grid[1, 1, 1] = Grid[1, 1, 0].m_Neighbors[2][1] != null ? Grid[1, 1, 0].m_Neighbors[2][1] : Grid[1, 1, 0]; // +X +Y +Z ////////////////////////////////////////////////////////////////////////// // Create the successive interpolants for trilinear interpolation // // Assume we interpolate on X first (ISO speed), so we need 4 distinct values ImageUtility.float4 tX = new ImageUtility.float4( Math.Max(0.0f, Math.Min(1.0f, (EV.x - Grid[0, 0, 0].m_EV_ISOSpeed) / Math.Max(1e-6f, Grid[1, 0, 0].m_EV_ISOSpeed - Grid[0, 0, 0].m_EV_ISOSpeed))), // Y=0 Z=0 Math.Max(0.0f, Math.Min(1.0f, (EV.x - Grid[0, 1, 0].m_EV_ISOSpeed) / Math.Max(1e-6f, Grid[1, 1, 0].m_EV_ISOSpeed - Grid[0, 1, 0].m_EV_ISOSpeed))), // Y=1 Z=0 Math.Max(0.0f, Math.Min(1.0f, (EV.x - Grid[0, 0, 1].m_EV_ISOSpeed) / Math.Max(1e-6f, Grid[1, 0, 1].m_EV_ISOSpeed - Grid[0, 0, 1].m_EV_ISOSpeed))), // Y=0 Z=1 Math.Max(0.0f, Math.Min(1.0f, (EV.x - Grid[0, 1, 1].m_EV_ISOSpeed) / Math.Max(1e-6f, Grid[1, 1, 1].m_EV_ISOSpeed - Grid[0, 1, 1].m_EV_ISOSpeed))) // Y=1 Z=1 ); ImageUtility.float4 rX = new ImageUtility.float4(1.0f - tX.x, 1.0f - tX.y, 1.0f - tX.z, 1.0f - tX.w); // Compute the 4 interpolated shutter speeds & apertures ImageUtility.float4 ShutterSpeedsX = new ImageUtility.float4( rX.x * Grid[0, 0, 0].m_EV_ShutterSpeed + tX.x * Grid[1, 0, 0].m_EV_ShutterSpeed, // Y=0 Z=0 rX.y * Grid[0, 1, 0].m_EV_ShutterSpeed + tX.y * Grid[1, 1, 0].m_EV_ShutterSpeed, // Y=1 Z=0 rX.z * Grid[0, 0, 1].m_EV_ShutterSpeed + tX.z * Grid[1, 0, 1].m_EV_ShutterSpeed, // Y=0 Z=1 rX.w * Grid[0, 1, 1].m_EV_ShutterSpeed + tX.w * Grid[1, 1, 1].m_EV_ShutterSpeed // Y=1 Z=1 ); ImageUtility.float4 AperturesX = new ImageUtility.float4( rX.x * Grid[0, 0, 0].m_EV_Aperture + tX.x * Grid[1, 0, 0].m_EV_Aperture, // Y=0 Z=0 rX.y * Grid[0, 1, 0].m_EV_Aperture + tX.y * Grid[1, 1, 0].m_EV_Aperture, // Y=1 Z=0 rX.z * Grid[0, 0, 1].m_EV_Aperture + tX.z * Grid[1, 0, 1].m_EV_Aperture, // Y=0 Z=1 rX.w * Grid[0, 1, 1].m_EV_Aperture + tX.w * Grid[1, 1, 1].m_EV_Aperture // Y=1 Z=1 ); // Next we interpolate on Y (Shutter speed), so we need 2 distinct values ImageUtility.float2 tY = new ImageUtility.float2( Math.Max(0.0f, Math.Min(1.0f, (EV.y - ShutterSpeedsX.x) / Math.Max(1e-6f, ShutterSpeedsX.y - ShutterSpeedsX.x))), // Z=0 Math.Max(0.0f, Math.Min(1.0f, (EV.y - ShutterSpeedsX.z) / Math.Max(1e-6f, ShutterSpeedsX.w - ShutterSpeedsX.z))) // Z=1 ); ImageUtility.float2 rY = new ImageUtility.float2(1.0f - tY.x, 1.0f - tY.y); // Compute the 2 apertures ImageUtility.float2 AperturesY = new ImageUtility.float2( rY.x * AperturesX.x + tY.x * AperturesX.y, rY.y * AperturesX.z + tY.y * AperturesX.w ); // Finally, we interpolate on Z (Aperture), we need only 1 single value float tZ = Math.Max(0.0f, Math.Min(1.0f, (EV.z - AperturesY.x) / Math.Max(1e-6f, AperturesY.y - AperturesY.x))); float rZ = 1.0f - tZ; ////////////////////////////////////////////////////////////////////////// // Create the special camera calibration that is the result of the interpolation of the 8 nearest ones in the grid m_InterpolatedCalibration = new CameraCalibration(); m_InterpolatedCalibration.m_CameraShotInfos.m_ISOSpeed = _ISOSpeed; m_InterpolatedCalibration.m_CameraShotInfos.m_ShutterSpeed = _ShutterSpeed; m_InterpolatedCalibration.m_CameraShotInfos.m_Aperture = _Aperture; for (int ProbeIndex = 0; ProbeIndex < REQUIRED_PROBES_COUNT; ProbeIndex++) { CameraCalibration.Probe TargetProbe = m_InterpolatedCalibration.m_Reflectances[ProbeIndex]; float L000 = Grid[0, 0, 0].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; float L100 = Grid[1, 0, 0].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; float L010 = Grid[0, 1, 0].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; float L110 = Grid[1, 1, 0].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; float L001 = Grid[0, 0, 1].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; float L101 = Grid[1, 0, 1].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; float L011 = Grid[0, 1, 1].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; float L111 = Grid[1, 1, 1].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; // Interpolate on X (ISO speed) float L00 = rX.x * L000 + tX.x * L100; float L10 = rX.y * L010 + tX.y * L110; float L01 = rX.z * L001 + tX.z * L101; float L11 = rX.w * L011 + tX.w * L111; // Interpolate on Y (shutter speed) float L0 = rY.x * L00 + tY.x * L10; float L1 = rY.y * L01 + tY.y * L11; // Interpolate on Z (aperture) float L = rZ * L0 + tZ * L1; TargetProbe.m_IsAvailable = true; TargetProbe.m_LuminanceMeasured = L; } // Fill missing values m_InterpolatedCalibration.UpdateAllLuminances(); // Reset white reflectance reference because it was set for another setup WhiteReflectanceReference = new ImageUtility.float3(0, 0, -1); }
public ImageUtility.float3 xyY; // The color used to build the swatch #endregion Fields #region Methods public virtual void Save( CalibratedTexture _Owner, XmlElement _SwatchElement ) { ImageUtility.float4 XYZ = new ImageUtility.float4( ImageUtility.ColorProfile.xyY2XYZ( xyY ), 1.0f ); ImageUtility.float3 RGB = (ImageUtility.float3) Texture.Profile.XYZ2RGB( XYZ ); _Owner.SetAttribute( _SwatchElement, "xyY", xyY.ToString() ).SetAttribute( "RGB", RGB.ToString() ); }
/// <summary> /// Builds a swatch bitmap /// </summary> /// <param name="_Width"></param> /// <param name="_Height"></param> /// <param name="_xyY"></param> /// <returns></returns> private ImageUtility.Bitmap BuildSwatch( int _Width, int _Height, ImageUtility.float3 _xyY ) { ImageUtility.Bitmap Result = new ImageUtility.Bitmap( _Width, _Height, new ImageUtility.ColorProfile( ImageUtility.ColorProfile.STANDARD_PROFILE.sRGB ) ); ImageUtility.float4 XYZ = new ImageUtility.float4( ImageUtility.ColorProfile.xyY2XYZ( _xyY ), 1.0f ); for ( int Y=0; Y < _Height; Y++ ) for ( int X=0; X < _Width; X++ ) Result.ContentXYZ[X,Y] = XYZ; return Result; }
/// <summary> /// Computes the average color within a rectangle in UV space /// </summary> /// <param name="_TopLeft">The top left corner (in UV space) of the rectangle to sample</param> /// <param name="_BottomRight">The bottom right corner (in UV space) of the rectangle to sample</param> /// <returns>The average xyY color</returns> public ImageUtility.float3 ComputeAverageSwatchColor( ImageUtility.float2 _TopLeft, ImageUtility.float2 _BottomRight ) { // Average xyY values in the specified rectangle int X0 = Math.Max( 0, Math.Min( m_Texture.Width-1, (int) Math.Floor( _TopLeft.x * m_Texture.Width ) ) ); int Y0 = Math.Max( 0, Math.Min( m_Texture.Height-1, (int) Math.Floor( _TopLeft.y * m_Texture.Height ) ) ); int X1 = Math.Min( m_Texture.Width, Math.Max( X0+1, (int) Math.Floor( _BottomRight.x * m_Texture.Width ) ) ); int Y1 = Math.Min( m_Texture.Height, Math.Max( Y0+1, (int) Math.Floor( _BottomRight.y * m_Texture.Height ) ) ); int W = X1 - X0; int H = Y1 - Y0; ImageUtility.float4 AverageXYZ = new ImageUtility.float4( 0, 0, 0, 0 ); for ( int Y=Y0; Y < Y1; Y++ ) for ( int X=X0; X < X1; X++ ) { ImageUtility.float4 XYZ = m_Texture.ContentXYZ[X,Y]; AverageXYZ += XYZ; } AverageXYZ = (1.0f / (W*H)) * AverageXYZ; ImageUtility.float3 xyY = ImageUtility.ColorProfile.XYZ2xyY( (ImageUtility.float3) AverageXYZ ); return xyY; }
/// <summary> /// Captures the calibrated texture /// </summary> /// <param name="_Source">The source image to capture</param> /// <param name="_Database">Database to perform proper calibration</param> /// <param name="_Parms">Parameters for the capture</param> public void Capture( ImageUtility.Bitmap _Source, CameraCalibrationDatabase _Database, CaptureParms _Parms ) { if ( _Source == null ) throw new Exception( "Invalid source bitmap to build texture from!" ); if ( _Database == null ) throw new Exception( "Invalid calibration database found in parameters!" ); if ( _Parms == null ) throw new Exception( "Invalid calibration parameters!" ); if ( m_SwatchWidth <= 0 || m_SwatchHeight <= 0 ) throw new Exception( "Invalid swatch size! Must be > 0!" ); // Save parameters as they're associated to this texture m_CaptureParameters = _Parms; m_WhiteReflectanceReference = _Database.WhiteReflectanceReference; m_WhiteReflectanceCorrectionFactor = _Database.WhiteReflectanceCorrectionFactor; m_SpatialCorrectionEnabled = _Database.WhiteReferenceImage != null; ////////////////////////////////////////////////////////////////////////// // Setup the database to find the most appropriate calibration data for our image infos _Database.PrepareCalibrationFor( _Parms.ISOSpeed, _Parms.ShutterSpeed, _Parms.Aperture ); ////////////////////////////////////////////////////////////////////////// // Build target texture ImageUtility.float4 AvgXYZ = new ImageUtility.float4( 0, 0, 0, 0 ); //DEBUG // float MinLuminance_Raw = float.MaxValue; // float MaxLuminance_Raw = -float.MaxValue; const int EXTREME_VALUES_COUNT = 100; ImageUtility.float3[] ArrayMin = new ImageUtility.float3[EXTREME_VALUES_COUNT]; ImageUtility.float3[] ArrayMax = new ImageUtility.float3[EXTREME_VALUES_COUNT]; for ( int i=0; i < EXTREME_VALUES_COUNT; i++ ) { ArrayMin[i] = new ImageUtility.float3( 0, 1, 0 ); ArrayMax[i] = new ImageUtility.float3( 0, 0, 0 ); } if ( _Parms.CropSource ) { float fImageWidth = 2.0f * _Parms.CropRectangleHalfSize.x * _Source.Height; float fImageHeight = 2.0f * _Parms.CropRectangleHalfSize.y * _Source.Height; int W = (int) Math.Floor( fImageWidth ); int H = (int) Math.Floor( fImageHeight ); ImageUtility.float2 AxisX = new ImageUtility.float2( (float) Math.Cos( _Parms.CropRectangleRotation ), -(float) Math.Sin( _Parms.CropRectangleRotation ) ); ImageUtility.float2 AxisY = new ImageUtility.float2( (float) Math.Sin( _Parms.CropRectangleRotation ), (float) Math.Cos( _Parms.CropRectangleRotation ) ); ImageUtility.float2 TopLeftCorner = new ImageUtility.float2( 0.5f * (_Source.Width - _Source.Height) + _Parms.CropRectangleCenter.x * _Source.Height, _Source.Height * _Parms.CropRectangleCenter.y ) + _Source.Height * (-_Parms.CropRectangleHalfSize.x * AxisX - _Parms.CropRectangleHalfSize.y * AxisY); m_Texture = new ImageUtility.Bitmap( W, H, new ImageUtility.ColorProfile( ImageUtility.ColorProfile.STANDARD_PROFILE.sRGB ) ); ImageUtility.float4 XYZ; ImageUtility.float3 ShortXYZ; ImageUtility.float3 xyY; ImageUtility.float2 CurrentScanlinePixel = TopLeftCorner + 0.5f * (fImageWidth - W) * AxisX + 0.5f * (fImageHeight - H) * AxisY; if ( Math.Abs( _Parms.CropRectangleRotation ) < 1e-6f ) { // Use integer pixels to avoid attenuated values due to bilinear filtering CurrentScanlinePixel.x = (float) Math.Floor( CurrentScanlinePixel.x ); CurrentScanlinePixel.y = (float) Math.Floor( CurrentScanlinePixel.y ); } for ( int Y=0; Y < H; Y++ ) { ImageUtility.float2 CurrentPixel = CurrentScanlinePixel; for ( int X=0; X < W; X++ ) { float U = CurrentPixel.x / _Source.Width; float V = CurrentPixel.y / _Source.Height; XYZ = _Source.BilinearSample( CurrentPixel.x, CurrentPixel.y ); //DEBUG // float L = XYZ.y * _Database.GetSpatialLuminanceCorrectionFactor( U, V ); // if ( L < MinLuminance_Raw ) // MinLuminance_Raw = L; // if ( L > MaxLuminance_Raw ) // MaxLuminance_Raw = L; //DEBUG xyY = ImageUtility.ColorProfile.XYZ2xyY( (ImageUtility.float3) XYZ ); xyY = _Database.CalibrateWithSpatialCorrection( U, V, xyY ); // Apply luminance calibration ShortXYZ = ImageUtility.ColorProfile.xyY2XYZ( xyY ); XYZ = new ImageUtility.float4( ShortXYZ, XYZ.w ); m_Texture.ContentXYZ[X,Y] = XYZ; // Update min/max/avg values InsertMinMax( ShortXYZ, ArrayMin, ArrayMax, EXTREME_VALUES_COUNT ); AvgXYZ += XYZ; CurrentPixel += AxisX; } CurrentScanlinePixel += AxisY; } } else { // Simple texture copy, with luminance calibration m_Texture = new ImageUtility.Bitmap( _Source.Width, _Source.Height, new ImageUtility.ColorProfile( ImageUtility.ColorProfile.STANDARD_PROFILE.sRGB ) ); ImageUtility.float4 XYZ; ImageUtility.float3 ShortXYZ; ImageUtility.float3 xyY; int W = m_Texture.Width; int H = m_Texture.Height; int X0 = 0; int X1 = W; int Y0 = 0; int Y1 = H; //DEBUG // X0 = 1088; Y0 = 764; // X1 = X0 + 1100; Y1 = Y0 + 632; for ( int Y=Y0; Y < Y1; Y++ ) { float V = (float) Y / H; for ( int X=X0; X < X1; X++ ) { float U = (float) X / W; XYZ = _Source.ContentXYZ[X,Y]; //DEBUG // float L = XYZ.y * _Database.GetSpatialLuminanceCorrectionFactor( U, V ); // if ( L < MinLuminance_Raw ) // MinLuminance_Raw = L; // if ( L > MaxLuminance_Raw ) // MaxLuminance_Raw = L; //DEBUG xyY = ImageUtility.ColorProfile.XYZ2xyY( (ImageUtility.float3) XYZ ); xyY = _Database.CalibrateWithSpatialCorrection( U, V, xyY ); // Apply luminance calibration ShortXYZ = ImageUtility.ColorProfile.xyY2XYZ( xyY ); XYZ = new ImageUtility.float4( ShortXYZ, XYZ.w ); m_Texture.ContentXYZ[X,Y] = XYZ; // Update min/max/avg values InsertMinMax( ShortXYZ, ArrayMin, ArrayMax, EXTREME_VALUES_COUNT ); AvgXYZ += XYZ; } } } // Normalize average swatch color float Normalizer = 1.0f / (m_Texture.Width*m_Texture.Height); ImageUtility.float3 avgxyY = ImageUtility.ColorProfile.XYZ2xyY( Normalizer * ((ImageUtility.float3) AvgXYZ) ); m_SwatchAvg.xyY = avgxyY; // Compute min & max using statistical norm ImageUtility.float3 BestXYZ_Min; ImageUtility.float3 BestXYZ_Max; if ( _Parms.UseModeInsteadOfMean ) { // Use mode BestXYZ_Min = ComputeMode( ArrayMin ); BestXYZ_Max = ComputeMode( ArrayMax ); } else { // Use mean BestXYZ_Min = ComputeMean( ArrayMin ); BestXYZ_Max = ComputeMean( ArrayMax ); } m_SwatchMin.xyY = ImageUtility.ColorProfile.XYZ2xyY( BestXYZ_Min ); m_SwatchMax.xyY = ImageUtility.ColorProfile.XYZ2xyY( BestXYZ_Max ); m_SwatchMin.Texture = BuildSwatch( m_SwatchWidth, m_SwatchHeight, m_SwatchMin.xyY ); m_SwatchMax.Texture = BuildSwatch( m_SwatchWidth, m_SwatchHeight, m_SwatchMax.xyY ); m_SwatchAvg.Texture = BuildSwatch( m_SwatchWidth, m_SwatchHeight, m_SwatchAvg.xyY ); // Rebuild custom swatches foreach ( CustomSwatch CS in m_CustomSwatches ) CS.Texture = BuildSwatch( m_SwatchWidth, m_SwatchHeight, CS.xyY ); ////////////////////////////////////////////////////////////////////////// // Feed some purely informational shot infos to the main texture, probably won't be saved anyway... m_Texture.HasValidShotInfo = true; m_Texture.ISOSpeed = _Parms.ISOSpeed; m_Texture.ShutterSpeed = _Parms.ShutterSpeed; m_Texture.Aperture = _Parms.Aperture; }
/// <summary> /// Rebuilds and assigns the bitmap for the output panel from the loaded image /// </summary> private void RebuildImage() { if ( m_BitmapXYZ == null ) return; bool sRGB = checkBoxsRGB.Checked; bool SpatialCorrection = checkBoxSpatialLuminanceCorrection.Checked; ImageUtility.float4[,] Image = new ImageUtility.float4[m_BitmapXYZ.Width,m_BitmapXYZ.Height]; int W = m_BitmapXYZ.Width; int H = m_BitmapXYZ.Height; if ( checkBoxLuminance.Checked ) { // Convert into luminances only for ( int Y = 0; Y < H; Y++ ) for ( int X = 0; X < W; X++ ) { float L = m_BitmapXYZ.ContentXYZ[X, Y].y; if ( SpatialCorrection ) L*= m_CalibrationDatabase.GetSpatialLuminanceCorrectionFactor( (float) X / W, (float) Y / H ); if ( sRGB ) L = ImageUtility.ColorProfile.Linear2sRGB( L ); Image[X, Y].x = L; Image[X, Y].y = L; Image[X, Y].z = L; } } else { // RGB ImageUtility.float4[,] Content = m_BitmapXYZ.ContentXYZ; if ( checkBoxSpatialLuminanceCorrection.Checked ) { Content = new ImageUtility.float4[m_BitmapXYZ.Width,m_BitmapXYZ.Height]; Array.Copy( m_BitmapXYZ.ContentXYZ, Content, Content.LongLength ); for ( int Y=0; Y < H; Y++ ) for ( int X=0; X < W; X++ ) { ImageUtility.float4 XYZ = Content[X,Y]; ImageUtility.float3 xyY = ImageUtility.ColorProfile.XYZ2xyY( (ImageUtility.float3) XYZ ); xyY.z *= m_CalibrationDatabase.GetSpatialLuminanceCorrectionFactor( (float) X / W, (float) Y / H ); XYZ = new ImageUtility.float4( ImageUtility.ColorProfile.xyY2XYZ( xyY ), XYZ.w ); Content[X,Y] = XYZ; } } // Build conversion profile ImageUtility.ColorProfile Profile = new ImageUtility.ColorProfile( ImageUtility.ColorProfile.Chromaticities.sRGB, // Always use standard sRGB illuminant sRGB ? ImageUtility.ColorProfile.GAMMA_CURVE.sRGB : ImageUtility.ColorProfile.GAMMA_CURVE.STANDARD, // Either use sRGB linear toe or a standard gamma sRGB ? ImageUtility.ColorProfile.GAMMA_EXPONENT_sRGB : 1.0f ); // Either use sRGB gamma or linear gamma // Convert Profile.XYZ2RGB( Content, Image ); } outputPanel.Image = Image; }
/// <summary> /// Captures the calibrated texture /// </summary> /// <param name="_Source">The source image to capture</param> /// <param name="_Database">Database to perform proper calibration</param> /// <param name="_Parms">Parameters for the capture</param> public void Capture(ImageUtility.Bitmap _Source, CameraCalibrationDatabase _Database, CaptureParms _Parms) { if (_Source == null) { throw new Exception("Invalid source bitmap to build texture from!"); } if (_Database == null) { throw new Exception("Invalid calibration database found in parameters!"); } if (_Parms == null) { throw new Exception("Invalid calibration parameters!"); } if (m_SwatchWidth <= 0 || m_SwatchHeight <= 0) { throw new Exception("Invalid swatch size! Must be > 0!"); } // Save parameters as they're associated to this texture m_CaptureParameters = _Parms; m_WhiteReflectanceReference = _Database.WhiteReflectanceReference; m_WhiteReflectanceCorrectionFactor = _Database.WhiteReflectanceCorrectionFactor; m_SpatialCorrectionEnabled = _Database.WhiteReferenceImage != null; ////////////////////////////////////////////////////////////////////////// // Setup the database to find the most appropriate calibration data for our image infos _Database.PrepareCalibrationFor(_Parms.ISOSpeed, _Parms.ShutterSpeed, _Parms.Aperture); ////////////////////////////////////////////////////////////////////////// // Build target texture ImageUtility.float4 AvgXYZ = new ImageUtility.float4(0, 0, 0, 0); //DEBUG // float MinLuminance_Raw = float.MaxValue; // float MaxLuminance_Raw = -float.MaxValue; const int EXTREME_VALUES_COUNT = 100; ImageUtility.float3[] ArrayMin = new ImageUtility.float3[EXTREME_VALUES_COUNT]; ImageUtility.float3[] ArrayMax = new ImageUtility.float3[EXTREME_VALUES_COUNT]; for (int i = 0; i < EXTREME_VALUES_COUNT; i++) { ArrayMin[i] = new ImageUtility.float3(0, 1, 0); ArrayMax[i] = new ImageUtility.float3(0, 0, 0); } if (_Parms.CropSource) { float fImageWidth = 2.0f * _Parms.CropRectangleHalfSize.x * _Source.Height; float fImageHeight = 2.0f * _Parms.CropRectangleHalfSize.y * _Source.Height; int W = (int)Math.Floor(fImageWidth); int H = (int)Math.Floor(fImageHeight); ImageUtility.float2 AxisX = new ImageUtility.float2((float)Math.Cos(_Parms.CropRectangleRotation), -(float)Math.Sin(_Parms.CropRectangleRotation)); ImageUtility.float2 AxisY = new ImageUtility.float2((float)Math.Sin(_Parms.CropRectangleRotation), (float)Math.Cos(_Parms.CropRectangleRotation)); ImageUtility.float2 TopLeftCorner = new ImageUtility.float2(0.5f * (_Source.Width - _Source.Height) + _Parms.CropRectangleCenter.x * _Source.Height, _Source.Height * _Parms.CropRectangleCenter.y) + _Source.Height * (-_Parms.CropRectangleHalfSize.x * AxisX - _Parms.CropRectangleHalfSize.y * AxisY); m_Texture = new ImageUtility.Bitmap(W, H, new ImageUtility.ColorProfile(ImageUtility.ColorProfile.STANDARD_PROFILE.sRGB)); ImageUtility.float4 XYZ; ImageUtility.float3 ShortXYZ; ImageUtility.float3 xyY; ImageUtility.float2 CurrentScanlinePixel = TopLeftCorner + 0.5f * (fImageWidth - W) * AxisX + 0.5f * (fImageHeight - H) * AxisY; if (Math.Abs(_Parms.CropRectangleRotation) < 1e-6f) { // Use integer pixels to avoid attenuated values due to bilinear filtering CurrentScanlinePixel.x = (float)Math.Floor(CurrentScanlinePixel.x); CurrentScanlinePixel.y = (float)Math.Floor(CurrentScanlinePixel.y); } for (int Y = 0; Y < H; Y++) { ImageUtility.float2 CurrentPixel = CurrentScanlinePixel; for (int X = 0; X < W; X++) { float U = CurrentPixel.x / _Source.Width; float V = CurrentPixel.y / _Source.Height; XYZ = _Source.BilinearSample(CurrentPixel.x, CurrentPixel.y); //DEBUG // float L = XYZ.y * _Database.GetSpatialLuminanceCorrectionFactor( U, V ); // if ( L < MinLuminance_Raw ) // MinLuminance_Raw = L; // if ( L > MaxLuminance_Raw ) // MaxLuminance_Raw = L; //DEBUG xyY = ImageUtility.ColorProfile.XYZ2xyY((ImageUtility.float3)XYZ); xyY = _Database.CalibrateWithSpatialCorrection(U, V, xyY); // Apply luminance calibration ShortXYZ = ImageUtility.ColorProfile.xyY2XYZ(xyY); XYZ = new ImageUtility.float4(ShortXYZ, XYZ.w); m_Texture.ContentXYZ[X, Y] = XYZ; // Update min/max/avg values InsertMinMax(ShortXYZ, ArrayMin, ArrayMax, EXTREME_VALUES_COUNT); AvgXYZ += XYZ; CurrentPixel += AxisX; } CurrentScanlinePixel += AxisY; } } else { // Simple texture copy, with luminance calibration m_Texture = new ImageUtility.Bitmap(_Source.Width, _Source.Height, new ImageUtility.ColorProfile(ImageUtility.ColorProfile.STANDARD_PROFILE.sRGB)); ImageUtility.float4 XYZ; ImageUtility.float3 ShortXYZ; ImageUtility.float3 xyY; int W = m_Texture.Width; int H = m_Texture.Height; int X0 = 0; int X1 = W; int Y0 = 0; int Y1 = H; //DEBUG // X0 = 1088; Y0 = 764; // X1 = X0 + 1100; Y1 = Y0 + 632; for (int Y = Y0; Y < Y1; Y++) { float V = (float)Y / H; for (int X = X0; X < X1; X++) { float U = (float)X / W; XYZ = _Source.ContentXYZ[X, Y]; //DEBUG // float L = XYZ.y * _Database.GetSpatialLuminanceCorrectionFactor( U, V ); // if ( L < MinLuminance_Raw ) // MinLuminance_Raw = L; // if ( L > MaxLuminance_Raw ) // MaxLuminance_Raw = L; //DEBUG xyY = ImageUtility.ColorProfile.XYZ2xyY((ImageUtility.float3)XYZ); xyY = _Database.CalibrateWithSpatialCorrection(U, V, xyY); // Apply luminance calibration ShortXYZ = ImageUtility.ColorProfile.xyY2XYZ(xyY); XYZ = new ImageUtility.float4(ShortXYZ, XYZ.w); m_Texture.ContentXYZ[X, Y] = XYZ; // Update min/max/avg values InsertMinMax(ShortXYZ, ArrayMin, ArrayMax, EXTREME_VALUES_COUNT); AvgXYZ += XYZ; } } } // Normalize average swatch color float Normalizer = 1.0f / (m_Texture.Width * m_Texture.Height); ImageUtility.float3 avgxyY = ImageUtility.ColorProfile.XYZ2xyY(Normalizer * ((ImageUtility.float3)AvgXYZ)); m_SwatchAvg.xyY = avgxyY; // Compute min & max using statistical norm ImageUtility.float3 BestXYZ_Min; ImageUtility.float3 BestXYZ_Max; if (_Parms.UseModeInsteadOfMean) { // Use mode BestXYZ_Min = ComputeMode(ArrayMin); BestXYZ_Max = ComputeMode(ArrayMax); } else { // Use mean BestXYZ_Min = ComputeMean(ArrayMin); BestXYZ_Max = ComputeMean(ArrayMax); } m_SwatchMin.xyY = ImageUtility.ColorProfile.XYZ2xyY(BestXYZ_Min); m_SwatchMax.xyY = ImageUtility.ColorProfile.XYZ2xyY(BestXYZ_Max); m_SwatchMin.Texture = BuildSwatch(m_SwatchWidth, m_SwatchHeight, m_SwatchMin.xyY); m_SwatchMax.Texture = BuildSwatch(m_SwatchWidth, m_SwatchHeight, m_SwatchMax.xyY); m_SwatchAvg.Texture = BuildSwatch(m_SwatchWidth, m_SwatchHeight, m_SwatchAvg.xyY); // Rebuild custom swatches foreach (CustomSwatch CS in m_CustomSwatches) { CS.Texture = BuildSwatch(m_SwatchWidth, m_SwatchHeight, CS.xyY); } ////////////////////////////////////////////////////////////////////////// // Feed some purely informational shot infos to the main texture, probably won't be saved anyway... m_Texture.HasValidShotInfo = true; m_Texture.ISOSpeed = _Parms.ISOSpeed; m_Texture.ShutterSpeed = _Parms.ShutterSpeed; m_Texture.Aperture = _Parms.Aperture; }
private unsafe void UpdateBitmap() { if (m_Image == null) { return; } // Fill pixel per pixel int W = m_Image.Width; int H = m_Image.Height; if (m_Bitmap != null && (m_Bitmap.Width != W || m_Bitmap.Height != H)) { m_Bitmap.Dispose(); m_Bitmap = null; } if (m_Bitmap == null) { m_Bitmap = new Bitmap(W, H, PixelFormat.Format32bppArgb); } ImageUtility.float4[,] ContentRGB = new ImageUtility.float4[W, H]; if (m_ViewLinear) { m_ProfileLinear.XYZ2RGB(m_Image.ContentXYZ, ContentRGB); } else { m_ProfilesRGB.XYZ2RGB(m_Image.ContentXYZ, ContentRGB); } BitmapData LockedBitmap = m_Bitmap.LockBits(new Rectangle(0, 0, W, H), ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb); for (int Y = 0; Y < H; Y++) { byte *pScanline = (byte *)LockedBitmap.Scan0.ToPointer() + LockedBitmap.Stride * Y; for (int X = 0; X < W; X++) { byte R = (byte)Math.Max(0, Math.Min(255, 255 * ContentRGB[X, Y].x)); byte G = (byte)Math.Max(0, Math.Min(255, 255 * ContentRGB[X, Y].y)); byte B = (byte)Math.Max(0, Math.Min(255, 255 * ContentRGB[X, Y].z)); byte A = (byte)Math.Max(0, Math.Min(255, 255 * (m_ViewLinear ? ContentRGB[X, Y].w : ImageUtility.ColorProfile.Linear2sRGB(ContentRGB[X, Y].w)))); switch (m_ViewMode) { case VIEW_MODE.RGB: *pScanline++ = B; *pScanline++ = G; *pScanline++ = R; *pScanline++ = 0xFF; break; case VIEW_MODE.R: *pScanline++ = R; *pScanline++ = R; *pScanline++ = R; *pScanline++ = 0xFF; break; case VIEW_MODE.G: *pScanline++ = G; *pScanline++ = G; *pScanline++ = G; *pScanline++ = 0xFF; break; case VIEW_MODE.B: *pScanline++ = B; *pScanline++ = B; *pScanline++ = B; *pScanline++ = 0xFF; break; case VIEW_MODE.AO: *pScanline++ = A; *pScanline++ = A; *pScanline++ = A; *pScanline++ = 0xFF; break; case VIEW_MODE.AO_FROM_RGB: { float LinR = ImageUtility.ColorProfile.sRGB2Linear(ContentRGB[X, Y].x); float LinG = ImageUtility.ColorProfile.sRGB2Linear(ContentRGB[X, Y].y); float LinB = ImageUtility.ColorProfile.sRGB2Linear(ContentRGB[X, Y].z); float LinAO = (float)Math.Sqrt(LinR * LinR + LinG * LinG + LinB * LinB) * 0.57735026918962576450914878050196f; // divided by sqrt(3) A = (byte)Math.Max(0, Math.Min(255, 255 * ImageUtility.ColorProfile.Linear2sRGB(LinAO))); *pScanline++ = A; *pScanline++ = A; *pScanline++ = A; *pScanline++ = 0xFF; } break; case VIEW_MODE.RGB_AO: { float LinR = ImageUtility.ColorProfile.sRGB2Linear(ContentRGB[X, Y].x); float LinG = ImageUtility.ColorProfile.sRGB2Linear(ContentRGB[X, Y].y); float LinB = ImageUtility.ColorProfile.sRGB2Linear(ContentRGB[X, Y].z); float LinAO = ContentRGB[X, Y].w; LinR *= LinAO; LinG *= LinAO; LinB *= LinAO; R = (byte)Math.Max(0, Math.Min(255, 255 * ImageUtility.ColorProfile.Linear2sRGB(LinR))); G = (byte)Math.Max(0, Math.Min(255, 255 * ImageUtility.ColorProfile.Linear2sRGB(LinG))); B = (byte)Math.Max(0, Math.Min(255, 255 * ImageUtility.ColorProfile.Linear2sRGB(LinB))); *pScanline++ = B; *pScanline++ = G; *pScanline++ = R; *pScanline++ = 0xFF; } break; } } } m_Bitmap.UnlockBits(LockedBitmap); Refresh(); }
/// <summary> /// Prepares the interpolated calibration table to process the pixels in an image shot with the specified shot infos /// </summary> /// <param name="_ISOSpeed"></param> /// <param name="_ShutterSpeed"></param> /// <param name="_Aperture"></param> public void PrepareCalibrationFor( float _ISOSpeed, float _ShutterSpeed, float _Aperture ) { if ( m_RootNode == null ) throw new Exception( "Calibration grid hasn't been built: did you provide a valid database path? Does the path contain camera calibration data?" ); if ( IsPreparedFor( _ISOSpeed, _ShutterSpeed, _Aperture ) ) return; // Already prepared! ////////////////////////////////////////////////////////////////////////// // Find the 8 nodes encompassing our values // I'm making the delicate assumption that, although the starting node is chosen on the // condition its EV values are strictly inferior to the target we're looking for, all // neighbor nodes should satisfy the condition they're properly placed. // // This is true for the direct neighbors +X, +Y, +Z that are immediately above target values // but for example, neighbor (+X +Y) may have a very bad aperture value (Z) that may be // above the target aperture... // // Let's hope the user won't provide too fancy calibrations... // (anyway, interpolants are clamped in [0,1] so there's no risk of overshooting) // ImageUtility.float3 EV; GridNode.Convert2EV( _ISOSpeed, _ShutterSpeed, _Aperture, out EV.x, out EV.y, out EV.z ); // Find the start node GridNode StartNode = FindStartNode( EV.x, EV.y, EV.z ); m_InterpolationStartNode = StartNode; // Build the 8 grid nodes from it GridNode[,,] Grid = new GridNode[2,2,2]; Grid[0,0,0] = StartNode; Grid[1,0,0] = StartNode.m_Neighbors[0][1] != null ? StartNode.m_Neighbors[0][1] : StartNode; // +X Grid[0,1,0] = StartNode.m_Neighbors[1][1] != null ? StartNode.m_Neighbors[1][1] : StartNode; // +Y Grid[0,0,1] = StartNode.m_Neighbors[2][1] != null ? StartNode.m_Neighbors[2][1] : StartNode; // +Z Grid[1,1,0] = Grid[1,0,0].m_Neighbors[1][1] != null ? Grid[1,0,0].m_Neighbors[1][1] : Grid[1,0,0]; // +X +Y Grid[0,1,1] = Grid[0,1,0].m_Neighbors[2][1] != null ? Grid[0,1,0].m_Neighbors[2][1] : Grid[0,1,0]; // +Y +Z Grid[1,0,1] = Grid[0,0,1].m_Neighbors[0][1] != null ? Grid[0,0,1].m_Neighbors[0][1] : Grid[0,0,1]; // +X +Z Grid[1,1,1] = Grid[1,1,0].m_Neighbors[2][1] != null ? Grid[1,1,0].m_Neighbors[2][1] : Grid[1,1,0]; // +X +Y +Z ////////////////////////////////////////////////////////////////////////// // Create the successive interpolants for trilinear interpolation // // Assume we interpolate on X first (ISO speed), so we need 4 distinct values ImageUtility.float4 tX = new ImageUtility.float4( Math.Max( 0.0f, Math.Min( 1.0f, (EV.x - Grid[0,0,0].m_EV_ISOSpeed) / Math.Max( 1e-6f, Grid[1,0,0].m_EV_ISOSpeed - Grid[0,0,0].m_EV_ISOSpeed) ) ), // Y=0 Z=0 Math.Max( 0.0f, Math.Min( 1.0f, (EV.x - Grid[0,1,0].m_EV_ISOSpeed) / Math.Max( 1e-6f, Grid[1,1,0].m_EV_ISOSpeed - Grid[0,1,0].m_EV_ISOSpeed) ) ), // Y=1 Z=0 Math.Max( 0.0f, Math.Min( 1.0f, (EV.x - Grid[0,0,1].m_EV_ISOSpeed) / Math.Max( 1e-6f, Grid[1,0,1].m_EV_ISOSpeed - Grid[0,0,1].m_EV_ISOSpeed) ) ), // Y=0 Z=1 Math.Max( 0.0f, Math.Min( 1.0f, (EV.x - Grid[0,1,1].m_EV_ISOSpeed) / Math.Max( 1e-6f, Grid[1,1,1].m_EV_ISOSpeed - Grid[0,1,1].m_EV_ISOSpeed) ) ) // Y=1 Z=1 ); ImageUtility.float4 rX = new ImageUtility.float4( 1.0f - tX.x, 1.0f - tX.y, 1.0f - tX.z, 1.0f - tX.w ); // Compute the 4 interpolated shutter speeds & apertures ImageUtility.float4 ShutterSpeedsX = new ImageUtility.float4( rX.x * Grid[0,0,0].m_EV_ShutterSpeed + tX.x * Grid[1,0,0].m_EV_ShutterSpeed, // Y=0 Z=0 rX.y * Grid[0,1,0].m_EV_ShutterSpeed + tX.y * Grid[1,1,0].m_EV_ShutterSpeed, // Y=1 Z=0 rX.z * Grid[0,0,1].m_EV_ShutterSpeed + tX.z * Grid[1,0,1].m_EV_ShutterSpeed, // Y=0 Z=1 rX.w * Grid[0,1,1].m_EV_ShutterSpeed + tX.w * Grid[1,1,1].m_EV_ShutterSpeed // Y=1 Z=1 ); ImageUtility.float4 AperturesX = new ImageUtility.float4( rX.x * Grid[0,0,0].m_EV_Aperture + tX.x * Grid[1,0,0].m_EV_Aperture, // Y=0 Z=0 rX.y * Grid[0,1,0].m_EV_Aperture + tX.y * Grid[1,1,0].m_EV_Aperture, // Y=1 Z=0 rX.z * Grid[0,0,1].m_EV_Aperture + tX.z * Grid[1,0,1].m_EV_Aperture, // Y=0 Z=1 rX.w * Grid[0,1,1].m_EV_Aperture + tX.w * Grid[1,1,1].m_EV_Aperture // Y=1 Z=1 ); // Next we interpolate on Y (Shutter speed), so we need 2 distinct values ImageUtility.float2 tY = new ImageUtility.float2( Math.Max( 0.0f, Math.Min( 1.0f, (EV.y - ShutterSpeedsX.x) / Math.Max( 1e-6f, ShutterSpeedsX.y - ShutterSpeedsX.x) ) ), // Z=0 Math.Max( 0.0f, Math.Min( 1.0f, (EV.y - ShutterSpeedsX.z) / Math.Max( 1e-6f, ShutterSpeedsX.w - ShutterSpeedsX.z) ) ) // Z=1 ); ImageUtility.float2 rY = new ImageUtility.float2( 1.0f - tY.x, 1.0f - tY.y ); // Compute the 2 apertures ImageUtility.float2 AperturesY = new ImageUtility.float2( rY.x * AperturesX.x + tY.x * AperturesX.y, rY.y * AperturesX.z + tY.y * AperturesX.w ); // Finally, we interpolate on Z (Aperture), we need only 1 single value float tZ = Math.Max( 0.0f, Math.Min( 1.0f, (EV.z - AperturesY.x) / Math.Max( 1e-6f, AperturesY.y - AperturesY.x) ) ); float rZ = 1.0f - tZ; ////////////////////////////////////////////////////////////////////////// // Create the special camera calibration that is the result of the interpolation of the 8 nearest ones in the grid m_InterpolatedCalibration = new CameraCalibration(); m_InterpolatedCalibration.m_CameraShotInfos.m_ISOSpeed = _ISOSpeed; m_InterpolatedCalibration.m_CameraShotInfos.m_ShutterSpeed = _ShutterSpeed; m_InterpolatedCalibration.m_CameraShotInfos.m_Aperture = _Aperture; for ( int ProbeIndex=0; ProbeIndex < REQUIRED_PROBES_COUNT; ProbeIndex++ ) { CameraCalibration.Probe TargetProbe = m_InterpolatedCalibration.m_Reflectances[ProbeIndex]; float L000 = Grid[0,0,0].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; float L100 = Grid[1,0,0].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; float L010 = Grid[0,1,0].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; float L110 = Grid[1,1,0].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; float L001 = Grid[0,0,1].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; float L101 = Grid[1,0,1].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; float L011 = Grid[0,1,1].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; float L111 = Grid[1,1,1].m_CameraCalibration.m_Reflectances[ProbeIndex].m_LuminanceMeasured; // Interpolate on X (ISO speed) float L00 = rX.x * L000 + tX.x * L100; float L10 = rX.y * L010 + tX.y * L110; float L01 = rX.z * L001 + tX.z * L101; float L11 = rX.w * L011 + tX.w * L111; // Interpolate on Y (shutter speed) float L0 = rY.x * L00 + tY.x * L10; float L1 = rY.y * L01 + tY.y * L11; // Interpolate on Z (aperture) float L = rZ * L0 + tZ * L1; TargetProbe.m_IsAvailable = true; TargetProbe.m_LuminanceMeasured = L; } // Fill missing values m_InterpolatedCalibration.UpdateAllLuminances(); // Reset white reflectance reference because it was set for another setup WhiteReflectanceReference = new ImageUtility.float3( 0, 0, -1 ); }
void TestChromaRanges() { ImageUtility.ColorProfile profile = new ImageUtility.ColorProfile(ImageUtility.ColorProfile.STANDARD_PROFILE.sRGB ); ImageUtility.float3 tempFloat3 = new ImageUtility.float3( 0, 0, 0 ); ImageUtility.float4 tempFloat4 = new ImageUtility.float4( 0, 0, 0, 1 ); float Ygo, Cg, Co; ranges_t[] ranges = new ranges_t[4]; for ( int lumaIndex=0; lumaIndex < ranges.Length; lumaIndex++ ) { ranges_t range = new ranges_t(); ranges[lumaIndex] = range; float L = (1+lumaIndex) / 255.0f; for ( int R=0; R < 256; R++ ) { for ( int G=0; G < 256; G++ ) { for ( int B=0; B < 256; B++ ) { tempFloat4.x = L * R; tempFloat4.y = L * G; tempFloat4.z = L * B; // Convert to YCoCg // Ygo = 0.25f * tempFloat4.x + 0.5f * tempFloat4.y + 0.25f * tempFloat4.z; // Cg = -0.25f * tempFloat4.x + 0.5f * tempFloat4.y - 0.25f * tempFloat4.z; // Co = 0.50f * tempFloat4.x + 0.0f * tempFloat4.y - 0.50f * tempFloat4.z; RGB2YCoCg( tempFloat4.x, tempFloat4.y, tempFloat4.z, out Ygo, out Co, out Cg ); YCoCg2RGB( Ygo, Co, Cg, out tempFloat3.x, out tempFloat3.y, out tempFloat3.z ); if ( Math.Abs( tempFloat3.x - tempFloat4.x ) > 1e-6 ) throw new Exception( "RHA!" ); if ( Math.Abs( tempFloat3.y - tempFloat4.y ) > 1e-6 ) throw new Exception( "RHA!" ); if ( Math.Abs( tempFloat3.z - tempFloat4.z ) > 1e-6 ) throw new Exception( "RHA!" ); // Convert to xyY ImageUtility.float4 XYZ = profile.RGB2XYZ( tempFloat4 ); tempFloat3.x = XYZ.x; tempFloat3.y = XYZ.y; tempFloat3.z = XYZ.z; ImageUtility.float3 xyY = ImageUtility.ColorProfile.XYZ2xyY( tempFloat3 ); // Update ranges range.Ygo_min = Math.Min( range.Ygo_min, Ygo ); range.Ygo_max = Math.Max( range.Ygo_max, Ygo ); range.Cg_min = Math.Min( range.Cg_min, Cg ); range.Cg_max = Math.Max( range.Cg_max, Cg ); range.Co_min = Math.Min( range.Co_min, Co ); range.Co_max = Math.Max( range.Co_max, Co ); range.Y_min = Math.Min( range.Y_min, xyY.z ); range.Y_max = Math.Max( range.Y_max, xyY.z ); range.x_min = Math.Min( range.x_min, xyY.x ); range.x_max = Math.Max( range.x_max, xyY.x ); range.y_min = Math.Min( range.y_min, xyY.y ); range.y_max = Math.Max( range.y_max, xyY.y ); } } } } }
public ImageUtility.Bitmap Texture; // The bitmap generated from the swatch color public virtual void Save(CalibratedTexture _Owner, XmlElement _SwatchElement) { ImageUtility.float4 XYZ = new ImageUtility.float4(ImageUtility.ColorProfile.xyY2XYZ(xyY), 1.0f); ImageUtility.float3 RGB = (ImageUtility.float3)Texture.Profile.XYZ2RGB(XYZ); _Owner.SetAttribute(_SwatchElement, "xyY", xyY.ToString()).SetAttribute("RGB", RGB.ToString()); }
private void Generate() { try { tabControlGenerators.Enabled = false; ////////////////////////////////////////////////////////////////////////// // 1] Apply bilateral filtering to the input texture as a pre-process ApplyBilateralFiltering(m_TextureSource, m_TextureTarget0, floatTrackbarControlBilateralRadius.Value, floatTrackbarControlBilateralTolerance.Value, checkBoxWrap.Checked); ////////////////////////////////////////////////////////////////////////// // 2] Compute directional occlusion m_TextureTarget1.RemoveFromLastAssignedSlots(); // Prepare computation parameters m_TextureTarget0.SetCS(0); m_TextureTarget1.SetCSUAV(0); m_SB_Rays.SetInput(1); m_CB_Input.m.RaysCount = (UInt32)Math.Min(MAX_THREADS, integerTrackbarControlRaysCount.Value); m_CB_Input.m.MaxStepsCount = (UInt32)integerTrackbarControlMaxStepsCount.Value; m_CB_Input.m.Tile = (uint)(checkBoxWrap.Checked ? 1 : 0); m_CB_Input.m.TexelSize_mm = TextureSize_mm / Math.Max(W, H); m_CB_Input.m.Displacement_mm = TextureHeight_mm; // Start if (!m_CS_GenerateSSBumpMap.Use()) { throw new Exception("Can't generate self-shadowed bump map as compute shader failed to compile!"); } int h = Math.Max(1, MAX_LINES * 1024 / W); int CallsCount = (int)Math.Ceiling((float)H / h); for (int i = 0; i < CallsCount; i++) { m_CB_Input.m.Y0 = (UInt32)(i * h); m_CB_Input.UpdateData(); m_CS_GenerateSSBumpMap.Dispatch(W, h, 1); m_Device.Present(true); progressBar.Value = (int)(0.01f * (BILATERAL_PROGRESS + (100 - BILATERAL_PROGRESS) * (i + 1) / (CallsCount)) * progressBar.Maximum); // for ( int a=0; a < 10; a++ ) Application.DoEvents(); } m_TextureTarget1.RemoveFromLastAssignedSlotUAV(); // So we can use it as input for next stage progressBar.Value = progressBar.Maximum; // Compute in a single shot (this is madness!) // m_CB_Input.m.y = 0; // m_CB_Input.UpdateData(); // m_CS_GenerateSSBumpMap.Dispatch( W, H, 1 ); ////////////////////////////////////////////////////////////////////////// // 3] Copy target to staging for CPU readback and update the resulting bitmap m_TextureTarget_CPU.CopyFrom(m_TextureTarget1); if (m_BitmapResult != null) { m_BitmapResult.Dispose(); } m_BitmapResult = null; m_BitmapResult = new ImageUtility.Bitmap(W, H, m_LinearProfile); m_BitmapResult.HasAlpha = true; RendererManaged.PixelsBuffer Pixels = m_TextureTarget_CPU.Map(0, 0); using (System.IO.BinaryReader R = Pixels.OpenStreamRead()) for (int Y = 0; Y < H; Y++) { R.BaseStream.Position = Y * Pixels.RowPitch; for (int X = 0; X < W; X++) { ImageUtility.float4 Color = new ImageUtility.float4(R.ReadSingle(), R.ReadSingle(), R.ReadSingle(), R.ReadSingle()); Color = m_LinearProfile.RGB2XYZ(Color); m_BitmapResult.ContentXYZ[X, Y] = Color; } } Pixels.Dispose(); m_TextureTarget_CPU.UnMap(0, 0); // Assign result viewportPanelResult.Image = m_BitmapResult; } catch (Exception _e) { MessageBox("An error occurred during generation!\r\n\r\nDetails: ", _e); } finally { tabControlGenerators.Enabled = true; } }
private void buttonWhiteRefTest3_Click( object sender, EventArgs e ) { int W = DEFAULT_WHITE_REFERENCE_IMAGE_SIZE; int H = DEFAULT_WHITE_REFERENCE_IMAGE_SIZE; ImageUtility.Bitmap WhiteRef = new ImageUtility.Bitmap( W, H, m_sRGBProfile ); ImageUtility.float3 xyY = new ImageUtility.float3( m_sRGBProfile.Chromas.W.x, m_sRGBProfile.Chromas.W.y, 0.0f ); for ( int Y=0; Y < H; Y++ ) { float V = (float) Y / H; for ( int X=0; X < W; X++ ) { float U = (float) X / W; xyY.z = Math.Min( 1.0f, 1.0f - 0.5f * V ); ImageUtility.float4 XYZ = new ImageUtility.float4( ImageUtility.ColorProfile.xyY2XYZ( xyY ), 1.0f ); WhiteRef.ContentXYZ[X,Y] = XYZ; } } // Assign to the database m_CalibrationDatabase.WhiteReferenceImage = WhiteRef; UpdateWhiteReferenceImageUI(); }