public List<MetaSimple> fiGetMetaData(FIBITMAP bm) { List<MetaSimple> results = new List<MetaSimple>(); // Create a wrapper for all metadata the image contains ImageMetadata iMetadata = new ImageMetadata(bm); // Get each metadata model foreach (MetadataModel metadataModel in iMetadata) { // Get each metadata tag and create a subnode for it foreach (MetadataTag metadataTag in metadataModel) { MetaSimple tagValue = new MetaSimple(); tagValue.ModelName = metadataModel.ToString(); tagValue.ModelTag = metadataTag.Key; tagValue.TagValue = metadataTag; results.Add(tagValue); } } return results; }
public virtual void Add(BackgroundImageClass image) { var imageKey = new ImageMetadata(image); if (RRContainer.Current.GetAllInstances<IFilter>().Where(x => x is SpriteFilter).FirstOrDefault(y => y.IgnoreTarget(new SpriteFilterContext(image))) != null || spriteList.ContainsKey(imageKey)) return; SpritedImage spritedImage; try { spritedImage = SpriteContainer.AddImage(image); } catch (InvalidOperationException ex) { var message = string.Format("There were errors reducing {0}", image.ImageUrl); var wrappedException = new ApplicationException(message, ex); RRTracer.Trace(message); RRTracer.Trace(ex.ToString()); if (Registry.CaptureErrorAction != null) Registry.CaptureErrorAction(wrappedException); return; } spriteList.Add(imageKey, spritedImage); if (SpriteContainer.Size >= config.SpriteSizeLimit || (SpriteContainer.Colors >= config.SpriteColorLimit && !config.ImageQuantizationDisabled && !config.ImageOptimizationDisabled)) Flush(); }
public virtual void Add(BackgroundImageClass image) { var imageKey = new ImageMetadata(image); if (spriteList.ContainsKey(imageKey)) return; SpritedImage spritedImage = null; try { spritedImage = SpriteContainer.AddImage(image); } catch (InvalidOperationException ex) { var message = string.Format("There were errors reducing {0}", image.ImageUrl); var wrappedException = new ApplicationException(message, ex); RRTracer.Trace(message); RRTracer.Trace(ex.ToString()); if (RequestReduceModule.CaptureErrorAction != null) RequestReduceModule.CaptureErrorAction(wrappedException); return; } spriteList.Add(imageKey, spritedImage); if (SpriteContainer.Size >= config.SpriteSizeLimit || (SpriteContainer.Colors >= config.SpriteColorLimit && !config.ImageQuantizationDisabled && !config.ImageOptimizationDisabled)) Flush(); }
public virtual Sprite this[BackgroundImageClass image] { get { var imageKey = new ImageMetadata(image); return spriteList.ContainsKey(imageKey) ? spriteList[imageKey] : null; } }
public void SaveImageMetadata() { IImageMetadata metadata = new ImageMetadata(); metadata.Clear(); metadata.Id = "33"; metadata.Filename = "testImage.jpg"; using (XmlWriter writer = Helpers.CreateXmlWriter(@"testImageMetadata.cxml")) metadata.Save(writer); }
private void bLoad_Click(object sender, EventArgs e) { // Create variables OpenFileDialog ofd = new OpenFileDialog(); FIBITMAP dib = new FIBITMAP(); try { // Apply settings ofd.CheckFileExists = true; ofd.CheckPathExists = true; ofd.FileName = ""; ofd.Filter = "All files (*.*)|*.*"; ofd.Multiselect = false; ofd.RestoreDirectory = true; // Get image filename if (ofd.ShowDialog() == DialogResult.OK) { // Load the image dib = FreeImage.LoadEx(ofd.FileName); // Check if image was loaded successfully if (dib.IsNull) throw new Exception("Failed to load image."); // Clear the treeview tvMetadata.Nodes.Clear(); // Create a wrapper for all metadata the image contains ImageMetadata iMetadata = new ImageMetadata(dib); // Get each metadata model foreach (MetadataModel metadataModel in iMetadata) { // Create a new node for each model TreeNode modelNode = tvMetadata.Nodes.Add(metadataModel.ToString()); // Get each metadata tag and create a subnode for it foreach (MetadataTag metadataTag in metadataModel) { modelNode.Nodes.Add(metadataTag.Key + ": " + metadataTag.ToString()); } } } else { MessageBox.Show("Operation aborted.", "Aborted"); } } // Display error message catch (Exception ex) { while (ex.InnerException != null) ex = ex.InnerException; MessageBox.Show(ex.ToString(), "Exception caught"); } // Clean up finally { ofd.Dispose(); FreeImage.UnloadEx(ref dib); } }
public virtual Sprite Add(BackgroundImageClass image) { var imageKey = new ImageMetadata(image); if (spriteList.ContainsKey(imageKey)) return spriteList[imageKey]; var currentPositionToReturn = SpriteContainer.Width; SpriteContainer.AddImage(image); var sprite = new Sprite(currentPositionToReturn, spriteIndex); if (SpriteContainer.Size >= config.SpriteSizeLimit) Flush(); spriteList.Add(imageKey, sprite); return sprite; }
public virtual void Add(BackgroundImageClass image) { var imageKey = new ImageMetadata(image); if (RRContainer.Current.GetAllInstances<IFilter>().Where(x => x is SpriteFilter).FirstOrDefault(y => y.IgnoreTarget(new SpriteFilterContext(image))) != null) return; if (SpriteList.Any(x => x.Key.Equals(imageKey))) { var originalImage = SpriteList.First(x => x.Key.Equals(imageKey)).Value; var clonedImage = new SpritedImage(originalImage.AverageColor, image, originalImage.Image) { Position = originalImage.Position, Url = originalImage.Url, Metadata = imageKey }; //SpriteContainer.AddImage(clonedImage); SpriteList.Add(new KeyValuePair<ImageMetadata, SpritedImage>(imageKey, clonedImage)); return; } SpritedImage spritedImage; var sprite = SpriteList.FirstOrDefault(x => x.Value.CssClass.ImageUrl == image.ImageUrl); if(sprite.Value != null) { image.IsSprite = true; sprite.Value.CssClass.IsSprite = true; } try { spritedImage = SpriteContainer.AddImage(image); spritedImage.Metadata = imageKey; } catch (Exception ex) { var message = string.Format("There were errors reducing {0}", image.ImageUrl); var wrappedException = new ApplicationException(message, ex); RRTracer.Trace(message); RRTracer.Trace(ex.ToString()); if (Registry.CaptureErrorAction != null) Registry.CaptureErrorAction(wrappedException); return; } SpriteList.Add(new KeyValuePair<ImageMetadata, SpritedImage>(imageKey, spritedImage)); if (SpriteContainer.Size >= config.SpriteSizeLimit || (SpriteContainer.Colors >= config.SpriteColorLimit && !config.ImageQuantizationDisabled && !config.ImageOptimizationDisabled)) Flush(); }
public virtual void Add(BackgroundImageClass image) { if (ImageExclusionFilter != null && ImageExclusionFilter(image)) return; var imageKey = new ImageMetadata(image); if (spriteList.Any(x => x.Key.Equals(imageKey))) { var originalImage = spriteList.First(x => x.Key.Equals(imageKey)).Value; var clonedImage = new SpritedImage(originalImage.AverageColor, image, originalImage.Image) { Position = originalImage.Position, Url = originalImage.Url, Metadata = imageKey }; spriteList.Add(new KeyValuePair<ImageMetadata, SpritedImage>(imageKey, clonedImage)); return; } SpritedImage spritedImage; var sprite = spriteList.FirstOrDefault(x => x.Value.CssClass.ImageUrl == image.ImageUrl); if(sprite.Value != null) { image.IsSprite = true; sprite.Value.CssClass.IsSprite = true; } try { spritedImage = SpriteContainer.AddImage(image); spritedImage.Metadata = imageKey; } catch (Exception ex) { var message = string.Format("There were errors reducing {0}", image.ImageUrl); Tracer.Trace(message); Tracer.Trace(ex.ToString()); var wrappedException = new SpriteException(image.OriginalClassString, message, ex); Errors.Add(wrappedException); return; } spriteList.Add(new KeyValuePair<ImageMetadata, SpritedImage>(imageKey, spritedImage)); if (SpriteContainer.Size >= config.SpriteSizeLimit || (SpriteContainer.Colors >= config.SpriteColorLimit && !config.ImageQuantizationDisabled && !config.ImageOptimizationDisabled)) Flush(); }
public static bool GetRotInfo(FileInfo fi, out ushort rotateme) { bool gotRotInfo = false; rotateme = 1; FIBITMAP? dibNull = GetJPGImageHandle(fi); if (dibNull != null) { FIBITMAP dib = (FIBITMAP)dibNull; var iMetadata = new ImageMetadata(dib); MetadataModel exifMain = iMetadata[FREE_IMAGE_MDMODEL.FIMD_EXIF_MAIN]; if (exifMain != null) { MetadataTag orientationTag = exifMain.GetTag("Orientation"); if (orientationTag != null) { var rotInfo = (ushort[])orientationTag.Value; rotateme = rotInfo[0]; gotRotInfo = true; } } CleanUpResources(dib); } return gotRotInfo; }
private bool checkFilterAndSearch(string filePath, string search, int rateFilter) { bool viewSearch = false; bool viewRate = false; search = search.ToLower(); if (search.Trim() != "" || rateFilter > 0) { ImageMetadata imageMetadata = new ImageMetadata(filePath); string comment = imageMetadata.getComment(); string fileName = imageMetadata.getFileName(); string tags = imageMetadata.convertArrToString(imageMetadata.getTags(), ' '); string authors = imageMetadata.convertArrToString(imageMetadata.getAuthors(), ' '); int rate = imageMetadata.getRate(); if (search.Trim() != "") { if (search.Trim() != "") { if (comment != null) { if (comment.Trim() != "") { if (comment.ToLower().Contains(search)) { viewSearch = true; } } } if (fileName != null) { if (fileName.Trim() != "") { if (fileName.ToLower().Contains(search)) { viewSearch = true; } } } if (tags != null) { if (tags.Trim() != "") { if (tags.ToLower().Contains(search)) { viewSearch = true; } } } if (authors != null) { if (authors.Trim() != "") { if (authors.ToLower().Contains(search)) { viewSearch = true; } } } } } else { viewSearch = true; } if (rateFilter > 0) { if (rate == wg_rateFilter) { viewRate = true; } } else { viewRate = true; } imageMetadata = null; } else { viewSearch = true; viewRate = true; } if (viewSearch && viewRate) { return(true); } else { return(false); } }
private void FillProperties(FIBITMAP dib) { // Create a wrapper for all metadata the image contains ImageMetadata iMetadata = new ImageMetadata(dib); var main = ((MDM_EXIF_MAIN) iMetadata[FREE_IMAGE_MDMODEL.FIMD_EXIF_MAIN]); var exif = ((MDM_EXIF_EXIF) iMetadata[FREE_IMAGE_MDMODEL.FIMD_EXIF_EXIF]); _equipMake = SafeTrim(main.Make); _equipModel = SafeTrim(main.EquipmentModel); _imageDescription = SafeTrim(main.ImageDescription); _copyright = SafeJoin(main.Copyright); _dtOrig = exif.DateTimeOriginal; _dtDigitized = exif.DateTimeDigitized; _focalLength = exif.FocalLength; _fNumber = exif.FNumber; if (exif.ExposureTime.HasValue) _exposureTime = string.Format("{0}/{1}", exif.ExposureTime.Value.Numerator, exif.ExposureTime.Value.Denominator); _exposureBias = exif.ExposureBiasValue; _isoSpeed = exif.ISOSpeedRatings != null ? exif.ISOSpeedRatings[0].ToString() : null; _orientation = main.Orientation; _pixXDim = exif.PixelXDimension; _pixYDim = exif.PixelYDimension; _flashFired = exif.Flash.HasValue && (exif.Flash.Value & 1) == 1; if (exif.Flash.HasValue) FillFlashModeResult(exif.Flash.Value); _meteringMode = (MeteringMode) (exif.MeteringMode.HasValue ? exif.MeteringMode.Value : 0); TryParseGPS(iMetadata, out _latitude, out _longitude); }
public void ProcessFrameInfo(ImageFrame frame, ImageMetadata imageMetadata) => new FrameInfoProcessor(this).Process(frame, imageMetadata);
/// <inheritdoc /> public override void Save(Stream stream, Image image, ImageMetadata metadata) { if (image.Width > BitmapEncoder.MaxAllowedWidth) { throw new InvalidOperationException("Cannot save the image. The image width is too large."); } if (image.Height > BitmapEncoder.MaxAllowedHeight) { throw new InvalidOperationException("Cannot save the image. The image height is too large."); } if ((long)image.Width * image.Height > BitmapEncoder.MaxAllowedPixels) { throw new InvalidOperationException("Cannot save the image. The number of pixels in the image is too large."); } int stride32 = ((image.Width * image.BitsPerPixel) + 31) / 32; int stride8 = stride32 * 4; int imagesize = stride8 * image.Height; int ncolors = image.BitsPerPixel <= 8 ? 1 << image.BitsPerPixel : 0; int offbytes = 14 /* file header */ + 40 /* bmp header */ + (ncolors * 4) /* colors */; int fsize = offbytes + imagesize; using (BinaryWriter writer = new BinaryWriter(stream, Encoding.UTF8, true)) { // write file header writer.Write(BitmapEncoder.Signature); writer.Write(fsize); writer.Write(0U); writer.Write(offbytes); // write header writer.Write(40); // size of header writer.Write(image.Width); writer.Write(image.Height); writer.Write((ushort)1); writer.Write((ushort)image.BitsPerPixel); writer.Write(0); // compression (0 == uncompressed) writer.Write(imagesize); writer.Write((int)((39.37 * image.HorizontalResolution) + 0.5)); writer.Write((int)((39.37 * image.VerticalResolution) + 0.5)); writer.Write(ncolors); writer.Write(ncolors); // write colors if necessary if (ncolors > 0) { Color[] palette = Image.CreatePalette(image.BitsPerPixel); for (int i = 0, ii = palette.Length; i < ii; i++) { writer.Write(palette[i].Argb); } } // write bits byte[] bitsdst = new byte[imagesize]; unsafe { // positive height indicates that bitmap is bottom-up fixed(ulong *src = &image.Bits[(image.Height - 1) * image.Stride]) { fixed(byte *dst = bitsdst) { Arrays.CopyStrides(image.Height, new IntPtr(src), -image.Stride8, new IntPtr(dst), stride8); if (image.BitsPerPixel < 8) { // make bits big-endian Vectors.SwapBits(image.Height * stride32, image.BitsPerPixel, (uint *)dst); } } } } writer.Write(bitsdst); writer.Flush(); } }
/// <summary> /// Initializes a new instance of the <see cref="Image{TPixel}"/> class /// with the height and the width of the image. /// </summary> /// <param name="configuration">The configuration providing initialization code which allows extending the library.</param> /// <param name="width">The width of the image in pixels.</param> /// <param name="height">The height of the image in pixels.</param> /// <param name="backgroundColor">The color to initialize the pixels with.</param> /// <param name="metadata">The images metadata.</param> internal Image(Configuration configuration, int width, int height, TPixel backgroundColor, ImageMetadata metadata) { this.configuration = configuration ?? Configuration.Default; this.PixelType = new PixelTypeInfo(Unsafe.SizeOf <TPixel>() * 8); this.Metadata = metadata ?? new ImageMetadata(); this.Frames = new ImageFrameCollection <TPixel>(this, width, height, backgroundColor); }
public ImageStatusEventData(ImageLoadStatus loadStatus, ImageMetadata metadata) { LoadStatus = loadStatus; Metadata = metadata; }
/// <summary> /// Initializes a new instance of the <see cref="Image{TPixel}" /> class /// with the height and the width of the image. /// </summary> /// <param name="configuration">The configuration providing initialization code which allows extending the library.</param> /// <param name="metadata">The images metadata.</param> /// <param name="frames">The frames that will be owned by this image instance.</param> internal Image(Configuration configuration, ImageMetadata metadata, IEnumerable <ImageFrame <TPixel> > frames) : base(configuration, PixelTypeInfo.Create <TPixel>(), metadata, ValidateFramesAndGetSize(frames)) { this.Frames = new ImageFrameCollection <TPixel>(this, frames); }
/// <summary> /// Encode writes the image to the jpeg baseline format with the given options. /// </summary> /// <typeparam name="TPixel">The pixel format.</typeparam> /// <param name="image">The image to write from.</param> /// <param name="stream">The stream to write to.</param> /// <param name="cancellationToken">The token to request cancellation.</param> public void Encode <TPixel>(Image <TPixel> image, Stream stream, CancellationToken cancellationToken) where TPixel : unmanaged, IPixel <TPixel> { Guard.NotNull(image, nameof(image)); Guard.NotNull(stream, nameof(stream)); cancellationToken.ThrowIfCancellationRequested(); const ushort max = JpegConstants.MaxLength; if (image.Width >= max || image.Height >= max) { throw new ImageFormatException($"Image is too large to encode at {image.Width}x{image.Height}."); } this.outputStream = stream; ImageMetadata metadata = image.Metadata; // System.Drawing produces identical output for jpegs with a quality parameter of 0 and 1. int qlty = Numerics.Clamp(this.quality ?? metadata.GetJpegMetadata().Quality, 1, 100); this.subsample ??= qlty >= 91 ? JpegSubsample.Ratio444 : JpegSubsample.Ratio420; // Convert from a quality rating to a scaling factor. int scale; if (qlty < 50) { scale = 5000 / qlty; } else { scale = 200 - (qlty * 2); } // Initialize the quantization tables. InitQuantizationTable(0, scale, ref this.luminanceQuantTable); InitQuantizationTable(1, scale, ref this.chrominanceQuantTable); // Compute number of components based on input image type. const int componentCount = 3; // Write the Start Of Image marker. this.WriteApplicationHeader(metadata); // Write Exif, ICC and IPTC profiles this.WriteProfiles(metadata); // Write the quantization tables. this.WriteDefineQuantizationTables(); // Write the image dimensions. this.WriteStartOfFrame(image.Width, image.Height, componentCount); // Write the Huffman tables. this.WriteDefineHuffmanTables(componentCount); // Write the image data. this.WriteStartOfScan(image, cancellationToken); // Write the End Of Image marker. this.buffer[0] = JpegConstants.Markers.XFF; this.buffer[1] = JpegConstants.Markers.EOI; stream.Write(this.buffer, 0, 2); stream.Flush(); }
public void CreateImageMetadata() { IImageMetadata metadata = new ImageMetadata(); metadata.Clear(); }
/// <summary> /// set the rotation info /// </summary> /// <remarks>FREEIMAGE (until 3.11) does not support writing of metadata!!!!!!!!!</remarks> /// <param name="dib"></param> /// <param name="rotInfo"></param> /// <returns>if the value was changed</returns> public static bool SetRotateInfo(FIBITMAP dib, ushort rotInfo) { bool changed = false; var iMetadata = new ImageMetadata(dib); MetadataModel exifMain = iMetadata[FREE_IMAGE_MDMODEL.FIMD_EXIF_MAIN]; if (exifMain != null) { MetadataTag orientationTag = exifMain.GetTag("Orientation"); if (orientationTag != null) { var nuval = new[] {rotInfo}; if (orientationTag.Value != nuval) { orientationTag.SetValue(nuval); changed = true; } } } return changed; }
/// <inheritdoc/> public Image <TPixel> Decode <TPixel>(BufferedReadStream stream) where TPixel : unmanaged, IPixel <TPixel> { var metadata = new ImageMetadata(); PngMetadata pngMetadata = metadata.GetPngMetadata(); this.currentStream = stream; this.currentStream.Skip(8); Image <TPixel> image = null; try { while (!this.isEndChunkReached && this.TryReadChunk(out PngChunk chunk)) { try { switch (chunk.Type) { case PngChunkType.Header: this.ReadHeaderChunk(pngMetadata, chunk.Data.Array); break; case PngChunkType.Physical: this.ReadPhysicalChunk(metadata, chunk.Data.GetSpan()); break; case PngChunkType.Gamma: this.ReadGammaChunk(pngMetadata, chunk.Data.GetSpan()); break; case PngChunkType.Data: if (image is null) { this.InitializeImage(metadata, out image); } this.ReadScanlines(chunk, image.Frames.RootFrame, pngMetadata); break; case PngChunkType.Palette: var pal = new byte[chunk.Length]; Buffer.BlockCopy(chunk.Data.Array, 0, pal, 0, chunk.Length); this.palette = pal; break; case PngChunkType.Transparency: var alpha = new byte[chunk.Length]; Buffer.BlockCopy(chunk.Data.Array, 0, alpha, 0, chunk.Length); this.paletteAlpha = alpha; this.AssignTransparentMarkers(alpha, pngMetadata); break; case PngChunkType.Text: this.ReadTextChunk(pngMetadata, chunk.Data.Array.AsSpan(0, chunk.Length)); break; case PngChunkType.CompressedText: this.ReadCompressedTextChunk(pngMetadata, chunk.Data.Array.AsSpan(0, chunk.Length)); break; case PngChunkType.InternationalText: this.ReadInternationalTextChunk(pngMetadata, chunk.Data.Array.AsSpan(0, chunk.Length)); break; case PngChunkType.Exif: if (!this.ignoreMetadata) { var exifData = new byte[chunk.Length]; Buffer.BlockCopy(chunk.Data.Array, 0, exifData, 0, chunk.Length); metadata.ExifProfile = new ExifProfile(exifData); } break; case PngChunkType.End: this.isEndChunkReached = true; break; case PngChunkType.ProprietaryApple: PngThrowHelper.ThrowInvalidChunkType("Proprietary Apple PNG detected! This PNG file is not conform to the specification and cannot be decoded."); break; } } finally { chunk.Data?.Dispose(); // Data is rented in ReadChunkData() } } if (image is null) { PngThrowHelper.ThrowNoData(); } return(image); } finally { this.scanline?.Dispose(); this.previousScanline?.Dispose(); } }
public void BaselineTags <TPixel>(TestImageProvider <TPixel> provider) where TPixel : unmanaged, IPixel <TPixel> { using (Image <TPixel> image = provider.GetImage(TiffDecoder)) { ImageFrame <TPixel> rootFrame = image.Frames.RootFrame; Assert.Equal(32, rootFrame.Width); Assert.Equal(32, rootFrame.Height); Assert.NotNull(rootFrame.Metadata.XmpProfile); Assert.Equal(2599, rootFrame.Metadata.XmpProfile.Length); ExifProfile exifProfile = rootFrame.Metadata.ExifProfile; TiffFrameMetadata tiffFrameMetadata = rootFrame.Metadata.GetTiffMetadata(); Assert.NotNull(exifProfile); // The original exifProfile has 30 values, but 4 of those values will be stored in the TiffFrameMetaData // and removed from the profile on decode. Assert.Equal(26, exifProfile.Values.Count); Assert.Equal(TiffBitsPerPixel.Bit4, tiffFrameMetadata.BitsPerPixel); Assert.Equal(TiffCompression.Lzw, tiffFrameMetadata.Compression); Assert.Equal("This is Название", exifProfile.GetValue(ExifTag.ImageDescription).Value); Assert.Equal("This is Изготовитель камеры", exifProfile.GetValue(ExifTag.Make).Value); Assert.Equal("This is Модель камеры", exifProfile.GetValue(ExifTag.Model).Value); Assert.Equal("IrfanView", exifProfile.GetValue(ExifTag.Software).Value); Assert.Null(exifProfile.GetValue(ExifTag.DateTime)?.Value); Assert.Equal("This is author1;Author2", exifProfile.GetValue(ExifTag.Artist).Value); Assert.Null(exifProfile.GetValue(ExifTag.HostComputer)?.Value); Assert.Equal("This is Авторские права", exifProfile.GetValue(ExifTag.Copyright).Value); Assert.Equal(4, exifProfile.GetValue(ExifTag.Rating).Value); Assert.Equal(75, exifProfile.GetValue(ExifTag.RatingPercent).Value); var expectedResolution = new Rational(10000, 1000, simplify: false); Assert.Equal(expectedResolution, exifProfile.GetValue(ExifTag.XResolution).Value); Assert.Equal(expectedResolution, exifProfile.GetValue(ExifTag.YResolution).Value); Assert.Equal(new Number[] { 8u }, exifProfile.GetValue(ExifTag.StripOffsets)?.Value, new NumberComparer()); Assert.Equal(new Number[] { 297u }, exifProfile.GetValue(ExifTag.StripByteCounts)?.Value, new NumberComparer()); Assert.Null(exifProfile.GetValue(ExifTag.ExtraSamples)?.Value); Assert.Equal(32u, exifProfile.GetValue(ExifTag.RowsPerStrip).Value); Assert.Null(exifProfile.GetValue(ExifTag.SampleFormat)); Assert.Equal(TiffPredictor.None, tiffFrameMetadata.Predictor); Assert.Equal(PixelResolutionUnit.PixelsPerInch, UnitConverter.ExifProfileToResolutionUnit(exifProfile)); ushort[] colorMap = exifProfile.GetValue(ExifTag.ColorMap)?.Value; Assert.NotNull(colorMap); Assert.Equal(48, colorMap.Length); Assert.Equal(10537, colorMap[0]); Assert.Equal(14392, colorMap[1]); Assert.Equal(58596, colorMap[46]); Assert.Equal(3855, colorMap[47]); Assert.Equal(TiffPhotometricInterpretation.PaletteColor, tiffFrameMetadata.PhotometricInterpretation); Assert.Equal(1u, exifProfile.GetValue(ExifTag.SamplesPerPixel).Value); ImageMetadata imageMetaData = image.Metadata; Assert.NotNull(imageMetaData); Assert.Equal(PixelResolutionUnit.PixelsPerInch, imageMetaData.ResolutionUnits); Assert.Equal(10, imageMetaData.HorizontalResolution); Assert.Equal(10, imageMetaData.VerticalResolution); TiffMetadata tiffMetaData = image.Metadata.GetTiffMetadata(); Assert.NotNull(tiffMetaData); Assert.Equal(ByteOrder.LittleEndian, tiffMetaData.ByteOrder); } }
public void Encode_PreservesMetadata <TPixel>(TestImageProvider <TPixel> provider) where TPixel : unmanaged, IPixel <TPixel> { // Load Tiff image using Image <TPixel> image = provider.GetImage(new TiffDecoder() { IgnoreMetadata = false }); ImageMetadata inputMetaData = image.Metadata; ImageFrame <TPixel> rootFrameInput = image.Frames.RootFrame; TiffFrameMetadata frameMetaInput = rootFrameInput.Metadata.GetTiffMetadata(); byte[] xmpProfileInput = rootFrameInput.Metadata.XmpProfile; ExifProfile exifProfileInput = rootFrameInput.Metadata.ExifProfile; Assert.Equal(TiffCompression.Lzw, frameMetaInput.Compression); Assert.Equal(TiffBitsPerPixel.Bit4, frameMetaInput.BitsPerPixel); // Save to Tiff var tiffEncoder = new TiffEncoder() { PhotometricInterpretation = TiffPhotometricInterpretation.Rgb }; using var ms = new MemoryStream(); image.Save(ms, tiffEncoder); // Assert ms.Position = 0; using var encodedImage = Image.Load <Rgba32>(ms); ImageMetadata encodedImageMetaData = encodedImage.Metadata; ImageFrame <Rgba32> rootFrameEncodedImage = encodedImage.Frames.RootFrame; TiffFrameMetadata tiffMetaDataEncodedRootFrame = rootFrameEncodedImage.Metadata.GetTiffMetadata(); ExifProfile encodedImageExifProfile = rootFrameEncodedImage.Metadata.ExifProfile; byte[] encodedImageXmpProfile = rootFrameEncodedImage.Metadata.XmpProfile; Assert.Equal(TiffBitsPerPixel.Bit4, tiffMetaDataEncodedRootFrame.BitsPerPixel); Assert.Equal(TiffCompression.Lzw, tiffMetaDataEncodedRootFrame.Compression); Assert.Equal(inputMetaData.HorizontalResolution, encodedImageMetaData.HorizontalResolution); Assert.Equal(inputMetaData.VerticalResolution, encodedImageMetaData.VerticalResolution); Assert.Equal(inputMetaData.ResolutionUnits, encodedImageMetaData.ResolutionUnits); Assert.Equal(rootFrameInput.Width, rootFrameEncodedImage.Width); Assert.Equal(rootFrameInput.Height, rootFrameEncodedImage.Height); PixelResolutionUnit resolutionUnitInput = UnitConverter.ExifProfileToResolutionUnit(exifProfileInput); PixelResolutionUnit resolutionUnitEncoded = UnitConverter.ExifProfileToResolutionUnit(encodedImageExifProfile); Assert.Equal(resolutionUnitInput, resolutionUnitEncoded); Assert.Equal(exifProfileInput.GetValue(ExifTag.XResolution).Value.ToDouble(), encodedImageExifProfile.GetValue(ExifTag.XResolution).Value.ToDouble()); Assert.Equal(exifProfileInput.GetValue(ExifTag.YResolution).Value.ToDouble(), encodedImageExifProfile.GetValue(ExifTag.YResolution).Value.ToDouble()); Assert.Equal(xmpProfileInput, encodedImageXmpProfile); Assert.Equal("IrfanView", exifProfileInput.GetValue(ExifTag.Software).Value); Assert.Equal("This is Название", exifProfileInput.GetValue(ExifTag.ImageDescription).Value); Assert.Equal("This is Изготовитель камеры", exifProfileInput.GetValue(ExifTag.Make).Value); Assert.Equal("This is Авторские права", exifProfileInput.GetValue(ExifTag.Copyright).Value); Assert.Equal(exifProfileInput.GetValue(ExifTag.ImageDescription).Value, encodedImageExifProfile.GetValue(ExifTag.ImageDescription).Value); Assert.Equal(exifProfileInput.GetValue(ExifTag.Make).Value, encodedImageExifProfile.GetValue(ExifTag.Make).Value); Assert.Equal(exifProfileInput.GetValue(ExifTag.Copyright).Value, encodedImageExifProfile.GetValue(ExifTag.Copyright).Value); // Note that the encoded profile has PlanarConfiguration explicitly set, which is missing in the original image profile. Assert.Equal((ushort)TiffPlanarConfiguration.Chunky, encodedImageExifProfile.GetValue(ExifTag.PlanarConfiguration)?.Value); Assert.Equal(exifProfileInput.Values.Count + 1, encodedImageExifProfile.Values.Count); }
private static void SetExifSubIFDProperties(IEnumerable <Directory> directories, ImageMetadata metaData) { ExifSubIfdDirectory directory = directories.OfType <ExifSubIfdDirectory>().FirstOrDefault(); if (directory == null) { return; } metaData.ExposureTime = directory.GetString(ExifDirectoryBase.TagExposureTime); metaData.FNumber = directory.GetString(ExifDirectoryBase.TagFNumber); metaData.ExposureProgram = directory.GetString(ExifDirectoryBase.TagExposureProgram); metaData.IsoSpeed = directory.GetString(ExifDirectoryBase.TagIsoEquivalent); metaData.DateTimeOriginal = directory.GetNullableDateTime(ExifDirectoryBase.TagDateTimeOriginal); metaData.DateTimeDigitized = directory.GetNullableDateTime(ExifDirectoryBase.TagDateTimeDigitized); }
/// <summary> /// Gets image details for the given file. /// </summary> /// <param name="path">The path to an image file.</param> /// <param name="useWIC">Whether to use the Windows Imaging Component.</param> public static ImageMetadata FromFile(string path, bool useWIC) { ImageMetadata imageInfo = new ImageMetadata(); if (string.IsNullOrEmpty(path)) return imageInfo; try { // Read file properties FileInfo info = new FileInfo(path); imageInfo.FileAttributes = info.Attributes; imageInfo.CreationTime = info.CreationTime; imageInfo.LastAccessTime = info.LastAccessTime; imageInfo.LastWriteTime = info.LastWriteTime; imageInfo.Size = info.Length; imageInfo.DirectoryName = info.DirectoryName; imageInfo.DisplayName = info.Name; imageInfo.Extension = info.Extension; // Get metadata MetadataExtractor metadata = MetadataExtractor.FromFile(path, useWIC); imageInfo.Dimensions = new Size(metadata.Width, metadata.Height); imageInfo.Resolution = new SizeF((float)metadata.DPIX, (float)metadata.DPIY); imageInfo.ImageDescription = metadata.ImageDescription ?? ""; imageInfo.EquipmentModel = metadata.EquipmentModel ?? ""; imageInfo.DateTaken = metadata.DateTaken; imageInfo.Artist = metadata.Artist ?? ""; imageInfo.Copyright = metadata.Copyright ?? ""; imageInfo.ExposureTime = (float)metadata.ExposureTime; imageInfo.FNumber = (float)metadata.FNumber; imageInfo.ISOSpeed = (ushort)metadata.ISOSpeed; imageInfo.UserComment = metadata.Comment ?? ""; imageInfo.Rating = (ushort)(metadata.Rating); imageInfo.Software = metadata.Software ?? ""; imageInfo.FocalLength = (float)metadata.FocalLength; if (metadata.Error != null) imageInfo.Error = metadata.Error; } catch (Exception e) { imageInfo.Error = e; } return imageInfo; }
private void bLoad_Click(object sender, EventArgs e) { // Create variables OpenFileDialog ofd = new OpenFileDialog(); FIBITMAP dib = 0; try { // Apply settings ofd.CheckFileExists = true; ofd.CheckPathExists = true; ofd.FileName = ""; ofd.Filter = "All files (*.*)|*.*"; ofd.Multiselect = false; ofd.RestoreDirectory = true; // Get image filename if (ofd.ShowDialog() == DialogResult.OK) { // Load the image dib = FreeImage.LoadEx(ofd.FileName); // Check if image was loaded successfully if (dib.IsNull) { throw new Exception("Failed to load image."); } // Clear the treeview tvMetadata.Nodes.Clear(); // Create a wrapper for all metadata the image contains ImageMetadata iMetadata = new ImageMetadata(dib); // Get each metadata model foreach (MetadataModel metadataModel in iMetadata) { // Create a new node for each model TreeNode modelNode = tvMetadata.Nodes.Add(metadataModel.ToString()); // Get each metadata tag and create a subnode for it foreach (MetadataTag metadataTag in metadataModel) { modelNode.Nodes.Add(metadataTag.Key + ": " + metadataTag.ToString()); } } } else { MessageBox.Show("Operation aborted.", "Aborted"); } } // Display error message catch (Exception ex) { while (ex.InnerException != null) { ex = ex.InnerException; } MessageBox.Show(ex.ToString(), "Exception caught"); } // Clean up finally { ofd.Dispose(); FreeImage.UnloadEx(ref dib); } }
/// <summary> /// Initializes a new instance of the <see cref="Image{TPixel}"/> class /// wrapping an external <see cref="MemorySource{T}"/> /// </summary> /// <param name="configuration">The configuration providing initialization code which allows extending the library.</param> /// <param name="memorySource">The memory source.</param> /// <param name="width">The width of the image in pixels.</param> /// <param name="height">The height of the image in pixels.</param> /// <param name="metadata">The images metadata.</param> internal Image(Configuration configuration, MemorySource <TPixel> memorySource, int width, int height, ImageMetadata metadata) { this.configuration = configuration; this.PixelType = new PixelTypeInfo(Unsafe.SizeOf <TPixel>() * 8); this.Metadata = metadata; this.Frames = new ImageFrameCollection <TPixel>(this, width, height, memorySource); }
/// <summary> /// Encodes the image to the specified stream from the <see cref="Image{TPixel}"/>. /// </summary> /// <typeparam name="TPixel">The pixel format.</typeparam> /// <param name="image">The <see cref="Image{TPixel}"/> to encode from.</param> /// <param name="stream">The <see cref="Stream"/> to encode the image data to.</param> public void Encode <TPixel>(Image <TPixel> image, Stream stream) where TPixel : unmanaged, IPixel <TPixel> { int width = image.Width; int height = image.Height; int pixelCount = width * height; Span <byte> y = this.Y.GetSpan(); Span <byte> u = this.U.GetSpan(); Span <byte> v = this.V.GetSpan(); bool hasAlpha = YuvConversion.ConvertRgbToYuv(image, this.configuration, this.memoryAllocator, y, u, v); int yStride = width; int uvStride = (yStride + 1) >> 1; var it = new Vp8EncIterator(this.YTop, this.UvTop, this.Nz, this.MbInfo, this.Preds, this.TopDerr, this.Mbw, this.Mbh); int[] alphas = new int[WebpConstants.MaxAlpha + 1]; this.alpha = this.MacroBlockAnalysis(width, height, it, y, u, v, yStride, uvStride, alphas, out this.uvAlpha); int totalMb = this.Mbw * this.Mbw; this.alpha /= totalMb; this.uvAlpha /= totalMb; // Analysis is done, proceed to actual encoding. this.SegmentHeader = new Vp8EncSegmentHeader(4); this.AssignSegments(alphas); this.SetLoopParams(this.quality); // Initialize the bitwriter. int averageBytesPerMacroBlock = AverageBytesPerMb[this.BaseQuant >> 4]; int expectedSize = this.Mbw * this.Mbh * averageBytesPerMacroBlock; this.bitWriter = new Vp8BitWriter(expectedSize, this); // Extract and encode alpha channel data, if present. int alphaDataSize = 0; bool alphaCompressionSucceeded = false; using var alphaEncoder = new AlphaEncoder(); Span <byte> alphaData = Span <byte> .Empty; if (hasAlpha) { // TODO: This can potentially run in an separate task. IMemoryOwner <byte> encodedAlphaData = alphaEncoder.EncodeAlpha(image, this.configuration, this.memoryAllocator, this.alphaCompression, out alphaDataSize); alphaData = encodedAlphaData.GetSpan(); if (alphaDataSize < pixelCount) { // Only use compressed data, if the compressed data is actually smaller then the uncompressed data. alphaCompressionSucceeded = true; } } // Stats-collection loop. this.StatLoop(width, height, yStride, uvStride); it.Init(); it.InitFilter(); var info = new Vp8ModeScore(); var residual = new Vp8Residual(); do { bool dontUseSkip = !this.Proba.UseSkipProba; info.Clear(); it.Import(y, u, v, yStride, uvStride, width, height, false); // Warning! order is important: first call VP8Decimate() and // *then* decide how to code the skip decision if there's one. if (!this.Decimate(it, ref info, this.rdOptLevel) || dontUseSkip) { this.CodeResiduals(it, info, residual); } else { it.ResetAfterSkip(); } it.SaveBoundary(); }while (it.Next()); // Store filter stats. this.AdjustFilterStrength(); // Write bytes from the bitwriter buffer to the stream. ImageMetadata metadata = image.Metadata; metadata.SyncProfiles(); this.bitWriter.WriteEncodedImageToStream( stream, metadata.ExifProfile, metadata.XmpProfile, (uint)width, (uint)height, hasAlpha, alphaData, this.alphaCompression && alphaCompressionSucceeded); }
private void ProcessProfiles(ImageMetadata imageMetadata, ExifProfile exifProfile, XmpProfile xmpProfile) { if (exifProfile != null && exifProfile.Parts != ExifParts.None) { foreach (IExifValue entry in exifProfile.Values) { if (!this.Collector.Entries.Exists(t => t.Tag == entry.Tag) && entry.GetValue() != null) { ExifParts entryPart = ExifTags.GetPart(entry.Tag); if (entryPart != ExifParts.None && exifProfile.Parts.HasFlag(entryPart)) { this.Collector.AddOrReplace(entry.DeepClone()); } } } } else { exifProfile.RemoveValue(ExifTag.SubIFDOffset); } if (imageMetadata.IptcProfile != null) { imageMetadata.IptcProfile.UpdateData(); var iptc = new ExifByteArray(ExifTagValue.IPTC, ExifDataType.Byte) { Value = imageMetadata.IptcProfile.Data }; this.Collector.Add(iptc); } else { exifProfile.RemoveValue(ExifTag.IPTC); } if (imageMetadata.IccProfile != null) { var icc = new ExifByteArray(ExifTagValue.IccProfile, ExifDataType.Undefined) { Value = imageMetadata.IccProfile.ToByteArray() }; this.Collector.Add(icc); } else { exifProfile.RemoveValue(ExifTag.IccProfile); } if (xmpProfile != null) { var xmp = new ExifByteArray(ExifTagValue.XMP, ExifDataType.Byte) { Value = xmpProfile.Data }; this.Collector.Add(xmp); } else { exifProfile.RemoveValue(ExifTag.XMP); } }
private async Task ProcessRequestAsync(HttpContext context, bool processRequest, IImageResolver sourceImageResolver, ImageContext imageContext, IDictionary <string, string> commands) { // Create a cache key based on all the components of the requested url string uri = GetUri(context, commands); string key = this.cacheHash.Create(uri, this.options.CachedNameLength); ImageMetadata sourceImageMetadata = default; if (processRequest) { // Lock any reads when a write is being done for the same key to prevent potential file locks. using (await AsyncLock.ReaderLockAsync(key)) { // Check to see if the cache contains this image sourceImageMetadata = await sourceImageResolver.GetMetaDataAsync(); IImageCacheResolver cachedImageResolver = await this.cache.GetAsync(key); if (cachedImageResolver != null) { ImageCacheMetadata cachedImageMetadata = await cachedImageResolver.GetMetaDataAsync(); if (cachedImageMetadata != default) { // Has the cached image expired or has the source image been updated? if (cachedImageMetadata.SourceLastWriteTimeUtc == sourceImageMetadata.LastWriteTimeUtc && cachedImageMetadata.CacheLastWriteTimeUtc > DateTimeOffset.Now.AddDays(-this.options.MaxCacheDays)) { // We're pulling the image from the cache. using (Stream cachedBuffer = await cachedImageResolver.OpenReadAsync()) { await this.SendResponseAsync(imageContext, key, cachedBuffer, cachedImageMetadata); } return; } } } } // Not cached? Let's get it from the image resolver. ChunkedMemoryStream outStream = null; try { if (processRequest) { // Enter a write lock which locks writing and any reads for the same request. // This reduces the overheads of unnecessary processing plus avoids file locks. using (await AsyncLock.WriterLockAsync(key)) { // No allocations here for inStream since we are passing the raw input stream. // outStream allocation depends on the memory allocator used. ImageCacheMetadata cachedImageMetadata = default; outStream = new ChunkedMemoryStream(); using (Stream inStream = await sourceImageResolver.OpenReadAsync()) { IImageFormat format; // No commands? We simply copy the stream across. if (commands.Count == 0) { format = Image.DetectFormat(this.options.Configuration, inStream); await inStream.CopyToAsync(outStream); } else { using (var image = FormattedImage.Load(this.options.Configuration, inStream)) { image.Process(this.logger, this.processors, commands); this.options.OnBeforeSave?.Invoke(image); image.Save(outStream); format = image.Format; } } // Check to see if the source metadata has a cachecontrol max-age value and use it to // override the default max age from our options. var maxAge = TimeSpan.FromDays(this.options.MaxBrowserCacheDays); if (!sourceImageMetadata.CacheControlMaxAge.Equals(TimeSpan.MinValue)) { maxAge = sourceImageMetadata.CacheControlMaxAge; } cachedImageMetadata = new ImageCacheMetadata( sourceImageMetadata.LastWriteTimeUtc, DateTime.UtcNow, format.DefaultMimeType, maxAge); } // Allow for any further optimization of the image. Always reset the position just in case. outStream.Position = 0; string contentType = cachedImageMetadata.ContentType; string extension = this.formatUtilities.GetExtensionFromContentType(contentType); this.options.OnProcessed?.Invoke(new ImageProcessingContext(context, outStream, commands, contentType, extension)); outStream.Position = 0; // Save the image to the cache and send the response to the caller. await this.cache.SetAsync(key, outStream, cachedImageMetadata); await this.SendResponseAsync(imageContext, key, outStream, cachedImageMetadata); } } } catch (Exception ex) { // Log the error internally then rethrow. // We don't call next here, the pipeline will automatically handle it this.logger.LogImageProcessingFailed(imageContext.GetDisplayUrl(), ex); throw; } finally { outStream?.Dispose(); } } }
/// <summary> /// Encodes the image to the specified stream from the <see cref="Image{TPixel}"/>. /// </summary> /// <typeparam name="TPixel">The pixel format.</typeparam> /// <param name="image">The <see cref="Image{TPixel}"/> to encode from.</param> /// <param name="stream">The <see cref="Stream"/> to encode the image data to.</param> /// <param name="cancellationToken">The token to request cancellation.</param> public void Encode <TPixel>(Image <TPixel> image, Stream stream, CancellationToken cancellationToken) where TPixel : unmanaged, IPixel <TPixel> { Guard.NotNull(image, nameof(image)); Guard.NotNull(stream, nameof(stream)); ImageMetadata metadata = image.Metadata; GifMetadata gifMetadata = metadata.GetGifMetadata(); this.colorTableMode ??= gifMetadata.ColorTableMode; bool useGlobalTable = this.colorTableMode == GifColorTableMode.Global; // Quantize the image returning a palette. IndexedImageFrame <TPixel> quantized; using (IQuantizer <TPixel> frameQuantizer = this.quantizer.CreatePixelSpecificQuantizer <TPixel>(this.configuration)) { if (useGlobalTable) { frameQuantizer.BuildPalette(this.pixelSamplingStrategy, image); quantized = frameQuantizer.QuantizeFrame(image.Frames.RootFrame, image.Bounds()); } else { quantized = frameQuantizer.BuildPaletteAndQuantizeFrame(image.Frames.RootFrame, image.Bounds()); } } // Get the number of bits. this.bitDepth = ColorNumerics.GetBitsNeededForColorDepth(quantized.Palette.Length); // Write the header. this.WriteHeader(stream); // Write the LSD. int index = this.GetTransparentIndex(quantized); this.WriteLogicalScreenDescriptor(metadata, image.Width, image.Height, index, useGlobalTable, stream); if (useGlobalTable) { this.WriteColorTable(quantized, stream); } // Write the comments. this.WriteComments(gifMetadata, stream); // Write application extension to allow additional frames. if (image.Frames.Count > 1) { this.WriteApplicationExtension(stream, gifMetadata.RepeatCount); } if (useGlobalTable) { this.EncodeGlobal(image, quantized, index, stream); } else { this.EncodeLocal(image, quantized, stream); } // Clean up. quantized.Dispose(); // TODO: Write extension etc stream.WriteByte(GifConstants.EndIntroducer); }
/// <summary> /// Gets the webp format specific metadata for the image. /// </summary> /// <param name="metadata">The metadata this method extends.</param> /// <returns>The <see cref="WebpMetadata"/>.</returns> public static WebpMetadata GetWebpMetadata(this ImageMetadata metadata) => metadata.GetFormatMetadata(WebpFormat.Instance);
private bool TryParseGPS(ImageMetadata iMetadata, out double? latitude, out double? longitude) { MDM_EXIF_GPS gps = (MDM_EXIF_GPS) iMetadata[FREE_IMAGE_MDMODEL.FIMD_EXIF_GPS]; double? lonValue = ToDecimalDegree(gps.Longitude); if (lonValue.HasValue && gps.LongitudeDirection != null && gps.LongitudeDirection == MetadataModel.LongitudeType.West) lonValue *= -1; double? latValue = ToDecimalDegree(gps.Latitude); if (latValue.HasValue && gps.Latitude != null && gps.LatitudeDirection == MetadataModel.LatitudeType.South) latValue *= -1; latitude = latValue; longitude = lonValue; return latitude.HasValue && longitude.HasValue; }
/// <summary> /// Encodes the image to the specified stream from the <see cref="ImageFrame{TPixel}"/>. /// </summary> /// <typeparam name="TPixel">The pixel format.</typeparam> /// <param name="image">The <see cref="ImageFrame{TPixel}"/> to encode from.</param> /// <param name="stream">The <see cref="Stream"/> to encode the image data to.</param> public void Encode <TPixel>(Image <TPixel> image, Stream stream) where TPixel : unmanaged, IPixel <TPixel> { Guard.NotNull(image, nameof(image)); Guard.NotNull(stream, nameof(stream)); this.configuration = image.GetConfiguration(); ImageMetadata metadata = image.Metadata; TgaMetadata tgaMetadata = metadata.GetTgaMetadata(); this.bitsPerPixel = this.bitsPerPixel ?? tgaMetadata.BitsPerPixel; TgaImageType imageType = this.compression is TgaCompression.RunLength ? TgaImageType.RleTrueColor : TgaImageType.TrueColor; if (this.bitsPerPixel == TgaBitsPerPixel.Pixel8) { imageType = this.compression is TgaCompression.RunLength ? TgaImageType.RleBlackAndWhite : TgaImageType.BlackAndWhite; } byte imageDescriptor = 0; if (this.compression is TgaCompression.RunLength) { // If compression is used, set bit 5 of the image descriptor to indicate a left top origin. imageDescriptor |= 0x20; } if (this.bitsPerPixel is TgaBitsPerPixel.Pixel32) { // Indicate, that 8 bit are used for the alpha channel. imageDescriptor |= 0x8; } if (this.bitsPerPixel is TgaBitsPerPixel.Pixel16) { // Indicate, that 1 bit is used for the alpha channel. imageDescriptor |= 0x1; } var fileHeader = new TgaFileHeader( idLength: 0, colorMapType: 0, imageType: imageType, cMapStart: 0, cMapLength: 0, cMapDepth: 0, xOffset: 0, yOffset: this.compression is TgaCompression.RunLength ? (short)image.Height : (short)0, // When run length encoding is used, the origin should be top left instead of the default bottom left. width: (short)image.Width, height: (short)image.Height, pixelDepth: (byte)this.bitsPerPixel.Value, imageDescriptor: imageDescriptor); Span <byte> buffer = stackalloc byte[TgaFileHeader.Size]; fileHeader.WriteTo(buffer); stream.Write(buffer, 0, TgaFileHeader.Size); if (this.compression is TgaCompression.RunLength) { this.WriteRunLengthEncodedImage(stream, image.Frames.RootFrame); } else { this.WriteImage(stream, image.Frames.RootFrame); } stream.Flush(); }
/// <summary> /// Gets the bmp format specific metadata for the image. /// </summary> /// <param name="metadata">The metadata this method extends.</param> /// <returns>The <see cref="BmpMetadata"/>.</returns> public static BmpMetadata GetBmpMetadata(this ImageMetadata metadata) => metadata.GetFormatMetadata(BmpFormat.Instance);
/// <summary> /// get the rotation info /// </summary> /// <param name="dib"></param> /// <returns></returns> public static ushort GetRotateInfo(FIBITMAP dib) { ushort rotateme = 1; var iMetadata = new ImageMetadata(dib); MetadataModel exifMain = iMetadata[FREE_IMAGE_MDMODEL.FIMD_EXIF_MAIN]; if (exifMain != null) { MetadataTag orientationTag = exifMain.GetTag("Orientation"); if (orientationTag != null) { var rotInfo = (ushort[])orientationTag.Value; if (rotInfo != null && rotInfo.Length > 0) { rotateme = rotInfo[0]; } } } return rotateme; }
/// <summary> /// Encodes the image to the specified stream from the <see cref="Image{TPixel}"/>. /// </summary> /// <typeparam name="TPixel">The pixel format.</typeparam> /// <param name="image">The <see cref="Image{TPixel}"/> to encode from.</param> /// <param name="stream">The <see cref="Stream"/> to encode the image data to.</param> public void Encode <TPixel>(Image <TPixel> image, Stream stream) where TPixel : struct, IPixel <TPixel> { Guard.NotNull(image, nameof(image)); Guard.NotNull(stream, nameof(stream)); this.configuration = image.GetConfiguration(); ImageMetadata metadata = image.Metadata; this.gifMetadata = metadata.GetFormatMetadata(GifFormat.Instance); this.colorTableMode = this.colorTableMode ?? this.gifMetadata.ColorTableMode; bool useGlobalTable = this.colorTableMode == GifColorTableMode.Global; // Quantize the image returning a palette. IQuantizedFrame <TPixel> quantized = null; using (IFrameQuantizer <TPixel> frameQuantizer = this.quantizer.CreateFrameQuantizer <TPixel>(image.GetConfiguration())) { quantized = frameQuantizer.QuantizeFrame(image.Frames.RootFrame); } // Get the number of bits. this.bitDepth = ImageMaths.GetBitsNeededForColorDepth(quantized.Palette.Length).Clamp(1, 8); // Write the header. this.WriteHeader(stream); // Write the LSD. int index = this.GetTransparentIndex(quantized); this.WriteLogicalScreenDescriptor(metadata, image.Width, image.Height, index, useGlobalTable, stream); if (useGlobalTable) { this.WriteColorTable(quantized, stream); } // Write the comments. this.WriteComments(metadata, stream); // Write application extension to allow additional frames. if (image.Frames.Count > 1) { this.WriteApplicationExtension(stream, this.gifMetadata.RepeatCount); } if (useGlobalTable) { this.EncodeGlobal(image, quantized, index, stream); } else { this.EncodeLocal(image, quantized, stream); } // Clean up. quantized?.Dispose(); // TODO: Write extension etc stream.WriteByte(GifConstants.EndIntroducer); }
/// <summary> /// Encodes the image to the specified stream from the <see cref="Image{TPixel}"/>. /// </summary> /// <typeparam name="TPixel">The pixel format.</typeparam> /// <param name="image">The <see cref="ImageFrame{TPixel}"/> to encode from.</param> /// <param name="stream">The <see cref="Stream"/> to encode the image data to.</param> public void Encode <TPixel>(Image <TPixel> image, Stream stream) where TPixel : struct, IPixel <TPixel> { Guard.NotNull(image, nameof(image)); Guard.NotNull(stream, nameof(stream)); this.configuration = image.GetConfiguration(); this.width = image.Width; this.height = image.Height; // Always take the encoder options over the metadata values. ImageMetadata metadata = image.Metadata; PngMetadata pngMetadata = metadata.GetFormatMetadata(PngFormat.Instance); this.gamma = this.gamma ?? pngMetadata.Gamma; this.writeGamma = this.gamma > 0; this.pngColorType = this.pngColorType ?? pngMetadata.ColorType; this.pngBitDepth = this.pngBitDepth ?? pngMetadata.BitDepth; this.use16Bit = this.pngBitDepth == PngBitDepth.Bit16; // Ensure we are not allowing impossible combinations. if (!ColorTypes.ContainsKey(this.pngColorType.Value)) { throw new NotSupportedException("Color type is not supported or not valid."); } stream.Write(PngConstants.HeaderBytes, 0, PngConstants.HeaderBytes.Length); QuantizedFrame <TPixel> quantized = null; if (this.pngColorType == PngColorType.Palette) { byte bits = (byte)this.pngBitDepth; if (!ColorTypes[this.pngColorType.Value].Contains(bits)) { throw new NotSupportedException("Bit depth is not supported or not valid."); } // Use the metadata to determine what quantization depth to use if no quantizer has been set. if (this.quantizer is null) { this.quantizer = new WuQuantizer(ImageMaths.GetColorCountForBitDepth(bits)); } // Create quantized frame returning the palette and set the bit depth. quantized = this.quantizer.CreateFrameQuantizer <TPixel>(image.GetConfiguration()) .QuantizeFrame(image.Frames.RootFrame); byte quantizedBits = (byte)ImageMaths.GetBitsNeededForColorDepth(quantized.Palette.Length).Clamp(1, 8); bits = Math.Max(bits, quantizedBits); // Png only supports in four pixel depths: 1, 2, 4, and 8 bits when using the PLTE chunk // We check again for the bit depth as the bit depth of the color palette from a given quantizer might not // be within the acceptable range. if (bits == 3) { bits = 4; } else if (bits >= 5 && bits <= 7) { bits = 8; } this.bitDepth = bits; } else { this.bitDepth = (byte)this.pngBitDepth; if (!ColorTypes[this.pngColorType.Value].Contains(this.bitDepth)) { throw new NotSupportedException("Bit depth is not supported or not valid."); } } this.bytesPerPixel = this.CalculateBytesPerPixel(); var header = new PngHeader( width: image.Width, height: image.Height, bitDepth: this.bitDepth, colorType: this.pngColorType.Value, compressionMethod: 0, // None filterMethod: 0, interlaceMethod: 0); // TODO: Can't write interlaced yet. this.WriteHeaderChunk(stream, header); // Collect the indexed pixel data if (quantized != null) { this.WritePaletteChunk(stream, quantized); } if (pngMetadata.HasTrans) { this.WriteTransparencyChunk(stream, pngMetadata); } this.WritePhysicalChunk(stream, metadata); this.WriteGammaChunk(stream); this.WriteExifChunk(stream, metadata); this.WriteDataChunks(image.Frames.RootFrame, quantized, stream); this.WriteEndChunk(stream); stream.Flush(); quantized?.Dispose(); }
/// <summary> /// Encodes the image to the specified stream from the <see cref="ImageFrame{TPixel}"/>. /// </summary> /// <typeparam name="TPixel">The pixel format.</typeparam> /// <param name="image">The <see cref="ImageFrame{TPixel}"/> to encode from.</param> /// <param name="stream">The <see cref="Stream"/> to encode the image data to.</param> public void Encode <TPixel>(Image <TPixel> image, Stream stream) where TPixel : struct, IPixel <TPixel> { Guard.NotNull(image, nameof(image)); Guard.NotNull(stream, nameof(stream)); this.configuration = image.GetConfiguration(); ImageMetadata metadata = image.Metadata; BmpMetadata bmpMetadata = metadata.GetBmpMetadata(); this.bitsPerPixel = this.bitsPerPixel ?? bmpMetadata.BitsPerPixel; short bpp = (short)this.bitsPerPixel; int bytesPerLine = 4 * (((image.Width * bpp) + 31) / 32); this.padding = bytesPerLine - (int)(image.Width * (bpp / 8F)); // Set Resolution. int hResolution = 0; int vResolution = 0; if (metadata.ResolutionUnits != PixelResolutionUnit.AspectRatio) { if (metadata.HorizontalResolution > 0 && metadata.VerticalResolution > 0) { switch (metadata.ResolutionUnits) { case PixelResolutionUnit.PixelsPerInch: hResolution = (int)Math.Round(UnitConverter.InchToMeter(metadata.HorizontalResolution)); vResolution = (int)Math.Round(UnitConverter.InchToMeter(metadata.VerticalResolution)); break; case PixelResolutionUnit.PixelsPerCentimeter: hResolution = (int)Math.Round(UnitConverter.CmToMeter(metadata.HorizontalResolution)); vResolution = (int)Math.Round(UnitConverter.CmToMeter(metadata.VerticalResolution)); break; case PixelResolutionUnit.PixelsPerMeter: hResolution = (int)Math.Round(metadata.HorizontalResolution); vResolution = (int)Math.Round(metadata.VerticalResolution); break; } } } int infoHeaderSize = this.writeV4Header ? BmpInfoHeader.SizeV4 : BmpInfoHeader.SizeV3; var infoHeader = new BmpInfoHeader( headerSize: infoHeaderSize, height: image.Height, width: image.Width, bitsPerPixel: bpp, planes: 1, imageSize: image.Height * bytesPerLine, clrUsed: 0, clrImportant: 0, xPelsPerMeter: hResolution, yPelsPerMeter: vResolution); if (this.writeV4Header && this.bitsPerPixel == BmpBitsPerPixel.Pixel32) { infoHeader.AlphaMask = Rgba32AlphaMask; infoHeader.RedMask = Rgba32RedMask; infoHeader.GreenMask = Rgba32GreenMask; infoHeader.BlueMask = Rgba32BlueMask; infoHeader.Compression = BmpCompression.BitFields; } int colorPaletteSize = this.bitsPerPixel == BmpBitsPerPixel.Pixel8 ? ColorPaletteSize8Bit : 0; var fileHeader = new BmpFileHeader( type: BmpConstants.TypeMarkers.Bitmap, fileSize: BmpFileHeader.Size + infoHeaderSize + infoHeader.ImageSize, reserved: 0, offset: BmpFileHeader.Size + infoHeaderSize + colorPaletteSize); Span <byte> buffer = stackalloc byte[infoHeaderSize]; fileHeader.WriteTo(buffer); stream.Write(buffer, 0, BmpFileHeader.Size); if (this.writeV4Header) { infoHeader.WriteV4Header(buffer); } else { infoHeader.WriteV3Header(buffer); } stream.Write(buffer, 0, infoHeaderSize); this.WriteImage(stream, image.Frames.RootFrame); stream.Flush(); }
public void ImageMetadata() { ImageMetadata metadata; List<MetadataModel> modelList; MetadataTag tag = new MetadataTag(FREE_IMAGE_MDMODEL.FIMD_COMMENTS); tag.Key = "KEY"; tag.ID = 11; tag.Value = new double[] { 0d, 41d, -523d, -0.41d }; dib = FreeImage.Allocate(1, 1, 1, 1, 0, 0); Assert.IsFalse(dib.IsNull); metadata = new ImageMetadata(dib, true); Assert.AreEqual(0, metadata.Count); Assert.IsTrue(metadata.HideEmptyModels); Assert.IsEmpty(metadata.List); metadata = new ImageMetadata(dib, false); Assert.AreEqual(FreeImage.FREE_IMAGE_MDMODELS.Length, metadata.Count); Assert.IsFalse(metadata.HideEmptyModels); Assert.IsNotEmpty(metadata.List); metadata.HideEmptyModels = true; metadata.AddTag(tag); Assert.AreEqual(1, metadata.Count); Assert.IsNotEmpty(metadata.List); modelList = metadata.List; Assert.AreEqual(FREE_IMAGE_MDMODEL.FIMD_COMMENTS, modelList[0].Model); System.Collections.IEnumerator enumerator = metadata.GetEnumerator(); Assert.IsTrue(enumerator.MoveNext()); Assert.IsNotNull((MetadataModel)enumerator.Current); Assert.IsFalse(enumerator.MoveNext()); FreeImage.UnloadEx(ref dib); }
/// <summary> /// Decodes the stream to the image. /// </summary> /// <typeparam name="TPixel">The pixel format.</typeparam> /// <param name="stream">The stream containing image data. </param> /// <exception cref="ImageFormatException"> /// Thrown if the stream does not contain and end chunk. /// </exception> /// <exception cref="ArgumentOutOfRangeException"> /// Thrown if the image is larger than the maximum allowable size. /// </exception> /// <returns>The decoded image</returns> public Image <TPixel> Decode <TPixel>(Stream stream) where TPixel : struct, IPixel <TPixel> { var metadata = new ImageMetadata(); PngMetadata pngMetadata = metadata.GetFormatMetadata(PngFormat.Instance); this.currentStream = stream; this.currentStream.Skip(8); Image <TPixel> image = null; try { while (!this.isEndChunkReached && this.TryReadChunk(out PngChunk chunk)) { try { switch (chunk.Type) { case PngChunkType.Header: this.ReadHeaderChunk(pngMetadata, chunk.Data.Array); break; case PngChunkType.Physical: this.ReadPhysicalChunk(metadata, chunk.Data.GetSpan()); break; case PngChunkType.Gamma: this.ReadGammaChunk(pngMetadata, chunk.Data.GetSpan()); break; case PngChunkType.Data: if (image is null) { this.InitializeImage(metadata, out image); } using (var deframeStream = new ZlibInflateStream(this.currentStream, this.ReadNextDataChunk)) { deframeStream.AllocateNewBytes(chunk.Length); this.ReadScanlines(deframeStream.CompressedStream, image.Frames.RootFrame, pngMetadata); } break; case PngChunkType.Palette: var pal = new byte[chunk.Length]; Buffer.BlockCopy(chunk.Data.Array, 0, pal, 0, chunk.Length); this.palette = pal; break; case PngChunkType.Transparency: var alpha = new byte[chunk.Length]; Buffer.BlockCopy(chunk.Data.Array, 0, alpha, 0, chunk.Length); this.paletteAlpha = alpha; this.AssignTransparentMarkers(alpha, pngMetadata); break; case PngChunkType.Text: this.ReadTextChunk(pngMetadata, chunk.Data.Array.AsSpan(0, chunk.Length)); break; case PngChunkType.CompressedText: this.ReadCompressedTextChunk(pngMetadata, chunk.Data.Array.AsSpan(0, chunk.Length)); break; case PngChunkType.InternationalText: this.ReadInternationalTextChunk(pngMetadata, chunk.Data.Array.AsSpan(0, chunk.Length)); break; case PngChunkType.Exif: if (!this.ignoreMetadata) { var exifData = new byte[chunk.Length]; Buffer.BlockCopy(chunk.Data.Array, 0, exifData, 0, chunk.Length); metadata.ExifProfile = new ExifProfile(exifData); } break; case PngChunkType.End: this.isEndChunkReached = true; break; } } finally { chunk.Data?.Dispose(); // Data is rented in ReadChunkData() } } if (image is null) { throw new ImageFormatException("PNG Image does not contain a data chunk"); } return(image); } finally { this.scanline?.Dispose(); this.previousScanline?.Dispose(); } }
/// <summary> /// Synchronizes the profiles with the specified metadata. /// </summary> /// <param name="metadata">The metadata.</param> internal void Sync(ImageMetadata metadata) { this.SyncResolution(ExifTag.XResolution, metadata.HorizontalResolution); this.SyncResolution(ExifTag.YResolution, metadata.VerticalResolution); }
/// <inheritdoc/> public IImageInfo Identify(BufferedReadStream stream) { var metadata = new ImageMetadata(); PngMetadata pngMetadata = metadata.GetPngMetadata(); this.currentStream = stream; this.currentStream.Skip(8); try { while (!this.isEndChunkReached && this.TryReadChunk(out PngChunk chunk)) { try { switch (chunk.Type) { case PngChunkType.Header: this.ReadHeaderChunk(pngMetadata, chunk.Data.Array); break; case PngChunkType.Physical: this.ReadPhysicalChunk(metadata, chunk.Data.GetSpan()); break; case PngChunkType.Gamma: this.ReadGammaChunk(pngMetadata, chunk.Data.GetSpan()); break; case PngChunkType.Data: this.SkipChunkDataAndCrc(chunk); break; case PngChunkType.Text: this.ReadTextChunk(pngMetadata, chunk.Data.Array.AsSpan(0, chunk.Length)); break; case PngChunkType.CompressedText: this.ReadCompressedTextChunk(pngMetadata, chunk.Data.Array.AsSpan(0, chunk.Length)); break; case PngChunkType.InternationalText: this.ReadInternationalTextChunk(pngMetadata, chunk.Data.Array.AsSpan(0, chunk.Length)); break; case PngChunkType.Exif: if (!this.ignoreMetadata) { var exifData = new byte[chunk.Length]; Buffer.BlockCopy(chunk.Data.Array, 0, exifData, 0, chunk.Length); metadata.ExifProfile = new ExifProfile(exifData); } break; case PngChunkType.End: this.isEndChunkReached = true; break; } } finally { chunk.Data?.Dispose(); // Data is rented in ReadChunkData() } } } finally { this.scanline?.Dispose(); this.previousScanline?.Dispose(); } if (this.header.Width == 0 && this.header.Height == 0) { PngThrowHelper.ThrowNoHeader(); } return(new ImageInfo(new PixelTypeInfo(this.CalculateBitsPerPixel()), this.header.Width, this.header.Height, metadata)); }
public FlyCaptureDataFrame(IplImage image, ImageMetadata metadata) : this(image, metadata, BayerTileFormat.None) { }
/// <summary> /// Initializes a new instance of the <see cref="Image{TPixel}"/> class /// with the height and the width of the image. /// </summary> /// <param name="configuration">The configuration providing initialization code which allows extending the library.</param> /// <param name="width">The width of the image in pixels.</param> /// <param name="height">The height of the image in pixels.</param> /// <param name="metadata">The images metadata.</param> internal Image(Configuration configuration, int width, int height, ImageMetadata metadata) : base(configuration, PixelTypeInfo.Create <TPixel>(), metadata, width, height) { this.Frames = new ImageFrameCollection <TPixel>(this, width, height, default(TPixel)); }
public FlyCaptureDataFrame(IplImage image, ImageMetadata metadata, BayerTileFormat bayerTileFormat) { Image = image; Metadata = metadata; BayerTileFormat = bayerTileFormat; }
/// <summary> /// Gets the jpeg format specific metadata for the image. /// </summary> /// <param name="metadata">The metadata this method extends.</param> /// <returns>The <see cref="JpegMetadata"/>.</returns> public static JpegMetadata GetJpegMetadata(this ImageMetadata metadata) => metadata.GetFormatMetadata(JpegFormat.Instance);
public static BaseMetadata read(String location, MetadataFactory.ReadOptions options, CancellationToken token, int timeoutSeconds) { BaseMetadata metadata = new UnknownMetadata(FileUtils.getPathWithoutFileName(location)); metadata.Name = Path.GetFileName(location); Logger.Log.Info("Reading metadata for: " + location); int timeoutMs = timeoutSeconds * 1000; Stream data = FileUtils.waitForFileAccess(location, FileAccess.Read, timeoutMs, token); MediaProbe mediaProbe = new MediaProbe(); try { mediaProbe.open(location, token); switch (mediaProbe.MediaType) { case MediaType.AUDIO_MEDIA: { metadata = new AudioMetadata(location, data); AudioFileMetadataReader reader = new AudioFileMetadataReader(); reader.readMetadata(mediaProbe, data, options, metadata, token, timeoutSeconds); break; } case MediaType.IMAGE_MEDIA: { metadata = new ImageMetadata(location, data); ImageFileMetadataReader reader = new ImageFileMetadataReader(); reader.readMetadata(mediaProbe, data, options, metadata, token, timeoutSeconds); break; } case MediaType.VIDEO_MEDIA: { metadata = new VideoMetadata(location, data); VideoFileMetadataReader reader = new VideoFileMetadataReader(); reader.readMetadata(mediaProbe, data, options, metadata, token, timeoutSeconds); break; } default: break; } FileInfo info = new FileInfo(location); info.Refresh(); if (info.Attributes.HasFlag(FileAttributes.ReadOnly)) { metadata.IsReadOnly = true; } if (!options.HasFlag(MetadataFactory.ReadOptions.LEAVE_STREAM_OPENED_AFTER_READ)) { metadata.close(); } } catch (Exception e) { metadata.MetadataReadError = e; } finally { mediaProbe.close(); mediaProbe.Dispose(); } return(metadata); }
/// <summary> /// Initializes a new instance of the <see cref="Image"/> class. /// </summary> public Image() { Metadata = new ImageMetadata(); }
/// <inheritdoc cref="ComputeService.UpdateImageMetadataAsync" /> public static ImageMetadata UpdateImageMetadata(this ComputeService service, Identifier imageId, ImageMetadata metadata, bool overwrite = false) { return service.UpdateImageMetadataAsync(imageId, metadata, overwrite).ForceSynchronous(); }