Exemplo n.º 1
0
        private NuiSource()
        {
            this.context = new Context("openni.xml");

            // Initialise generators
            this.imageGenerator = this.context.FindExistingNode(NodeType.Image) as ImageGenerator;
            this.depthGenerator = this.context.FindExistingNode(NodeType.Depth) as DepthGenerator;
            this.depthGenerator.GetAlternativeViewPointCap().SetViewPoint(this.imageGenerator);

            this.userGenerator = new UserGenerator(this.context);
            this.imageMetadata = new ImageMetaData();
            var imageMapMode = this.imageGenerator.GetMapOutputMode();

            this.depthMetadata = new DepthMetaData();
            var depthMapMode = this.depthGenerator.GetMapOutputMode();
            this.depthHistogram = new int[this.depthGenerator.GetDeviceMaxDepth()];

            // Initialise bitmaps
            this.cameraImage = new WriteableBitmap(
                (int)imageMapMode.nXRes, (int)imageMapMode.nYRes, 96, 96, PixelFormats.Rgb24, null);
            this.depthImage = new WriteableBitmap(
                (int)depthMapMode.nXRes, (int)depthMapMode.nYRes, 96, 96, PixelFormats.Rgb24, null);

            // Initialise user generator
            this.userGenerator.NewUser += this.UserGenerator_NewUser;
            this.userGenerator.LostUser += this.UserGenerator_LostUser;
            this.userGenerator.StartGenerating();
            this.ShowPlayerLabels = true;

            // Initialise background thread
            var cameraThread = new Thread(this.CameraThread) { IsBackground = true };
            cameraThread.Start();
        }
Exemplo n.º 2
0
 public override ImageData Read(Stream stream, ImageMetaData info)
 {
     var pixels = new byte[info.Width*info.Height*4];
     stream.Position = 0x18;
     int dst = 0;
     for (uint y = 0; y < info.Height; ++y)
     {
         while (dst < pixels.Length)
         {
             int a = stream.ReadByte();
             if (-1 == a)
                 throw new EndOfStreamException();
             else if (0 == a)
             {
                 int count = stream.ReadByte();
                 if (-1 == count)
                     throw new EndOfStreamException();
                 else if (0 == count)
                     break;
                 dst += count * 4;
             }
             else
             {
                 stream.Read (pixels, dst, 3);
                 pixels[dst + 3] = (byte)a;
                 dst += 4;
             }
         }
     }
     return ImageData.Create (info, PixelFormats.Bgra32, null, pixels);
 }
Exemplo n.º 3
0
        public override ImageData Read(Stream stream, ImageMetaData info)
        {
            var meta = info as MfgMetaData;
            if (null == meta)
                throw new ArgumentException ("MfgFormat.Read should be supplied with MfgMetaData", "info");

            stream.Position = 0x14;
            if ('_' != meta.Type)
                using (var file = new ArcView.Reader (stream))
                {
                    for (uint i = 0; i < meta.Height; ++i)
                    {
                        uint n = file.ReadUInt32();
                        file.BaseStream.Seek (n*8, SeekOrigin.Current);
                    }
                }
            byte[] pixels = new byte[meta.Stride*info.Height];
            if (pixels.Length != stream.Read (pixels, 0, pixels.Length))
                throw new InvalidFormatException ("Unexpected end of file");
            PixelFormat format;
            if (24 == meta.BPP)
                format = PixelFormats.Bgr24;
            else
                format = PixelFormats.Bgra32;
            return ImageData.Create (info, format, null, pixels, meta.Stride);
        }
Exemplo n.º 4
0
 public void GetMetaData(ImageMetaData imageMD)
 {
     using (IMarshaler marsh = imageMD.GetMarshaler(true))
     {
         OpenNIImporter.xnGetImageMetaData(this.InternalObject, marsh.Native);
     }
 }
	protected override bool InitTexture(out Texture2D refText, out int xSize, out int ySize)
    {
        if(base.InitTexture(out refText, out xSize, out ySize)==false)
            return false;
        // make sure we have an image to work with
        if(m_context.ImageValid==false)
        {
            m_context.m_Logger.Log("Invalid image", NIEventLogger.Categories.Initialization, NIEventLogger.Sources.Image,NIEventLogger.VerboseLevel.Errors);
            return false;
        }
        if(m_factor<=0)
        {
            m_context.m_Logger.Log("Illegal factor", NIEventLogger.Categories.Initialization, NIEventLogger.Sources.Image, NIEventLogger.VerboseLevel.Errors);
            return false;
        }
        // get the resolution from the image
        MapOutputMode mom = m_context.Image.Image.MapOutputMode;
        // update the resolution by the factor
        ySize = mom.YRes / m_factor;
        xSize = mom.XRes / m_factor;
        // create the texture
        refText = new Texture2D(xSize, ySize, TextureFormat.RGB24, false);
        // create a new meta data object.
        NIOpenNICheckVersion.Instance.ValidatePrerequisite();
		m_metaData = new ImageMetaData();
        return true;
	}
Exemplo n.º 6
0
        private NuiSource()
        {
            context = new Context("openni.xml");

            // Initialise generators
            imageGenerator = this.context.FindExistingNode(NodeType.Image) as ImageGenerator;
            depthGenerator = this.context.FindExistingNode(NodeType.Depth) as DepthGenerator;

            imageMetadata = new ImageMetaData();
            var imageMapMode = imageGenerator.GetMapOutputMode();

            depthMetadata = new DepthMetaData();
            var depthMapMode = depthGenerator.GetMapOutputMode();
            depthHistogram = new int[depthGenerator.GetDeviceMaxDepth()];

            // Initialise bitmaps
            cameraImage = new WriteableBitmap((int)imageMapMode.nXRes, (int)imageMapMode.nYRes, 96, 96, PixelFormats.Rgb24, null);
            depthImage = new WriteableBitmap((int)depthMapMode.nXRes, (int)depthMapMode.nYRes, 96, 96, PixelFormats.Rgb24, null);

            // Initialise background thread
            var cameraThread = new Thread(this.CameraThread) { IsBackground = true };
            cameraThread.Start();

            var userGenerator = new UserGenerator(context);
            userGenerator.NewUser += this.UserGenerator_NewUser;
            userGenerator.LostUser += this.UserGenerator_LostUser;
        }
Exemplo n.º 7
0
 public override ImageData Read(Stream stream, ImageMetaData info)
 {
     var header = PngHeader.Clone() as byte[];
     using (var data = new StreamRegion (stream, 8, stream.Length - 8, true))
     using (var png = new PrefixStream (header, data))
         return base.Read (png, info);
 }
Exemplo n.º 8
0
 public override ImageData Read(Stream stream, ImageMetaData info)
 {
     var gps = (GpsMetaData)info;
     stream.Position = 0x29;
     using (var input = OpenGpsStream (stream, gps.Compression, gps.UnpackedSize))
         return base.Read (input, info);
 }
 // Use this for initialization
 void Start()
 {
     print("Graphics device: " + SystemInfo.graphicsDeviceVersion);
     Image = OpenNIContext.OpenNode(NodeType.Image) as ImageGenerator; //new ImageGenerator(OpenNIContext.Instance.context);
     imageMapTexture = new Texture2D( Image.MapOutputMode.XRes,  Image.MapOutputMode.YRes, TextureFormat.RGB24, false);
     imageMetaData = new ImageMetaData();
 }
        //Starts up necessary files to take data
        //Must run before TakeData()
        static unsafe void TakeImage(string filename)
        {
            //Sets locations of XML File
            string SAMPLE_XML_FILE = @"..\\..\\..\\SamplesConfig.xml";

            //Declares object of ScriptNode and defines context
            ScriptNode scriptNode;
            Context context = Context.CreateFromXmlFile(SAMPLE_XML_FILE, out scriptNode);

            //Declares variables to take a color image
            ImageGenerator image = context.FindExistingNode(NodeType.Image) as ImageGenerator;
            ImageMetaData imageMD = new ImageMetaData();
            MapOutputMode mapMode = image.MapOutputMode;

            //Waits for the Kinect to updage
            context.WaitOneUpdateAll(image);

            // Take current image
            image.GetMetaData(imageMD);
            byte[] RGB = new byte[imageMD.DataSize];

            //Sets widt
            //int width = (int)mapMode.XRes;
            //int height = (int)mapMode.YRes;

            //Takes the image map
            image.GetRGB24ImageMap();

            unsafe
            {
                //Declares bitmaps and rectangles to take the image data
                Bitmap g_TexMap = new Bitmap((int)mapMode.XRes, (int)mapMode.YRes, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
                Bitmap imageBitmap = new Bitmap((int)mapMode.XRes, (int)mapMode.YRes, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
                Rectangle rectIma = new Rectangle(0, 0, imageBitmap.Width, imageBitmap.Height);
                System.Drawing.Imaging.BitmapData dataIma = imageBitmap.LockBits(rectIma, ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format24bppRgb);

                //Creates a pointer for the image
                byte* pIma = (byte*)image.ImageMapPtr;

                // set pixels
                for (int y = 0; y < imageMD.YRes; ++y)
                {
                    //Declares the pointer for the destination pointers
                    byte* pDest = (byte*)dataIma.Scan0.ToPointer() + y * dataIma.Stride;

                    //Puts the different colored pixels into the destination
                    for (int x = 0; x < imageMD.XRes; ++x)
                    {
                        *((pDest++)+2) = *(pIma++); // blue
                        *(pDest++) = *(pIma++); // green
                        *((pDest++)-2) = *(pIma++); // red
                    }
                }
                //Flips image: If Kinect is mounted rightside up, take this line out
                //imageBitmap.RotateFlip(RotateFlipType.Rotate180FlipX);
                //Saves the image as a jpg
                imageBitmap.Save(filename, System.Drawing.Imaging.ImageFormat.Jpeg);
            }
        }
Exemplo n.º 11
0
 public override ImageData Read(Stream file, ImageMetaData info)
 {
     using (var reader = new Reader (file, info.Width * info.Height))
     {
         reader.Unpack();
         return ImageData.Create (info, PixelFormats.Bgra32, null, reader.Data);
     }
 }
Exemplo n.º 12
0
 public override ImageData Read(Stream stream, ImageMetaData info)
 {
     using (var reader = new PdtReader (stream, (PdtMetaData)info))
     {
         reader.Unpack();
         return ImageData.Create (info, reader.Format, reader.Palette, reader.Data);
     }
 }
Exemplo n.º 13
0
 public override ImageData Read(Stream file, ImageMetaData info)
 {
     var decoder = new JpegBitmapDecoder (file,
         BitmapCreateOptions.None, BitmapCacheOption.OnLoad);
     var frame = decoder.Frames[0];
     frame.Freeze();
     return new ImageData (frame, info);
 }
Exemplo n.º 14
0
 public override ImageData Read(Stream stream, ImageMetaData info)
 {
     var meta = (AlbMetaData)info;
     stream.Position = 0x10;
     using (var alb = new AlbStream (stream, meta.UnpackedSize))
     using (var file = new SeekableStream (alb))
         return meta.Format.Read (file, meta.Info);
 }
Exemplo n.º 15
0
 public override ImageData Read(Stream stream, ImageMetaData info)
 {
     using (var reader = new RmskReader (stream, info))
     {
         reader.Unpack();
         return ImageData.CreateFlipped (info, PixelFormats.Gray8, null, reader.Data, (int)info.Width);
     }
 }
Exemplo n.º 16
0
 public override ImageData Read(Stream stream, ImageMetaData info)
 {
     using (var reader = new WebPDecoder (stream, (WebPMetaData)info))
     {
         reader.Decode();
         return ImageData.Create (info, reader.Format, null, reader.Output);
     }
 }
Exemplo n.º 17
0
 public override ImageData Read(Stream stream, ImageMetaData info)
 {
     using (var reader = new AgfReader (stream, (AgfMetaData)info))
     {
         reader.Unpack();
         return ImageData.Create (info, reader.Format, null, reader.Data);
     }
 }
Exemplo n.º 18
0
 public override ImageData Read(Stream stream, ImageMetaData info)
 {
     var meta = (PsdMetaData)info;
     using (var reader = new PsdReader (stream, meta))
     {
         reader.Unpack();
         return ImageData.Create (info, reader.Format, reader.Palette, reader.Data);
     }
 }
Exemplo n.º 19
0
 public override ImageData Read(Stream stream, ImageMetaData info)
 {
     var meta = (BsgMetaData)info;
     using (var reader = new BsgReader (stream, meta))
     {
         reader.Unpack();
         return ImageData.CreateFlipped (info, reader.Format, reader.Palette, reader.Data, reader.Stride);
     }
 }
Exemplo n.º 20
0
 public override ImageData Read(Stream file, ImageMetaData info)
 {
     var meta = (ElgMetaData)info;
     file.Position = meta.HeaderSize;
     using (var reader = new Reader (file, meta))
     {
         reader.Unpack();
         return ImageData.Create (meta, reader.Format, reader.Palette, reader.Data);
     }
 }
Exemplo n.º 21
0
 public override ImageData Read(Stream file, ImageMetaData info)
 {
     file.Position = 0x14;
     using (var reader = new Reader (file, info))
     {
         reader.Unpack();
         var palette = new BitmapPalette (reader.Palette);
         return ImageData.Create (info, PixelFormats.Indexed8, palette, reader.Data, (int)info.Width);
     }
 }
Exemplo n.º 22
0
        static async Task CopyImage(ImageMetaData imgData)
        {
            //File.Copy(imgData.OriginalPath, imgData.FullSizePath);
            //CreateImageCopy(imgData.OriginalPath, imgData.PreviewPath, previewImage);
            //CreateImageCopy(imgData.OriginalPath, imgData.ThumbnailPath, thumbnailImage);

            //await Task.WhenAll(
            //    CreateImageCopy(imgData.OriginalPath, imgData.PreviewPath, previewImage),
            //    CreateImageCopy(imgData.OriginalPath, imgData.ThumbnailPath, thumbnailImage));
        }
Exemplo n.º 23
0
        public override ImageData Read(Stream stream, ImageMetaData info)
        {
            var meta = info as EpaMetaData;
            if (null == meta)
                throw new ArgumentException ("EpaFormat.Read should be supplied with EpaMetaData", "info");

            stream.Position = 2 == meta.Mode ? 0x18 : 0x10;
            var reader = new Reader (stream, meta);
            reader.Unpack();
            return ImageData.Create (meta, reader.Format, reader.Palette, reader.Data);
        }
Exemplo n.º 24
0
        public override ImageData Read(Stream stream, ImageMetaData info)
        {
            var meta = info as S25MetaData;
            if (null == meta)
                throw new ArgumentException ("S25Format.Read should be supplied with S25MetaData", "info");

            using (var reader = new Reader (stream, meta))
            {
                var pixels = reader.Unpack();
                return ImageData.Create (info, PixelFormats.Bgra32, null, pixels);
            }
        }
Exemplo n.º 25
0
 public override ImageData Read(Stream stream, ImageMetaData info)
 {
     stream.Position = 8;
     var palette = ReadPalette (stream);
     var pixels = new byte[info.Width * info.Height];
     using (var reader = new LzssStream (stream, LzssMode.Decompress, true))
     {
         if (pixels.Length != reader.Read (pixels, 0, pixels.Length))
             throw new InvalidFormatException();
         return ImageData.CreateFlipped (info, PixelFormats.Indexed8, palette, pixels, (int)info.Width);
     }
 }
Exemplo n.º 26
0
        public override ImageData Read(Stream stream, ImageMetaData info)
        {
            var meta = info as Ed8MetaData;
            if (null == meta)
                throw new ArgumentException ("Ed8Format.Read should be supplied with Ed8MetaData", "info");

            stream.Position = 0x1a;
            var reader = new Reader (stream, meta);
            reader.Unpack();
            var palette = new BitmapPalette (reader.Palette);
            return ImageData.Create (info, PixelFormats.Indexed8, palette, reader.Data, (int)info.Width);
        }
Exemplo n.º 27
0
        public override ImageData Read(Stream stream, ImageMetaData info)
        {
            var meta = (GfbMetaData)info;
            BitmapPalette palette = null;
            if (8 == meta.BPP && meta.DataOffset != 0x40)
            {
                stream.Position = 0x40;
                palette = ReadPalette (stream, meta.DataOffset - 0x40);
            }

            stream.Position = meta.DataOffset;
            byte[] pixels = new byte[meta.UnpackedSize];
            if (0 != meta.PackedSize)
            {
                using (var lzss = new LzssStream (stream, LzssMode.Decompress, true))
                    lzss.Read (pixels, 0, pixels.Length);
            }
            else
                stream.Read (pixels, 0, pixels.Length);

            PixelFormat format;
            switch (meta.BPP)
            {
            case 32:
                if (HasAlphaChannel (pixels))
                    format = PixelFormats.Bgra32;
                else
                    format = PixelFormats.Bgr32;
                break;

            case 24:
                format = PixelFormats.Bgr24;
                break;

            case 16:
                format = PixelFormats.Bgr565;
                break;

            case 8:
                if (null != palette)
                    format = PixelFormats.Indexed8;
                else
                    format = PixelFormats.Gray8;
                break;

            default:
                throw new NotSupportedException ("Not supported GFB color depth");
            }
            int stride = pixels.Length / (int)info.Height;

            return ImageData.CreateFlipped (info, format, palette, pixels, stride);
        }
Exemplo n.º 28
0
        public override ImageData Read(Stream stream, ImageMetaData info)
        {
            var meta = info as DgcMetaData;
            if (null == meta)
                throw new ArgumentException ("DgcFormat.Read should be supplied with DgcMetaData", "info");

            stream.Position = 12;
            using (var reader = new Reader (stream, meta))
            {
                reader.Unpack();
                return ImageData.Create (info, reader.Format, null, reader.Data);
            }
        }
Exemplo n.º 29
0
        public override ImageData Read(Stream stream, ImageMetaData info)
        {
            var meta = info as PicMetaData;
            if (null == meta)
                throw new ArgumentException ("PicFormat.Read should be supplied with PicMetaData", "info");

            stream.Position = 0x12;
            using (var reader = new Reader (stream, meta))
            {
                reader.Unpack();
                return ImageData.Create (meta, PixelFormats.Bgr24, null, reader.Data, (int)meta.Width*3);
            }
        }
Exemplo n.º 30
0
 public override ImageData Read(Stream stream, ImageMetaData info)
 {
     var meta = info as EgnMetaData;
     if (null == meta)
         throw new ArgumentException ("EgnFormat.Read should be supplied with EgnMetaData", "info");
     stream.Position = meta.DataOffset;
     using (var input = new ArcView.Reader (stream))
     {
         var reader = new Reader (input, meta.UnpackedSize, meta.Mode, meta.Flag);
         reader.Unpack();
         using (var bmp = new MemoryStream (reader.Data))
             return base.Read (bmp, info);
     }
 }
Exemplo n.º 31
0
        public override ImageData Read(IBinaryStream file, ImageMetaData info)
        {
            var reader = new GrcReader(file, (GrcMetaData)info);

            return(reader.Unpack());
        }
Exemplo n.º 32
0
 /// <summary>
 /// Initializes a new instance of the <see cref="Image{TPixel}"/> class
 /// with the height and the width of the image.
 /// </summary>
 /// <param name="configuration">
 /// The configuration providing initialization code which allows extending the library.
 /// </param>
 /// <param name="width">The width of the image in pixels.</param>
 /// <param name="height">The height of the image in pixels.</param>
 /// <param name="backgroundColor">The color to initialize the pixels with.</param>
 /// <param name="metadata">The images metadata.</param>
 internal Image(Configuration configuration, int width, int height, TPixel backgroundColor, ImageMetaData metadata)
 {
     this.configuration = configuration ?? Configuration.Default;
     this.PixelType     = new PixelTypeInfo(Unsafe.SizeOf <TPixel>() * 8);
     this.MetaData      = metadata ?? new ImageMetaData();
     this.frames        = new ImageFrameCollection <TPixel>(this, width, height, backgroundColor);
 }
Exemplo n.º 33
0
 public ChrFrameDecoder(IBinaryStream input, ImageMetaData info) : base(input, info)
 {
 }
        private async Task SendResponse(ImageContext imageContext, string key, Stream stream, ImageMetaData metadata)
        {
            imageContext.ComprehendRequestHeaders(metadata.LastWriteTimeUtc, stream.Length);

            switch (imageContext.GetPreconditionState())
            {
            case ImageContext.PreconditionState.Unspecified:
            case ImageContext.PreconditionState.ShouldProcess:
                if (imageContext.IsHeadRequest())
                {
                    await imageContext.SendStatusAsync(ResponseConstants.Status200Ok, metadata).ConfigureAwait(false);
                }

                this.logger.LogImageServed(imageContext.GetDisplayUrl(), key);
                await imageContext.SendAsync(stream, metadata).ConfigureAwait(false);

                break;

            case ImageContext.PreconditionState.NotModified:
                this.logger.LogImageNotModified(imageContext.GetDisplayUrl());
                await imageContext.SendStatusAsync(ResponseConstants.Status304NotModified, metadata).ConfigureAwait(false);

                break;

            case ImageContext.PreconditionState.PreconditionFailed:
                this.logger.LogImagePreconditionFailed(imageContext.GetDisplayUrl());
                await imageContext.SendStatusAsync(ResponseConstants.Status412PreconditionFailed, metadata).ConfigureAwait(false);

                break;

            default:
                var exception = new NotImplementedException(imageContext.GetPreconditionState().ToString());
                Debug.Fail(exception.ToString());
                throw exception;
            }
        }
Exemplo n.º 35
0
        /// <summary>
        /// Reads the Data file
        /// </summary>
        /// <param name="analysefilecontent">analyse file content Object</param>
        /// <param name="filepath">Path of file </param>
        /// <param name="analysefileheaderobject">Header Object</param>
        protected override void Initialise(AnalyseFileContent analysefilecontent, string filepath, AnalyseFileHeaderObject analysefileheaderobject)
        {
            this.filePath = filepath;
            this.analyseFileHeaderObject = analysefileheaderobject;

            // 1) Make sure the path file exists
            if (!File.Exists(this.filePath))
            {
                string header = "Missing File";
                string error  = string.Format("{0} is missing", this.filePath);
                MessageBox.Show(error, header);
                return;
            }

            // 2) ReadCalibrationFile (Calc MinMass, MaxMass, StepSize and MassSpecDataPoints)
            this.ReadCalibrationFile();

            // 3) Set Binsize
            this.binsize = 1;

            var  fileinfo    = new FileInfo(this.filePath);
            long imgFileSize = fileinfo.Length;

            string appPath = Application.StartupPath;

            appPath += "\\ApplicationSettings.xml";

            var filesizelimit = this.GetPathFileSizeLimit(appPath);
            var minMassDp     = 0;
            var maxMassDp     = (int)(this.massspecdatapoints - 1);

            // We only want to do binning for files bigger than 100MB filesize > pathfilesize
            if (imgFileSize != filesizelimit)
            {
                // Open up BinNumberWindow
                var binNumberWindow = new BinNumberWindow(this.analyseFileHeaderObject.NumberOfXPoints, this.analyseFileHeaderObject.NumberOfYPoints, this.MassCal);
                if (binNumberWindow.ShowDialog() == true)
                {
                    this.binsize = binNumberWindow.BinSize;
                    minMassDp    = binNumberWindow.MinMassDP;
                    maxMassDp    = binNumberWindow.MaxMassDP;
                }
            }

            this.massstepsize = 1;

            // 4) Fill out meta rawdata
            var metaData = new ImageMetaData();

            try
            {
                metaData.Add("Sample Name", typeof(string), this.analyseFileHeaderObject.Name, false);
                metaData.Add("Mass Range", typeof(string), this.massRangeName, false);
                metaData.Add("Mass Step Size", typeof(string), 1, false);
                metaData.Add("X1 (mm)", typeof(string), (Math.Abs(this.analyseFileHeaderObject.X1 - (int)this.analyseFileHeaderObject.X1) < Epsilon) ? this.analyseFileHeaderObject.X1.ToString("0.0") : this.analyseFileHeaderObject.X1.ToString(CultureInfo.InvariantCulture), false);
                metaData.Add("Y1 (mm)", typeof(string), (Math.Abs(this.analyseFileHeaderObject.Y1 - (int)this.analyseFileHeaderObject.Y1) < Epsilon) ? this.analyseFileHeaderObject.Y1.ToString("0.0") : this.analyseFileHeaderObject.Y1.ToString(CultureInfo.InvariantCulture), false);
                metaData.Add("X2 (mm)", typeof(string), (Math.Abs(this.analyseFileHeaderObject.X2 - (int)this.analyseFileHeaderObject.X2) < Epsilon) ? this.analyseFileHeaderObject.X2.ToString("0.0") : this.analyseFileHeaderObject.X2.ToString(CultureInfo.InvariantCulture), false);
                metaData.Add("Y2 (mm)", typeof(string), (Math.Abs(this.analyseFileHeaderObject.Y2 - (int)this.analyseFileHeaderObject.Y2) < Epsilon) ? this.analyseFileHeaderObject.Y2.ToString("0.0") : this.analyseFileHeaderObject.Y2.ToString(CultureInfo.InvariantCulture), false);
                metaData.Add("Data Points in X", typeof(string), this.analyseFileHeaderObject.NumberOfXPoints.ToString(CultureInfo.InvariantCulture), false);
                metaData.Add("Data Points in Y", typeof(string), this.analyseFileHeaderObject.NumberOfYPoints.ToString(CultureInfo.InvariantCulture), false);
                metaData.Add("Point Width (mm)", typeof(string), Math.Round(this.analyseFileHeaderObject.Dx, 2).ToString(CultureInfo.InvariantCulture), false);
                metaData.Add("Point Height (mm)", typeof(string), Math.Round(this.analyseFileHeaderObject.Dy, 2).ToString(CultureInfo.InvariantCulture), false);
                metaData.Add("Bin Size", typeof(string), this.binsize.ToString(CultureInfo.InvariantCulture), false);
            }
            catch (Exception e)
            {
                Util.ReportException(e);
            }

            // 5) Declare Median List
            var valuesForMedian = new List <float>();

            this.minIntensityValue = 0;
            this.maxIntensityValue = 0;

            var minIntList = new float[maxMassDp - minMassDp + 1];
            var maxIntList = new float[maxMassDp - minMassDp + 1];

            // 6) Declare and Initialise the dataTic
            var dataTic = new float[analysefileheaderobject.NumberOfXPoints][];

            for (int i = 0; i < dataTic.Length; i++)
            {
                dataTic[i] = new float[analysefileheaderobject.NumberOfYPoints];
            }

            // 7) Declare mass spectrum datat list
            this.massspecdatalist = new List <float[][]>();

            // 8) Declare and Initialise the dataList
            var data = new float[analysefileheaderobject.NumberOfXPoints][];

            for (int pointOnXAxis = 0; pointOnXAxis < analysefileheaderobject.NumberOfXPoints; pointOnXAxis++)
            {
                data[pointOnXAxis] = new float[analysefileheaderobject.NumberOfYPoints];
            }

            this.massspecdatalist.Add(data);

            // 9) Declare and populate the image data list - Fill it with some default data - this will be over written later
            Cursor.Current = Cursors.WaitCursor;
            AppContext.ProgressStart("Generating Empty Imagelist...");
            var imageDataList = new List <ImageData>();

            try
            {
                for (int massSpecDataPt = 0; massSpecDataPt < ((maxMassDp - minMassDp + 1) / this.binsize); massSpecDataPt++)
                {
                    var specData = new float[analysefileheaderobject.NumberOfXPoints][];
                    for (int i = 0; i < specData.Length; i++)
                    {
                        specData[i] = new float[analysefileheaderobject.NumberOfYPoints];
                    }

                    // ok, now create the imageData and add to list...
                    var imageData = new ImageData(
                        analysefilecontent.Document,
                        specData,
                        this.analyseFileHeaderObject.Name,
                        metaData,
                        0,
                        10000,
                        1000,
                        1000,
                        this.analyseFileHeaderObject.Dx,
                        this.analyseFileHeaderObject.Dy,
                        this.masscal,
                        this.analyseFileHeaderObject.ExperimentType);

                    // Add the imageData to the list
                    imageDataList.Add(imageData);
                    AppContext.ProgressSetValue(100.0 * massSpecDataPt / ((maxMassDp - minMassDp + 1) / this.binsize));
                }
            }
            catch (Exception e)
            {
                Util.ReportException(e);
            }
            finally
            {
                AppContext.ProgressClear();
                Cursor.Current = Cursors.Default;
            }

            // 10) Read binary file
            Cursor.Current = Cursors.WaitCursor;
            AppContext.ProgressStart("Populating ImageList...");

            using (var imgReader = new BinaryReader(File.Open(this.filePath, FileMode.Open)))
            {
                int xp = analysefileheaderobject.NumberOfXPoints;
                int yp = analysefileheaderobject.NumberOfYPoints;
                int dp = (maxMassDp - minMassDp + 1) / this.binsize;
                int dd = this.binsize;
                int dl = minMassDp;
                int xf = analysefileheaderobject.NumberOfXPoints;
                int df = this.masscal.Length;
                int ps = this.analyseFileHeaderObject.DataByteSize;
                int y;
                var meanSumList = new double[dp];

                for (int i = 0; i < dp; i++)
                {
                    minIntList[i]  = float.MaxValue;
                    maxIntList[i]  = float.MinValue;
                    meanSumList[i] = 0.0;
                }

                for (y = 0; y < yp; y++)
                {
                    // for the progress bar
                    AppContext.ProgressSetValue(y * 100 / yp);

                    // refresh counter here;
                    for (int x = 0; x < xp; x++)
                    {
                        long fp = ((((y * xf) + x) * df) + dl) * ps;
                        imgReader.BaseStream.Position = fp;

                        for (int d = 0; d < dp; d++)
                        {
                            float average = 0;

                            for (int di = 0; di < dd; di++)
                            {
                                float intensity;
                                switch (this.analyseFileHeaderObject.DataType)
                                {
                                case 4:
                                    intensity = imgReader.ReadInt16();
                                    break;

                                case 8:
                                case 16:
                                    intensity = imgReader.ReadInt32();
                                    break;

                                case 64:
                                    intensity = imgReader.ReadInt64();
                                    break;

                                default:
                                    intensity = imgReader.ReadInt32();
                                    break;
                                }

                                average += intensity;
                            }

                            average = average / dd;
                            imageDataList[d].Data[x][y] = average;
                            dataTic[x][y] += average;
                            if (average < minIntList[d])
                            {
                                minIntList[d] = average;
                            }

                            if (average > maxIntList[d])
                            {
                                maxIntList[d] = average;
                            }

                            // mean and median calculation... sum up the value to calculate the mean
                            meanSumList[d] += average;
                        }
                    }
                }

                // and cleanup
                imgReader.Close();
            }

            AppContext.ProgressClear();
            Cursor.Current = Cursors.Default;

            // 11) create new masscal array
            var newmasscal = new float[(maxMassDp - minMassDp + 1) / this.binsize];

            for (int i = 0; i < ((maxMassDp - minMassDp + 1) / this.binsize); i++)
            {
                newmasscal[i] = this.masscal[minMassDp + (i * this.binsize)];
            }

            this.masscal = newmasscal;

            // 12) Mean/Median for Tic
            double meanSum = 0.0;

            int numDataPoint = dataTic.Length;

            for (int y = 0; y < analysefileheaderobject.NumberOfYPoints; y++)
            {
                for (int x = 0; x < analysefileheaderobject.NumberOfXPoints; x++)
                {
                    float intensity = dataTic[x][y];

                    meanSum += intensity;
                    valuesForMedian.Add(intensity);

                    if (intensity < this.minIntensityValue)
                    {
                        this.minIntensityValue = intensity;
                    }

                    if (intensity > this.maxIntensityValue)
                    {
                        this.maxIntensityValue = intensity;
                    }
                }
            }

            if (numDataPoint != 0)
            {
                this.meanValue = (float)(meanSum / numDataPoint);
            }

            valuesForMedian.Sort();
            this.medianValue = ((valuesForMedian.Count % 2) == 0)
                                   ? (valuesForMedian[(valuesForMedian.Count / 2) - 1]
                                      + valuesForMedian[valuesForMedian.Count / 2]) / 2.0f
                                   : valuesForMedian[valuesForMedian.Count / 2];

            // 13) Fill out imageTic
            var imageTic = new ImageData(
                analysefilecontent.Document,
                dataTic,
                this.analyseFileHeaderObject.Name,
                metaData,
                this.minIntensityValue,
                this.maxIntensityValue,
                this.meanValue,
                this.medianValue,
                this.analyseFileHeaderObject.Dx,
                this.analyseFileHeaderObject.Dy,
                this.masscal,
                this.analyseFileHeaderObject.ExperimentType);

            // 14) Fill out ImageSpectrumData and add to analysefilecontent
            analysefilecontent.Add(
                new ImageSpectrumData(
                    analysefilecontent.Document,
                    this.analyseFileHeaderObject.Name,
                    metaData,
                    this.masscal,
                    imageDataList,
                    this.analyseFileHeaderObject.ExperimentType)
            {
                ImageTic = imageTic
            });
        }
Exemplo n.º 36
0
        /// <summary>
        /// Decodes the stream to the image.
        /// </summary>
        /// <typeparam name="TPixel">The pixel format.</typeparam>
        /// <param name="stream">The stream containing image data. </param>
        /// <exception cref="ImageFormatException">
        /// Thrown if the stream does not contain and end chunk.
        /// </exception>
        /// <exception cref="ArgumentOutOfRangeException">
        /// Thrown if the image is larger than the maximum allowable size.
        /// </exception>
        /// <returns>The decoded image</returns>
        public Image <TPixel> Decode <TPixel>(Stream stream)
            where TPixel : struct, IPixel <TPixel>
        {
            var metadata = new ImageMetaData();

            this.currentStream = stream;
            this.currentStream.Skip(8);
            Image <TPixel> image = null;

            try
            {
                using (var deframeStream = new ZlibInflateStream(this.currentStream))
                {
                    PngChunk currentChunk;
                    while (!this.isEndChunkReached && (currentChunk = this.ReadChunk()) != null)
                    {
                        try
                        {
                            switch (currentChunk.Type)
                            {
                            case PngChunkTypes.Header:
                                this.ReadHeaderChunk(currentChunk.Data);
                                this.ValidateHeader();
                                break;

                            case PngChunkTypes.Physical:
                                this.ReadPhysicalChunk(metadata, currentChunk.Data);
                                break;

                            case PngChunkTypes.Data:
                                if (image == null)
                                {
                                    this.InitializeImage(metadata, out image);
                                }

                                deframeStream.AllocateNewBytes(currentChunk.Length);
                                this.ReadScanlines(deframeStream.CompressedStream, image.Frames.RootFrame);
                                stream.Read(this.crcBuffer, 0, 4);
                                break;

                            case PngChunkTypes.Palette:
                                byte[] pal = new byte[currentChunk.Length];
                                Buffer.BlockCopy(currentChunk.Data, 0, pal, 0, currentChunk.Length);
                                this.palette = pal;
                                break;

                            case PngChunkTypes.PaletteAlpha:
                                byte[] alpha = new byte[currentChunk.Length];
                                Buffer.BlockCopy(currentChunk.Data, 0, alpha, 0, currentChunk.Length);
                                this.paletteAlpha = alpha;
                                this.AssignTransparentMarkers(alpha);
                                break;

                            case PngChunkTypes.Text:
                                this.ReadTextChunk(metadata, currentChunk.Data, currentChunk.Length);
                                break;

                            case PngChunkTypes.End:
                                this.isEndChunkReached = true;
                                break;
                            }
                        }
                        finally
                        {
                            // Data is rented in ReadChunkData()
                            if (currentChunk.Data != null)
                            {
                                ArrayPool <byte> .Shared.Return(currentChunk.Data);
                            }
                        }
                    }
                }

                return(image);
            }
            finally
            {
                this.scanline?.Dispose();
                this.previousScanline?.Dispose();
            }
        }
Exemplo n.º 37
0
 public override ImageData Read(IBinaryStream stream, ImageMetaData info)
 {
     using (var bmp = OpenAsBitmap(stream))
         return(base.Read(bmp, info));
 }
Exemplo n.º 38
0
 public override ImageData Read(Stream stream, ImageMetaData info)
 {
     stream.Seek(8, SeekOrigin.Current);
     using (var zstream = new ZLibStream(stream, CompressionMode.Decompress, true))
         return(base.Read(zstream, info));
 }
Exemplo n.º 39
0
 /// <summary>
 /// Synchronizes the profiles with the specified meta data.
 /// </summary>
 /// <param name="metaData">The meta data.</param>
 internal void Sync(ImageMetaData metaData)
 {
     this.SyncResolution(ExifTag.XResolution, metaData.HorizontalResolution);
     this.SyncResolution(ExifTag.YResolution, metaData.VerticalResolution);
 }
Exemplo n.º 40
0
 public override ImageData Read(IBinaryStream file, ImageMetaData info)
 {
     using (var input = UnpackedStream(file))
         return(Bmp.Read(input, info));
 }
Exemplo n.º 41
0
 public ImgReader(IBinaryStream input, ImageMetaData info) : base(input, info)
 {
 }
Exemplo n.º 42
0
 public GgsReader(IBinaryStream input, ImageMetaData info)
 {
     m_input  = input;
     m_output = new byte[3 * info.Width * info.Height];
 }
Exemplo n.º 43
0
 public BitmapDecoder(byte[] pixels, ImageMetaData info, PixelFormat format, BitmapPalette palette)
 {
     Info  = info;
     Image = ImageData.Create(info, format, palette, pixels);
 }
Exemplo n.º 44
0
        /// <summary>
        /// Decodes the stream to the image.
        /// </summary>
        /// <param name="stream">The stream containing image data. </param>
        /// <returns>The decoded image</returns>
        public Image <TPixel> Decode(Stream stream)
        {
            try
            {
                this.metaData = new ImageMetaData();

                this.currentStream = stream;

                // Skip the identifier
                this.currentStream.Skip(6);
                this.ReadLogicalScreenDescriptor();

                if (this.logicalScreenDescriptor.GlobalColorTableFlag)
                {
                    this.globalColorTableLength = this.logicalScreenDescriptor.GlobalColorTableSize * 3;
                    this.globalColorTable       = Buffer <byte> .CreateClean(this.globalColorTableLength);

                    // Read the global color table from the stream
                    stream.Read(this.globalColorTable.Array, 0, this.globalColorTableLength);
                }

                // Loop though the respective gif parts and read the data.
                int nextFlag = stream.ReadByte();
                while (nextFlag != GifConstants.Terminator)
                {
                    if (nextFlag == GifConstants.ImageLabel)
                    {
                        if (this.previousFrame != null && this.DecodingMode == FrameDecodingMode.First)
                        {
                            break;
                        }

                        this.ReadFrame();
                    }
                    else if (nextFlag == GifConstants.ExtensionIntroducer)
                    {
                        int label = stream.ReadByte();
                        switch (label)
                        {
                        case GifConstants.GraphicControlLabel:
                            this.ReadGraphicalControlExtension();
                            break;

                        case GifConstants.CommentLabel:
                            this.ReadComments();
                            break;

                        case GifConstants.ApplicationExtensionLabel:
                            this.Skip(12);     // No need to read.
                            break;

                        case GifConstants.PlainTextLabel:
                            this.Skip(13);     // Not supported by any known decoder.
                            break;
                        }
                    }
                    else if (nextFlag == GifConstants.EndIntroducer)
                    {
                        break;
                    }

                    nextFlag = stream.ReadByte();
                    if (nextFlag == -1)
                    {
                        break;
                    }
                }
            }
            finally
            {
                this.globalColorTable?.Dispose();
            }

            return(this.image);
        }
Exemplo n.º 45
0
 public void Setup()
 {
     MetaData = new ImageMetaData()
     {
         Image =
         {
             ExposureStart  = new DateTime(2019, 1, 1, 12, 2, 3, 333),
             ExposureNumber =                 5,
             ImageType      = "LIGHT",
             Binning        = "1x1",
             ExposureTime   =               300,
             RecordedRMS    = new NINA.Model.RMS()
             {
                 Total      =                10,
             }
         },
         Camera =
         {
             Name            = "TestCamera",
             BinX            =            2,
             BinY            =            3,
             PixelSize       =          3.9,
             Temperature     =          -10,
             Gain            =          139,
             Offset          =           10,
             ElectronsPerADU =          3.1,
             SetPoint        = -11
         },
         Telescope =
         {
             Name        = "TestTelescope",
             FocalLength =                                500,
             FocalRatio  =                                  4,
             Coordinates = new Coordinates(Angle.ByDegree(10),Angle.ByDegree(1), Epoch.J2000)
         },
         Focuser =
         {
             Name        = "TestFocuser",
             Position    =           100,
             StepSize    =            20,
             Temperature = 10
         },
         Rotator =
         {
             Name     = "TestRotator",
             Position =           100,
             StepSize =            20,
         },
         FilterWheel =
         {
             Name   = "TestFilterWheel",
             Filter = "RED"
         },
         Target =
         {
             Name        = "M81",
             Coordinates = new Coordinates(Angle.ByDegree(11),Angle.ByDegree(2), Epoch.J2000)
         },
         Observer =
         {
             Latitude  = 10,
             Longitude = 20,
             Elevation = 100
         }
     };
     MetaData.Image.RecordedRMS.SetScale(5);
 }
Exemplo n.º 46
0
        ImageData TryBlendImage(string base_name, GsaReader overlay, ImageMetaData overlay_info)
        {
            int ovl_x      = overlay_info.OffsetX;
            int ovl_y      = overlay_info.OffsetY;
            int ovl_width  = (int)overlay_info.Width;
            int ovl_height = (int)overlay_info.Height;

            if (ovl_x < 0)
            {
                ovl_width += ovl_x;
                ovl_x      = 0;
            }
            if (ovl_y < 0)
            {
                ovl_height += ovl_y;
                ovl_y       = 0;
            }
            using (var input = VFS.OpenBinaryStream(base_name))
            {
                var base_info = ReadMetaData(input) as GsaMetaData;
                if (null == base_info)
                {
                    return(null);
                }
                int base_width  = (int)base_info.Width;
                int base_height = (int)base_info.Height;
                if (checked (ovl_x + ovl_width) > base_width)
                {
                    ovl_width = base_width - ovl_x;
                }
                if (checked (ovl_y + ovl_height) > base_height)
                {
                    ovl_height = base_height - ovl_y;
                }
                if (ovl_height <= 0 || ovl_width <= 0)
                {
                    return(null);
                }

                input.Position = 0;
                var reader      = new GsaReader(input, base_info);
                var base_pixels = reader.Unpack();

                int src_pixel_size = overlay.PixelSize;
                int dst_pixel_size = reader.PixelSize;
                int dst            = ovl_y * reader.Stride + ovl_x * dst_pixel_size;
                int src            = 0;
                for (int y = 0; y < ovl_height; ++y)
                {
                    int src_pixel = src;
                    int dst_pixel = dst;
                    for (int x = 0; x < ovl_width; ++x)
                    {
                        int src_alpha = overlay.Data[src_pixel + 3];
                        if (src_alpha > 0)
                        {
                            if (0xFF == src_alpha)
                            {
                                Buffer.BlockCopy(overlay.Data, src_pixel, base_pixels, dst_pixel, dst_pixel_size);
                            }
                            else // assume destination has no alpha channel
                            {
                                base_pixels[dst_pixel + 0] = (byte)((overlay.Data[src_pixel + 0] * src_alpha
                                                                     + base_pixels[dst_pixel + 0] * (0xFF - src_alpha)) / 0xFF);
                                base_pixels[dst_pixel + 1] = (byte)((overlay.Data[src_pixel + 1] * src_alpha
                                                                     + base_pixels[dst_pixel + 1] * (0xFF - src_alpha)) / 0xFF);
                                base_pixels[dst_pixel + 2] = (byte)((overlay.Data[src_pixel + 2] * src_alpha
                                                                     + base_pixels[dst_pixel + 2] * (0xFF - src_alpha)) / 0xFF);
                            }
                        }
                        src_pixel += src_pixel_size;
                        dst_pixel += dst_pixel_size;
                    }
                    src += overlay.Stride;
                    dst += reader.Stride;
                }
                return(ImageData.CreateFlipped(base_info, reader.Format, null, base_pixels, reader.Stride));
            }
        }
Exemplo n.º 47
0
 public RawBitmapDecoder(IBinaryStream input, ImageMetaData info) : base(input, info)
 {
 }
Exemplo n.º 48
0
        private BitmapSource ReadFuckedUpBmpImage(Stream file, ImageMetaData info)
        {
            var header = new byte[0x36];

            if (header.Length != file.Read(header, 0, header.Length))
            {
                throw new InvalidFormatException();
            }
            int w = LittleEndian.ToInt32(header, 0x12);
            int h = LittleEndian.ToInt32(header, 0x16);

            if (w != info.Width || h != info.Height)
            {
                throw new InvalidFormatException();
            }

            int         bpp = LittleEndian.ToUInt16(header, 0x1c);
            PixelFormat format;

            switch (bpp)
            {
            case 32: format = PixelFormats.Bgr32; break;

            case 24: format = PixelFormats.Bgr24; break;

            case 16: format = PixelFormats.Bgr565; break;

            case 8:  format = PixelFormats.Indexed8; break;

            default: throw new NotImplementedException();
            }
            BitmapPalette palette = null;

            if (8 == bpp)
            {
                int colors = Math.Min(LittleEndian.ToInt32(header, 0x2E), 0x100);
                palette = DwqBmpReader.ReadPalette(file, colors);
            }
            int pixel_size = bpp / 8;
            int stride     = ((int)info.Width * pixel_size + 3) & ~3;
            var pixels     = new byte[stride * info.Height];

            if (pixels.Length != file.Read(pixels, 0, pixels.Length))
            {
                throw new EndOfStreamException();
            }
            if (bpp >= 24)
            {
                for (int row = 0; row < pixels.Length; row += stride)
                {
                    for (int i = 2; i < stride; i += pixel_size)
                    {
                        var t = pixels[row + i];
                        pixels[row + i]     = pixels[row + i - 2];
                        pixels[row + i - 2] = t;
                    }
                }
            }
            return(BitmapSource.Create((int)info.Width, (int)info.Height,
                                       ImageData.DefaultDpiX, ImageData.DefaultDpiY,
                                       format, palette, pixels, stride));
        }
Exemplo n.º 49
0
 public abstract IImageDecoder CreateDecoder(IBinaryStream input, ImageMetaData info);
Exemplo n.º 50
0
 public override IImageDecoder CreateDecoder(IBinaryStream input, ImageMetaData info)
 {
     return(new An20Decoder(input, info));
 }
Exemplo n.º 51
0
 public override ImageData Read(IBinaryStream stream, ImageMetaData info)
 {
     using (var bmp = DecompressStream(stream))
         return(Bmp.Read(bmp, info));
 }
Exemplo n.º 52
0
 public AnmArchive(ArcView arc, ArchiveFormat impl, ICollection <Entry> dir, ImageMetaData base_info)
     : base(arc, impl, dir)
 {
     ImageInfo = base_info;
 }
        /// <summary>
        /// Performs operations upon the current request.
        /// </summary>
        /// <param name="context">The current HTTP request context.</param>
        /// <returns>The <see cref="Task"/>.</returns>
        public async Task Invoke(HttpContext context)
        {
            IDictionary <string, string> commands = this.requestParser.ParseRequestCommands(context)
                                                    .Where(kvp => this.knownCommands.Contains(kvp.Key))
                                                    .ToDictionary(p => p.Key, p => p.Value);

            this.options.OnParseCommands?.Invoke(new ImageCommandContext(context, commands, CommandParser.Instance));

            // Get the correct service for the request.
            IImageProvider provider = this.resolvers.FirstOrDefault(r => r.Match(context));

            if (provider?.IsValidRequest(context) != true)
            {
                // Nothing to do. call the next delegate/middleware in the pipeline
                await this.next(context).ConfigureAwait(false);

                return;
            }

            // Create a cache key based on all the components of the requested url
            string uri = GetUri(context, commands);
            string key = this.cacheHash.Create(uri, this.options.CachedNameLength);

            bool           processRequest      = true;
            var            imageContext        = new ImageContext(context, this.options);
            IImageResolver sourceImageResolver = await provider.GetAsync(context).ConfigureAwait(false);

            if (sourceImageResolver == null)
            {
                // Log the error but let the pipeline handle the 404
                this.logger.LogImageResolveFailed(imageContext.GetDisplayUrl());
                processRequest = false;
            }

            ImageMetaData sourceImageMetadata = default;

            if (processRequest)
            {
                // Lock any reads when a write is being done for the same key to prevent potential file locks.
                using (await AsyncLock.ReaderLockAsync(key).ConfigureAwait(false))
                {
                    // Check to see if the cache contains this image
                    sourceImageMetadata = await sourceImageResolver.GetMetaDataAsync().ConfigureAwait(false);

                    IImageResolver cachedImageResolver = await this.cache.GetAsync(key).ConfigureAwait(false);

                    if (cachedImageResolver != null)
                    {
                        ImageMetaData cachedImageMetadata = await cachedImageResolver.GetMetaDataAsync().ConfigureAwait(false);

                        if (cachedImageMetadata != default)
                        {
                            // Has the cached image expired or has the source image been updated?
                            if (cachedImageMetadata.LastWriteTimeUtc > sourceImageMetadata.LastWriteTimeUtc &&
                                cachedImageMetadata.LastWriteTimeUtc > DateTimeOffset.Now.AddDays(-this.options.MaxCacheDays))
                            {
                                // We're pulling the image from the cache.
                                using (Stream cachedBuffer = await cachedImageResolver.OpenReadAsync().ConfigureAwait(false))
                                {
                                    await this.SendResponse(imageContext, key, cachedBuffer, cachedImageMetadata).ConfigureAwait(false);
                                }

                                return;
                            }
                        }
                    }
                }

                // Not cached? Let's get it from the image resolver.
                ChunkedMemoryStream outStream = null;
                try
                {
                    if (processRequest)
                    {
                        // Enter a write lock which locks writing and any reads for the same request.
                        // This reduces the overheads of unnecessary processing plus avoids file locks.
                        using (await AsyncLock.WriterLockAsync(key).ConfigureAwait(false))
                        {
                            // No allocations here for inStream since we are passing the raw input stream.
                            // outStream allocation depends on the memory allocator used.
                            ImageMetaData cachedImageMetadata = default;
                            outStream = new ChunkedMemoryStream(this.memoryAllocator);
                            using (Stream inStream = await sourceImageResolver.OpenReadAsync().ConfigureAwait(false))
                                using (var image = FormattedImage.Load(this.options.Configuration, inStream))
                                {
                                    image.Process(this.logger, this.processors, commands);
                                    this.options.OnBeforeSave?.Invoke(image);
                                    image.Save(outStream);

                                    // Check to see if the source metadata has a cachecontrol max-age value and use it to
                                    // override the default max age from our options.
                                    var maxAge = TimeSpan.FromDays(this.options.MaxBrowserCacheDays);
                                    if (!sourceImageMetadata.CacheControlMaxAge.Equals(TimeSpan.MinValue))
                                    {
                                        maxAge = sourceImageMetadata.CacheControlMaxAge;
                                    }

                                    cachedImageMetadata = new ImageMetaData(DateTime.UtcNow, image.Format.DefaultMimeType, maxAge);
                                }

                            // Allow for any further optimization of the image. Always reset the position just in case.
                            outStream.Position = 0;
                            string contentType = cachedImageMetadata.ContentType;
                            string extension   = this.formatUtilities.GetExtensionFromContentType(contentType);
                            this.options.OnProcessed?.Invoke(new ImageProcessingContext(context, outStream, commands, contentType, extension));
                            outStream.Position = 0;

                            // Save the image to the cache and send the response to the caller.
                            await this.cache.SetAsync(key, outStream, cachedImageMetadata).ConfigureAwait(false);

                            await this.SendResponse(imageContext, key, outStream, cachedImageMetadata).ConfigureAwait(false);
                        }
                    }
                }
                catch (Exception ex)
                {
                    // Log the error internally then rethrow.
                    // We don't call next here, the pipeline will automatically handle it
                    this.logger.LogImageProcessingFailed(imageContext.GetDisplayUrl(), ex);
                    throw;
                }
                finally
                {
                    outStream?.Dispose();
                }
            }

            if (!processRequest)
            {
                // Call the next delegate/middleware in the pipeline
                await this.next(context).ConfigureAwait(false);
            }
        }
Exemplo n.º 54
0
 internal int GetStride(ImageMetaData info)
 {
     return(((int)info.Width * info.BPP / 8 + 3) & ~3);
 }
Exemplo n.º 55
0
        /// <summary>
        /// Read metadata from stream and read the blocks in the scans into <see cref="OrigComponent.SpectralBlocks"/>.
        /// </summary>
        /// <param name="stream">The stream</param>
        /// <param name="metadataOnly">Whether to decode metadata only.</param>
        public void ParseStream(Stream stream, bool metadataOnly = false)
        {
            this.MetaData       = new ImageMetaData();
            this.InputStream    = stream;
            this.InputProcessor = new InputProcessor(stream, this.Temp);

            // Check for the Start Of Image marker.
            this.InputProcessor.ReadFull(this.Temp, 0, 2);
            if (this.Temp[0] != OrigJpegConstants.Markers.XFF || this.Temp[1] != OrigJpegConstants.Markers.SOI)
            {
                throw new ImageFormatException("Missing SOI marker.");
            }

            // Process the remaining segments until the End Of Image marker.
            bool processBytes = true;

            // we can't currently short circute progressive images so don't try.
            while (processBytes)
            {
                this.InputProcessor.ReadFull(this.Temp, 0, 2);
                while (this.Temp[0] != 0xff)
                {
                    // Strictly speaking, this is a format error. However, libjpeg is
                    // liberal in what it accepts. As of version 9, next_marker in
                    // jdmarker.c treats this as a warning (JWRN_EXTRANEOUS_DATA) and
                    // continues to decode the stream. Even before next_marker sees
                    // extraneous data, jpeg_fill_bit_buffer in jdhuff.c reads as many
                    // bytes as it can, possibly past the end of a scan's data. It
                    // effectively puts back any markers that it overscanned (e.g. an
                    // "\xff\xd9" EOI marker), but it does not put back non-marker data,
                    // and thus it can silently ignore a small number of extraneous
                    // non-marker bytes before next_marker has a chance to see them (and
                    // print a warning).
                    // We are therefore also liberal in what we accept. Extraneous data
                    // is silently ignore
                    // This is similar to, but not exactly the same as, the restart
                    // mechanism within a scan (the RST[0-7] markers).
                    // Note that extraneous 0xff bytes in e.g. SOS data are escaped as
                    // "\xff\x00", and so are detected a little further down below.
                    this.Temp[0] = this.Temp[1];
                    this.Temp[1] = this.InputProcessor.ReadByte();
                }

                byte marker = this.Temp[1];
                if (marker == 0)
                {
                    // Treat "\xff\x00" as extraneous data.
                    continue;
                }

                while (marker == 0xff)
                {
                    // Section B.1.1.2 says, "Any marker may optionally be preceded by any
                    // number of fill bytes, which are bytes assigned code X'FF'".
                    marker = this.InputProcessor.ReadByte();
                }

                // End Of Image.
                if (marker == OrigJpegConstants.Markers.EOI)
                {
                    break;
                }

                if (marker >= OrigJpegConstants.Markers.RST0 && marker <= OrigJpegConstants.Markers.RST7)
                {
                    // Figures B.2 and B.16 of the specification suggest that restart markers should
                    // only occur between Entropy Coded Segments and not after the final ECS.
                    // However, some encoders may generate incorrect JPEGs with a final restart
                    // marker. That restart marker will be seen here instead of inside the ProcessSOS
                    // method, and is ignored as a harmless error. Restart markers have no extra data,
                    // so we check for this before we read the 16-bit length of the segment.
                    continue;
                }

                // Read the 16-bit length of the segment. The value includes the 2 bytes for the
                // length itself, so we subtract 2 to get the number of remaining bytes.
                this.InputProcessor.ReadFull(this.Temp, 0, 2);
                int remaining = (this.Temp[0] << 8) + this.Temp[1] - 2;
                if (remaining < 0)
                {
                    throw new ImageFormatException("Short segment length.");
                }

                switch (marker)
                {
                case OrigJpegConstants.Markers.SOF0:
                case OrigJpegConstants.Markers.SOF1:
                case OrigJpegConstants.Markers.SOF2:
                    this.IsProgressive = marker == OrigJpegConstants.Markers.SOF2;
                    this.ProcessStartOfFrameMarker(remaining);
                    if (metadataOnly && this.isJFif)
                    {
                        return;
                    }

                    break;

                case OrigJpegConstants.Markers.DHT:
                    if (metadataOnly)
                    {
                        this.InputProcessor.Skip(remaining);
                    }
                    else
                    {
                        this.ProcessDefineHuffmanTablesMarker(remaining);
                    }

                    break;

                case OrigJpegConstants.Markers.DQT:
                    if (metadataOnly)
                    {
                        this.InputProcessor.Skip(remaining);
                    }
                    else
                    {
                        this.ProcessDefineQuantizationTablesMarker(remaining);
                    }

                    break;

                case OrigJpegConstants.Markers.SOS:
                    if (metadataOnly)
                    {
                        return;
                    }

                    // when this is a progressive image this gets called a number of times
                    // need to know how many times this should be called in total.
                    this.ProcessStartOfScanMarker(remaining);
                    if (this.InputProcessor.ReachedEOF || !this.IsProgressive)
                    {
                        // if unexpeced EOF reached or this is not a progressive image we can stop processing bytes as we now have the image data.
                        processBytes = false;
                    }

                    break;

                case OrigJpegConstants.Markers.DRI:
                    if (metadataOnly)
                    {
                        this.InputProcessor.Skip(remaining);
                    }
                    else
                    {
                        this.ProcessDefineRestartIntervalMarker(remaining);
                    }

                    break;

                case OrigJpegConstants.Markers.APP0:
                    this.ProcessApplicationHeaderMarker(remaining);
                    break;

                case OrigJpegConstants.Markers.APP1:
                    this.ProcessApp1Marker(remaining);
                    break;

                case OrigJpegConstants.Markers.APP2:
                    this.ProcessApp2Marker(remaining);
                    break;

                case OrigJpegConstants.Markers.APP14:
                    this.ProcessApp14Marker(remaining);
                    break;

                default:
                    if ((marker >= OrigJpegConstants.Markers.APP0 && marker <= OrigJpegConstants.Markers.APP15) ||
                        marker == OrigJpegConstants.Markers.COM)
                    {
                        this.InputProcessor.Skip(remaining);
                    }
                    else if (marker < OrigJpegConstants.Markers.SOF0)
                    {
                        // See Table B.1 "Marker code assignments".
                        throw new ImageFormatException("Unknown marker");
                    }
                    else
                    {
                        throw new ImageFormatException("Unknown marker");
                    }

                    break;
                }
            }

            this.InitDerivedMetaDataProperties();
        }
Exemplo n.º 56
0
 /// <summary>
 /// Initializes a new instance of the <see cref="Image{TPixel}"/> class
 /// consuming an external buffer instance.
 /// </summary>
 internal Image(Configuration configuration, IBuffer <TPixel> consumedBuffer, int width, int height, ImageMetaData metadata)
 {
     this.configuration = configuration;
     this.PixelType     = new PixelTypeInfo(Unsafe.SizeOf <TPixel>() * 8);
     this.MetaData      = metadata;
     this.frames        = new ImageFrameCollection <TPixel>(this, width, height, consumedBuffer);
 }
Exemplo n.º 57
0
 public override ImageData Read(IBinaryStream stream, ImageMetaData info)
 {
     using (var input = new StreamRegion(stream.AsStream, 0x10, true))
         using (var png = new BinaryStream(input, stream.Name))
             return(base.Read(png, info));
 }
Exemplo n.º 58
0
        public override ImageData Read(Stream stream, ImageMetaData info)
        {
            var meta = (DwqMetaData)info;

            BitmapSource bitmap = null;

            using (var input = new StreamRegion(stream, 0x40, meta.PackedSize, true))
            {
                switch (meta.PackType)
                {
                case 5: // JPEG
                    return(Jpeg.Read(input, info));

                case 8: // PNG
                    return(Png.Read(input, info));

                case 0: // BMP
                case 2: // BMP+MASK
                    bitmap = ReadFuckedUpBmpImage(input, info);
                    break;

                case 7: // JPEG+MASK
                {
                    var decoder = new JpegBitmapDecoder(input, BitmapCreateOptions.None, BitmapCacheOption.OnLoad);
                    bitmap = decoder.Frames[0];
                    break;
                }

                case 3: // PACKBMP+MASK
                {
                    var reader = new DwqBmpReader(input, meta);
                    reader.Unpack();
                    bitmap = BitmapSource.Create((int)info.Width, (int)info.Height,
                                                 ImageData.DefaultDpiX, ImageData.DefaultDpiY,
                                                 reader.Format, reader.Palette, reader.Data, reader.Stride);
                    break;
                }
                }
            }
            if (null == bitmap)
            {
                throw new NotImplementedException();
            }
            if (meta.AType)
            {
                int mask_offset = 0x40 + meta.PackedSize;
                if (mask_offset != stream.Length)
                {
                    using (var mask = new StreamRegion(stream, mask_offset, true))
                    {
                        var reader = new DwqBmpReader(mask, meta);
                        if (8 == reader.Format.BitsPerPixel) // mask should be represented as 8bpp bitmap
                        {
                            reader.Unpack();
                            var alpha   = reader.Data;
                            var palette = reader.Palette.Colors;
                            for (int i = 0; i < alpha.Length; ++i)
                            {
                                var color = palette[alpha[i]];
                                int A     = (color.R + color.G + color.B) / 3;
                                alpha[i] = (byte)A;
                            }
                            bitmap = ApplyAlphaChannel(bitmap, alpha);
                        }
                    }
                }
            }
            bitmap.Freeze();
            return(new ImageData(bitmap, meta));
        }
Exemplo n.º 59
0
#pragma warning restore 1416

    /// <summary>
    /// Detect objects in the image.
    /// </summary>
    /// <param name="image"></param>
    /// <returns></returns>
    private async Task DetectObjects(ImageMetaData metadata)
    {
        var image    = metadata.Image;
        var fileName = new FileInfo(image.FullPath);

        if (!fileName.Exists)
        {
            return;
        }

        try
        {
            var  thumbSize          = ThumbSize.Large;
            var  medThumb           = new FileInfo(_thumbService.GetThumbPath(fileName, thumbSize));
            bool enableAIProcessing = _configService.GetBool(ConfigSettings.EnableAIProcessing, true);

            MetaDataService.GetImageSize(medThumb.FullName, out var thumbWidth, out var thumbHeight);

            var foundObjects = new List <ImageObject>();
            var foundFaces   = new List <ImageObject>();

            if (enableAIProcessing || _azureFaceService.DetectionType == AzureFaceService.AzureDetection.AllImages)
            {
                Logging.Log($"Processing AI image detection for {fileName.Name}...");
            }

            if (!File.Exists(medThumb.FullName))
            {
                // The thumb isn't ready yet.
                return;
            }

            var bitmap = SafeLoadBitmap(medThumb.FullName);

            if (bitmap != null && _imageClassifier != null && enableAIProcessing)
            {
                var colorWatch = new Stopwatch("DetectObjects");

                var dominant = _imageClassifier.DetectDominantColour(bitmap);
                var average  = _imageClassifier.DetectAverageColor(bitmap);

                colorWatch.Stop();

                image.MetaData.AverageColor  = average.ToHex();
                image.MetaData.DominantColor = dominant.ToHex();

                Logging.LogVerbose($"Image {image.FullPath} has dominant colour {dominant.ToHex()}, average {average.ToHex()}");
            }

            // Next, look for faces. We need to determine if we:
            //  a) Use only local (Accord.Net) detection
            //  b) Use local detection, and then if we find a face, or a person object, submit to Azure
            //  c) Always submit every image to Azure.
            // This is a user config.
            bool useAzureDetection = false;

            // For the object detector, we need a successfully loaded bitmap
            if (bitmap != null && enableAIProcessing)
            {
                var objwatch = new Stopwatch("DetectObjects");

                // First, look for Objects
                var objects = await _objectDetector.DetectObjects(bitmap);

                objwatch.Stop();

                if (objects.Any())
                {
                    Logging.Log($" Yolo found {objects.Count()} objects in {fileName}...");

                    var newTags = await CreateNewTags(objects);

                    var newObjects = objects.Select(x => new ImageObject
                    {
                        RecogntionSource = ImageObject.RecognitionType.MLNetObject,
                        ImageId          = image.ImageId,
                        RectX            = (int)x.Rect.Left,
                        RectY            = (int)x.Rect.Top,
                        RectHeight       = (int)x.Rect.Height,
                        RectWidth        = (int)x.Rect.Width,
                        TagId            = x.IsFace ? 0 : newTags[x.Tag],
                        Type             = ImageObject.ObjectTypes.Object.ToString(),
                        Score            = x.Score
                    }).ToList();

                    if (UseAzureForRecogition(objects))
                    {
                        useAzureDetection = true;
                    }

                    ScaleObjectRects(image, newObjects, thumbWidth, thumbHeight);
                    foundObjects.AddRange(newObjects);
                }
            }

            if (_azureFaceService.DetectionType == AzureFaceService.AzureDetection.AllImages)
            {
                // Skip local face detection and just go straight to Azure
                useAzureDetection = true;
            }
            else if (enableAIProcessing)
            {
                if (_emguFaceService.ServiceAvailable)
                {
                    var emguwatch = new Stopwatch("EmguFaceDetect");

                    var rects = _emguFaceService.DetectFaces(medThumb.FullName);

                    emguwatch.Stop();

                    if (UseAzureForRecogition(rects))
                    {
                        // Filter out the faces if we're using Azure
                        rects             = rects.Where(x => !x.IsFace).ToList();
                        useAzureDetection = true;
                    }

                    if (rects.Any())
                    {
                        // Azure is disabled, so just use what we've got.
                        Logging.Log($" Emgu found {rects.Count} faces in {fileName}...");

                        var newTags = await CreateNewTags(rects);

                        var newObjects = rects.Select(x => new ImageObject
                        {
                            RecogntionSource = ImageObject.RecognitionType.Emgu,
                            ImageId          = image.ImageId,
                            RectX            = x.Rect.Left,
                            RectY            = x.Rect.Top,
                            RectHeight       = x.Rect.Height,
                            RectWidth        = x.Rect.Width,
                            TagId            = newTags[x.Tag],
                            Type             = x.IsFace ? ImageObject.ObjectTypes.Face.ToString() : ImageObject.ObjectTypes.Object.ToString(),
                            Score            = 0
                        }).ToList();

                        ScaleObjectRects(image, newObjects, thumbWidth, thumbHeight);
                        foundFaces.AddRange(newObjects);
                    }
                }
                else
                {
                    var accordwatch = new Stopwatch("AccordFaceDetect");

                    // Emgu isn't available, so use Accord.Net instead
                    var rects = _accordFaceService.DetectFaces(bitmap);

                    accordwatch.Stop();

                    if (rects.Any())
                    {
                        if (UseAzureForRecogition(rects))
                        {
                            useAzureDetection = true;
                        }
                        else
                        {
                            // Azure is disabled, so just use what we've got.
                            Logging.Log($" Accord.Net found {rects.Count} faces in {fileName}...");

                            var newTags = await CreateNewTags(rects);

                            var newObjects = rects.Select(x => new ImageObject
                            {
                                ImageId          = image.ImageId,
                                RectX            = x.Rect.Left,
                                RectY            = x.Rect.Top,
                                RectHeight       = x.Rect.Height,
                                RectWidth        = x.Rect.Width,
                                Type             = ImageObject.ObjectTypes.Face.ToString(), // Accord only does faces.
                                TagId            = newTags[x.Tag],
                                RecogntionSource = ImageObject.RecognitionType.Accord,
                                Score            = 0
                            }).ToList();

                            ScaleObjectRects(image, newObjects, thumbWidth, thumbHeight);
                            foundFaces.AddRange(newObjects);
                        }
                    }
                }
            }

            if (useAzureDetection)
            {
                var faceTag = await _metdataService.CreateTagsFromStrings(new List <string> {
                    "Face"
                });

                var faceTagId = faceTag.FirstOrDefault()?.TagId ?? 0;

                var azurewatch = new Stopwatch("AzureFaceDetect");

                Logging.LogVerbose($"Processing {medThumb.FullName} with Azure Face Service");

                // We got predictions or we're scanning everything - so now let's try the image with Azure.
                var azureFaces = await _azureFaceService.DetectFaces(medThumb.FullName, _imageProcessor);

                azurewatch.Stop();

                if (azureFaces.Any())
                {
                    Logging.Log($" Azure found {azureFaces.Count} faces in {fileName}...");

                    // Get a list of the Azure Person IDs
                    var peopleIds = azureFaces.Select(x => x.PersonId.ToString());

                    // Create any new ones, or pull existing ones back from the cache
                    await CreateMissingPeople(peopleIds);

                    // Now convert into ImageObjects. Note that if the peopleCache doesn't
                    // contain the key, it means we didn't create a person record successfully
                    // for that entry - so we skip it.
                    var newObjects = azureFaces.Select(x => new ImageObject
                    {
                        ImageId          = image.ImageId,
                        RectX            = x.Left,
                        RectY            = x.Top,
                        RectHeight       = x.Height,
                        RectWidth        = x.Width,
                        Type             = ImageObject.ObjectTypes.Face.ToString(),
                        TagId            = faceTagId,
                        RecogntionSource = ImageObject.RecognitionType.Azure,
                        Score            = x.Score,
                        PersonId         = GetPersonIDFromCache(x.PersonId)
                    }).ToList();

                    ScaleObjectRects(image, newObjects, thumbWidth, thumbHeight);
                    foundFaces.AddRange(newObjects);

                    var peopleToAdd = foundFaces.Select(x => x.Person);

                    // Add them
                }
                else
                {
                    // If we're scanning because local face detection found a face, log the result.
                    if (_azureFaceService.DetectionType == AzureFaceService.AzureDetection.ImagesWithFaces)
                    {
                        Logging.Log($"Azure found no faces in image {fileName}");
                    }
                    else
                    {
                        Logging.LogVerbose($"Azure found no faces in image {fileName}");
                    }
                }
            }

            if (foundFaces.Any())
            {
                // We've found some faces. Add a tagID.
                const string faceTagName = "Face";
                var          tags        = await _metdataService.CreateTagsFromStrings(new List <string> {
                    faceTagName
                });

                var faceTagId = tags.Single().TagId;
                foundFaces.ForEach(x => x.TagId = faceTagId);
            }

            if (foundObjects.Any() || foundFaces.Any())
            {
                var objWriteWatch = new Stopwatch("WriteDetectedObjects");

                var allFound = foundObjects.Union(foundFaces).ToList();

                using var db = new ImageContext();

                // First, clear out the existing faces and objects - we don't want dupes
                // TODO: Might need to be smarter about this once we add face names and
                // Object identification details.
                await db.BatchDelete(db.ImageObjects.Where(x => x.ImageId.Equals(image.ImageId) && x.RecogntionSource != ImageObject.RecognitionType.ExternalApp));

                // Now add the objects and faces.
                await db.BulkInsert(db.ImageObjects, allFound);

                WriteAITagsToImages(image, allFound);

                objWriteWatch.Stop();
            }
        }
        catch (Exception ex)
        {
            Logging.LogError($"Exception during AI detection for {fileName}: {ex}");
        }
    }
Exemplo n.º 60
0
        /// <summary>
        /// Encodes the image to the specified stream from the <see cref="Image{TPixel}"/>.
        /// </summary>
        /// <typeparam name="TPixel">The pixel format.</typeparam>
        /// <param name="image">The <see cref="ImageFrame{TPixel}"/> to encode from.</param>
        /// <param name="stream">The <see cref="Stream"/> to encode the image data to.</param>
        public void Encode <TPixel>(Image <TPixel> image, Stream stream)
            where TPixel : struct, IPixel <TPixel>
        {
            Guard.NotNull(image, nameof(image));
            Guard.NotNull(stream, nameof(stream));

            this.configuration = image.GetConfiguration();
            this.width         = image.Width;
            this.height        = image.Height;

            // Always take the encoder options over the metadata values.
            ImageMetaData metaData    = image.MetaData;
            PngMetaData   pngMetaData = metaData.GetFormatMetaData(PngFormat.Instance);

            this.gamma        = this.gamma ?? pngMetaData.Gamma;
            this.writeGamma   = this.gamma > 0;
            this.pngColorType = this.pngColorType ?? pngMetaData.ColorType;
            this.pngBitDepth  = this.pngBitDepth ?? pngMetaData.BitDepth;
            this.use16Bit     = this.pngBitDepth.Equals(PngBitDepth.Bit16);

            // Ensure we are not allowing impossible combinations.
            if (!ColorTypes.ContainsKey(this.pngColorType.Value))
            {
                throw new NotSupportedException("Color type is not supported or not valid.");
            }

            stream.Write(PngConstants.HeaderBytes, 0, PngConstants.HeaderBytes.Length);

            QuantizedFrame <TPixel> quantized = null;

            if (this.pngColorType == PngColorType.Palette)
            {
                byte bits = (byte)this.pngBitDepth;
                if (!ColorTypes[this.pngColorType.Value].Contains(bits))
                {
                    throw new NotSupportedException("Bit depth is not supported or not valid.");
                }

                // Use the metadata to determine what quantization depth to use if no quantizer has been set.
                if (this.quantizer == null)
                {
                    this.quantizer = new WuQuantizer(ImageMaths.GetColorCountForBitDepth(bits));
                }

                // Create quantized frame returning the palette and set the bit depth.
                quantized = this.quantizer.CreateFrameQuantizer <TPixel>(image.GetConfiguration())
                            .QuantizeFrame(image.Frames.RootFrame);
                byte quantizedBits = (byte)ImageMaths.GetBitsNeededForColorDepth(quantized.Palette.Length).Clamp(1, 8);
                bits = Math.Max(bits, quantizedBits);

                // Png only supports in four pixel depths: 1, 2, 4, and 8 bits when using the PLTE chunk
                // We check again for the bit depth as the bit depth of the color palette from a given quantizer might not
                // be within the acceptable range.
                if (bits == 3)
                {
                    bits = 4;
                }
                else if (bits >= 5 && bits <= 7)
                {
                    bits = 8;
                }

                this.bitDepth = bits;
            }
            else
            {
                this.bitDepth = (byte)this.pngBitDepth;
                if (!ColorTypes[this.pngColorType.Value].Contains(this.bitDepth))
                {
                    throw new NotSupportedException("Bit depth is not supported or not valid.");
                }
            }

            this.bytesPerPixel = this.CalculateBytesPerPixel();

            var header = new PngHeader(
                width: image.Width,
                height: image.Height,
                bitDepth: this.bitDepth,
                colorType: this.pngColorType.Value,
                compressionMethod: 0, // None
                filterMethod: 0,
                interlaceMethod: 0);  // TODO: Can't write interlaced yet.

            this.WriteHeaderChunk(stream, header);

            // Collect the indexed pixel data
            if (quantized != null)
            {
                this.WritePaletteChunk(stream, quantized);
            }

            this.WritePhysicalChunk(stream, metaData);
            this.WriteGammaChunk(stream);
            this.WriteExifChunk(stream, metaData);
            this.WriteDataChunks(image.Frames.RootFrame, quantized, stream);
            this.WriteEndChunk(stream);
            stream.Flush();

            quantized?.Dispose();
        }