Esempio n. 1
0
        /// <summary>
        /// Initializes a new instance of the <see cref="RawImage"/> class from packed sensor data.
        /// </summary>
        /// <param name="frameData">The raw, packed sensor data.</param>
        /// <param name="imageMetadata">The metadata with packing parameters.</param>
        public RawImage(byte[] frameData, Json.FrameImage imageMetadata)
        {
            if (frameData == null)
                throw new ArgumentNullException("frameData");

            VerifyMetadata(imageMetadata, out _width, out _height);
            _data = Unpack(frameData, _width, _height);
        }
Esempio n. 2
0
        /// <summary>
        /// Initializes a new instance of the <see cref="RawImage"/> class from a <see cref="LightFieldComponent"/>.
        /// </summary>
        /// <param name="frame">The raw frame component.</param>
        /// <param name="imageMetadata">The metadata with packing parameters.</param>
        public RawImage(LightFieldComponent frame, Json.FrameImage imageMetadata)
        {
            if (frame == null)
                throw new ArgumentNullException("frame");
            if (frame.Length < 1)
                throw new ArgumentException("Component does not contain any data.", "frame");

            VerifyMetadata(imageMetadata, out _width, out _height);
            _data = Unpack(frame.Data, _width, _height);
        }
Esempio n. 3
0
        private void VerifyMetadata(Json.FrameImage imageMetadata, out int width, out int height)
        {
            if (imageMetadata == null)
                throw new ArgumentNullException("imageMetadata");
            if (imageMetadata.RawDetails == null ||
                imageMetadata.RawDetails.PixelPacking == null)
                throw new ArgumentException("Metadata does not contain required information.", "imageMetadata");

            if (imageMetadata.Representation != "rawPacked")
                throw new NotSupportedException("Unsupported representation.");
            if (imageMetadata.RawDetails.PixelPacking.Endianness != "big")
                throw new NotSupportedException("Unsupported endianness.");
            if (imageMetadata.RawDetails.PixelPacking.BitsPerPixel != 12)
                throw new NotSupportedException("Unsupported number of bits per pixel.");

            width = (int)imageMetadata.Width;
            height = (int)imageMetadata.Height;

            if (width < 0 || height < 0)
                throw new ArgumentException("Invalid frame dimensions.");
        }
        private void InitializeInput(ushort[] data, Json.FrameImage frameMetadata)
        {
            ushort[,] minimums = new ushort[2, 2];
            ushort[,] maximums = new ushort[2, 2];

            float[,] groundedMaximums  = new float[2, 2];
            float[,] whiteBalanceGains = new float[2, 2];
            InitializeMosaic(minimums, maximums, groundedMaximums, whiteBalanceGains, frameMetadata);

            _input = new float[data.Length];
            int i = 0;

            for (int y = 0; y < _height; ++y)
            {
                for (int x = 0; x < _width; ++x, ++i)
                {
                    _input[i] = ((float)data[i] /* * whiteBalanceGains[x % 2, y % 2]*/ - minimums[x % 2, y % 2]) / groundedMaximums[x % 2, y % 2];
                }
            }
            //
            //  _input[i] = (float)Math.Pow((data[i] * whiteBalanceGains[x % 2, y % 2] - minimums[x % 2, y % 2]) / groundedMaximums[x % 2, y % 2], _gamma);
        }
        /// <summary>
        /// Initializes a new instance of the <see cref="DemosaicedImage"/> class from raw image and metadata.
        /// </summary>
        /// <param name="rawFrame">The raw image to be demosaiced.</param>
        /// <param name="frameMetadata">The metadata specifying the demosaic parameters.</param>
        public DemosaicedImage(RawImage rawFrame, Json.FrameImage frameMetadata)
        {
            if (rawFrame == null)
            {
                throw new ArgumentNullException("rawFrame");
            }
            if (frameMetadata == null)
            {
                throw new ArgumentNullException("frameMetadata");
            }

            if (frameMetadata.RawDetails == null ||
                frameMetadata.RawDetails.PixelFormat == null ||
                frameMetadata.RawDetails.Mosaic == null)
            {
                throw new ArgumentException("Metadata does not contain required information.", "frameMetadata");
            }

            if (frameMetadata.RawDetails.Mosaic.Tile != "r,gr:gb,b")
            {
                throw new NotSupportedException("Unsupported mosaic tile.");
            }

            _width  = rawFrame.Width;
            _height = rawFrame.Height;

            if (_width < 1 || _height < 1)
            {
                throw new ArgumentException("Invalid image dimensions.", "rawFrame");
            }

            if (rawFrame.Data == null || rawFrame.Data.Length != _width * _height)
            {
                throw new ArgumentException("Raw frame dimensions do not match metadata dimensions.");
            }

            if (frameMetadata.Color != null)
            {
                if (frameMetadata.Color.Gamma != decimal.Zero)
                {
                    _gamma = ((double)frameMetadata.Color.Gamma);
                }

                if (frameMetadata.Color.CcmRgbToSrgbArray != null && frameMetadata.Color.CcmRgbToSrgbArray.Length == 9)
                {
                    _ccmRgbToSrgbArray[0, 0] = (double)frameMetadata.Color.CcmRgbToSrgbArray[0];
                    _ccmRgbToSrgbArray[0, 1] = (double)frameMetadata.Color.CcmRgbToSrgbArray[1];
                    _ccmRgbToSrgbArray[0, 2] = (double)frameMetadata.Color.CcmRgbToSrgbArray[2];

                    _ccmRgbToSrgbArray[1, 0] = (double)frameMetadata.Color.CcmRgbToSrgbArray[3];
                    _ccmRgbToSrgbArray[1, 1] = (double)frameMetadata.Color.CcmRgbToSrgbArray[4];
                    _ccmRgbToSrgbArray[1, 2] = (double)frameMetadata.Color.CcmRgbToSrgbArray[5];

                    _ccmRgbToSrgbArray[2, 0] = (double)frameMetadata.Color.CcmRgbToSrgbArray[6];
                    _ccmRgbToSrgbArray[2, 1] = (double)frameMetadata.Color.CcmRgbToSrgbArray[7];
                    _ccmRgbToSrgbArray[2, 2] = (double)frameMetadata.Color.CcmRgbToSrgbArray[8];
                }
            }

            InitializeInput(rawFrame.Data, frameMetadata);

            _output = new ColorRgb128Float[_input.Length];
            _valid  = new bool[_output.Length];
        }
        private void InitializeMosaic(ushort[,] minimums, ushort[,] maximums, float[,] groundedMaximums, float[,] whiteBalanceGains, Json.FrameImage frameMetadata)
        {
            string upperLeftPixel = frameMetadata.RawDetails.Mosaic.UpperLeftPixel;

            Json.BayerValue black   = frameMetadata.RawDetails.PixelFormat.Black;
            Json.BayerValue white   = frameMetadata.RawDetails.PixelFormat.White;
            Json.BayerValue balance = frameMetadata.Color.WhiteBalanceGain;

            switch (upperLeftPixel)
            {
            case "b":
                _bRemainder = 0;
                FillMatrix(minimums, (ushort)black.B, (ushort)black.Gb, (ushort)black.Gr, (ushort)black.R);
                FillMatrix(maximums, (ushort)white.B, (ushort)white.Gb, (ushort)white.Gr, (ushort)white.R);
                FillMatrix(whiteBalanceGains, (float)balance.B, (float)balance.Gb, (float)balance.Gr, (float)balance.R);
                break;

            case "r":
                _bRemainder = 1;
                FillMatrix(minimums, (ushort)black.R, (ushort)black.Gr, (ushort)black.Gb, (ushort)black.B);
                FillMatrix(maximums, (ushort)white.R, (ushort)white.Gr, (ushort)white.Gb, (ushort)white.B);
                FillMatrix(whiteBalanceGains, (float)balance.R, (float)balance.Gr, (float)balance.Gb, (float)balance.B);
                break;

            default:
                throw new NotSupportedException("Unsupported upper left mosaic pixel.");
            }

            for (int x = 0; x <= 1; x++)
            {
                for (int y = 0; y <= 1; y++)
                {
                    groundedMaximums[x, y] = maximums[x, y] - minimums[x, y];
                }
            }
        }
        /// <summary>
        /// Finds the nearest matching flat field image in the set.
        /// </summary>
        /// <param name="zoomStep">The frame:Devices/Lens/ZoomStep of the reference image.</param>
        /// <param name="focusStep">The frame:Devices/Lens/FocusStep of the reference image.</param>
        /// <param name="packageReference">The package reference passed in to the <see cref="LoadFrom(LightFieldPackage,string)"/> reference that contains the nearest matching flat field image.</param>
        /// <param name="componentReference">The component reference of the nearest flat field image in the package.</param>
        /// <param name="frameImage">Cached image metadata of the flat field image.</param>
        /// <returns>true if a nearest matching flat field could be found; false otherwise.</returns>
        /// <remarks>Currently, this method returns false if and only if there are no images in the set.</remarks>
        public bool FindNearestFlatFieldImage(int zoomStep, int focusStep, out string packageReference, out string componentReference, out Json.FrameImage frameImage)
        {
            packageReference   = null;
            componentReference = null;
            frameImage         = null;

            FlatFieldItem searchItem = new FlatFieldItem();

            searchItem.FocusStep = focusStep;
            searchItem.ZoomStep  = zoomStep;

            int index = _items.BinarySearch(searchItem);

            if (index < 0)
            {
                index = ~index;
            }
            if (index >= _items.Count)
            {
                index = _items.Count - 1;
            }

            if (index < 0)
            {
                return(false);
            }

            packageReference   = _items[index].PackageReference;
            componentReference = _items[index].DataReference;
            frameImage         = _items[index].FrameImage;
            return(true);
        }
        /// <summary>
        /// Finds the nearest matching flat field image in the set.
        /// </summary>
        /// <param name="metadata">The metadata of the reference image.</param>
        /// <param name="packageReference">The package reference passed in to the <see cref="LoadFrom(LightFieldPackage,string)"/> reference that contains the nearest matching flat field image.</param>
        /// <param name="componentReference">The component reference of the nearest flat field image in the package.</param>
        /// <param name="frameImage">Cached image metadata of the flat field image.</param>
        /// <returns>true if a nearest matching flat field could be found; false otherwise.</returns>
        /// <remarks>Currently, this method returns false if and only if there are no images in the set.</remarks>
        public bool FindNearestFlatFieldImage(Json.FrameMetadata metadata, out string packageReference, out string componentReference, out Json.FrameImage frameImage)
        {
            if (metadata == null)
            {
                throw new ArgumentNullException("metadata");
            }

            if (metadata.Devices == null || metadata.Devices.Lens == null)
            {
                throw new ArgumentException("Critical metadata missing.", "metadata");
            }

            return(FindNearestFlatFieldImage((int)metadata.Devices.Lens.ZoomStep, (int)metadata.Devices.Lens.FocusStep, out packageReference, out componentReference, out frameImage));
        }