Пример #1
0
        /// <summary>
        /// Fast processing for the common case of 2:1 horizontal and 2:1 vertical.
        /// It's still a box filter.
        /// </summary>
        private void h2v2_upsample(ComponentBuffer input_data)
        {
            ComponentBuffer output_data = m_color_buf[m_currentComponent];

            int inrow  = 0;
            int outrow = 0;

            while (outrow < m_cinfo.m_max_v_samp_factor)
            {
                int row      = m_upsampleRowOffset + inrow;
                int outIndex = 0;

                var inputBuffer  = input_data[row];
                var outputBuffer = output_data[outrow];
                for (int col = 0; outIndex < m_cinfo.m_output_width; col++)
                {
                    byte invalue = inputBuffer[col]; /* don't need GETJSAMPLE() here */
                    outputBuffer[outIndex++] = invalue;
                    outputBuffer[outIndex++] = invalue;
                }

                JpegUtils.jcopy_sample_rows(output_data, outrow, output_data, outrow + 1, 1, m_cinfo.m_output_width);
                inrow++;
                outrow += 2;
            }
        }
Пример #2
0
        /// <summary>
        /// Color conversion for YCCK -> RGB
        /// it's just a gybrid of YCCK -> CMYK and CMYK -> RGB conversions
        /// </summary>
        private void YcckRgbConvert(ComponentBuffer[] input_buf, int input_row, byte[][] output_buf, int output_row, int num_rows)
        {
            var component0RowOffset = m_perComponentOffsets[0];
            var component1RowOffset = m_perComponentOffsets[1];
            var component2RowOffset = m_perComponentOffsets[2];
            var component3RowOffset = m_perComponentOffsets[3];

            var limit       = m_cinfo.m_sample_range_limit;
            var limitOffset = m_cinfo.m_sampleRangeLimitOffset;

            var num_cols = m_cinfo.outputWidth;

            for (var row = 0; row < num_rows; row++)
            {
                var columnOffset = 0;
                for (var col = 0; col < num_cols; col++)
                {
                    int y  = input_buf[0][input_row + component0RowOffset][col];
                    int cb = input_buf[1][input_row + component1RowOffset][col];
                    int cr = input_buf[2][input_row + component2RowOffset][col];

                    int cmyk_c = limit[limitOffset + JpegConstants.MAXJSAMPLE - (y + m_Cr_r_tab[cr])];
                    int cmyk_m = limit[limitOffset + JpegConstants.MAXJSAMPLE - (y + JpegUtils.RIGHT_SHIFT(m_Cb_g_tab[cb] + m_Cr_g_tab[cr], SCALEBITS))];
                    int cmyk_y = limit[limitOffset + JpegConstants.MAXJSAMPLE - (y + m_Cb_b_tab[cb])];
                    int cmyk_k = input_buf[3][input_row + component3RowOffset][col];

                    output_buf[output_row + row][columnOffset + JpegConstants.RGB_RED]   = (byte)((cmyk_c * cmyk_k) / 255);
                    output_buf[output_row + row][columnOffset + JpegConstants.RGB_GREEN] = (byte)((cmyk_m * cmyk_k) / 255);
                    output_buf[output_row + row][columnOffset + JpegConstants.RGB_BLUE]  = (byte)((cmyk_y * cmyk_k) / 255);
                    columnOffset += JpegConstants.RGB_PIXELSIZE;
                }

                input_row++;
            }
        }
Пример #3
0
        /**************** YCbCr -> RGB conversion: most common case **************/
        /*************** BG_YCC -> RGB conversion: less common case **************/
        /***************    RGB -> Y   conversion: less common case **************/

        /*
         * YCbCr is defined per Recommendation ITU-R BT.601-7 (03/2011),
         * previously known as Recommendation CCIR 601-1, except that Cb and Cr
         * are normalized to the range 0..MAXJSAMPLE rather than -0.5 .. 0.5.
         * sRGB (standard RGB color space) is defined per IEC 61966-2-1:1999.
         * sYCC (standard luma-chroma-chroma color space with extended gamut)
         * is defined per IEC 61966-2-1:1999 Amendment A1:2003 Annex F.
         * bg-sRGB and bg-sYCC (big gamut standard color spaces)
         * are defined per IEC 61966-2-1:1999 Amendment A1:2003 Annex G.
         * Note that the derived conversion coefficients given in some of these
         * documents are imprecise.  The general conversion equations are
         *
         *	R = Y + K * (1 - Kr) * Cr
         *	G = Y - K * (Kb * (1 - Kb) * Cb + Kr * (1 - Kr) * Cr) / (1 - Kr - Kb)
         *	B = Y + K * (1 - Kb) * Cb
         *
         *	Y = Kr * R + (1 - Kr - Kb) * G + Kb * B
         *
         * With Kr = 0.299 and Kb = 0.114 (derived according to SMPTE RP 177-1993
         * from the 1953 FCC NTSC primaries and CIE Illuminant C), K = 2 for sYCC,
         * the conversion equations to be implemented are therefore
         *
         *	R = Y + 1.402 * Cr
         *	G = Y - 0.344136286 * Cb - 0.714136286 * Cr
         *	B = Y + 1.772 * Cb
         *
         *	Y = 0.299 * R + 0.587 * G + 0.114 * B
         *
         * where Cb and Cr represent the incoming values less CENTERJSAMPLE.
         * For bg-sYCC, with K = 4, the equations are
         *
         *	R = Y + 2.804 * Cr
         *	G = Y - 0.688272572 * Cb - 1.428272572 * Cr
         *	B = Y + 3.544 * Cb
         *
         * To avoid floating-point arithmetic, we represent the fractional constants
         * as integers scaled up by 2^16 (about 4 digits precision); we have to divide
         * the products by 2^16, with appropriate rounding, to get the correct answer.
         * Notice that Y, being an integral input, does not contribute any fraction
         * so it need not participate in the rounding.
         *
         * For even more speed, we avoid doing any multiplications in the inner loop
         * by precalculating the constants times Cb and Cr for all possible values.
         * For 8-bit JSAMPLEs this is very reasonable (only 256 entries per table);
         * for 9-bit to 12-bit samples it is still acceptable.  It's not very
         * reasonable for 16-bit samples, but if you want lossless storage you
         * shouldn't be changing colorspace anyway.
         * The Cr=>R and Cb=>B values can be rounded to integers in advance; the
         * values for the G calculation are left scaled up, since we must add them
         * together before rounding.
         */

        /// <summary>
        /// Initialize tables for YCbCr->RGB colorspace conversion.
        /// </summary>
        private void BuildYccRgbTable()
        {
            /* Normal case, sYCC */
            m_Cr_r_tab = new int[JpegConstants.MAXJSAMPLE + 1];
            m_Cb_b_tab = new int[JpegConstants.MAXJSAMPLE + 1];
            m_Cr_g_tab = new int[JpegConstants.MAXJSAMPLE + 1];
            m_Cb_g_tab = new int[JpegConstants.MAXJSAMPLE + 1];

            for (int i = 0, x = -JpegConstants.CENTERJSAMPLE; i <= JpegConstants.MAXJSAMPLE; i++, x++)
            {
                /* i is the actual input pixel value, in the range 0..MAXJSAMPLE */
                /* The Cb or Cr value we are thinking of is x = i - CENTERJSAMPLE */
                /* Cr=>R value is nearest int to 1.402 * x */
                m_Cr_r_tab[i] = JpegUtils.RIGHT_SHIFT((FIX(1.402) * x) + ONE_HALF, SCALEBITS);

                /* Cb=>B value is nearest int to 1.772 * x */
                m_Cb_b_tab[i] = JpegUtils.RIGHT_SHIFT((FIX(1.772) * x) + ONE_HALF, SCALEBITS);

                /* Cr=>G value is scaled-up -0.714136286 * x */
                m_Cr_g_tab[i] = -FIX(0.714136286) * x;

                /* Cb=>G value is scaled-up -0.344136286 * x */
                /* We also add in ONE_HALF so that need not do it in inner loop */
                m_Cb_g_tab[i] = (-FIX(0.344136286) * x) + ONE_HALF;
            }
        }
Пример #4
0
        /// <summary>
        /// Initialize tables for BG_YCC->RGB colorspace conversion.
        /// </summary>
        private void BuildBgYccRgbTable()
        {
            /* Wide gamut case, bg-sYCC */
            m_Cr_r_tab = new int[JpegConstants.MAXJSAMPLE + 1];
            m_Cb_b_tab = new int[JpegConstants.MAXJSAMPLE + 1];
            m_Cr_g_tab = new int[JpegConstants.MAXJSAMPLE + 1];
            m_Cb_g_tab = new int[JpegConstants.MAXJSAMPLE + 1];

            for (int i = 0, x = -JpegConstants.CENTERJSAMPLE; i <= JpegConstants.MAXJSAMPLE; i++, x++)
            {
                /* i is the actual input pixel value, in the range 0..MAXJSAMPLE */
                /* The Cb or Cr value we are thinking of is x = i - CENTERJSAMPLE */
                /* Cr=>R value is nearest int to 2.804 * x */
                m_Cr_r_tab[i] = JpegUtils.RIGHT_SHIFT((FIX(2.804) * x) + ONE_HALF, SCALEBITS);

                /* Cb=>B value is nearest int to 3.544 * x */
                m_Cb_b_tab[i] = JpegUtils.RIGHT_SHIFT((FIX(3.544) * x) + ONE_HALF, SCALEBITS);

                /* Cr=>G value is scaled-up -1.428272572 * x */
                m_Cr_g_tab[i] = -FIX(1.428272572) * x;

                /* Cb=>G value is scaled-up -0.688272572 * x */
                /* We also add in ONE_HALF so that need not do it in inner loop */
                m_Cb_g_tab[i] = (-FIX(0.688272572) * x) + ONE_HALF;
            }
        }
Пример #5
0
        /// <summary>
        /// This version handles any integral sampling ratios.
        /// This is not used for typical JPEG files, so it need not be fast.
        /// Nor, for that matter, is it particularly accurate: the algorithm is
        /// simple replication of the input pixel onto the corresponding output
        /// pixels.  The hi-falutin sampling literature refers to this as a
        /// "box filter".  A box filter tends to introduce visible artifacts,
        /// so if you are actually going to use 3:1 or 4:1 sampling ratios
        /// you would be well advised to improve this code.
        /// </summary>
        private void int_upsample(ref ComponentBuffer input_data)
        {
            ComponentBuffer output_data = m_color_buf[m_currentComponent];
            int             h_expand    = m_h_expand[m_currentComponent];
            int             v_expand    = m_v_expand[m_currentComponent];

            int inrow  = 0;
            int outrow = 0;

            while (outrow < m_cinfo.m_max_v_samp_factor)
            {
                /* Generate one output row with proper horizontal expansion */
                int row = m_upsampleRowOffset + inrow;
                for (int col = 0; col < m_cinfo.m_output_width; col++)
                {
                    byte invalue  = input_data[row][col]; /* don't need GETJSAMPLE() here */
                    int  outIndex = 0;
                    for (int h = h_expand; h > 0; h--)
                    {
                        output_data[outrow][outIndex] = invalue;
                        outIndex++;
                    }
                }

                /* Generate any additional output rows by duplicating the first one */
                if (v_expand > 1)
                {
                    JpegUtils.jcopy_sample_rows(output_data, outrow, output_data,
                                                outrow + 1, v_expand - 1, m_cinfo.m_output_width);
                }

                inrow++;
                outrow += v_expand;
            }
        }
Пример #6
0
        /// <summary>
        /// Fast processing for the common case of 2:1 horizontal and 2:1 vertical.
        /// It's still a box filter.
        /// </summary>
        private void H2V2UpSample(ref ComponentBuffer input_data)
        {
            var output_data = m_color_buf[m_currentComponent];

            var inrow  = 0;
            var outrow = 0;

            while (outrow < m_cinfo.m_maxVSampleFactor)
            {
                var row      = m_upsampleRowOffset + inrow;
                var outIndex = 0;

                for (var col = 0; outIndex < m_cinfo.outputWidth; col++)
                {
                    var invalue = input_data[row][col]; /* don't need GETJSAMPLE() here */
                    output_data[outrow][outIndex] = invalue;
                    outIndex++;
                    output_data[outrow][outIndex] = invalue;
                    outIndex++;
                }

                JpegUtils.jcopy_sample_rows(output_data, outrow, output_data, outrow + 1, 1, m_cinfo.outputWidth);
                inrow++;
                outrow += 2;
            }
        }
Пример #7
0
        private int m_next_row;                   /* index of next row to fill/empty in strip */

        /// <summary>
        /// Initialize postprocessing controller.
        /// </summary>
        public jpeg_d_post_controller(jpeg_decompress_struct cinfo, bool need_full_buffer)
        {
            m_cinfo = cinfo;

            /* Create the quantization buffer, if needed */
            if (cinfo.m_quantize_colors)
            {
                /* The buffer strip height is max_v_samp_factor, which is typically
                 * an efficient number of rows for upsampling to return.
                 * (In the presence of output rescaling, we might want to be smarter?)
                 */
                m_strip_height = cinfo.m_max_v_samp_factor;

                if (need_full_buffer)
                {
                    /* Two-pass color quantization: need full-image storage. */
                    /* We round up the number of rows to a multiple of the strip height. */
                    m_whole_image = jpeg_common_struct.CreateSamplesArray(
                        cinfo.m_output_width * cinfo.m_out_color_components,
                        JpegUtils.jround_up(cinfo.m_output_height, m_strip_height));
                    m_whole_image.ErrorProcessor = cinfo;
                }
                else
                {
                    /* One-pass color quantization: just make a strip buffer. */
                    m_buffer = jpeg_common_struct.AllocJpegSamples(
                        cinfo.m_output_width * cinfo.m_out_color_components, m_strip_height);
                }
            }
        }
Пример #8
0
        /// <summary>
        /// Control routine to do upsampling (and color conversion).
        /// The control routine just handles the row buffering considerations.
        /// 2:1 vertical sampling case: may need a spare row.
        /// </summary>
        private void merged_2v_upsample(ComponentBuffer[] input_buf, ref int in_row_group_ctr, byte[][] output_buf, ref int out_row_ctr, int out_rows_avail)
        {
            int num_rows;        /* number of rows returned to caller */

            if (m_spare_full)
            {
                /* If we have a spare row saved from a previous cycle, just return it. */
                byte[][] temp = new byte[1][];
                temp[0] = m_spare_row;
                JpegUtils.jcopy_sample_rows(temp, 0, output_buf, out_row_ctr, 1, m_out_row_width);
                num_rows     = 1;
                m_spare_full = false;
            }
            else
            {
                /* Figure number of rows to return to caller. */
                num_rows = 2;

                /* Not more than the distance to the end of the image. */
                if (num_rows > m_rows_to_go)
                {
                    num_rows = m_rows_to_go;
                }

                /* And not more than what the client can accept: */
                out_rows_avail -= out_row_ctr;
                if (num_rows > out_rows_avail)
                {
                    num_rows = out_rows_avail;
                }

                /* Create output pointer array for upsampler. */
                byte[][] work_ptrs = new byte[2][];
                work_ptrs[0] = output_buf[out_row_ctr];
                if (num_rows > 1)
                {
                    work_ptrs[1] = output_buf[out_row_ctr + 1];
                }
                else
                {
                    work_ptrs[1] = m_spare_row;
                    m_spare_full = true;
                }

                /* Now do the upsampling. */
                h2v2_merged_upsample(input_buf, in_row_group_ctr, work_ptrs);
            }

            /* Adjust counts */
            out_row_ctr  += num_rows;
            m_rows_to_go -= num_rows;

            /* When the buffer is emptied, declare this input row group consumed */
            if (!m_spare_full)
            {
                in_row_group_ctr++;
            }
        }
Пример #9
0
        /*
         * These are the routines invoked by the control routines to do
         * the actual upsampling/conversion.  One row group is processed per call.
         *
         * Note: since we may be writing directly into application-supplied buffers,
         * we have to be honest about the output width; we can't assume the buffer
         * has been rounded up to an even width.
         */

        /// <summary>
        /// Upsample and color convert for the case of 2:1 horizontal and 1:1 vertical.
        /// </summary>
        private void h2v1_merged_upsample(ComponentBuffer[] input_buf, int in_row_group_ctr, byte[][] output_buf, int outRow)
        {
            int inputIndex0 = 0;
            int inputIndex1 = 0;
            int inputIndex2 = 0;
            int outputIndex = 0;

            byte[] limit       = m_cinfo.m_sample_range_limit;
            int    limitOffset = m_cinfo.m_sampleRangeLimitOffset;

            /* Loop for each pair of output pixels */
            for (int col = m_cinfo.m_output_width >> 1; col > 0; col--)
            {
                /* Do the chroma part of the calculation */
                int cb = input_buf[1][in_row_group_ctr][inputIndex1];
                inputIndex1++;

                int cr = input_buf[2][in_row_group_ctr][inputIndex2];
                inputIndex2++;

                int cred   = m_Cr_r_tab[cr];
                int cgreen = JpegUtils.RIGHT_SHIFT(m_Cb_g_tab[cb] + m_Cr_g_tab[cr], SCALEBITS);
                int cblue  = m_Cb_b_tab[cb];

                /* Fetch 2 Y values and emit 2 pixels */
                int y = input_buf[0][in_row_group_ctr][inputIndex0];
                inputIndex0++;

                output_buf[outRow][outputIndex + JpegConstants.RGB_RED]   = limit[limitOffset + y + cred];
                output_buf[outRow][outputIndex + JpegConstants.RGB_GREEN] = limit[limitOffset + y + cgreen];
                output_buf[outRow][outputIndex + JpegConstants.RGB_BLUE]  = limit[limitOffset + y + cblue];
                outputIndex += JpegConstants.RGB_PIXELSIZE;

                y = input_buf[0][in_row_group_ctr][inputIndex0];
                inputIndex0++;

                output_buf[outRow][outputIndex + JpegConstants.RGB_RED]   = limit[limitOffset + y + cred];
                output_buf[outRow][outputIndex + JpegConstants.RGB_GREEN] = limit[limitOffset + y + cgreen];
                output_buf[outRow][outputIndex + JpegConstants.RGB_BLUE]  = limit[limitOffset + y + cblue];
                outputIndex += JpegConstants.RGB_PIXELSIZE;
            }

            /* If image width is odd, do the last output column separately */
            if ((m_cinfo.m_output_width & 1) != 0)
            {
                int cb     = input_buf[1][in_row_group_ctr][inputIndex1];
                int cr     = input_buf[2][in_row_group_ctr][inputIndex2];
                int cred   = m_Cr_r_tab[cr];
                int cgreen = JpegUtils.RIGHT_SHIFT(m_Cb_g_tab[cb] + m_Cr_g_tab[cr], SCALEBITS);
                int cblue  = m_Cb_b_tab[cb];

                int y = input_buf[0][in_row_group_ctr][inputIndex0];
                output_buf[outRow][outputIndex + JpegConstants.RGB_RED]   = limit[limitOffset + y + cred];
                output_buf[outRow][outputIndex + JpegConstants.RGB_GREEN] = limit[limitOffset + y + cgreen];
                output_buf[outRow][outputIndex + JpegConstants.RGB_BLUE]  = limit[limitOffset + y + cblue];
            }
        }
Пример #10
0
        private static void RotateCoefficients(string original_path, RotateDirection direction)
        {
            string temporary_path = original_path + ".tmp";             // FIXME make it unique

            JpegUtils.Transform(original_path, temporary_path,
                                direction == RotateDirection.Clockwise ? JpegUtils.TransformType.Rotate90
                                             : JpegUtils.TransformType.Rotate270);

            Unix.Rename(temporary_path, original_path);
        }
Пример #11
0
        /// <summary>
        /// Downsample pixel values of a single component.
        /// This version handles the special case of a full-size component,
        /// without smoothing.
        /// </summary>
        private void FullsizeDownsample(int componentIndex, byte[][] input_data, int startInputRow, byte[][] output_data, int startOutRow)
        {
            /* Copy the data */
            JpegUtils.jcopy_sample_rows(input_data, startInputRow, output_data, startOutRow, m_cinfo.m_max_v_samp_factor, m_cinfo.m_image_width);

            /* Edge-expand */
            var compptr = m_cinfo.Component_info[componentIndex];

            ExpandRightEdge(output_data, startOutRow, m_cinfo.m_max_v_samp_factor,
                            m_cinfo.m_image_width, compptr.Width_in_blocks * compptr.DCT_h_scaled_size);
        }
    static public Pixbuf LoadAtMaxSize(string path, int max_width, int max_height)
    {
#if true
        PixbufUtils.AspectLoader loader = new AspectLoader(max_width, max_height);
        return(loader.LoadFromFile(path));
#else
        int width, height;
        JpegUtils.GetSize(path, out width, out height);
        PixbufUtils.Fit(width, height, max_width, max_height, false, out width, out height);
        Gdk.Pixbuf image = JpegUtils.LoadScaled(path, width, height);

        return(image);
#endif
    }
Пример #13
0
            public ImageInfo(ImageFile img)
            {
                // FIXME We use the memory store to hold the anonymous statements
                // as they are added so that we can query for them later to
                // resolve anonymous nodes.
                store = new MemoryStore();

                if (img == null)
                {
                    return;
                }

                if (img is StatementSource)
                {
                    SemWeb.StatementSource source = (SemWeb.StatementSource)img;
                    source.Select(this);

                    // If we couldn't find the ISO speed because of the ordering
                    // search the memory store for the values
                    if (iso_speed == null && iso_anon != null)
                    {
                        add = false;
                        store.Select(this);
                    }
                }

                if (img is JpegFile)
                {
                    int real_width;
                    int real_height;

                    JpegUtils.GetSize(img.Uri.LocalPath, out real_width, out real_height);
                    width  = real_width.ToString();
                    height = real_height.ToString();
                }
        #if USE_EXIF_DATE
                date = img.Date.ToLocalTime();
        #endif
            }
Пример #14
0
        /// <summary>
        /// Example image
        /// </summary>
        /// <returns>random colored image.</returns>
        private static Bitmap GetImage()
        {
            Bitmap bitmap = new Bitmap(720, 400, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
            //Create random generator
            Random random = new Random();
            //Create ne graphics
            Graphics graphics = Graphics.FromImage(bitmap);
            //Create color
            int r = random.Next(0, 255);
            int g = random.Next(0, 255);
            int b = random.Next(0, 255);

            //Set graphics color
            graphics.FillRectangle(new SolidBrush(Color.FromArgb(r, g, b)), 0, 0, bitmap.Width, bitmap.Height);
            //Create image stream
            MemoryStream imageStream = new MemoryStream();
            //Save image
            EncoderParameters encoderParameters = new EncoderParameters(1);

            encoderParameters.Param[0] = new EncoderParameter(System.Drawing.Imaging.Encoder.Quality, 75L);
            bitmap.Save(imageStream, JpegUtils.GetEncoder(ImageFormat.Jpeg), encoderParameters);

            return(bitmap);
        }
Пример #15
0
 /// <summary>
 /// Color conversion for grayscale: just copy the data.
 /// This also works for YCC -> grayscale conversion, in which
 /// we just copy the Y (luminance) component and ignore chrominance.
 /// </summary>
 private void GrayscaleConvert(ComponentBuffer[] input_buf, int input_row, byte[][] output_buf, int output_row, int num_rows)
 {
     JpegUtils.jcopy_sample_rows(input_buf[0], input_row + m_perComponentOffsets[0], output_buf, output_row, num_rows, m_cinfo.outputWidth);
 }
        public bool Convert(FilterRequest req)
        {
            string source = req.Current.LocalPath;

            System.Uri dest_uri = req.TempUri(System.IO.Path.GetExtension(source));
            string     dest     = dest_uri.LocalPath;

            using (ImageFile img = ImageFile.Create(source)) {
                bool changed = false;

                if (img.Orientation != PixbufOrientation.TopLeft && img is JpegFile)
                {
                    JpegFile jimg = img as JpegFile;

                    if (img.Orientation == PixbufOrientation.RightTop)
                    {
                        JpegUtils.Transform(source,
                                            dest,
                                            JpegUtils.TransformType.Rotate90);
                        changed = true;
                    }
                    else if (img.Orientation == PixbufOrientation.LeftBottom)
                    {
                        JpegUtils.Transform(source,
                                            dest,
                                            JpegUtils.TransformType.Rotate270);
                        changed = true;
                    }
                    else if (img.Orientation == PixbufOrientation.BottomRight)
                    {
                        JpegUtils.Transform(source,
                                            dest,
                                            JpegUtils.TransformType.Rotate180);
                        changed = true;
                    }

                    int width, height;

                    jimg = ImageFile.Create(dest) as JpegFile;

                    PixbufUtils.GetSize(dest, out width, out height);

                    jimg.SetOrientation(PixbufOrientation.TopLeft);
                    jimg.SetDimensions(width, height);

                    Gdk.Pixbuf pixbuf = new Gdk.Pixbuf(dest, 160, 120, true);
                    jimg.SetThumbnail(pixbuf);
                    pixbuf.Dispose();

                    jimg.SaveMetaData(dest);
                    jimg.Dispose();
                }

                if (changed)
                {
                    req.Current = dest_uri;
                }

                return(changed);
            }
        }
Пример #17
0
        private void YccRgbConvert(ComponentBuffer[] input_buf, int input_row, byte[][] output_buf, int output_row, int num_rows)
        {
            var component0RowOffset = m_perComponentOffsets[0];
            var component1RowOffset = m_perComponentOffsets[1];
            var component2RowOffset = m_perComponentOffsets[2];

            var limit       = m_cinfo.m_sample_range_limit;
            var limitOffset = m_cinfo.m_sampleRangeLimitOffset;

            for (var row = 0; row < num_rows; row++)
            {
                var columnOffset = 0;
                for (var col = 0; col < m_cinfo.outputWidth; col++)
                {
                    int y  = input_buf[0][input_row + component0RowOffset][col];
                    int cb = input_buf[1][input_row + component1RowOffset][col];
                    int cr = input_buf[2][input_row + component2RowOffset][col];

                    /* Range-limiting is essential due to noise introduced by DCT losses.
                     * for extended gamut (sYCC) and wide gamut (bg-sYCC) encodings.
                     */
                    output_buf[output_row + row][columnOffset + JpegConstants.RGB_RED]   = limit[limitOffset + y + m_Cr_r_tab[cr]];
                    output_buf[output_row + row][columnOffset + JpegConstants.RGB_GREEN] = limit[limitOffset + y + JpegUtils.RIGHT_SHIFT(m_Cb_g_tab[cb] + m_Cr_g_tab[cr], SCALEBITS)];
                    output_buf[output_row + row][columnOffset + JpegConstants.RGB_BLUE]  = limit[limitOffset + y + m_Cb_b_tab[cb]];
                    columnOffset += JpegConstants.RGB_PIXELSIZE;
                }

                input_row++;
            }
        }
Пример #18
0
        /// <summary>
        /// Adobe-style YCCK->CMYK conversion.
        /// We convert YCbCr to R=1-C, G=1-M, and B=1-Y using the same
        /// conversion as above, while passing K (black) unchanged.
        /// We assume build_ycc_rgb_table has been called.
        /// </summary>
        private void YcckCmykConvert(ComponentBuffer[] input_buf, int input_row, byte[][] output_buf, int output_row, int num_rows)
        {
            var component0RowOffset = m_perComponentOffsets[0];
            var component1RowOffset = m_perComponentOffsets[1];
            var component2RowOffset = m_perComponentOffsets[2];
            var component3RowOffset = m_perComponentOffsets[3];

            var limit       = m_cinfo.m_sample_range_limit;
            var limitOffset = m_cinfo.m_sampleRangeLimitOffset;

            var num_cols = m_cinfo.outputWidth;

            for (var row = 0; row < num_rows; row++)
            {
                var columnOffset = 0;
                for (var col = 0; col < num_cols; col++)
                {
                    int y  = input_buf[0][input_row + component0RowOffset][col];
                    int cb = input_buf[1][input_row + component1RowOffset][col];
                    int cr = input_buf[2][input_row + component2RowOffset][col];

                    /* Range-limiting is essential due to noise introduced by DCT losses,
                     * and for extended gamut encodings (sYCC).
                     */
                    output_buf[output_row + row][columnOffset]     = limit[limitOffset + JpegConstants.MAXJSAMPLE - (y + m_Cr_r_tab[cr])];                                                    /* red */
                    output_buf[output_row + row][columnOffset + 1] = limit[limitOffset + JpegConstants.MAXJSAMPLE - (y + JpegUtils.RIGHT_SHIFT(m_Cb_g_tab[cb] + m_Cr_g_tab[cr], SCALEBITS))]; /* green */
                    output_buf[output_row + row][columnOffset + 2] = limit[limitOffset + JpegConstants.MAXJSAMPLE - (y + m_Cb_b_tab[cb])];                                                    /* blue */

                    /* K passes through unchanged */
                    /* don't need GETJSAMPLE here */
                    output_buf[output_row + row][columnOffset + 3] = input_buf[3][input_row + component3RowOffset][col];
                    columnOffset += 4;
                }

                input_row++;
            }
        }
Пример #19
0
        public my_upsampler(jpeg_decompress_struct cinfo)
        {
            m_cinfo             = cinfo;
            m_need_context_rows = false;  /* until we find out differently */

            if (cinfo.m_CCIR601_sampling) /* this isn't supported */
            {
                cinfo.ERREXIT(J_MESSAGE_CODE.JERR_CCIR601_NOTIMPL);
            }

            /* Verify we can handle the sampling factors, select per-component methods,
             * and create storage as needed.
             */
            for (int ci = 0; ci < cinfo.m_num_components; ci++)
            {
                jpeg_component_info componentInfo = cinfo.Comp_info[ci];

                /* Compute size of an "input group" after IDCT scaling.  This many samples
                 * are to be converted to max_h_samp_factor * max_v_samp_factor pixels.
                 */
                int h_in_group  = (componentInfo.H_samp_factor * componentInfo.DCT_h_scaled_size) / cinfo.min_DCT_h_scaled_size;
                int v_in_group  = (componentInfo.V_samp_factor * componentInfo.DCT_v_scaled_size) / cinfo.min_DCT_v_scaled_size;
                int h_out_group = cinfo.m_max_h_samp_factor;
                int v_out_group = cinfo.m_max_v_samp_factor;

                /* save for use later */
                m_rowgroup_height[ci] = v_in_group;

                if (!componentInfo.component_needed)
                {
                    /* Don't bother to upsample an uninteresting component. */
                    m_upsampleMethods[ci] = ComponentUpsampler.noop_upsampler;
                    continue;           /* don't need to allocate buffer */
                }

                if (h_in_group == h_out_group && v_in_group == v_out_group)
                {
                    /* Fullsize components can be processed without any work. */
                    m_upsampleMethods[ci] = ComponentUpsampler.fullsize_upsampler;
                    continue;           /* don't need to allocate buffer */
                }

                if (h_in_group * 2 == h_out_group && v_in_group == v_out_group)
                {
                    /* Special case for 2h1v upsampling */
                    m_upsampleMethods[ci] = ComponentUpsampler.h2v1_upsampler;
                }
                else if (h_in_group * 2 == h_out_group && v_in_group * 2 == v_out_group)
                {
                    /* Special case for 2h2v upsampling */
                    m_upsampleMethods[ci] = ComponentUpsampler.h2v2_upsampler;
                }
                else if ((h_out_group % h_in_group) == 0 && (v_out_group % v_in_group) == 0)
                {
                    /* Generic integral-factors upsampling method */
                    m_upsampleMethods[ci] = ComponentUpsampler.int_upsampler;
                    m_h_expand[ci]        = (byte)(h_out_group / h_in_group);
                    m_v_expand[ci]        = (byte)(v_out_group / v_in_group);
                }
                else
                {
                    cinfo.ERREXIT(J_MESSAGE_CODE.JERR_FRACT_SAMPLE_NOTIMPL);
                }

                ComponentBuffer cb = new ComponentBuffer();
                cb.SetBuffer(jpeg_common_struct.AllocJpegSamples(
                                 JpegUtils.jround_up(cinfo.m_output_width,
                                                     cinfo.m_max_h_samp_factor), cinfo.m_max_v_samp_factor));

                m_color_buf[ci] = cb;
            }
        }
Пример #20
0
        /// <summary>
        /// Upsample and color convert for the case of 2:1 horizontal and 2:1 vertical.
        /// </summary>
        private void H2V2MergedUpSample(ComponentBuffer[] input_buf, int in_row_group_ctr, byte[][] output_buf)
        {
            var inputRow00   = in_row_group_ctr * 2;
            var inputIndex00 = 0;

            var inputRow01   = (in_row_group_ctr * 2) + 1;
            var inputIndex01 = 0;

            var inputIndex1 = 0;
            var inputIndex2 = 0;

            var outIndex0 = 0;
            var outIndex1 = 0;

            var limit       = m_cinfo.m_sample_range_limit;
            var limitOffset = m_cinfo.m_sampleRangeLimitOffset;

            /* Loop for each group of output pixels */
            for (var col = m_cinfo.outputWidth >> 1; col > 0; col--)
            {
                /* Do the chroma part of the calculation */
                int cb = input_buf[1][in_row_group_ctr][inputIndex1];
                inputIndex1++;

                int cr = input_buf[2][in_row_group_ctr][inputIndex2];
                inputIndex2++;

                var cred   = m_Cr_r_tab[cr];
                var cgreen = JpegUtils.RIGHT_SHIFT(m_Cb_g_tab[cb] + m_Cr_g_tab[cr], SCALEBITS);
                var cblue  = m_Cb_b_tab[cb];

                /* Fetch 4 Y values and emit 4 pixels */
                int y = input_buf[0][inputRow00][inputIndex00];
                inputIndex00++;

                output_buf[0][outIndex0 + JpegConstants.RGB_RED]   = limit[limitOffset + y + cred];
                output_buf[0][outIndex0 + JpegConstants.RGB_GREEN] = limit[limitOffset + y + cgreen];
                output_buf[0][outIndex0 + JpegConstants.RGB_BLUE]  = limit[limitOffset + y + cblue];
                outIndex0 += JpegConstants.RGB_PIXELSIZE;

                y = input_buf[0][inputRow00][inputIndex00];
                inputIndex00++;

                output_buf[0][outIndex0 + JpegConstants.RGB_RED]   = limit[limitOffset + y + cred];
                output_buf[0][outIndex0 + JpegConstants.RGB_GREEN] = limit[limitOffset + y + cgreen];
                output_buf[0][outIndex0 + JpegConstants.RGB_BLUE]  = limit[limitOffset + y + cblue];
                outIndex0 += JpegConstants.RGB_PIXELSIZE;

                y = input_buf[0][inputRow01][inputIndex01];
                inputIndex01++;

                output_buf[1][outIndex1 + JpegConstants.RGB_RED]   = limit[limitOffset + y + cred];
                output_buf[1][outIndex1 + JpegConstants.RGB_GREEN] = limit[limitOffset + y + cgreen];
                output_buf[1][outIndex1 + JpegConstants.RGB_BLUE]  = limit[limitOffset + y + cblue];
                outIndex1 += JpegConstants.RGB_PIXELSIZE;

                y = input_buf[0][inputRow01][inputIndex01];
                inputIndex01++;

                output_buf[1][outIndex1 + JpegConstants.RGB_RED]   = limit[limitOffset + y + cred];
                output_buf[1][outIndex1 + JpegConstants.RGB_GREEN] = limit[limitOffset + y + cgreen];
                output_buf[1][outIndex1 + JpegConstants.RGB_BLUE]  = limit[limitOffset + y + cblue];
                outIndex1 += JpegConstants.RGB_PIXELSIZE;
            }

            /* If image width is odd, do the last output column separately */
            if ((m_cinfo.outputWidth & 1) != 0)
            {
                int cb     = input_buf[1][in_row_group_ctr][inputIndex1];
                int cr     = input_buf[2][in_row_group_ctr][inputIndex2];
                var cred   = m_Cr_r_tab[cr];
                var cgreen = JpegUtils.RIGHT_SHIFT(m_Cb_g_tab[cb] + m_Cr_g_tab[cr], SCALEBITS);
                var cblue  = m_Cb_b_tab[cb];

                int y = input_buf[0][inputRow00][inputIndex00];
                output_buf[0][outIndex0 + JpegConstants.RGB_RED]   = limit[limitOffset + y + cred];
                output_buf[0][outIndex0 + JpegConstants.RGB_GREEN] = limit[limitOffset + y + cgreen];
                output_buf[0][outIndex0 + JpegConstants.RGB_BLUE]  = limit[limitOffset + y + cblue];

                y = input_buf[0][inputRow01][inputIndex01];
                output_buf[1][outIndex1 + JpegConstants.RGB_RED]   = limit[limitOffset + y + cred];
                output_buf[1][outIndex1 + JpegConstants.RGB_GREEN] = limit[limitOffset + y + cgreen];
                output_buf[1][outIndex1 + JpegConstants.RGB_BLUE]  = limit[limitOffset + y + cblue];
            }
        }