Exemplo n.º 1
0
        /// <summary>
        /// 指定されたYUV画像の読み込みを行い、YUVインスタンスに画像を書き込みます。
        /// </summary>
        /// <exception cref="YUVIOException"/>
        public YUV ReadImage(YUV img)
        {
            if (MaxFrameCount < ReadFrameCount)
            {
                throw new YUVIOException("これ以上画像を読み込めません");
            }

            ReadFrameCount++;
            try
            {
                fs.Read(img.Y, 0, Pixel);
                fs.Read(img.U, 0, Pixelc);
                fs.Read(img.V, 0, Pixelc);
            }
            catch (IndexOutOfRangeException)
            {
                Console.WriteLine("画像領域の領域外を参照しました");
                throw;
            }
            catch (Exception)
            {
                throw;
            }

            return(img);
        }
Exemplo n.º 2
0
        /// <summary>
        /// ColorFormatが444のYUVインスタンスを受け取り、ColorFormatが420のYUVインスタンスを作成します。
        /// </summary>
        /// <param name="img">ColorFormatが444のYUVインスタンス</param>
        /// <param name="param">0でY成分のみ、0以外でY,u,v成分すべて含む</param>
        /// <returns></returns>
        public static YUV TransImage444To420(YUV img, int param)
        {
            if (img.ColorFormat == 420)
            {
                Console.WriteLine("すでにYUV420のフォーマットが指定されています");
                return(img);
            }

            int j, k;
            int width, height;
            int widthc, heightc;

            width   = img.Width;
            height  = img.Height;
            widthc  = width / 2;
            heightc = height / 2;

            YUV outImg = new YUV(width, height, 444);

            //#pragma omp parallel for
            for (k = 0; k < height; k++)
            {
                for (j = 0; j < width; j++)
                {
                    outImg.Y[j + k * width] = img.Y[j + k * width];
                }
            }

            if (param == 0)
            {
                //#pragma omp parallel for
                for (k = 0; k < heightc; k++)
                {
                    for (j = 0; j < widthc; j++)
                    {
                        outImg.U[j + k * widthc] = 128;
                        outImg.V[j + k * widthc] = 128;
                    }
                }
            }
            else
            {
                //#pragma omp parallel for
                for (k = 0; k < heightc; k++)
                {
                    for (j = 0; j < widthc; j++)
                    {
                        outImg.U[j + k * widthc] = (byte)((img.U[2 * j + 2 * k * width] + img.U[2 * j + 1 + 2 * k * width] + img.U[2 * j + (2 * k + 1) * width] + img.U[2 * j + 1 + (2 * k + 1) * width] + 2) / 4);
                        outImg.V[j + k * widthc] = (byte)((img.V[2 * j + 2 * k * width] + img.V[2 * j + 1 + 2 * k * width] + img.V[2 * j + (2 * k + 1) * width] + img.V[2 * j + 1 + (2 * k + 1) * width] + 2) / 4);
                    }
                }
            }

            return(outImg);
        }
Exemplo n.º 3
0
        public static YUV TransImage420To444Fast(YUV srcImg, int param)
        {
            if (srcImg.ColorFormat == 444)
            {
                Console.WriteLine("すでにYUV444のフォーマットが指定されています");
                return(srcImg);
            }

            int j, k;
            int width, height;
            int widthc, heightc;

            width   = srcImg.Width;
            height  = srcImg.Height;
            widthc  = width / 2;
            heightc = height / 2;

            YUV dstImg = new YUV(width, height, 444);

            for (k = 0; k < height; k++)
            {
                for (j = 0; j < width; j++)
                {
                    dstImg.Y[j + k * width] = srcImg.Y[j + k * width];
                }
            }

            if (param == 0)
            {
                for (k = 0; k < height; k++)
                {
                    for (j = 0; j < width; j++)
                    {
                        dstImg.U[j + k * width] = 128;
                        dstImg.V[j + k * width] = 128;
                    }
                }
            }
            else
            {
                for (k = 0; k < heightc; k++)
                {
                    for (j = 0; j < widthc; j++)
                    {
                        dstImg.U[2 * j + (2 * k) * width] = dstImg.U[2 * j + 1 + (2 * k) * width] = dstImg.U[2 * j + (2 * k + 1) * width] = dstImg.U[2 * j + 1 + (2 * k + 1) * width] = srcImg.U[j + k * widthc];
                        dstImg.V[2 * j + (2 * k) * width] = dstImg.V[2 * j + 1 + (2 * k) * width] = dstImg.V[2 * j + (2 * k + 1) * width] = dstImg.V[2 * j + 1 + (2 * k + 1) * width] = srcImg.V[j + k * widthc];
                    }
                }
            }

            return(dstImg);
        }
Exemplo n.º 4
0
 /// <summary>
 /// <para>引数のパスの位置に指定されたファイルストリームに、インスタンスの画像データを書き出します</para>
 /// </summary>
 /// <param name="outFs"></param>
 public static void WriteImage(YUV img, FileStream outFs)
 {
     try
     {
         outFs.Write(img.Y, 0, img.Pixel);
         outFs.Write(img.U, 0, img.Pixelc);
         outFs.Write(img.V, 0, img.Pixelc);
     }
     catch
     {
         throw;
     }
 }
Exemplo n.º 5
0
 /// <summary>
 /// <para>引数のパスの位置に新規ファイルを作成し、インスタンスの画像データを書き出します</para>
 /// <para>処理終了後ファイルアクセスを解除します(※動画では使用不可)</para>
 /// </summary>
 /// <param name="outPath"></param>
 public static void WriteImage(YUV img, string outPath)
 {
     try
     {
         using (FileStream outFs = new FileStream(outPath, System.IO.FileMode.Create, System.IO.FileAccess.Write))
         {
             outFs.Write(img.Y, 0, img.Pixel);
             outFs.Write(img.U, 0, img.Pixelc);
             outFs.Write(img.V, 0, img.Pixelc);
         }
     }
     catch
     {
         throw;
     }
 }
Exemplo n.º 6
0
        /// <summary>
        /// YUV型の画素値情報全体を指定した値で埋めます。
        /// </summary>
        /// <param name="img"></param>
        /// <param name="yValue">Yを埋める値</param>
        /// <param name="uValue">Uを埋める値</param>
        /// <param name="vValue">Vを埋める値</param>
        public static void SetvalueImage(YUV img, int yValue, int uValue, int vValue)
        {
            byte yRoundingValue = RoundingByte(yValue);
            byte uRoundingValue = RoundingByte(uValue);
            byte vRoundingValue = RoundingByte(vValue);

            for (int i = 0; i < img.Y.Length; i++)
            {
                img.Y[i] = yRoundingValue;
            }

            for (int i = 0; i < img.V.Length; i++)
            {
                img.U[i] = uRoundingValue;
                img.V[i] = vRoundingValue;
            }
        }
Exemplo n.º 7
0
        /// <summary>
        /// <para>MatインスタンスをYUVインスタンスに変換します</para>
        /// </summary>
        /// <param name="MatImg">変換したいMatインスタンス</param>
        /// <param name="colorSelect">0でYUV420、0以外でYUV444に変換</param>
        /// <returns></returns>
        public static YUV MatBGR2YUV(Mat MatImg, int colorSelect)
        {
            int j, k;
            int width, height;

            width  = MatImg.Width;
            height = MatImg.Height;

            var step = MatImg.Step();


            YUV imgTmp = new YUV(width, height, 444);

            Mat distImg = null;

            Cv2.CvtColor(MatImg, distImg, ColorConversionCodes.YUV2BGR);

            unsafe
            {
                byte *datImgData = distImg.DataPointer;
                for (k = 0; k < height; k++)
                {
                    for (j = 0; j < width; j++)
                    {
                        imgTmp.Y[j + k * width] = datImgData[step * k + j * 3];
                        imgTmp.V[j + k * width] = datImgData[step * k + j * 3 + 1];
                        imgTmp.U[j + k * width] = datImgData[step * k + j * 3 + 2];
                    }
                }
            }

            YUV YUVImg;

            if (colorSelect == 0)
            {
                YUVImg = YUVProc.TransImage444To420(imgTmp, 1);
            }
            else
            {
                YUVImg = YUVProc.DeepCopy(imgTmp);
            }

            return(YUVImg);
        }
Exemplo n.º 8
0
        /// <summary>
        /// 引数に指定したYUVインスタンスのディープコピーを作成するメソッド
        /// </summary>
        /// <param name="target">ディープコピーを行うイメージ</param>
        /// <returns>ディープコピーされたイメージ</returns>
        public static YUV DeepCopy(YUV target)
        {
            YUV             result;
            BinaryFormatter b = new BinaryFormatter();

            MemoryStream mem = new MemoryStream();

            try
            {
                b.Serialize(mem, target);
                mem.Position = 0;
                result       = (YUV)b.Deserialize(mem);
            }
            finally
            {
                mem.Close();
            }

            return(result);
        }
Exemplo n.º 9
0
        /// <summary>
        /// 指定されたYUV画像の読み込みを行い、YUVインスタンスを新規生成して返します。
        /// </summary>
        /// <exception cref="YUVIOException"/>
        public YUV ReadImage()
        {
            YUV img = new YUV(Width, Height, ColorFormat);

            if (MaxFrameCount < ReadFrameCount)
            {
                throw new YUVIOException("これ以上画像を読み込めません");
            }

            ReadFrameCount++;
            try
            {
                fs.Read(img.Y, 0, Pixel);
                fs.Read(img.U, 0, Pixelc);
                fs.Read(img.V, 0, Pixelc);
            }
            catch
            {
                throw;
            }

            return(img);
        }
Exemplo n.º 10
0
        /// <summary>
        /// ColorFormatが420のYUVインスタンスを受け取り、ColorFormatが444のYUVインスタンスを作成します。
        /// </summary>
        /// <param name="srcImg">ColorFormatが420のYUVインスタンス</param>
        /// <param name="param">0でY成分のみ、0以外でY,u,v成分すべて含む</param>
        /// <returns></returns>
        public static YUV TransImage420To444(YUV srcImg, int param)
        {
            if (srcImg.ColorFormat == 444)
            {
                Console.WriteLine("すでにYUV444のフォーマットが指定されています");
                return(srcImg);
            }

            int j, k;
            int width, height;
            int widthc, heightc;

            width   = srcImg.Width;
            height  = srcImg.Height;
            widthc  = width / 2;
            heightc = height / 2;

            YUV dstImg = new YUV(width, height, 444);

            //#pragma omp parallel for
            for (k = 0; k < height; k++)
            {
                for (j = 0; j < width; j++)
                {
                    dstImg.Y[j + k * width] = srcImg.Y[j + k * width];
                }
            }

            if (param == 0)
            {
                //#pragma omp parallel for
                for (k = 0; k < height; k++)
                {
                    for (j = 0; j < width; j++)
                    {
                        dstImg.U[j + k * width] = 128;
                        dstImg.V[j + k * width] = 128;
                    }
                }
            }
            else
            {
                //#pragma omp parallel for
                for (k = 0; k < heightc - 1; k++)
                {
                    for (j = 0; j < widthc - 1; j++)
                    {
                        dstImg.U[2 * j + 1 + (2 * k + 1) * width] = (byte)((9 * srcImg.U[j + k * widthc] + 4 * srcImg.U[j + 1 + k * widthc] + 4 * srcImg.U[j + (k + 1) * widthc] + 3 * srcImg.U[j + 1 + (k + 1) * widthc] + 10) / 20);
                        dstImg.U[2 * j + 2 + (2 * k + 1) * width] = (byte)((4 * srcImg.U[j + k * widthc] + 9 * srcImg.U[j + 1 + k * widthc] + 3 * srcImg.U[j + (k + 1) * widthc] + 4 * srcImg.U[j + 1 + (k + 1) * widthc] + 10) / 20);
                        dstImg.U[2 * j + 1 + (2 * k + 2) * width] = (byte)((4 * srcImg.U[j + k * widthc] + 3 * srcImg.U[j + 1 + k * widthc] + 9 * srcImg.U[j + (k + 1) * widthc] + 4 * srcImg.U[j + 1 + (k + 1) * widthc] + 10) / 20);
                        dstImg.U[2 * j + 2 + (2 * k + 2) * width] = (byte)((3 * srcImg.U[j + k * widthc] + 4 * srcImg.U[j + 1 + k * widthc] + 4 * srcImg.U[j + (k + 1) * widthc] + 9 * srcImg.U[j + 1 + (k + 1) * widthc] + 10) / 20);;
                        dstImg.V[2 * j + 1 + (2 * k + 1) * width] = (byte)((9 * srcImg.V[j + k * widthc] + 4 * srcImg.V[j + 1 + k * widthc] + 4 * srcImg.V[j + (k + 1) * widthc] + 3 * srcImg.V[j + 1 + (k + 1) * widthc] + 10) / 20);
                        dstImg.V[2 * j + 2 + (2 * k + 1) * width] = (byte)((4 * srcImg.V[j + k * widthc] + 9 * srcImg.V[j + 1 + k * widthc] + 3 * srcImg.V[j + (k + 1) * widthc] + 4 * srcImg.V[j + 1 + (k + 1) * widthc] + 10) / 20);
                        dstImg.V[2 * j + 1 + (2 * k + 2) * width] = (byte)((4 * srcImg.V[j + k * widthc] + 3 * srcImg.V[j + 1 + k * widthc] + 9 * srcImg.V[j + (k + 1) * widthc] + 4 * srcImg.V[j + 1 + (k + 1) * widthc] + 10) / 20);
                        dstImg.V[2 * j + 2 + (2 * k + 2) * width] = (byte)((3 * srcImg.V[j + k * widthc] + 4 * srcImg.V[j + 1 + k * widthc] + 4 * srcImg.V[j + (k + 1) * widthc] + 9 * srcImg.V[j + 1 + (k + 1) * widthc] + 10) / 20);;
                    }
                }

                k = 0;
                //#pragma omp parallel for
                for (j = 0; j < widthc - 1; j++)
                {
                    dstImg.U[2 * j + 1 + k * width] = (byte)((18 * srcImg.U[j + k * widthc] + 8 * srcImg.U[j + 1 + k * widthc] + 13) / 26);
                    dstImg.U[2 * j + 2 + k * width] = (byte)((8 * srcImg.U[j + k * widthc] + 18 * srcImg.U[j + 1 + k * widthc] + 13) / 26);
                    dstImg.V[2 * j + 1 + k * width] = (byte)((18 * srcImg.V[j + k * widthc] + 8 * srcImg.V[j + 1 + k * widthc] + 13) / 26);
                    dstImg.V[2 * j + 2 + k * width] = (byte)((8 * srcImg.V[j + k * widthc] + 18 * srcImg.V[j + 1 + k * widthc] + 13) / 26);
                }
                k = heightc - 1;
                //#pragma omp parallel for
                for (j = 0; j < widthc - 1; j++)
                {
                    dstImg.U[2 * j + 1 + (2 * k + 1) * width] = (byte)((18 * srcImg.U[j + k * widthc] + 8 * srcImg.U[j + 1 + k * widthc] + 13) / 26);
                    dstImg.U[2 * j + 2 + (2 * k + 1) * width] = (byte)((8 * srcImg.U[j + k * widthc] + 18 * srcImg.U[j + 1 + k * widthc] + 13) / 26);
                    dstImg.V[2 * j + 1 + (2 * k + 1) * width] = (byte)((18 * srcImg.V[j + k * widthc] + 8 * srcImg.V[j + 1 + k * widthc] + 13) / 26);
                    dstImg.V[2 * j + 2 + (2 * k + 1) * width] = (byte)((8 * srcImg.V[j + k * widthc] + 18 * srcImg.V[j + 1 + k * widthc] + 13) / 26);
                }

                //#pragma omp parallel for
                for (k = 0; k < heightc - 1; k++)
                {
                    j = 0;
                    dstImg.U[j + (2 * k + 1) * width] = (byte)((18 * srcImg.U[j + k * widthc] + 8 * srcImg.U[j + (k + 1) * widthc] + 13) / 26);
                    dstImg.U[j + (2 * k + 2) * width] = (byte)((8 * srcImg.U[j + k * widthc] + 18 * srcImg.U[j + (k + 1) * widthc] + 13) / 26);
                    dstImg.V[j + (2 * k + 1) * width] = (byte)((18 * srcImg.V[j + k * widthc] + 8 * srcImg.V[j + (k + 1) * widthc] + 13) / 26);
                    dstImg.V[j + (2 * k + 2) * width] = (byte)((8 * srcImg.V[j + k * widthc] + 18 * srcImg.V[j + (k + 1) * widthc] + 13) / 26);
                    j = widthc - 1;
                    dstImg.U[2 * j + 1 + (2 * k + 1) * width] = (byte)((18 * srcImg.U[j + k * widthc] + 8 * srcImg.U[j + (k + 1) * widthc] + 13) / 26);
                    dstImg.U[2 * j + 1 + (2 * k + 2) * width] = (byte)((8 * srcImg.U[j + k * widthc] + 18 * srcImg.U[j + (k + 1) * widthc] + 13) / 26);
                    dstImg.V[2 * j + 1 + (2 * k + 1) * width] = (byte)((18 * srcImg.V[j + k * widthc] + 8 * srcImg.V[j + (k + 1) * widthc] + 13) / 26);
                    dstImg.V[2 * j + 1 + (2 * k + 2) * width] = (byte)((8 * srcImg.V[j + k * widthc] + 18 * srcImg.V[j + (k + 1) * widthc] + 13) / 26);
                }
                k = 0; j = 0; dstImg.U[j + k * width] = srcImg.U[j + k * widthc];
                dstImg.V[j + k * width] = srcImg.V[j + k * widthc];
                k = 0; j = widthc - 1; dstImg.U[2 * j + 1 + k * width] = srcImg.U[j + k * widthc];
                dstImg.V[2 * j + 1 + k * width] = srcImg.V[j + k * widthc];
                k = heightc - 1; j = 0; dstImg.U[j + (2 * k + 1) * width] = srcImg.U[j + k * widthc];
                dstImg.V[j + (2 * k + 1) * width] = srcImg.V[j + k * widthc];
                k = heightc - 1; j = widthc - 1; dstImg.U[2 * j + 1 + (2 * k + 1) * width] = srcImg.U[j + k * widthc];
                dstImg.V[2 * j + 1 + (2 * k + 1) * width] = srcImg.V[j + k * widthc];
            }
            return(dstImg);
        }
Exemplo n.º 11
0
        /// <summary>
        /// <para>YUVインスタンスをMatインスタンスに変換します</para>
        /// </summary>
        /// <param name="YUVImg">変換を行いたいYUVインスタンス</param>
        /// <param name="fullYUV">YUVの変換形式を選択(ITU-R BT.601 規定YCbCr=0 , 8bitフルスケールYUV=1)</param>
        /// <param name="mode">低速処理=0,高速処理=1(YUV420を処理する際の高速化設定)</param>
        /// <returns>変換を行ったインスタンス(CV_8SC3形式,並び順:BGR)</returns>
        public static Mat YUV2MatBGR(YUV YUVImg, int fullYUV, int mode)
        {
            int j, k;
            int width, height;
            int widthc, heightc;
            int r, g, b;

            width   = YUVImg.Width;
            height  = YUVImg.Height;
            widthc  = YUVImg.Widthc;
            heightc = YUVImg.Heightc;

            Mat MatImg = new Mat(new Size(width, height), MatType.CV_8UC3);
            var step   = MatImg.Step();

            YUV imgTmp;

            if (YUVImg.ColorFormat == 420)
            {
                if (mode == 0)
                {
                    imgTmp = YUVProc.TransImage420To444(YUVImg, 1);
                }
                else
                {
                    imgTmp = YUVProc.TransImage420To444Fast(YUVImg, 1);
                }
            }
            else
            {
                imgTmp = YUVProc.DeepCopy(YUVImg);
            }

            if (fullYUV != 0)
            {                   //8bitフルスケールYUVに変換
                unsafe
                {
                    byte *MatImgData = MatImg.DataPointer;
                    for (k = 0; k < height; k++)
                    {
                        for (j = 0; j < width; j++)
                        {
                            r = YUVProc.RoundingByte(imgTmp.Y[j + (k * width)] + 1.402 * (imgTmp.V[j + (k * width)] - 128));
                            g = YUVProc.RoundingByte(imgTmp.Y[j + (k * width)] - 0.344136 * (imgTmp.U[j + (k * width)] - 128) - 0.714136 * (imgTmp.V[j + (k * width)] - 128));
                            b = YUVProc.RoundingByte(imgTmp.Y[j + (k * width)] + 1.772 * (imgTmp.U[j + (k * width)] - 128));

                            MatImgData[step * k + j * 3 + 0] = (byte)b;
                            MatImgData[step * k + j * 3 + 1] = (byte)g;
                            MatImgData[step * k + j * 3 + 2] = (byte)r;
                        }
                    }
                }
            }
            else
            {                                           //ITU-R BT.601 規定YCbCrに変換
                unsafe
                {
                    byte *MatImgData = MatImg.DataPointer;
                    for (k = 0; k < height; k++)
                    {
                        for (j = 0; j < width; j++)
                        {
                            r = YUVProc.RoundingByte(1.164 * (imgTmp.Y[j + (k * width)] - 16) + 1.596 * (imgTmp.V[j + (k * width)] - 128));
                            g = YUVProc.RoundingByte(1.164 * (imgTmp.Y[j + (k * width)] - 16) - 0.391 * (imgTmp.U[j + (k * width)] - 128) - 0.813 * (imgTmp.V[j + (k * width)] - 128));
                            b = YUVProc.RoundingByte(1.164 * (imgTmp.Y[j + (k * width)] - 16) + 2.018 * (imgTmp.U[j + (k * width)] - 128));

                            MatImgData[step * k + j * 3 + 0] = (byte)b;
                            MatImgData[step * k + j * 3 + 1] = (byte)g;
                            MatImgData[step * k + j * 3 + 2] = (byte)r;
                        }
                    }
                }
            }

            return(MatImg);
        }