Example #1
0
		/**
		 * @warning This is a hack - the packet memory allocation stuff is broken. The
		 * packet is allocated if it was not really allocated.
		 */
		static public int av_dup_packet(AVPacket pkt)
		{
			throw(new NotImplementedException());
		}
Example #2
0
		/**
		 * Reduce packet size, correctly zeroing padding
		 *
		 * @param pkt packet
		 * @param size new size
		 */
		static public void av_shrink_packet(AVPacket pkt, int size)
		{
			throw(new NotImplementedException());
		}
Example #3
0
		/**
		 * Increase packet size, correctly zeroing padding
		 *
		 * @param pkt packet
		 * @param grow_by number of bytes by which to increase the size of the packet
		 */
		static public int av_grow_packet(AVPacket pkt, int grow_by)
		{
			throw(new NotImplementedException());
		}
Example #4
0
		static public int av_packet_split_side_data(AVPacket pkt)
		{
			throw(new NotImplementedException());
		}
Example #5
0
		/**
		 * Allocate the payload of a packet and initialize its fields with
		 * default values.
		 *
		 * @param pkt packet
		 * @param size wanted payload size
		 * @return 0 if OK, AVERROR_xxx otherwise
		 */
		static public int av_new_packet(AVPacket pkt, int size)
		{
			throw(new NotImplementedException());
		}
Example #6
0
        /*
         * uint8_t* av_packet_new_side_data(AVPacket pkt, AVPacketSideDataType type, int size)
         * {
         *      throw(new NotImplementedException());
         * }
         */

        /**
         * Shrink the already allocated side data buffer
         *
         * @param pkt packet
         * @param type side information type
         * @param size new side information size
         * @return 0 on success, < 0 on failure
         */
        static public int av_packet_shrink_side_data(AVPacket pkt, AVPacketSideDataType type, int size)
        {
            throw(new NotImplementedException());
        }
Example #7
0
		/*
		uint8_t* av_packet_new_side_data(AVPacket pkt, AVPacketSideDataType type, int size)
		{
			throw(new NotImplementedException());
		}
		*/

		/**
		 * Shrink the already allocated side data buffer
		 *
		 * @param pkt packet
		 * @param type side information type
		 * @param size new side information size
		 * @return 0 on success, < 0 on failure
		 */
		static public int av_packet_shrink_side_data(AVPacket pkt, AVPacketSideDataType type, int size)
		{
			throw(new NotImplementedException());
		}
Example #8
0
 /**
  * Free a packet.
  *
  * @param pkt packet to free
  */
 static public void av_free_packet(AVPacket pkt)
 {
     throw(new NotImplementedException());
 }
Example #9
0
 //int (*decode)(AVCodecContext *, void *outdata, int *outdata_size, AVPacket *avpkt);
 //public Unimplemented decode;
 //virtual public int decode(AVCodecContext avctx, Pointer<byte> outdata, ref int outdata_size, AVPacket avpkt)
 virtual public int decode(AVCodecContext avctx, ref object outdata, AVPacket avpkt)
 {
     throw (new NotImplementedException());
 }
Example #10
0
 /**
  * Increase packet size, correctly zeroing padding
  *
  * @param pkt packet
  * @param grow_by number of bytes by which to increase the size of the packet
  */
 static public int av_grow_packet(AVPacket pkt, int grow_by)
 {
     throw(new NotImplementedException());
 }
Example #11
0
 /**
  * @warning This is a hack - the packet memory allocation stuff is broken. The
  * packet is allocated if it was not really allocated.
  */
 static public int av_dup_packet(AVPacket pkt)
 {
     throw(new NotImplementedException());
 }
Example #12
0
 /**
  * Reduce packet size, correctly zeroing padding
  *
  * @param pkt packet
  * @param size new size
  */
 static public void av_shrink_packet(AVPacket pkt, int size)
 {
     throw(new NotImplementedException());
 }
Example #13
0
 /**
  * Allocate the payload of a packet and initialize its fields with
  * default values.
  *
  * @param pkt packet
  * @param size wanted payload size
  * @return 0 if OK, AVERROR_xxx otherwise
  */
 static public int av_new_packet(AVPacket pkt, int size)
 {
     throw(new NotImplementedException());
 }
Example #14
0
 static public int av_packet_split_side_data(AVPacket pkt)
 {
     throw(new NotImplementedException());
 }
Example #15
0
		/**
		 * Free a packet.
		 *
		 * @param pkt packet to free
		 */
		static public void av_free_packet(AVPacket pkt)
		{
			throw(new NotImplementedException());
		}
Example #16
0
	    //int (*decode)(AVCodecContext *, void *outdata, int *outdata_size, AVPacket *avpkt);
		//public Unimplemented decode;
		//virtual public int decode(AVCodecContext avctx, Pointer<byte> outdata, ref int outdata_size, AVPacket avpkt)
		virtual public int decode(AVCodecContext avctx, ref object outdata, AVPacket avpkt)
		{
			throw (new NotImplementedException());
		}
Example #17
0
		/// <summary>
		/// 
		/// </summary>
		/// <param name="avctx"></param>
		/// <param name="outdata"></param>
		/// <param name="outdata_size"></param>
		/// <param name="avpkt"></param>
		/// <returns></returns>
		public override int decode(AVCodecContext avctx, ref object outputData, AVPacket avpkt)
		{
			Pointer<byte> buf = avpkt.data;
			int buf_size = avpkt.size;
			BMPContext s = (BMPContext)avctx.priv_data;
			AVFrame p = s.picture;
			uint fsize, hsize;
			int width, height;
			int depth;
			BiCompression comp;
			uint ihsize;
			int i, n, linesize;
			var rgb = new uint[3];
			uint alpha = 0;
			Pointer<byte> ptr;
			int dsize;
			Pointer<byte> buf0 = buf;

			if (buf_size < 14)
			{
				log.av_log(avctx, log.AV_LOG_ERROR, "buf size too small (%d)\n", buf_size);
				return -1;
			}

			if (bytestream.get_byte(ref buf) != 'B' || bytestream.get_byte(ref buf) != 'M')
			{
				log.av_log(avctx, log.AV_LOG_ERROR, "bad magic number\n");
				return -1;
			}

			fsize = bytestream.get_le32(ref buf);

			if (buf_size < fsize)
			{
				log.av_log(avctx, log.AV_LOG_ERROR, "not enough data (%d < %d), trying to decode anyway\n", buf_size, fsize);
				fsize = (uint)buf_size;
			}

			buf += 2; /* reserved1 */
			buf += 2; /* reserved2 */

			hsize  = bytestream.get_le32(ref buf);  /* header size */
			ihsize = bytestream.get_le32(ref buf); /* more header size */

			if (ihsize + 14 > hsize)
			{
				log.av_log(avctx, log.AV_LOG_ERROR, "invalid header size %d\n", hsize);
				return -1;
			}

			/* sometimes file size is set to some headers size, set a real size in that case */
			if (fsize == 14 || fsize == ihsize + 14)
			{
				fsize = (uint)(buf_size - 2);
			}

			if (fsize <= hsize)
			{
				log.av_log(avctx, log.AV_LOG_ERROR, "declared file size is less than header size (%d < %d)\n", fsize, hsize);
				return -1;
			}

			switch (ihsize)
			{
				case  40: // windib
				case  56: // windib v3
				case  64: // OS/2 v2
				case 108: // windib v4
				case 124: // windib v5
					width = (int)bytestream.get_le32(ref buf);
					height = (int)bytestream.get_le32(ref buf);
					break;
				case  12: // OS/2 v1
					width = (int)bytestream.get_le16(ref buf);
					height = (int)bytestream.get_le16(ref buf);
				break;
				default:
					log.av_log(avctx, log.AV_LOG_ERROR, "unsupported BMP file, patch welcome\n");
					return -1;
			}

			if (bytestream.get_le16(ref buf) != 1) /* planes */
			{
				log.av_log(avctx, log.AV_LOG_ERROR, "invalid BMP header\n");
				return -1;
			}

			depth = bytestream.get_le16(ref buf);

			if (ihsize == 40 || ihsize == 64 || ihsize == 56)
			{
				comp = (BiCompression)bytestream.get_le32(ref buf);
			}
			else
			{
				comp = BiCompression.BMP_RGB;
			}

			if (comp != BiCompression.BMP_RGB && comp != BiCompression.BMP_BITFIELDS && comp != BiCompression.BMP_RLE4 && comp != BiCompression.BMP_RLE8)
			{
				log.av_log(avctx, log.AV_LOG_ERROR, "BMP coding %d not supported\n", comp);
				return -1;
			}

			if (comp == BiCompression.BMP_BITFIELDS)
			{
				buf += 20;
				rgb[0] = bytestream.get_le32(ref buf);
				rgb[1] = bytestream.get_le32(ref buf);
				rgb[2] = bytestream.get_le32(ref buf);
				if (ihsize >= 108)
				{
					alpha = bytestream.get_le32(ref buf);
				}
			}


			avctx.width = width;
			avctx.height = height > 0 ? height : -height;

			avctx.pix_fmt = PixelFormat.PIX_FMT_NONE;

			switch (depth)
			{
			case 32:
				if (comp == BiCompression.BMP_BITFIELDS)
				{
					if (rgb[0] == 0xFF000000 && rgb[1] == 0x00FF0000 && rgb[2] == 0x0000FF00)
					{
						avctx.pix_fmt = (alpha != 0) ? PixelFormat.PIX_FMT_ABGR : PixelFormat.PIX_FMT_0BGR;
					}
					else if (rgb[0] == 0x00FF0000 && rgb[1] == 0x0000FF00 && rgb[2] == 0x000000FF)
					{
						avctx.pix_fmt = (alpha != 0) ? PixelFormat.PIX_FMT_BGRA : PixelFormat.PIX_FMT_BGR0;
					}
					else if (rgb[0] == 0x0000FF00 && rgb[1] == 0x00FF0000 && rgb[2] == 0xFF000000)
					{
						avctx.pix_fmt = (alpha != 0) ? PixelFormat.PIX_FMT_ARGB : PixelFormat.PIX_FMT_0RGB;
					}
					else if (rgb[0] == 0x000000FF && rgb[1] == 0x0000FF00 && rgb[2] == 0x00FF0000)
					{
						avctx.pix_fmt = (alpha != 0) ? PixelFormat.PIX_FMT_RGBA : PixelFormat.PIX_FMT_RGB0;
					}
					else
					{
						log.av_log(avctx, log.AV_LOG_ERROR, "Unknown bitfields %0X %0X %0X\n", rgb[0], rgb[1], rgb[2]);
						return error.AVERROR(error.EINVAL);
					}
			    }
				else
				{
					avctx.pix_fmt = PixelFormat.PIX_FMT_BGRA;
			    }
			    break;
			case 24:
				avctx.pix_fmt = PixelFormat.PIX_FMT_BGR24;
			    break;
			case 16:
				if (comp == BiCompression.BMP_RGB)
				{
					avctx.pix_fmt = PixelFormat.PIX_FMT_RGB555;
				}
				else if (comp == BiCompression.BMP_BITFIELDS)
				{
					if (rgb[0] == 0xF800 && rgb[1] == 0x07E0 && rgb[2] == 0x001F) avctx.pix_fmt = PixelFormat.PIX_FMT_RGB565;
					else if (rgb[0] == 0x7C00 && rgb[1] == 0x03E0 && rgb[2] == 0x001F) avctx.pix_fmt = PixelFormat.PIX_FMT_RGB555;
					else if (rgb[0] == 0x0F00 && rgb[1] == 0x00F0 && rgb[2] == 0x000F) avctx.pix_fmt = PixelFormat.PIX_FMT_RGB444;
					else
					{
						log.av_log(avctx, log.AV_LOG_ERROR, "Unknown bitfields %0X %0X %0X\n", rgb[0], rgb[1], rgb[2]);
						return error.AVERROR(error.EINVAL);
					}
				}
			    break;
			case 8:
				if (hsize - ihsize - 14 > 0)
				{
					avctx.pix_fmt = PixelFormat.PIX_FMT_PAL8;
				}
				else
				{
					avctx.pix_fmt = PixelFormat.PIX_FMT_GRAY8;
				}
			    break;
			case 1:
			case 4:
			    if (hsize - ihsize - 14 > 0)
				{
					avctx.pix_fmt = PixelFormat.PIX_FMT_PAL8;
			    }
				else
				{
			        log.av_log(avctx, log.AV_LOG_ERROR, "Unknown palette for %d-colour BMP\n", 1 << depth);
					return -1;
			    }
			    break;
			default:
			    log.av_log(avctx, log.AV_LOG_ERROR, "depth %d not supported\n", depth);
			    return -1;
			}

			if (avctx.pix_fmt == PixelFormat.PIX_FMT_NONE)
			{
			    log.av_log(avctx, log.AV_LOG_ERROR, "unsupported pixel format\n");
			    return -1;
			}

			if (!p.data[0].IsNull) avctx.release_buffer(avctx, p);

			p.reference = 0;
			if (avctx.get_buffer(avctx, p) < 0)
			{
			    log.av_log(avctx, log.AV_LOG_ERROR, "get_buffer() failed\n");
			    return -1;
			}

			p.pict_type = AVPictureType.AV_PICTURE_TYPE_I;

			p.key_frame = 1;

			buf = buf0 + hsize;
			dsize = (int)(buf_size - hsize);

			/* Line size in file multiple of 4 */
			n = (int)(((avctx.width * depth + 31) / 8) & ~3);

			if (n * avctx.height > dsize && comp != BiCompression.BMP_RLE4 && comp != BiCompression.BMP_RLE8)
			{
			    log.av_log(avctx, log.AV_LOG_ERROR, "not enough data (%d < %d)\n", dsize, n * avctx.height);
			    return -1;
			}

			// RLE may skip decoding some picture areas, so blank picture before decoding
			if (comp == BiCompression.BMP_RLE4 || comp == BiCompression.BMP_RLE8)
			{
				CLib.memset(p.data[0], 0, avctx.height * p.linesize[0]);
			}

			if (depth == 4 || depth == 8)
			{
				CLib.memset(p.data[1], 0, 1024);
			}

			if (height > 0)
			{
			    ptr = p.data[0] + (avctx.height - 1) * p.linesize[0];
			    linesize = -p.linesize[0];
			}
			else
			{
			    ptr = p.data[0];
			    linesize = p.linesize[0];
			}

			if (avctx.pix_fmt == PixelFormat.PIX_FMT_PAL8)
			{
			    int colors = (1 << depth);
			    if (ihsize >= 36)
				{
			        int t;
			        buf = buf0 + 46;
			        t = (int)bytestream.get_le32(ref buf);
			        if (t < 0 || t > (int)(1 << depth))
					{
			            log.av_log(avctx, log.AV_LOG_ERROR, "Incorrect number of colors - %X for bitdepth %d\n", t, depth);
			        }
					else if (t != 0)
					{
			            colors = t;
			        }
			    }
			    buf = buf0 + 14 + ihsize; //palette location
				if ((hsize - ihsize - 14) < (colors << 2)) // OS/2 bitmap, 3 bytes per palette entry
				{
					for (i = 0; i < colors; i++)
					{
						var a = p.data[1].CastPointer<uint>();
						a[i] = (uint)((0xff << 24) | bytestream.get_le24(ref buf));
					}
			    }
				else
				{
					for (i = 0; i < colors; i++)
					{
						var a = p.data[1].CastPointer<uint>();
						a[i] = (uint)((0xFFU << 24) | bytestream.get_le32(ref buf));
					}
			    }
			    buf = buf0 + hsize;
			}

			if (comp == BiCompression.BMP_RLE4 || comp == BiCompression.BMP_RLE8)
			{
			    if (height < 0)
				{
			        p.data[0] += p.linesize[0] * (avctx.height - 1);
			        p.linesize[0] = -p.linesize[0];
			    }

			    //ff_msrle_decode(avctx, (AVPicture)p, depth, buf, dsize);
				Unimplemented.Mark();

			    if (height < 0)
				{
			        p.data[0] += p.linesize[0] * (avctx.height - 1);
			        p.linesize[0] = -p.linesize[0];
			    }
			}
			else
			{
			    switch (depth)
				{
			    case 1:
			        for (i = 0; i < avctx.height; i++)
					{
			            for (int j = 0; j < n; j++)
						{
			                ptr[j * 8 + 0] = (byte)((buf[j] >> 7)    );
			                ptr[j * 8 + 1] = (byte)((buf[j] >> 6) & 1);
			                ptr[j * 8 + 2] = (byte)((buf[j] >> 5) & 1);
			                ptr[j * 8 + 3] = (byte)((buf[j] >> 4) & 1);
			                ptr[j * 8 + 4] = (byte)((buf[j] >> 3) & 1);
			                ptr[j * 8 + 5] = (byte)((buf[j] >> 2) & 1);
			                ptr[j * 8 + 6] = (byte)((buf[j] >> 1) & 1);
			                ptr[j * 8 + 7] = (byte)((buf[j] >> 0) & 1);
			            }
			            buf += n;
			            ptr += linesize;
			        }
			        break;
			    case 8:
			    case 24:
			    case 32:
			        for (i = 0; i < avctx.height; i++)
					{
						//Console.WriteLine("i={0}, BytesPerRow={1}, linesize={2}", i, n, linesize);
						CLib.memcpy(ptr, buf, n);
			            buf += n;
			            ptr += linesize;
			        }
			        break;
			    case 4:
			        for(i = 0; i < avctx.height; i++){
			            for(int j = 0; j < n; j++)
						{
			                ptr[j * 2 + 0] = (byte)((buf[j] >> 4) & 0xF);
			                ptr[j * 2 + 1] = (byte)(buf[j] & 0xF);
			            }
			            buf += n;
			            ptr += linesize;
			        }
			        break;
			    case 16:
			        for (i = 0; i < avctx.height; i++)
					{
			            Pointer<ushort> src = buf.CastPointer<ushort>();
						Pointer<ushort> dst = ptr.CastPointer<ushort>();

						for (int j = 0; j < avctx.width; j++)
						{
							dst[0] = av_bswap.av_le2ne16(src[0]);
							src++;
							dst++;
						}

			            buf += n;
			            ptr += linesize;
			        }
			        break;
			    default:
			        log.av_log(avctx, log.AV_LOG_ERROR, "BMP decoder is broken\n");
			        return -1;
			    }
			}

			outputData = s.picture;

			return buf_size;
		} // decode
Example #18
0
		public override int decode(AVCodecContext avctx, ref object outdata, AVPacket avpkt)
		{
			outdata = null;

			TargaContext s = (TargaContext)avctx.priv_data;
			//AVFrame *picture = data;
			AVFrame p = s.picture;
			Pointer<byte> dst;
			int stride;
			TargaCompr compr;
			int idlen, pal, y, w, h, bpp, flags;
			int first_clr, colors, csize;

			bytestream2.init(ref s.gb, avpkt.data, avpkt.size);

			/* parse image header */
			idlen = bytestream2.get_byte(ref s.gb);
			pal = bytestream2.get_byte(ref s.gb);
			compr = (TargaCompr)bytestream2.get_byte(ref s.gb);
			first_clr = bytestream2.get_le16(ref s.gb);
			colors = bytestream2.get_le16(ref s.gb);
			csize = bytestream2.get_byte(ref s.gb);
			bytestream2.skip(ref s.gb, 4); /* 2: x, 2: y */
			w = bytestream2.get_le16(ref s.gb);
			h = bytestream2.get_le16(ref s.gb);
			bpp = bytestream2.get_byte(ref s.gb);

			if (bytestream2.get_bytes_left(ref s.gb) <= idlen) {
				log.av_log(avctx, log.AV_LOG_ERROR, "Not enough data to read header\n");
			    return error.AVERROR_INVALIDDATA;
			}

			flags = bytestream2.get_byte(ref s.gb);

			if ((pal == 0) && ((first_clr != 0) || (colors != 0) || (csize != 0)))
			{
				log.av_log(avctx, log.AV_LOG_WARNING, "File without colormap has colormap information set.\n");
			    // specification says we should ignore those value in this case
			    first_clr = colors = csize = 0;
			}


			// skip identifier if any
			bytestream2.skip(ref s.gb, idlen);

			switch(bpp){
			case 8:
					avctx.pix_fmt = ((TargaCompr)((int)compr & (~(int)TargaCompr.TGA_RLE)) == TargaCompr.TGA_BW) ? PixelFormat.PIX_FMT_GRAY8 : PixelFormat.PIX_FMT_PAL8;
			    break;
			case 15:
			case 16:
				avctx.pix_fmt = PixelFormat.PIX_FMT_RGB555LE;
			    break;
			case 24:
				avctx.pix_fmt = PixelFormat.PIX_FMT_BGR24;
			    break;
			case 32:
				avctx.pix_fmt = PixelFormat.PIX_FMT_BGRA;
			    break;
			default:
				log.av_log(avctx, log.AV_LOG_ERROR, "Bit depth %i is not supported\n", bpp);
			    return -1;
			}


			if (!s.picture.data[0].IsNull) avctx.release_buffer(avctx, s.picture);

			if (imgutils.av_image_check_size((uint)w, (uint)h, 0, avctx) != 0) return -1;

			if (w != avctx.width || h != avctx.height) Functions.avcodec_set_dimensions(avctx, w, h);

			if (avctx.get_buffer(avctx, p) < 0)
			{
				log.av_log(avctx, log.AV_LOG_ERROR, "get_buffer() failed\n");
			    return -1;
			}

			if ((flags & 0x20) != 0)
			{
			    dst = p.data[0];
			    stride = p.linesize[0];
			}
			else
			{
				//image is upside-down
			    dst = p.data[0] + p.linesize[0] * (h - 1);
			    stride = -p.linesize[0];
			}


			if (colors != 0)
			{
			    int pal_size, pal_sample_size;
			    if((colors + first_clr) > 256){
					log.av_log(avctx, log.AV_LOG_ERROR, "Incorrect palette: %i colors with offset %i\n", colors, first_clr);
			        return -1;
			    }
			    switch (csize) {
			    case 24: pal_sample_size = 3; break;
			    case 16:
			    case 15: pal_sample_size = 2; break;
			    default:
					log.av_log(avctx, log.AV_LOG_ERROR, "Palette entry size %i bits is not supported\n", csize);
			        return -1;
			    }
			    pal_size = colors * pal_sample_size;
				if (avctx.pix_fmt != PixelFormat.PIX_FMT_PAL8)
				{
					//should not occur but skip palette anyway
					bytestream2.skip(ref s.gb, pal_size);
				}
				else
				{
					int t;
					var ppal = p.data[1].CastPointer<uint>() + first_clr;

					if (bytestream2.get_bytes_left(ref s.gb) < pal_size)
					{
						log.av_log(avctx, log.AV_LOG_ERROR, "Not enough data to read palette\n");
						return error.AVERROR_INVALIDDATA;
					}
					switch (pal_sample_size)
					{
						case 3:
							/* RGB24 */
							for (t = 0; t < colors; t++)
							{
								ppal[0] = (0xffU << 24) | bytestream2.get_le24u(ref s.gb);
								ppal++;
							}
							break;
						case 2:
							/* RGB555 */
							for (t = 0; t < colors; t++)
							{
								var v = (uint)bytestream2.get_le16u(ref s.gb);
								v = ((v & 0x7C00) << 9) |
									((v & 0x03E0) << 6) |
									((v & 0x001F) << 3);
								/* left bit replication */
								v |= (v & 0xE0E0E0U) >> 5;
								ppal[0] = (0xffU << 24) | v;
								ppal++;
							}
							break;
					}
					p.palette_has_changed = 1;
				}
			}

			if ((compr & (~TargaCompr.TGA_RLE)) == TargaCompr.TGA_NODATA)
			{
			    CLib.memset(p.data[0], 0, p.linesize[0] * h);
			}
			else {
				if ((compr & TargaCompr.TGA_RLE) != 0)
				{
			        //int res = targa_decode_rle(avctx, s, dst, w, h, stride, bpp);
			        //if (res < 0) return res;
					throw (new NotImplementedException());
				}
				else
				{
			        var img_size = w * ((bpp + 1) >> 3);
			        if (bytestream2.get_bytes_left(ref s.gb) < img_size * h) {
			            log.av_log(avctx, log.AV_LOG_ERROR, "Not enough data available for image\n");
			            return error.AVERROR_INVALIDDATA;
			        }
			        for (y = 0; y < h; y++)
					{
			            bytestream2.get_bufferu(ref s.gb, dst, img_size);
			            dst += stride;
			        }
			    }
			}


			if ((flags & 0x10) != 0) // right-to-left, needs horizontal flip
			{
			    int x;
			    for(y = 0; y < h; y++){
			        var line = p.data[0].GetOffsetPointer(y * p.linesize[0]);
			        for (x = 0; x < w >> 1; x++)
					{
			            switch (bpp)
						{
							case 32:
								line.CastPointer<uint>().SwapValuesAtOffsets((x), (w - x - 1));
								break;
							case 24:
								line.CastPointer<byte>().SwapValuesAtOffsets((3 * x + 0), (3 * w - 3 * x - 3));
								line.CastPointer<byte>().SwapValuesAtOffsets((3 * x + 1), (3 * w - 3 * x - 2));
								line.CastPointer<byte>().SwapValuesAtOffsets((3 * x + 2), (3 * w - 3 * x - 1));
								break;
							case 16:
								line.CastPointer<ushort>().SwapValuesAtOffsets((x), (w -x - 1));
								break;
							case 8:
								line.CastPointer<byte>().SwapValuesAtOffsets((x), (w -x - 1));
								break;
			            }
			        }
			    }
			}

			outdata = s.picture;

			return avpkt.size;
		}