public virtual int sceMpegBaseCscAvc(TPointer bufferRGB, int unknown, int bufferWidth, TPointer mp4AvcCscStructAddr) { SceMp4AvcCscStruct mp4AvcCscStruct = new SceMp4AvcCscStruct(); mp4AvcCscStruct.read(mp4AvcCscStructAddr); int rangeX = 0; int rangeY = 0; int rangeWidth = mp4AvcCscStruct.width << 4; int rangeHeight = mp4AvcCscStruct.height << 4; //if (log.DebugEnabled) { Console.WriteLine(string.Format("sceMpegBaseCscAvc {0}", mp4AvcCscStruct)); } return(hleMpegBaseCscAvcRange(bufferRGB, unknown, bufferWidth, mp4AvcCscStruct, rangeX, rangeY, rangeWidth, rangeHeight)); }
public virtual int sceMpegBaseCscAvcRange(TPointer bufferRGB, int unknown, TPointer32 rangeAddr, int bufferWidth, TPointer mp4AvcCscStructAddr) { SceMp4AvcCscStruct mp4AvcCscStruct = new SceMp4AvcCscStruct(); mp4AvcCscStruct.read(mp4AvcCscStructAddr); int rangeX = rangeAddr.getValue(0) << 4; int rangeY = rangeAddr.getValue(4) << 4; int rangeWidth = rangeAddr.getValue(8) << 4; int rangeHeight = rangeAddr.getValue(12) << 4; //if (log.DebugEnabled) { Console.WriteLine(string.Format("sceMpegBaseCscAvcRange range x={0:D}, y={1:D}, width={2:D}, height={3:D}, {4}", rangeX, rangeY, rangeWidth, rangeHeight, mp4AvcCscStruct)); } return(hleMpegBaseCscAvcRange(bufferRGB, unknown, bufferWidth, mp4AvcCscStruct, rangeX, rangeY, rangeWidth, rangeHeight)); }
public virtual int sceMpegBaseYCrCbCopy(TPointer dst, TPointer src, int flags) { SceMp4AvcCscStruct dstStruct = new SceMp4AvcCscStruct(); dstStruct.read(dst); SceMp4AvcCscStruct srcStruct = new SceMp4AvcCscStruct(); srcStruct.read(src); int size1 = ((srcStruct.width + 16) >> 5) * (srcStruct.height >> 1); int size2 = (srcStruct.width >> 5) * (srcStruct.height >> 1); //if (log.DebugEnabled) { Console.WriteLine(string.Format("sceMpegBaseYCrCbCopy dstStruct: {0}", dstStruct)); Console.WriteLine(string.Format("sceMpegBaseYCrCbCopy srcStruct: {0}", srcStruct)); Console.WriteLine(string.Format("sceMpegBaseYCrCbCopy size1=0x{0:X}, size2=0x{1:X}", size1, size2)); } Memory mem = Memory.Instance; if ((flags & 1) != 0) { copyBlocks(mem, dstStruct.buffer0, srcStruct.buffer0, size1); copyBlocks(mem, dstStruct.buffer1, srcStruct.buffer1, size2); copyBlocks(mem, dstStruct.buffer4, srcStruct.buffer4, size1 >> 1); copyBlocks(mem, dstStruct.buffer5, srcStruct.buffer5, size2 >> 1); } if ((flags & 2) != 0) { copyBlocks(mem, dstStruct.buffer2, srcStruct.buffer2, size1); copyBlocks(mem, dstStruct.buffer3, srcStruct.buffer3, size2); copyBlocks(mem, dstStruct.buffer6, srcStruct.buffer6, size1 >> 1); copyBlocks(mem, dstStruct.buffer7, srcStruct.buffer7, size2 >> 1); } return(0); }
private int hleMpegBaseCscAvcRange(TPointer bufferRGB, int unknown, int bufferWidth, SceMp4AvcCscStruct mp4AvcCscStruct, int rangeX, int rangeY, int rangeWidth, int rangeHeight) { int width = mp4AvcCscStruct.width << 4; int height = mp4AvcCscStruct.height << 4; // It seems that the pixel output format is always ABGR8888. int videoPixelMode = TPSM_PIXEL_STORAGE_MODE_32BIT_ABGR8888; int bytesPerPixel = sceDisplay.getPixelFormatBytes(videoPixelMode); int destAddr = bufferRGB.Address; int width2 = width >> 1; int height2 = height >> 1; int Length = width * height; int length2 = width2 * height2; // Read the YCbCr image. // See the description of the format used by the PSP in sceVideocodecDecode(). int[] luma = getIntBuffer(Length); int[] cb = getIntBuffer(length2); int[] cr = getIntBuffer(length2); int sizeY1 = ((width + 16) >> 5) * (height >> 1) * 16; int sizeY2 = (width >> 5) * (height >> 1) * 16; int sizeCrCb1 = sizeY1 >> 1; int sizeCrCb2 = sizeY1 >> 1; int[] bufferY1 = getIntBuffer(sizeY1); int[] bufferY2 = getIntBuffer(sizeY2); int[] bufferCrCb1 = getIntBuffer(sizeCrCb1); int[] bufferCrCb2 = getIntBuffer(sizeCrCb2); read(mp4AvcCscStruct.buffer0, sizeY1, bufferY1, 0); read(mp4AvcCscStruct.buffer1, sizeY2, bufferY2, 0); read(mp4AvcCscStruct.buffer4, sizeCrCb1, bufferCrCb1, 0); read(mp4AvcCscStruct.buffer5, sizeCrCb2, bufferCrCb2, 0); for (int x = 0, j = 0; x < width; x += 32) { for (int y = 0, i = x; y < height; y += 2, i += 2 * width, j += 16) { Array.Copy(bufferY1, j, luma, i, 16); } } for (int x = 16, j = 0; x < width; x += 32) { for (int y = 0, i = x; y < height; y += 2, i += 2 * width, j += 16) { Array.Copy(bufferY2, j, luma, i, 16); } } for (int x = 0, j = 0; x < width2; x += 16) { for (int y = 0; y < height2; y += 2) { for (int xx = 0, i = y * width2 + x; xx < 8; xx++, i++) { cb[i] = bufferCrCb1[j++]; cr[i] = bufferCrCb1[j++]; } } } for (int x = 0, j = 0; x < width2; x += 16) { for (int y = 1; y < height2; y += 2) { for (int xx = 0, i = y * width2 + x; xx < 8; xx++, i++) { cb[i] = bufferCrCb2[j++]; cr[i] = bufferCrCb2[j++]; } } } read(mp4AvcCscStruct.buffer2, sizeY1, bufferY1, 0); read(mp4AvcCscStruct.buffer3, sizeY2, bufferY2, 0); read(mp4AvcCscStruct.buffer6, sizeCrCb1, bufferCrCb1, 0); read(mp4AvcCscStruct.buffer7, sizeCrCb2, bufferCrCb2, 0); for (int x = 0, j = 0; x < width; x += 32) { for (int y = 1, i = x + width; y < height; y += 2, i += 2 * width, j += 16) { Array.Copy(bufferY1, j, luma, i, 16); } } for (int x = 16, j = 0; x < width; x += 32) { for (int y = 1, i = x + width; y < height; y += 2, i += 2 * width, j += 16) { Array.Copy(bufferY2, j, luma, i, 16); } } for (int x = 8, j = 0; x < width2; x += 16) { for (int y = 0; y < height2; y += 2) { for (int xx = 0, i = y * width2 + x; xx < 8; xx++, i++) { cb[i] = bufferCrCb1[j++]; cr[i] = bufferCrCb1[j++]; } } } for (int x = 8, j = 0; x < width2; x += 16) { for (int y = 1; y < height2; y += 2) { for (int xx = 0, i = y * width2 + x; xx < 8; xx++, i++) { cb[i] = bufferCrCb2[j++]; cr[i] = bufferCrCb2[j++]; } } } releaseIntBuffer(bufferY1); releaseIntBuffer(bufferY2); releaseIntBuffer(bufferCrCb1); releaseIntBuffer(bufferCrCb2); // Convert YCbCr to ABGR int[] abgr = getIntBuffer(Length); H264Utils.YUV2ABGR(width, height, luma, cb, cr, abgr); releaseIntBuffer(luma); releaseIntBuffer(cb); releaseIntBuffer(cr); // Do not cache the video image as a texture in the VideoEngine to allow fluid rendering VideoEngine.Instance.addVideoTexture(destAddr, destAddr + (rangeY + rangeHeight) * bufferWidth * bytesPerPixel); // Write the ABGR image if (videoPixelMode == TPSM_PIXEL_STORAGE_MODE_32BIT_ABGR8888 && RuntimeContext.hasMemoryInt()) { // Optimize the most common case int pixelIndex = rangeY * width + rangeX; int addr = destAddr; for (int i = 0; i < rangeHeight; i++) { Array.Copy(abgr, pixelIndex, RuntimeContext.MemoryInt, addr >> 2, rangeWidth); pixelIndex += width; addr += bufferWidth * bytesPerPixel; } } else { int addr = destAddr; for (int i = 0; i < rangeHeight; i++) { IMemoryWriter memoryWriter = MemoryWriter.getMemoryWriter(addr, rangeWidth * bytesPerPixel, bytesPerPixel); int pixelIndex = (i + rangeY) * width + rangeX; for (int j = 0; j < rangeWidth; j++, pixelIndex++) { int abgr8888 = abgr[pixelIndex]; int pixelColor = Debug.getPixelColor(abgr8888, videoPixelMode); memoryWriter.writeNext(pixelColor); } memoryWriter.flush(); addr += bufferWidth * bytesPerPixel; } } releaseIntBuffer(abgr); return(0); }