public void ImageFormat_EncodeJPG_DecodeJPG_ProducesSimilarData() { const int kDimension = 64; const int kLength = kDimension * kDimension; const int kDeviation = 3; var color = new Color32((byte)UnityEngine.Random.Range(0, 255), (byte)UnityEngine.Random.Range(0, 255), (byte)UnityEngine.Random.Range(0, 255), 255); var data = ArrayUtilities.Allocate <Color32>(kLength); for (var i = 0; i < kLength; ++i) { data[i] = color; } var encoded = JpegEncoder.Encode(ArrayUtilities.Cast <byte>(data), kDimension, kDimension, (int)GraphicsFormatUtility.GetBlockSize(GraphicsFormat.R8G8B8A8_UNorm), GraphicsFormat.R8G8B8A8_UNorm); int width = 0, height = 0; var decoded = ArrayUtilities.Cast <Color32>(JpegEncoder.Decode(encoded, ref width, ref height)); Debug.Assert(width == kDimension && height == kDimension); Debug.Assert(ArrayUtilities.Count <Color32>(data) == ArrayUtilities.Count <Color32>(decoded)); int count = 0; for (var i = 0; i < kLength; ++i) { int rd = Math.Abs((int)data[i].r - (int)decoded[i].r); int gd = Math.Abs((int)data[i].g - (int)decoded[i].g); int bd = Math.Abs((int)data[i].b - (int)decoded[i].b); int ad = Math.Abs((int)data[i].a - (int)decoded[i].a); if (rd > kDeviation || gd > kDeviation || bd > kDeviation || ad > kDeviation) { ++count; } } Debug.AssertFormat(count == 0, "{0} pixels had deviation of {1} or more from original data.", count, kDeviation); }
private void ProcessBatch() { while (_requestsBatch.Count > 0) { var request = _requestsBatch.Dequeue(); var graphicsFormat = GraphicsFormatUtility.GetGraphicsFormat(request.renderTexture.format, false); var pixelSize = GraphicsFormatUtility.GetBlockSize(graphicsFormat); var channels = GraphicsFormatUtility.GetComponentCount(graphicsFormat); var channelSize = pixelSize / channels; var rect = new Rect(0, 0, request.renderTexture.width, request.renderTexture.height); if (channels >= 3 && channels <= 4) { if (request.texture == null) { request.texture = new Texture2D(request.renderTexture.width, request.renderTexture.height, request.renderTexture.graphicsFormat, TextureCreationFlags.None); } RenderTexture.active = request.renderTexture; request.texture.ReadPixels(rect, 0, 0); request.InvokeCallback(request.texture.GetRawTextureData()); RenderTexture.active = null; } else { Debug.Assert(channels == 1, "Can only handle a single channel RT."); // Read pixels must be one of RGBA32, ARGB32, RGB24, RGBAFloat or RGBAHalf. // So R16 and RFloat will be converted to RGBAFloat. var texture = new Texture2D(request.renderTexture.width, request.renderTexture.height, TextureFormat.RGBAFloat, false); RenderTexture.active = request.renderTexture; texture.ReadPixels(rect, 0, 0); RenderTexture.active = null; var length = request.renderTexture.width * request.renderTexture.height; var input = ArrayUtilities.Cast <float>(texture.GetRawTextureData()); UnityEngine.Object.Destroy(texture); int index = 0; switch (channelSize) { case 2: short[] shorts = ArrayUtilities.Allocate <short>(length); var si = 0; var numerator = (1 << 16) - 1; while (index < length) { shorts[index++] = (short)(numerator * input[si]); si += 4; } var shortOutputNativeArray = new NativeArray <byte>(ArrayUtilities.Cast <byte>(shorts), Allocator.Persistent); request.InvokeCallback(ArrayUtilities.Cast <byte>(shorts)); break; case 4: float[] floats = ArrayUtilities.Allocate <float>(length); var fi = 0; while (index < length) { floats[index++] = input[fi]; fi += 4; } var floatOutputNativeArray = new NativeArray <byte>(ArrayUtilities.Cast <byte>(floats), Allocator.Persistent); request.InvokeCallback(ArrayUtilities.Cast <byte>(floats)); break; default: throw new NotSupportedException(); } } _requestsPool.Enqueue(request); } }