/// <summary> /// Encode the input data as per provided image format. /// </summary> /// <param name="data">An array of data to be encoded.</param> /// <param name="width">Image width.</param> /// <param name="height">Image height.</param> /// <param name="format">Graphics format used by the render texture.</param> /// <param name="imageFormat">Format for encoding the data.</param> /// <param name="additionalParam">Additional flags to be passed for the encoding.</param> /// <returns></returns> /// <exception cref="NotSupportedException"></exception> public static Array EncodeArray(Array data, int width, int height, GraphicsFormat format, ImageFormat imageFormat, int additionalParam = 0) { switch (imageFormat) { case ImageFormat.Raw: return(data); #if UNITY_2019_3_OR_NEWER case ImageFormat.Png: return(ImageConversion.EncodeArrayToPNG(data, format, (uint)width, (uint)height, 0)); case ImageFormat.Exr: return(ImageConversion.EncodeArrayToEXR(data, format, (uint)width, (uint)height, 0, /*EXRFlags*/ (Texture2D.EXRFlags)additionalParam)); case ImageFormat.Tga: return(ImageConversion.EncodeArrayToTGA(data, format, (uint)width, (uint)height, 0)); #endif case ImageFormat.Jpg: #if USIM_USE_BUILTIN_JPG_ENCODER return(ImageConversion.EncodeArrayToJPG(data, format, (uint)width, (uint)height, 0, /*quality*/ additionalParam > 0 ? (int)additionalParam : 75)); #else return(JpegEncoder.Encode(ArrayUtilities.Cast <byte>(data), width, height, GraphicsUtilities.GetBlockSize(format), format, /*quality*/ additionalParam > 0 ? (int)additionalParam : 75)); #endif default: throw new NotSupportedException("ImageFormat is not supported"); } }
/// <summary> /// Write the data to the file system and inform the consumers for uploading it to the cloud. /// </summary> /// <param name="path">Full path to the file.</param> /// <param name="data">An array of data</param> /// <param name="uploadSynchronously">boolean indicating if the upload needs to happen synchronously.</param> /// <param name="isArtifact">A flag indicating if the data being written is an artifact or not.</param> /// <returns>boolean indicating if the write was successful</returns> public static bool Write(string path, Array data, bool uploadSynchronously = false, bool isArtifact = true) { Debug.Assert(!string.IsNullOrEmpty(path), "Write path cannot be empty or null."); Debug.Assert(data != null, "Array data cannot be null."); if (Options.debugDontWriteFiles) { Manager.Instance.ConsumerFileProduced(path, uploadSynchronously, isArtifact); return(true); } FileStream file = null; try { var bytes = ArrayUtilities.Cast <byte>(data); file = File.Create(path, kFileDataBufferSize); file.Write(bytes, 0, bytes.Length); file.Close(); file = null; Manager.Instance.ConsumerFileProduced(path, false, isArtifact); return(true); } catch (Exception e) { Log.E("FileProducer.Write exception : " + e.ToString()); return(false); } finally { if (file != null) { file.Close(); } } }
/// <summary> /// Perform the readback from the provided Render texture using ReadPixels API. /// </summary> /// <param name="renderTexture">Input source Render texture for the readback.</param> /// <returns>Returns a byte array of the RGB data retrieved from the readback.</returns> /// <exception cref="NotSupportedException"></exception> public static byte[] GetPixelsSlow(RenderTexture renderTexture) { var graphicsFormat = GraphicsFormatUtility.GetGraphicsFormat(renderTexture.format, false); var pixelSize = GraphicsUtilities.GetBlockSize(graphicsFormat); var channels = GraphicsFormatUtility.GetComponentCount(graphicsFormat); var channelSize = pixelSize / channels; var rect = new Rect(0, 0, renderTexture.width, renderTexture.height); // for RGB(A) we can just return the raw data. if (channels >= 3 && channels <= 4) { var texture = new Texture2D(renderTexture.width, renderTexture.height); RenderTexture.active = renderTexture; texture.ReadPixels(rect, 0, 0); RenderTexture.active = null; var data = texture.GetRawTextureData(); UnityEngine.Object.Destroy(texture); return(data); } else { Debug.Assert(channels == 1, "Can only handle a single channel RT."); // Read pixels must be one of RGBA32, ARGB32, RGB24, RGBAFloat or RGBAHalf. // So R16 and RFloat will be converted to RGBAFloat. var texture = new Texture2D(renderTexture.width, renderTexture.height, TextureFormat.RGBAFloat, false); RenderTexture.active = renderTexture; texture.ReadPixels(rect, 0, 0); RenderTexture.active = null; var length = renderTexture.width * renderTexture.height; var input = ArrayUtilities.Cast <float>(texture.GetRawTextureData()); UnityEngine.Object.Destroy(texture); Array output = null; int index = 0; switch (channelSize) { case 2: short[] shorts = ArrayUtilities.Allocate <short>(length); var si = 0; var numerator = (1 << 16) - 1; while (index < length) { shorts[index++] = (short)(numerator * input[si]); si += 4; } output = shorts; break; case 4: float[] floats = ArrayUtilities.Allocate <float>(length); var fi = 0; while (index < length) { floats[index++] = input[fi]; fi += 4; } output = floats; break; default: throw new NotSupportedException(); } return(ArrayUtilities.Cast <byte>(output)); } }