public static Encode ( Array, raw, int width, int height, int pixelSize, GraphicsFormat textureFormat, int quality = 75, Flags, flags = Flags.TJ_BOTTOMUP ) : byte[] | ||
raw | Array, | |
width | int | |
height | int | |
pixelSize | int | |
textureFormat | GraphicsFormat | |
quality | int | |
flags | Flags, | |
Результат | byte[] |
/// <summary> /// Encode the input data as per provided image format. /// </summary> /// <param name="data">An array of data to be encoded.</param> /// <param name="width">Image width.</param> /// <param name="height">Image height.</param> /// <param name="format">Graphics format used by the render texture.</param> /// <param name="imageFormat">Format for encoding the data.</param> /// <param name="additionalParam">Additional flags to be passed for the encoding.</param> /// <returns></returns> /// <exception cref="NotSupportedException"></exception> public static Array EncodeArray(Array data, int width, int height, GraphicsFormat format, ImageFormat imageFormat, int additionalParam = 0) { switch (imageFormat) { case ImageFormat.Raw: return(data); #if UNITY_2019_3_OR_NEWER case ImageFormat.Png: return(ImageConversion.EncodeArrayToPNG(data, format, (uint)width, (uint)height, 0)); case ImageFormat.Exr: return(ImageConversion.EncodeArrayToEXR(data, format, (uint)width, (uint)height, 0, /*EXRFlags*/ (Texture2D.EXRFlags)additionalParam)); case ImageFormat.Tga: return(ImageConversion.EncodeArrayToTGA(data, format, (uint)width, (uint)height, 0)); #endif case ImageFormat.Jpg: #if USIM_USE_BUILTIN_JPG_ENCODER return(ImageConversion.EncodeArrayToJPG(data, format, (uint)width, (uint)height, 0, /*quality*/ additionalParam > 0 ? (int)additionalParam : 75)); #else return(JpegEncoder.Encode(ArrayUtilities.Cast <byte>(data), width, height, GraphicsUtilities.GetBlockSize(format), format, /*quality*/ additionalParam > 0 ? (int)additionalParam : 75)); #endif default: throw new NotSupportedException("ImageFormat is not supported"); } }
public void TestJpegLibraryEncode444() { var encoder = new JpegEncoder(); encoder.SetQuantizationTable(JpegStandardQuantizationTable.ScaleByQuality(JpegStandardQuantizationTable.GetLuminanceTable(JpegElementPrecision.Precision8Bit, 0), 75)); encoder.SetQuantizationTable(JpegStandardQuantizationTable.ScaleByQuality(JpegStandardQuantizationTable.GetChrominanceTable(JpegElementPrecision.Precision8Bit, 1), 75)); encoder.SetHuffmanTable(true, 0, JpegStandardHuffmanEncodingTable.GetLuminanceDCTable()); encoder.SetHuffmanTable(false, 0, JpegStandardHuffmanEncodingTable.GetLuminanceACTable()); encoder.SetHuffmanTable(true, 1, JpegStandardHuffmanEncodingTable.GetChrominanceDCTable()); encoder.SetHuffmanTable(false, 1, JpegStandardHuffmanEncodingTable.GetChrominanceACTable()); encoder.AddComponent(1, 0, 0, 0, 1, 1); // Y component encoder.AddComponent(2, 1, 1, 1, 1, 1); // Cb component encoder.AddComponent(3, 1, 1, 1, 1, 1); // Cr component byte[] ycbcr = ArrayPool <byte> .Shared.Rent(3 *_width *_height); try { JpegRgbToYCbCrConverter.Shared.ConvertRgba32ToYCbCr8(MemoryMarshal.AsBytes(_rgba.AsSpan()), ycbcr, _width * _height); encoder.SetInputReader(new JpegBufferInputReader(_width, _height, 3, ycbcr)); using var bufferWriter = new NullBufferWriter(); encoder.SetOutput(bufferWriter); encoder.Encode(); } finally { ArrayPool <byte> .Shared.Return(ycbcr); } }
public static void SaveImage(this UIElement uiElement) { var dialog = new SaveFileDialog { DefaultExt = ".png", Filter = "PNG | *.png | JPG | *.jpg", }; var save = dialog.ShowDialog(); if (save.HasValue && save.Value) { var saveStream = dialog.OpenFile(); var bitmap = new WriteableBitmap(uiElement, new TranslateTransform()); var image = bitmap.ToImage(); if (dialog.SafeFileName.EndsWith(".png")) { var encoder = new PngEncoder(); encoder.Encode(image, saveStream); } else if (dialog.SafeFileName.EndsWith(".jpg")) { var encoder = new JpegEncoder(); encoder.Encode(image, saveStream); } saveStream.Close(); } }
public void JpegEncoderDecoderTest() { // Encode and decode a basic raster structure. var colorModel = new ColorModel(); colorModel.ColorSpace = ColorSpace.YCbCr; colorModel.Opaque = true; byte[][][] originalRaster = GetRaster(); var image = new Image(colorModel, originalRaster); var stream = new MemoryStream(); var encoder = new JpegEncoder(image, 50, stream); encoder.Encode(); stream.Seek(0, SeekOrigin.Begin); var decoder = new JpegDecoder(stream); DecodedJpeg decodedImage = decoder.Decode(); // Check that the returned raster structure looks something like what we passed in. for (int i = 0; i < 3; i++) { for (int j = 0; j < width; j++) { for (int k = 0; k < height; k++) { // Tune this. int diff = Math.Abs(decodedImage.Image.Raster[i][j][k] - originalRaster[i][j][k]); Assert.IsTrue(diff < 5); } } } ClientLogger.Debug("Finished JpegEncoderDecoder test."); }
/// <summary> /// Saves the image to the given stream with the jpeg format. /// </summary> /// <typeparam name="TPixel">The pixel format.</typeparam> /// <param name="source">The image this method extends.</param> /// <param name="stream">The stream to save the image to.</param> /// <param name="encoder">The options for the encoder.</param> /// <exception cref="System.ArgumentNullException">Thrown if the stream is null.</exception> /// <returns> /// The <see cref="Image{TPixel}"/>. /// </returns> public static Image <TPixel> SaveAsJpeg <TPixel>(this Image <TPixel> source, Stream stream, JpegEncoder encoder) where TPixel : struct, IPixel <TPixel> { encoder = encoder ?? new JpegEncoder(); encoder.Encode(source, stream); return(source); }
void ProcessReadbackRequests() { foreach (var capture in CaptureList) { if (capture.Request.hasError) { AvailableGpuDataArrays.Add(capture.GpuData); Debug.Log("Failed to read GPU texture"); } else if (capture.Request.done) { if (Bridge != null && Bridge.Status == Status.Connected) { // TODO: Remove the following two lines of extra memory copy, when we can use // AsyncGPUReadback.RequestIntoNativeArray. var data = capture.Request.GetData <byte>(); NativeArray <byte> .Copy(data, capture.GpuData, data.Length); var imageData = new ImageData() { Name = Name, Frame = Frame, Width = Width, Height = Height, Sequence = Sequence, }; if (!JpegOutput.TryTake(out imageData.Bytes)) { imageData.Bytes = new byte[MaxJpegSize]; } Tasks.Enqueue(Task.Run(() => { imageData.Length = JpegEncoder.Encode(capture.GpuData, Width, Height, 4, JpegQuality, imageData.Bytes); if (imageData.Length > 0) { imageData.Time = capture.CaptureTime; ImageWriter.Write(imageData); } else { Debug.Log("Compressed image is empty, length = 0"); } JpegOutput.Add(imageData.Bytes); AvailableGpuDataArrays.Add(capture.GpuData); })); Sequence++; } else { AvailableGpuDataArrays.Add(capture.GpuData); } } } CaptureList.RemoveAll(capture => capture.Request.done == true); }
public static Task <int> Encode(FileInfo source, FileInfo output, int quality, bool optimizeCoding) { if (quality <= 0 || quality > 100) { throw new ArgumentOutOfRangeException(nameof(quality)); } Image <Rgb24> image; using (FileStream stream = source.OpenRead()) { image = Image.Load <Rgb24>(stream); } // Convert RGB to YCbCr byte[] ycbcr = new byte[image.Width * image.Height * 3]; for (int i = 0; i < image.Height; i++) { JpegRgbToYCbCrConverter.Shared.ConvertRgb24ToYCbCr8(MemoryMarshal.AsBytes(image.GetPixelRowSpan(i)), ycbcr.AsSpan(3 * image.Width * i, 3 * image.Width), image.Width); } var encoder = new JpegEncoder(); encoder.SetQuantizationTable(JpegStandardQuantizationTable.ScaleByQuality(JpegStandardQuantizationTable.GetLuminanceTable(JpegElementPrecision.Precision8Bit, 0), quality)); encoder.SetQuantizationTable(JpegStandardQuantizationTable.ScaleByQuality(JpegStandardQuantizationTable.GetChrominanceTable(JpegElementPrecision.Precision8Bit, 1), quality)); if (optimizeCoding) { encoder.SetHuffmanTable(true, 0); encoder.SetHuffmanTable(false, 0); encoder.SetHuffmanTable(true, 1); encoder.SetHuffmanTable(false, 1); } else { encoder.SetHuffmanTable(true, 0, JpegStandardHuffmanEncodingTable.GetLuminanceDCTable()); encoder.SetHuffmanTable(false, 0, JpegStandardHuffmanEncodingTable.GetLuminanceACTable()); encoder.SetHuffmanTable(true, 1, JpegStandardHuffmanEncodingTable.GetChrominanceDCTable()); encoder.SetHuffmanTable(false, 1, JpegStandardHuffmanEncodingTable.GetChrominanceACTable()); } encoder.AddComponent(0, 0, 0, 0, 1, 1); // Y component encoder.AddComponent(1, 1, 1, 1, 2, 2); // Cb component encoder.AddComponent(2, 1, 1, 1, 2, 2); // Cr component encoder.SetInputReader(new JpegBufferInputReader(image.Width, image.Height, 3, ycbcr)); var writer = new ArrayBufferWriter <byte>(); encoder.SetOutput(writer); encoder.Encode(); using (FileStream stream = output.OpenWrite()) { stream.Write(writer.WrittenSpan); } return(Task.FromResult(0)); }
/// <summary> /// Saves the image to the given stream with the jpeg format. /// </summary> /// <typeparam name="TPixel">The pixel format.</typeparam> /// <param name="source">The image this method extends.</param> /// <param name="stream">The stream to save the image to.</param> /// <param name="options">The options for the encoder.</param> /// <exception cref="System.ArgumentNullException">Thrown if the stream is null.</exception> /// <returns> /// The <see cref="Image{TPixel}"/>. /// </returns> public static Image <TPixel> SaveAsJpeg <TPixel>(this Image <TPixel> source, Stream stream, IJpegEncoderOptions options) where TPixel : struct, IPixel <TPixel> { JpegEncoder encoder = new JpegEncoder(); encoder.Encode(source, stream, options); return(source); }
static MemoryStream Encode(Image image) { MemoryStream outStream = new MemoryStream(); JpegEncoder encoder = new JpegEncoder(image, 85, outStream); encoder.Encode(); outStream.Seek(0, SeekOrigin.Begin); return(outStream); }
IEnumerator Capture() { Capturing = true; var captureStart = Time.time; Camera.Render(); var readback = AsyncGPUReadback.Request(Camera.targetTexture, 0, TextureFormat.RGBA32); yield return(new WaitUntil(() => readback.done)); if (readback.hasError) { Debug.Log("Failed to read GPU texture"); Camera.targetTexture.Release(); Camera.targetTexture = null; Capturing = false; yield break; } Debug.Assert(readback.done); var data = readback.GetData <byte>(); ReadBuffer.CopyFrom(data); bool sending = true; Task.Run(() => { Data.Length = JpegEncoder.Encode(ReadBuffer, Width, Height, 4, JpegQuality, Data.Bytes); if (Data.Length > 0) { Data.Time = SimulatorManager.Instance.CurrentTime; ImageWriter.Write(Data, () => sending = false); } else { Debug.Log("Compressed image is empty, length = 0"); sending = false; } }); yield return(new WaitWhile(() => sending)); Data.Sequence++; var captureEnd = Time.time; var captureDelta = captureEnd - captureStart; var delay = 1.0f / Frequency - captureDelta; if (delay > 0) { yield return(new WaitForSeconds(delay)); } Capturing = false; }
public bool Save(string path, int quality, int compression) { CheckTexture(); SensorCamera.Render(); if (Distorted) { LensDistortion.Distort(SensorCamera.targetTexture, DistortedTexture); } var readback = AsyncGPUReadback.Request(Distorted ? DistortedTexture : SensorCamera.targetTexture, 0, TextureFormat.RGBA32); readback.WaitForCompletion(); if (readback.hasError) { Debug.Log("Failed to read GPU texture"); return(false); } Debug.Assert(readback.done); var data = readback.GetData <byte>(); var bytes = new byte[16 * 1024 * 1024]; int length; var ext = System.IO.Path.GetExtension(path).ToLower(); if (ext == ".png") { length = PngEncoder.Encode(data, Width, Height, 4, compression, bytes); } else if (ext == ".jpeg" || ext == ".jpg") { length = JpegEncoder.Encode(data, Width, Height, 4, quality, bytes); } else { return(false); } if (length > 0) { try { using (var file = System.IO.File.Create(path)) { file.Write(bytes, 0, length); } return(true); } catch { } } return(false); }
static byte[] ToJPG(byte[] tex, int width, int height) { JpegEncoder e = new JpegEncoder(); using (MemoryStream s = new MemoryStream()) { e.Encode(tex, width, height, 3, 1, s); return(s.ToArray()); } }
void ProcessReadbackRequests() { while (CaptureQueue.Count > 0) { var capture = CaptureQueue.Peek(); if (capture.Request.hasError) { CaptureQueue.Dequeue(); AvailableGpuDataArrays.Add(capture.GpuData); Debug.Log("Failed to read GPU texture"); } else if (capture.Request.done) { CaptureQueue.Dequeue(); // TODO: Remove the following two lines of extra memory copy, when we can use // AsyncGPUReadback.RequestIntoNativeArray. var data = capture.Request.GetData <byte>(); NativeArray <byte> .Copy(data, capture.GpuData, data.Length); if (!JpegOutput.TryTake(out buffer)) { buffer = new byte[MaxJpegSize]; } Tasks.Enqueue(Task.Run(() => { int size = JpegEncoder.Encode(capture.GpuData, Width, Height, 4, JpegQuality, buffer); if (size > 0) { //print(size); byte[] new_buffer = new byte[size]; Buffer.BlockCopy(buffer, 0, new_buffer, 0, size); delegateCameraDataChanged?.Invoke(new_buffer); } else { Debug.Log("Compressed image is empty, length = 0"); } JpegOutput.Add(buffer); AvailableGpuDataArrays.Add(capture.GpuData); })); Sequence++; } else { break; } } }
void ProcessReadbackRequests() { while (CaptureQueue.Count > 0) { var capture = CaptureQueue.Peek(); if (capture.Request.hasError) { CaptureQueue.Dequeue(); Debug.Log("Failed to read GPU texture"); } else if (capture.Request.done) { CaptureQueue.Dequeue(); var data = capture.Request.GetData <byte>(); var imageData = new ImageData() { Name = Name, Frame = Frame, Width = Width, Height = Height, Sequence = Sequence, }; if (!JpegOutput.TryTake(out imageData.Bytes)) { imageData.Bytes = new byte[MaxJpegSize]; } if (Bridge != null && Bridge.Status == Status.Connected) { Task.Run(() => { imageData.Length = JpegEncoder.Encode(data, Width, Height, 4, JpegQuality, imageData.Bytes); if (imageData.Length > 0) { imageData.Time = capture.CaptureTime; ImageWriter.Write(imageData); JpegOutput.Add(imageData.Bytes); } else { Debug.Log("Compressed image is empty, length = 0"); } }); } Sequence++; } else { break; } } }
private void EncodeFluxJpeg() { Log.Message("0 - start"); // Get all needed data which could collide as fast as possible bool localManuallyTriggered = manuallyTriggered; string filePath = CreateCurrentFilePath(); // Convert temp data to local raw data byte[][,] rawImage = new byte[3][, ]; Log.Message("1"); rawImage[0] = new byte[imageTextureWidth, imageTextureHeight]; rawImage[1] = new byte[imageTextureWidth, imageTextureHeight]; rawImage[2] = new byte[imageTextureWidth, imageTextureHeight]; Log.Message("1"); for (int row = 0; row < imageTextureHeight; row++) { for (int col = 0; col < imageTextureWidth; col++) { int index = ((imageTextureHeight - 1 - row) * imageTextureWidth + col) * 3; rawImage[0][col, row] = imageTextureRawData[index]; rawImage[1][col, row] = imageTextureRawData[index + 1]; rawImage[2][col, row] = imageTextureRawData[index + 2]; } } Log.Message("3 - post raw"); // Tmp cleanup ctrlEncodingPost = true; // Encode raw data and save the final image ColorModel model = new ColorModel { colorspace = FluxJpeg.Core.ColorSpace.RGB }; Log.Message("4"); FluxJpeg.Core.Image image = new FluxJpeg.Core.Image(model, rawImage); Log.Message("5 - post image"); FileStream fileStream = new FileStream(filePath, FileMode.Create); Log.Message("6 . post fs"); JpegEncoder encoder = new JpegEncoder(image, 75, fileStream); Log.Message("7 - post encode"); encoder.Encode(); Log.Message("8 - post save"); // Local cleanup fileStream.Dispose(); image = null; rawImage = null; Log.Message("end - 9"); // Create tmp copy to file if needed if (!localManuallyTriggered && PRModSettings.fileNamePattern == FileNamePattern.BothTmpCopy) { File.Copy(filePath, CreateFilePath(FileNamePattern.Numbered, true)); } }
private int LoadAndEncodeImage(ReadOnlySpan <byte> source, Stream destination) { var startPosition = (int)destination.Position; using (var image = Image.LoadPixelData <SixLabors.ImageSharp.PixelFormats.Bgra32>(source, width, height)) { jpegEncoder.Encode(image, destination); } destination.Flush(); return((int)(destination.Position - startPosition)); }
private static MemoryStream EncodeJpeg(byte[] rgbaFrame) { // Init buffer in FluxJpeg format var ms = new MemoryStream(); var img = new Client.Media.Jpeg.Image(VideoConstants.Width, VideoConstants.Height, rgbaFrame); // Encode Image as JPEG using the FluxJpeg library and write to destination stream var encoder = new JpegEncoder(img, jpegQuality, ms); encoder.Encode(); return(ms); }
static Stream GetJpgStream(WriteableBitmap bmp) { #if TRIAL bmp = AddTrialTextToImage(bmp); #endif JpegEncoder enc = new JpegEncoder(); enc.Quality = 90; var ms = new MemoryStream(); enc.Encode(bmp.ToImage(), ms); ms.Position = 0; return(ms); }
public static string SaveScreenToString() { var bitmap = new WriteableBitmap(Application.Current.RootVisual, null); //Convert the Image to pass into FJCore int width = bitmap.PixelWidth; int height = bitmap.PixelHeight; int bands = 3; var raster = new byte[bands][, ]; for (int i = 0; i < bands; i++) { raster[i] = new byte[width, height]; } for (int row = 0; row < height; row++) { for (int column = 0; column < width; column++) { int pixel = bitmap.Pixels[width * row + column]; raster[0][column, row] = (byte)(pixel >> 16); raster[1][column, row] = (byte)(pixel >> 8); raster[2][column, row] = (byte)pixel; } } var model = new ColorModel { colorspace = ColorSpace.RGB }; var img = new Image(model, raster); //Encode the Image as a JPEG var stream = new MemoryStream(); var encoder = new JpegEncoder(img, 100, stream); encoder.Encode(); //byte[] bs = stream.ToArray(); // return System.Text.Encoding.UTF8.GetString(bs, 0, bs.Length); //Move back to the start of the stream stream.Seek(0, SeekOrigin.Begin); //Get the Bytes and write them to the stream var binaryData = new Byte[stream.Length]; long bytesRead = stream.Read(binaryData, 0, (int)stream.Length); return(Convert.ToBase64String(binaryData)); }
void Update() { if (Bridge.Status != Ros.Status.Connected) { return; } Reader.Update(); if (Reader.Status == AsyncTextureReader.ReadStatus.Finished) { var data = Reader.GetData(); #if USE_COMPRESSED Task.Run(() => { lock (jpegArray) { int length = JpegEncoder.Encode(data, videoWidth, videoHeight, Reader.BytesPerPixel, 75, jpegArray); data.Dispose(); if (length > 0) { SendImage(jpegArray, length); } } }); #else SendImage(data.ToArray(), data.Length); data.Dispose(); #endif } if (Reader.Status != AsyncTextureReader.ReadStatus.Reading && !ImageIsBeingSent) { if (manual) { if (Input.GetKeyDown(KeyCode.S)) { Reader.Start(); } } else { if (Time.time - lastTimePoint > 1.0f / sendingFPS) { lastTimePoint = Time.time; Reader.Start(); } } } }
void Update() { if (Bridge == null || Bridge.Status != Ros.Status.Connected) { return; } Reader.Update(); if (Reader.Status == AsyncTextureReaderStatus.Finished) { var data = Reader.GetData(); #if USE_COMPRESSED var copy = new NativeArray <byte>(data, Allocator.Persistent); Task.Run(() => { lock (jpegArray) { int length = JpegEncoder.Encode(copy, videoWidth, videoHeight, Reader.BytesPerPixel, JpegQuality, jpegArray); if (length > 0) { SendImage(jpegArray, length); } } copy.Dispose(); }); #else SendImage(data.ToArray(), data.Length); #endif } if (Reader.Status != AsyncTextureReaderStatus.Reading && !ImageIsBeingSent) { if (manual) { if (Input.GetKeyDown(KeyCode.M)) { Reader.Start(); } } else { if (Time.time - lastTimePoint > 1.0f / sendingFPS) { lastTimePoint = Time.time; Reader.Start(); } } } }
private string ConvertToString(WriteableBitmap bitmap) { int width = bitmap.PixelWidth; int height = bitmap.PixelHeight; int bands = 3; byte[][,] raster = new byte[bands][, ]; for (int i = 0; i < bands; i++) { raster[i] = new byte[width, height]; } for (int row = 0; row < height; row++) { for (int column = 0; column < width; column++) { int pixel = bitmap.Pixels[width * row + column]; raster[0][column, row] = (byte)(pixel >> 16); raster[1][column, row] = (byte)(pixel >> 8); raster[2][column, row] = (byte)pixel; } } ColorModel model = new ColorModel { colorspace = ColorSpace.RGB }; FluxJpeg.Core.Image img = new FluxJpeg.Core.Image(model, raster); MemoryStream stream = new MemoryStream(); JpegEncoder encoder = new JpegEncoder(img, 100, stream); encoder.Encode(); stream.Seek(0, SeekOrigin.Begin); byte[] binaryData = new Byte[stream.Length]; long bytesRead = stream.Read(binaryData, 0, (int)stream.Length); string base64String = System.Convert.ToBase64String(binaryData, 0, binaryData.Length); return(base64String); }
public MemoryStream getAsEncodedStream(string type = null, int quality = 90) { MemoryStream imageStream = new MemoryStream(); if (type == null) { type = this.type != "" ? this.type : JPEG.MIME; } if (type == JPEG.MIME) // Encode as JPEG { byte[][,] raster = _bm.ToRaster(); FluxJpeg.Core.Image jpegImage = new FluxJpeg.Core.Image(new ColorModel { colorspace = ColorSpace.RGB }, raster); JpegEncoder jpegEncoder = new JpegEncoder(jpegImage, quality, imageStream); jpegEncoder.Encode(); if (_img != null) { // strip off any headers that might be left by encoder, etc imageStream = new MemoryStream(((JPEG)_img).stripHeaders(imageStream)); if (_preserveHeaders) { imageStream = new MemoryStream(((JPEG)_img).insertHeaders(imageStream)); } } } else if (type == PNG.MIME) // Encode as PNG { PngEncoder pngEncoder = new PngEncoder(_bm.Pixels, _bm.PixelWidth, _bm.PixelHeight, false, PngEncoder.FILTER_NONE, Deflater.BEST_COMPRESSION); byte[] pngBuffer = pngEncoder.pngEncode(); imageStream.Write(pngBuffer, 0, pngBuffer.Length); } else { Error(this, null); return(null); } return(imageStream); }
public bool Save(string path, int quality, int compression) { renderCam.Render(); Reader.Start(); Reader.WaitForCompletion(); var data = Reader.GetData(); var bytes = new byte[16 * 1024 * 1024]; int length; var ext = System.IO.Path.GetExtension(path).ToLower(); if (ext == ".png") { length = PngEncoder.Encode(data, videoWidth, videoHeight, Reader.BytesPerPixel, compression, bytes); } else if (ext == ".jpeg" || ext == ".jpg") { length = JpegEncoder.Encode(data, videoWidth, videoHeight, Reader.BytesPerPixel, quality, bytes); } else { return(false); } if (length > 0) { try { using (var file = System.IO.File.Create(path)) { file.Write(bytes, 0, length); } return(true); } catch { } } return(false); }
public static Stream Encode(WriteableBitmap bitmap, int quality) { //Convert the Image to pass into FJCore var width = bitmap.PixelWidth; var height = bitmap.PixelHeight; var bands = 3; var raster = new byte[bands][, ]; for (var i = 0; i < bands; i++) { raster[i] = new byte[width, height]; } for (var row = 0; row < height; row++) { for (var column = 0; column < width; column++) { var pixel = bitmap.Pixels[width * row + column]; raster[0][column, row] = (byte)(pixel >> 16); raster[1][column, row] = (byte)(pixel >> 8); raster[2][column, row] = (byte)pixel; } } var model = new ColorModel { colorspace = ColorSpace.RGB }; var img = new Image(model, raster); //Encode the Image as a JPEG var stream = new MemoryStream(); var encoder = new JpegEncoder(img, quality, stream); encoder.Encode(); //Move back to the start of the stream stream.Flush(); stream.Seek(0, SeekOrigin.Begin); return(stream); }
public static void EncodeJpeg(WriteableBitmap bmp, Stream dstStream) { // Init buffer in FluxJpeg format int w = bmp.PixelWidth; int h = bmp.PixelHeight; int[] p = bmp.Pixels; byte[][,] pixelsForJpeg = new byte[3][, ]; // RGB colors pixelsForJpeg[0] = new byte[h, w]; pixelsForJpeg[1] = new byte[h, w]; pixelsForJpeg[2] = new byte[h, w]; // Copy WriteableBitmap data into buffer for FluxJpeg int i = 0; for (int x = 0; x < w; x++) { for (int y = 0; y < h; y++) { int color = p[i++]; // Swap x and y coordinates to cheaply rotate the image 90° clockwise pixelsForJpeg[0][y, x] = (byte)(color >> 16); // R pixelsForJpeg[1][y, x] = (byte)(color >> 8); // G pixelsForJpeg[2][y, x] = (byte)(color); // B } } using (MemoryStream memStream = new MemoryStream()) { //Encode Image as JPEG var jpegImage = new FluxJpeg.Core.Image(new ColorModel { colorspace = ColorSpace.RGB }, pixelsForJpeg); var encoder = new JpegEncoder(jpegImage, 95, memStream); encoder.Encode(); // Seek to begin of stream and write the encoded bytes to the FileSteram memStream.Seek(0, SeekOrigin.Begin); // Use the new .Net 4 CopyTo method :) memStream.CopyTo(dstStream); } }
private static byte[] Encode(Bitmap icon, int quality) { using (JpegEncoder jpegEncoder = new JpegEncoder()) { BitmapData bitmapdata = icon.LockBits(new Rectangle(0, 0, icon.Width, icon.Height), ImageLockMode.ReadOnly, icon.PixelFormat); byte[] numArray = new byte[icon.Height * bitmapdata.Stride]; Marshal.Copy(bitmapdata.Scan0, numArray, 0, numArray.Length); icon.UnlockBits(bitmapdata); IconConverter.CorrectRgbChannel(numArray, icon.PixelFormat); IconConverter.ThrowExceptionIfJpegResultFail(jpegEncoder.SetPixelData(numArray, icon.PixelFormat, IconConverter.ToSize(icon.PhysicalDimension), 1)); jpegEncoder.SetQuality(quality); jpegEncoder.SetSamplingRatio(SamplingRatio.Ratio_444); byte[] outBuffer; IconConverter.ThrowExceptionIfJpegResultFail(jpegEncoder.Encode(out outBuffer, new Exif() { Software = "Nintendo AuthoringTool" })); return(outBuffer); } }
public bool SaveAsync(string imgFileName, string timeFileName, int quality, int compression) { renderCam.Render(); DateTime renderTime = DateTime.Now; Reader.Start(); Reader.Update(true); var data = Reader.GetData(); var bytes = new byte[16 * 1024 * 1024]; int length; var ext = System.IO.Path.GetExtension(imgFileName).ToLower(); if (ext == ".png") { length = PngEncoder.Encode(data, videoWidth, videoHeight, Reader.BytesPerPixel, compression, bytes); } else if (ext == ".jpeg" || ext == ".jpg") { length = JpegEncoder.Encode(data, videoWidth, videoHeight, Reader.BytesPerPixel, quality, bytes); } else { return(false); } if (length > 0) { try { SaveAsync(bytes, length, imgFileName, timeFileName, renderTime); return(true); } catch { } } return(false); }
public void TestJpegLibraryEncode420_NoBuffer() { var encoder = new JpegEncoder(); encoder.SetQuantizationTable(JpegStandardQuantizationTable.ScaleByQuality(JpegStandardQuantizationTable.GetLuminanceTable(JpegElementPrecision.Precision8Bit, 0), 75)); encoder.SetQuantizationTable(JpegStandardQuantizationTable.ScaleByQuality(JpegStandardQuantizationTable.GetChrominanceTable(JpegElementPrecision.Precision8Bit, 1), 75)); encoder.SetHuffmanTable(true, 0, JpegStandardHuffmanEncodingTable.GetLuminanceDCTable()); encoder.SetHuffmanTable(false, 0, JpegStandardHuffmanEncodingTable.GetLuminanceACTable()); encoder.SetHuffmanTable(true, 1, JpegStandardHuffmanEncodingTable.GetChrominanceDCTable()); encoder.SetHuffmanTable(false, 1, JpegStandardHuffmanEncodingTable.GetChrominanceACTable()); encoder.AddComponent(1, 0, 0, 0, 1, 1); // Y component encoder.AddComponent(2, 1, 1, 1, 2, 2); // Cb component encoder.AddComponent(3, 1, 1, 1, 2, 2); // Cr component encoder.SetInputReader(new JpegRgbaInputReader(_width, _height, _rgba)); using var bufferWriter = new NullBufferWriter(); encoder.SetOutput(bufferWriter); encoder.Encode(); }
public void ImageFormat_EncodeJPG_DecodeJPG_ProducesSimilarData() { const int kDimension = 64; const int kLength = kDimension * kDimension; const int kDeviation = 3; var color = new Color32((byte)UnityEngine.Random.Range(0, 255), (byte)UnityEngine.Random.Range(0, 255), (byte)UnityEngine.Random.Range(0, 255), 255); var data = ArrayUtilities.Allocate <Color32>(kLength); for (var i = 0; i < kLength; ++i) { data[i] = color; } var encoded = JpegEncoder.Encode(ArrayUtilities.Cast <byte>(data), kDimension, kDimension, (int)GraphicsFormatUtility.GetBlockSize(GraphicsFormat.R8G8B8A8_UNorm), GraphicsFormat.R8G8B8A8_UNorm); int width = 0, height = 0; var decoded = ArrayUtilities.Cast <Color32>(JpegEncoder.Decode(encoded, ref width, ref height)); Debug.Assert(width == kDimension && height == kDimension); Debug.Assert(ArrayUtilities.Count <Color32>(data) == ArrayUtilities.Count <Color32>(decoded)); int count = 0; for (var i = 0; i < kLength; ++i) { int rd = Math.Abs((int)data[i].r - (int)decoded[i].r); int gd = Math.Abs((int)data[i].g - (int)decoded[i].g); int bd = Math.Abs((int)data[i].b - (int)decoded[i].b); int ad = Math.Abs((int)data[i].a - (int)decoded[i].a); if (rd > kDeviation || gd > kDeviation || bd > kDeviation || ad > kDeviation) { ++count; } } Debug.AssertFormat(count == 0, "{0} pixels had deviation of {1} or more from original data.", count, kDeviation); }