private byte[] ConvertYugByetArrayJpegByteArray(byte[] data, Android.Hardware.Camera camera) { var parameters = camera.GetParameters(); int width = parameters.PreviewSize.Width; int height = parameters.PreviewSize.Height; YuvImage yuv = new YuvImage(data, parameters.PreviewFormat, width, height, null); var byteArrayOutputStream = new MemoryStream(); yuv.CompressToJpeg(new Rect(0, 0, width, height), 50, byteArrayOutputStream); return(byteArrayOutputStream.ToArray()); }
public void OnPreviewFrame(byte[] bytes, Android.Hardware.Camera camera) { try { /* OLD Android Code * //Fix for image not rotating on devices * byte[] rotatedData = new byte[bytes.Length]; * for (int y = 0; y < height; y++) { * for (int x = 0; x < width; x++) * rotatedData[x * height + height - y - 1] = bytes[x + y * width]; * } * * var cameraParameters = camera.GetParameters(); * * //Changed to using a YUV Image to get the byte data instead of manually working with it! * var img = new YuvImage(rotatedData, ImageFormatType.Nv21, cameraParameters.PreviewSize.Width, cameraParameters.PreviewSize.Height, null); * var dataRect = GetFramingRectInPreview(); * * var luminance = new PlanarYUVLuminanceSource (img.GetYuvData(), width, height, dataRect.Left, dataRect.Top, dataRect.Width(), dataRect.Height(), false); * //var luminance = new PlanarYUVLuminanceSource(img.GetYuvData(), cameraParameters.PreviewSize.Width, cameraParameters.PreviewSize.Height, 0, 0, cameraParameters.PreviewSize.Width, cameraParameters.PreviewSize.Height, false); * var binarized = new BinaryBitmap (new ZXing.Common.HybridBinarizer(luminance)); * var result = reader.decodeWithState(binarized); */ var cameraParameters = camera.GetParameters(); var img = new YuvImage(bytes, ImageFormatType.Nv21, cameraParameters.PreviewSize.Width, cameraParameters.PreviewSize.Height, null); lastYuvImage = img; lastWidth = width; lastHeight = height; //ShutdownCamera(); //activity.OnScan(result); } catch (ReaderException) { Android.Util.Log.Debug("ZXing.Mobile", "No barcode Found"); // ignore this exception; it happens every time there is a failed scan } catch (Exception) { // TODO: this one is unexpected.. log or otherwise handle it throw; } }
private Bitmap convertYuvByteArrayToBitmap(byte[] data, Android.Hardware.Camera camera) { try { Android.Hardware.Camera.Parameters parameters = camera.GetParameters(); Android.Hardware.Camera.Size size = parameters.PreviewSize; using (YuvImage image = new YuvImage(data, parameters.PreviewFormat, size.Width, size.Height, null)) { using (MemoryStream memoryStream = new MemoryStream()) { image.CompressToJpeg(new Rect(0, 0, size.Width, size.Height), int.Parse(MainValues.quality), memoryStream); return(BitmapFactory.DecodeByteArray(memoryStream.ToArray(), 0, memoryStream.ToArray().Length)); } } } catch (Exception) { return(null); } }
public static byte[] ConvertToJpg(this FastJavaByteArray self, int width, int height) { byte[] javaByteArray = new byte[self.Count]; self.CopyTo(javaByteArray, 0); using (YuvImage yuvImage = new YuvImage(javaByteArray, ImageFormatType.Nv21, width, height, null)) { using (Rect rect = new Rect(0, 0, width, height)) { byte[] jpg = null; using (var os = new MemoryStream()) { yuvImage.CompressToJpeg(rect, 100, os); jpg = os.ToArray(); os.Close(); } return(jpg); } } }
public void OnPreviewFrame(byte[] data, Android.Hardware.Camera camera) { try { noFrames = 0; YuvImage yi = new YuvImage(data, camera.GetParameters().PreviewFormat, camera.GetParameters().PreviewSize.Width, camera.GetParameters().PreviewSize.Height, null); byte[][] frames = new byte[fragmentation][]; using MemoryStream ms = new MemoryStream(); yi.CompressToJpeg(new Rect(0, 0, yi.Width, yi.Height), 20, ms); byte[] jpegBytes = ms.ToArray(); UdpFrame frame = new UdpFrame(jpegBytes); FrameRefreshed?.Invoke(frame.packages); } catch (Exception e) { start(); } }
public void OnPreviewFrame(byte[] data, Android.Hardware.Camera camera) { // throw new NotImplementedException(); // process data from camera(preiew-image on surfaceView) per frame frame_count++; if (frame_count == 4) { frame_count = 0; try { int previewWidth = camera.GetParameters().PreviewSize.Width; int previewHeight = camera.GetParameters().PreviewSize.Height; byte[] rotate_data = YUVRotate(data, previewWidth, previewHeight); //after rotate height became width, and width became height YuvImage image = new YuvImage(rotate_data, ImageFormat.Nv21, previewHeight, previewWidth, null); if (image != null) { System.IO.MemoryStream stream = new System.IO.MemoryStream(); image.CompressToJpeg(new Rect(0, 0, previewHeight, previewWidth), 100, stream); byte[] image_data = stream.ToArray(); Bitmap bitmap_temp = BitmapFactory.DecodeByteArray(image_data, 0, image_data.Length); Bitmap bitmap_result = ZoomImage(bitmap_temp, image_test_width, image_test_height); System.IO.MemoryStream stream_ = new System.IO.MemoryStream(); bitmap_result.Compress(Bitmap.CompressFormat.Jpeg, 100, stream_); byte[] image_data_ = stream_.ToArray(); image = null; image_data = null; bitmap_temp = null; bitmap_result = null; stream = null; stream_ = null; GC.Collect(); string result = PostFunc.MyPost.PostFunc(Convert.ToBase64String(image_data_)); string[] result_split = result.Split(new char[] { ',' }); _textview.Text = result_split[0] + " " + result_split[1] + "%"; if (result_split[0] == "Match!") { draw_flag = true; for (int i = 0; i < 8; i++) { lines[i] = int.Parse(result_split[i + 2]); if (i % 2 == 0) { lines[i] = lines[i] * imageview.Width / image_test_width; } else { lines[i] = lines[i] * imageview.Height / image_test_height; } } imageview.OnDraw(lines, draw_flag); } else { draw_flag = false; imageview.OnDraw(lines, draw_flag); } } } catch (IOException) { } } }
public void OnPreviewFrame(byte [] bytes, Android.Hardware.Camera camera) { if ((DateTime.Now - lastPreviewAnalysis).TotalMilliseconds < options.DelayBetweenAnalyzingFrames) { return; } try { /* OLD Android Code * //Fix for image not rotating on devices * byte[] rotatedData = new byte[bytes.Length]; * for (int y = 0; y < height; y++) { * for (int x = 0; x < width; x++) * rotatedData[x * height + height - y - 1] = bytes[x + y * width]; * } * * var cameraParameters = camera.GetParameters(); * * //Changed to using a YUV Image to get the byte data instead of manually working with it! * var img = new YuvImage(rotatedData, ImageFormatType.Nv21, cameraParameters.PreviewSize.Width, cameraParameters.PreviewSize.Height, null); * var dataRect = GetFramingRectInPreview(); * * var luminance = new PlanarYUVLuminanceSource (img.GetYuvData(), width, height, dataRect.Left, dataRect.Top, dataRect.Width(), dataRect.Height(), false); * //var luminance = new PlanarYUVLuminanceSource(img.GetYuvData(), cameraParameters.PreviewSize.Width, cameraParameters.PreviewSize.Height, 0, 0, cameraParameters.PreviewSize.Width, cameraParameters.PreviewSize.Height, false); * var binarized = new BinaryBitmap (new ZXing.Common.HybridBinarizer(luminance)); * var result = reader.decodeWithState(binarized); */ var cameraParameters = camera.GetParameters(); var img = new YuvImage(bytes, ImageFormatType.Nv21, cameraParameters.PreviewSize.Width, cameraParameters.PreviewSize.Height, null); var dataRect = GetFramingRectInPreview(); //var barcodeReader = new BarcodeReader(null, p => new PlanarYUVLuminanceSource(img.GetYuvData(), img.Width, img.Height, dataRect.Left, dataRect.Top, // dataRect.Width(), dataRect.Height(), false), null, null) //{ // AutoRotate = true, // TryHarder = true, //}; var barcodeReader = new BarcodeReader(null, null, null, (p, w, h, f) => new PlanarYUVLuminanceSource(p, w, h, 0, 0, w, h, false)) //new PlanarYUVLuminanceSource(p, w, h, dataRect.Left, dataRect.Top, dataRect.Width(), dataRect.Height(), false)) { AutoRotate = true, TryHarder = false }; if (this.options.PureBarcode.HasValue && this.options.PureBarcode.Value) { barcodeReader.PureBarcode = this.options.PureBarcode.Value; } if (this.options.PossibleFormats != null && this.options.PossibleFormats.Count > 0) { barcodeReader.PossibleFormats = this.options.PossibleFormats; } var result = barcodeReader.Decode(img.GetYuvData(), img.Width, img.Height, RGBLuminanceSource.BitmapFormat.Unknown); lastPreviewAnalysis = DateTime.Now; if (result == null || string.IsNullOrEmpty(result.Text)) { return; } Android.Util.Log.Debug("ZXing.Mobile", "Barcode Found: " + result.Text); ShutdownCamera(); activity.OnScan(result); } catch (ReaderException) { Android.Util.Log.Debug("ZXing.Mobile", "No barcode Found"); // ignore this exception; it happens every time there is a failed scan } catch (Exception) { // TODO: this one is unexpected.. log or otherwise handle it throw; } }
private Bitmap GetProcessedImage(Frame frame, Face face) { int rotationAngle = 0; int width = frame.GetMetadata().Width; int height = frame.GetMetadata().Height; switch (frame.GetMetadata().Rotation) { case FrameRotation.Rotate0: break; case FrameRotation.Rotate90: rotationAngle = 90; break; case FrameRotation.Rotate180: rotationAngle = 180; break; case FrameRotation.Rotate270: rotationAngle = 270; break; } var buffer = frame.GrayscaleImageData; //ByteBuffer buffer = ByteBuffer.Allocate(bitmap.ByteCount); //bitmap.CopyPixelsToBuffer(buffer); buffer.Rewind(); IntPtr classHandle = JNIEnv.FindClass("java/nio/ByteBuffer"); IntPtr methodId = JNIEnv.GetMethodID(classHandle, "array", "()[B"); IntPtr resultHandle = JNIEnv.CallObjectMethod(buffer.Handle, methodId); byte[] bytes = JNIEnv.GetArray <byte>(resultHandle); JNIEnv.DeleteLocalRef(resultHandle); // var bufferSize = frame.GrayscaleImageData.Capacity(); // var buffer = new byte[bufferSize]; // Marshal.Copy(frame.GrayscaleImageData.GetDirectBufferAddress(), buffer, 0, bufferSize); // var bytes = frame.GrayscaleImageData.ToArray<byte>(); YuvImage yuvImage = new YuvImage(bytes, ImageFormatType.Nv21, width, height, null); using (var byteArrayOutputStream = new MemoryStream()) { //frame.Bitmap.Compress(Bitmap.CompressFormat.Jpeg, 100, byteArrayOutputStream); yuvImage.CompressToJpeg(new Rect(0, 0, width, height), 100, byteArrayOutputStream); byte[] jpegArray = byteArrayOutputStream.GetBuffer(); Bitmap bitmap = BitmapFactory.DecodeByteArray(jpegArray, 0, jpegArray.Length); Matrix matrix = new Matrix(); matrix.PostRotate(rotationAngle); Bitmap scaledBitmap = Bitmap.CreateScaledBitmap(bitmap, width, height, true); Bitmap rotatedBitmap = Bitmap.CreateBitmap( scaledBitmap, 0, 0, scaledBitmap.Width, scaledBitmap.Height, matrix, true); return(this.Crop(rotatedBitmap, face)); } }
public void Decode(MediaCodec _Decoder, MediaExtractor extractor) { Stopwatch s = new Stopwatch(); s.Start(); int TIMEOUT_USEC = 10000; ByteBuffer[] encoderInputBuffers = _Decoder.GetInputBuffers(); ByteBuffer[] outputBuffers = _Decoder.GetOutputBuffers(); var mBufferInfo = new MediaCodec.BufferInfo(); bool inputDone = false; var index = 0; try { while (true) { if (!inputDone) { int inputBufIndex = _Decoder.DequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { ByteBuffer buffer = encoderInputBuffers[inputBufIndex]; //long ptsUsec = computePresentationTime(frameIndex); int sampleSize = extractor.ReadSampleData(buffer, 0); if (sampleSize < 0) { // Send an empty frame with the end-of-stream flag set. If we set EOS on a frame with data, that frame data will be ignored, and the output will be short one frame. _Decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0, MediaCodec.BufferFlagEndOfStream); inputDone = true; Log.Info(TAG, "sent input EOS (with zero-length frame)"); } else { Log.Info(TAG, "adding encoded video to decoder input "); _Decoder.QueueInputBuffer(inputBufIndex, 0, sampleSize, extractor.SampleTime, 0); extractor.Advance(); } } else { // either all in use, or we timed out during initial setup Log.Warn(TAG, "input buffer not available"); } } //ByteBuffer[] encoderOutputBuffers = _Decoder.GetOutputBuffers(); int encoderStatus = _Decoder.DequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater) { Log.Info(TAG, "no output available, spinning to await EOS"); } else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged) { // not expected for an encoder Log.Warn(TAG, "not expected OutputBuffersChanged happened"); outputBuffers = _Decoder.GetOutputBuffers(); } else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged) { // should happen before receiving buffers, and should only happen once //if (_MuxerStarted) //{ // Log.Error(TAG, "format changed twice and should never happen"); // throw new RuntimeException("format changed twice"); //} //MediaFormat newFormat = _Decoder.OutputFormat; //Log.Info(TAG, "format changed and starting MUX"); //_TrackIndex = _Muxer.AddTrack(newFormat); //_Muxer.Start(); //_MuxerStarted = true; } else if (encoderStatus < 0) { Log.Warn(TAG, "unexpected but lets ignore"); // let's ignore it } else { ByteBuffer encodedData = outputBuffers[encoderStatus]; if (encodedData == null) { Log.Error(TAG, string.Format("encoderOutputBuffer {0} was null!!", encoderStatus)); throw new RuntimeException(string.Format("encoderOutputBuffer {0} was null!!", encoderStatus)); } if ((mBufferInfo.Flags & MediaCodecBufferFlags.CodecConfig) != 0) { // The codec config data was pulled out and fed to the muxer when we got // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it. mBufferInfo.Size = 0; } if (mBufferInfo.Size != 0) { //if (!_MuxerStarted) //{ // Log.Error(TAG, "muxer hasnt started!!"); // throw new RuntimeException("muxer hasnt started"); //} // adjust the ByteBuffer values to match BufferInfo (not needed?) old //encodedData.Position(mBufferInfo.Offset); //encodedData.Limit(mBufferInfo.Offset + this.mBufferInfo.Size); try { //byte[] dst = new byte[outputBuffers[encoderStatus].Capacity()]; //outputBuffers[encoderStatus].Get(dst); //ByteBuffer buffer = outputBuffers[encoderStatus]; //byte[] ba = new byte[encodedData.Remaining()]; //encodedData.Get(ba); //ByteBuffer buffer = outputBuffers[encoderStatus]; //buffer.Position(mBufferInfo.Offset); //buffer.Limit(mBufferInfo.Offset + mBufferInfo.Size); //byte[] ba = new byte[buffer.Remaining()]; //buffer.Get(ba); //if (index < 10) //{ YuvImage yuv = Utils.GetYUVImage(encodedData, _CameraColorFormat, _Width, _Height); //var imagedata = yuv.GetYuvData(); //Utils.swapNV21_NV12(ref imagedata, _Width, _Height); //Image might need to be corrected later //Bitmap b = Utils.GetBitmap(yuv, _Width, _Height); //Bitmap bmp = BitmapFactory.DecodeByteArray(ba, 0, ba.Length);// this return null //var createfilepath = new File(_downloadsfilesdir, DateTime.Now.Ticks + ".png").AbsolutePath; //using (FileStream bos = new FileStream(createfilepath, FileMode.CreateNew)) //{ // b.Compress(Bitmap.CompressFormat.Png, 100, bos); //} //b.Recycle(); //} index++; //writeFrameToSDCard(dst, i, dst.length); //i++; } catch (Exception e) { //Log("iDecodeActivity", "Error while creating bitmap with: "); } _Decoder.ReleaseOutputBuffer(encoderStatus, false); } if ((mBufferInfo.Flags & MediaCodecBufferFlags.EndOfStream) != 0) { Log.Info(TAG, "End of Stream Reached!!"); break; } } } s.Stop(); Log.Info("inner STOPWATCH!!!!:", string.Format("numberofframes = {0}, totaltime = {1}", index, s.ElapsedMilliseconds)); } catch (Exception e) { Log.Error(TAG, "Decode or Muxer failed", e, e.Message); throw; } }