bool PreviewBuffer(ref Color32[] colors, out int width, out int height) { width = NatCam.Preview.width; height = NatCam.Preview.height; var pixelCount = width * height; var rawFrameBytesCount = pixelCount * 4; lock (threadLock) { if ((rawFrameBytes == null) || (rawFrameBytes.Length != rawFrameBytesCount)) { rawFrameBytes = new byte[rawFrameBytesCount]; } if (!NatCam.CaptureFrame(rawFrameBytes)) { return(false); } if ((colors == null) || (colors.Length != pixelCount)) { colors = new Color32[pixelCount]; } var handle = GCHandle.Alloc(colors, GCHandleType.Pinned); Marshal.Copy(rawFrameBytes, 0, handle.AddrOfPinnedObject(), rawFrameBytesCount); handle.Free(); } return(true); }
/// <summary> /// Method called on every frame that the camera preview updates /// </summary> public virtual void OnFrame() { onFrameCount++; if (imageProcessingType == ImageProcessingType.None) { didUpdateThisFrame = true; preview.texture = NatCam.Preview; } else { // Size checking if (buffer != null && buffer.Length != NatCam.Preview.width * NatCam.Preview.height * 4) { buffer = null; } // Create the managed buffer buffer = buffer ?? new byte[NatCam.Preview.width * NatCam.Preview.height * 4]; // Capture the current frame NatCam.CaptureFrame(buffer); // Size checking if (texture && (texture.width != NatCam.Preview.width || texture.height != NatCam.Preview.height)) { Texture2D.Destroy(texture); texture = null; } // Create the texture texture = texture ?? new Texture2D(NatCam.Preview.width, NatCam.Preview.height, textureFormat, false, false); didUpdateThisFrame = true; } }
/// <summary> /// Method called when the camera preview starts /// </summary> public virtual void OnStart() { // Create pixel buffer pixelBuffer = new byte[NatCam.Preview.width * NatCam.Preview.height * 4]; // Get the preview data NatCam.CaptureFrame(pixelBuffer, true); // Create preview matrix if (matrix != null && (matrix.cols() != NatCam.Preview.width || matrix.rows() != NatCam.Preview.height)) { matrix.Dispose(); matrix = null; } matrix = matrix ?? new Mat(NatCam.Preview.height, NatCam.Preview.width, CvType.CV_8UC4); Utils.copyToMat(pixelBuffer, matrix); // Create display texture if (texture && (texture.width != matrix.cols() || texture.height != matrix.rows())) { Texture2D.Destroy(texture); texture = null; } texture = texture ?? new Texture2D(matrix.cols(), matrix.rows(), textureFormat, false, false); // Scale the panel to match aspect ratios aspectFitter.aspectRatio = NatCam.Preview.width / (float)NatCam.Preview.height; // Display the result preview.texture = texture; Debug.Log("OnStart (): " + matrix.cols() + " " + matrix.rows() + " " + NatCam.Preview.width + " " + NatCam.Preview.height + " " + texture.width + " " + texture.height); }
void OnFrame() { // Capture the preview frame NatCam.CaptureFrame(buffer); // Convert to greyscale ConvertToGrey(buffer); // Fill the texture with the greys texture.LoadRawTextureData(buffer); texture.Apply(); }
/// <summary> /// Gets the current camera preview frame that converted to the correct direction in OpenCV Matrix format. /// </summary> private Mat GetMat() { if (matrix.cols() != NatCam.Preview.width || matrix.rows() != NatCam.Preview.height) { return(null); } // Get the preview data // Set `flip` flag to true because OpenCV uses inverted Y-coordinate system NatCam.CaptureFrame(pixelBuffer, true); Utils.copyToMat(pixelBuffer, matrix); return(matrix); }
/// <summary> /// Gets the mat of the current frame. /// The Mat object's type is 'CV_8UC4' (RGBA). /// </summary> /// <returns>The mat of the current frame.</returns> public override Mat GetMat() { if (!hasInitDone || !NatCam.IsPlaying || pixelBuffer == null) { return((rotatedFrameMat != null) ? rotatedFrameMat : frameMat); } // Set `flip` flag to true because OpenCV uses inverted Y-coordinate system NatCam.CaptureFrame(pixelBuffer, true); Utils.copyToMat(pixelBuffer, frameMat); FlipMat(frameMat, flipVertical, flipHorizontal); if (rotatedFrameMat != null) { Core.rotate(frameMat, rotatedFrameMat, Core.ROTATE_90_CLOCKWISE); return(rotatedFrameMat); } else { return(frameMat); } }
/// <summary> /// Gets the current camera preview frame that converted to the correct direction in OpenCV Matrix format. /// </summary> private Mat GetMat(MatCaptureMethod matCaptureMethod = MatCaptureMethod.NatCam_CaptureFrame) { if (matrix.cols() != NatCam.Preview.width || matrix.rows() != NatCam.Preview.height) { return(null); } switch (matCaptureMethod) { default: case MatCaptureMethod.NatCam_CaptureFrame: // Get the preview data // Set `flip` flag to true because OpenCV uses inverted Y-coordinate system NatCam.CaptureFrame(pixelBuffer, true); Utils.copyToMat(pixelBuffer, matrix); break; case MatCaptureMethod.NatCam_CaptureFrame_OpenCVFlip: // Get the preview data NatCam.CaptureFrame(pixelBuffer, false); Utils.copyToMat(pixelBuffer, matrix); // OpenCV uses an inverted coordinate system. Y-0 is the top of the image, whereas in OpenGL (and so NatCam), Y-0 is the bottom of the image. Core.flip(matrix, matrix, 0); break; case MatCaptureMethod.BlitWithReadPixels: // When NatCam.PreviewMatrix function does not work properly. (Zenfone 2) // The workaround for a device like this is to use Graphics.Blit with Texture2D.ReadPixels and Texture2D.GetRawTextureData/GetPixels32 to download the pixel data from the GPU. // Blit the NatCam preview to a temporary render texture; set the RT active and readback into a Texture2D (using ReadPixels), then access the pixel data in the texture. // The texture2D's TextureFormat needs to be RGBA32(Unity5.5+), ARGB32, RGB24, RGBAFloat or RGBAHalf. Utils.textureToTexture2D(NatCam.Preview, texture); Utils.copyToMat(texture.GetRawTextureData(), matrix); // OpenCV uses an inverted coordinate system. Y-0 is the top of the image, whereas in OpenGL (and so NatCam), Y-0 is the bottom of the image. Core.flip(matrix, matrix, 0); break; case MatCaptureMethod.Graphics_CopyTexture: // When NatCam.PreviewMatrix function does not work properly. (Zenfone 2) if (SystemInfo.copyTextureSupport != UnityEngine.Rendering.CopyTextureSupport.None) { Graphics.CopyTexture(NatCam.Preview, texture); Utils.copyToMat(texture.GetRawTextureData(), matrix); // OpenCV uses an inverted coordinate system. Y-0 is the top of the image, whereas in OpenGL (and so NatCam), Y-0 is the bottom of the image. Core.flip(matrix, matrix, 0); } else { if (fpsMonitor != null) { fpsMonitor.consoleText = "SystemInfo.copyTextureSupport: None"; } return(null); } break; } return(matrix); }