public bool SetImageFormat(CameraImageFormat format) { var result = NativeApi.NRRGBCameraSetImageFormat(m_NativeCameraHandle, format); NRDebugger.Log("[NativeCamera] SetImageFormat: " + result.ToString()); return(result == NativeResult.Success); }
public bool SetImageFormat(CameraImageFormat format) { var result = NativeApi.NRRGBCameraSetImageFormat(m_NativeCameraHandle, format); NativeErrorListener.Check(result, this, "SetImageFormat"); return(result == NativeResult.Success); }
private static void SetImageFormat(CameraImageFormat format) { #if !UNITY_EDITOR m_NativeCamera.SetImageFormat(format); #endif NRDebugger.Log("[NRRgbCamera] SetImageFormat : " + format.ToString()); }
/// <summary> Sets image format. </summary> /// <param name="format"> Describes the format to use.</param> public void SetImageFormat(CameraImageFormat format) { #if !UNITY_EDITOR CameraDataProvider.SetImageFormat(format); #endif NRDebugger.Info("[CameraController] SetImageFormat : " + format.ToString()); }
/// <summary> /// Constructs the camera image cinfo. /// </summary> /// <param name="nativeHandle">The handle representing the camera image on the native level.</param> /// <param name="dimensions">The dimensions of the camera image.</param> /// <param name="planeCount">The number of video planes in the camera image.</param> /// <param name="timestamp">The timestamp for when the camera image was captured.</param> /// <param name="format">The format of the camera image.</param> public CameraImageCinfo(int nativeHandle, Vector2Int dimensions, int planeCount, double timestamp, CameraImageFormat format) { this.m_NativeHandle = nativeHandle; this.m_Dimensions = dimensions; this.m_PlaneCount = planeCount; this.m_Timestamp = timestamp; this.m_Format = format; }
/// <summary> Use RGB_888 format default. </summary> /// <param name="format"> (Optional) Camera image format.</param> protected void CreateRGBCameraProxy(CameraImageFormat format = CameraImageFormat.RGB_888) { if (m_NativeCameraProxy != null) { return; } m_NativeCameraProxy = CameraProxyFactory.CreateRGBCameraProxy(); m_NativeCameraProxy.Regist(this); m_NativeCameraProxy.SetImageFormat(format); }
private static void SetImageFormat(CameraImageFormat format) { if (!isInitiate) { Initialize(); } #if !UNITY_EDITOR m_NativeCamera.SetImageFormat(format); #endif ImageFormat = format; NRDebugger.Log("[NRRgbCamera] SetImageFormat : " + format.ToString()); }
public static void SetImageFormat(CameraImageFormat format) { if (CurrentState == CaptureState.UnInitialized) { Initialize(); } #if !UNITY_EDITOR m_NativeCamera.SetImageFormat(format); #endif ImageFormat = format; NRDebug.Log("[NRRgbCamera] SetImageFormat : " + format.ToString()); }
internal CameraImage( ICameraImageApi cameraImageApi, int nativeHandle, Vector2Int dimensions, int planeCount, double timestamp, CameraImageFormat format) { m_CameraImageApi = cameraImageApi; m_NativeHandle = nativeHandle; this.dimensions = dimensions; this.planeCount = planeCount; this.timestamp = timestamp; this.format = format; #if ENABLE_UNITY_COLLECTIONS_CHECKS m_SafetyHandle = AtomicSafetyHandle.Create(); #endif }
private TextureFormat GetFormatByCamraImageFormat(CameraImageFormat format) { TextureFormat out_format = TextureFormat.RGB24; switch (format) { case CameraImageFormat.YUV_420_888: out_format = TextureFormat.YUY2; break; case CameraImageFormat.RGB_888: out_format = TextureFormat.RGB24; break; default: out_format = TextureFormat.RGB24; break; } return(out_format); }
public void Open(int sizeId) { if (this._handle == 0) { Error.ThrowNativeException(ObjectDisposed); } if (sizeId < 0 || this._cameraInfo.SupportedPreviewSizes.Count < sizeId) { Error.ThrowNativeException(ArgumentOutOfRange); } int errorCode = Camera.OpenNative(this._handle, this._cameraInfo.SupportedPreviewSizes[sizeId]); if (errorCode < 0) { this.Close(); } else { CameraSize currentPreviewSize; errorCode = Camera.GetPreviewSizeNative(this._handle, out currentPreviewSize); if (errorCode < 0) { this.Close(); } else { this._currentPreviewSize = currentPreviewSize; CameraImageFormat currentPreviewImageFormat; errorCode = Camera.GetPreviewImageFormatNative(this._handle, out currentPreviewImageFormat); if (errorCode < 0) { this.Close(); } else { this._currentPreviewImageFormat = currentPreviewImageFormat; } } } }
public static extern NativeResult NRRGBCameraSetImageFormat( UInt64 rgb_camera_handle, CameraImageFormat format);
public bool TryAcquireLatestImage(out int nativeHandle, out Vector2Int dimensions, out int planeCount, out double timestamp, out CameraImageFormat format) { return(Api.UnityARCore_cameraImage_tryAcquireLatestImage(out nativeHandle, out dimensions, out planeCount, out timestamp, out format)); }
static internal extern bool UnityARCore_cameraImage_tryAcquireLatestImage( out int nativeHandle, out Vector2Int dimensions, out int planeCount, out double timestamp, out CameraImageFormat format);
/// <summary> Sets image format. </summary> /// <param name="format"> Describes the format to use.</param> /// <returns> True if it succeeds, false if it fails. </returns> public bool SetImageFormat(CameraImageFormat format) { return(true); }
/// <summary> Constructor. </summary> /// <param name="format"> Camera image format.</param> public CameraModelView(CameraImageFormat format) { this.CreateRGBCameraProxy(format); }
private static extern int GetPreviewImageFormatNative(int handle, out CameraImageFormat format);
public bool TryAcquireLatestImage(out int nativeHandle, out Vector2Int dimensions, out int planeCount, out double timestamp, out CameraImageFormat format) { nativeHandle = default(int); dimensions = default(Vector2Int); planeCount = default(int); timestamp = default(double); format = default(CameraImageFormat); return(false); }
static internal bool UnityARCore_cameraImage_tryAcquireLatestImage( out int nativeHandle, out Vector2Int dimensions, out int planeCount, out double timestamp, out CameraImageFormat format) { nativeHandle = 0; dimensions = default(Vector2Int); planeCount = default(int); timestamp = default(double); format = default(CameraImageFormat); return(false); }
// Get Image from the AR Camera, extract the raw data from the image private unsafe void CaptureARBuffer(ARCameraFrameEventArgs eventArgs) { // Get the image in the ARSubsystemManager.cameraFrameReceived callback XRCameraImage image; if (!cameraManager.TryGetLatestImage(out image)) { Debug.LogError("Capture AR Buffer returns nothing!!!!!!"); return; } CameraImageFormat ddd1 = image.format; XRCameraImagePlane ss = image.GetPlane(0); Matrix4x4 ddd2 = eventArgs.projectionMatrix.Value; Vector3 position1 = new Vector3(); Quaternion rotation1 = new Quaternion(); SerializedCameraData serializedCameraData = new SerializedCameraData() { Timestamp = eventArgs.timestampNs.Value, Position = position1, Rotation = rotation1, ProjectionMatrix = eventArgs.projectionMatrix.Value }; byte[] augmentByteArray = serializedCameraData.Serialize(); //Matrix4x4 ddd22 = eventArgs.projectionMatrix; var conversionParams = new XRCameraImageConversionParams { // Get the full image inputRect = new RectInt(0, 0, image.width, image.height), // Downsample by 2 outputDimensions = new Vector2Int(image.width, image.height), // Color image format outputFormat = ConvertFormat, // Flip across the x axis transformation = CameraImageTransformation.MirrorX // Call ProcessImage when the async operation completes }; // See how many bytes we need to store the final image. int size = image.GetConvertedDataSize(conversionParams); Debug.LogError("OnCameraFrameReceived, size == " + size + "w:" + image.width + " h:" + image.height + " planes=" + image.planeCount); // Allocate a buffer to store the image var buffer = new NativeArray <byte>(size, Allocator.Temp); // Extract the image data image.Convert(conversionParams, new System.IntPtr(buffer.GetUnsafePtr()), buffer.Length); // The image was converted to RGBA32 format and written into the provided buffer // so we can dispose of the CameraImage. We must do this or it will leak resources. byte[] bytes = buffer.ToArray(); monoProxy.StartCoroutine(PushFrame(bytes, image.width, image.height, () => { image.Dispose(); buffer.Dispose(); })); }