/// <summary> /// Initializes this instance by coroutine. /// </summary> protected virtual IEnumerator _Initialize() { if (hasInitDone) { ReleaseResources(); if (onDisposed != null) { onDisposed.Invoke(); } } isInitWaiting = true; bool hasFilePathCoroutineCompleted = false; string fullPath = string.Empty; getFilePathCoroutine = Utils.getFilePathAsync(requestedVideoFilePath, (result) => { hasFilePathCoroutineCompleted = true; fullPath = result; }); StartCoroutine(getFilePathCoroutine); int initFrameCount = 0; bool isTimeout = false; while (true) { if (initFrameCount > timeoutFrameCount) { isTimeout = true; break; } else if (hasFilePathCoroutineCompleted) { if (string.IsNullOrEmpty(fullPath)) { videoCapture = null; isInitWaiting = false; initCoroutine = null; getFilePathCoroutine = null; if (onErrorOccurred != null) { onErrorOccurred.Invoke(ErrorCode.VIDEO_FILE_NOT_EXIST); } yield break; } videoCapture = new VideoCapture(); videoCapture.open(fullPath); if (!videoCapture.isOpened()) { videoCapture.Dispose(); videoCapture = null; isInitWaiting = false; initCoroutine = null; getFilePathCoroutine = null; if (onErrorOccurred != null) { onErrorOccurred.Invoke(ErrorCode.VIDEO_FILE_CANT_OPEN); } yield break; } imageBufferMat = new Mat(); videoCapture.read(imageBufferMat); videoCapture.set(Videoio.CAP_PROP_POS_FRAMES, 0); videoCapture.grab(); baseMat = new Mat(imageBufferMat.rows(), imageBufferMat.cols(), imageBufferMat.type()); imageBufferMat.copyTo(baseMat); if (baseColorFormat == outputColorFormat) { frameMat = baseMat; } else { frameMat = new Mat(baseMat.rows(), baseMat.cols(), CvType.CV_8UC(Channels(outputColorFormat))); } isPlaying = true; Debug.Log("VideoCaptrueToMatHelper:: " + " filePath:" + requestedVideoFilePath + " width:" + frameMat.width() + " height:" + frameMat.height() + " fps:" + videoCapture.get(Videoio.CAP_PROP_FPS)); isInitWaiting = false; hasInitDone = true; initCoroutine = null; getFilePathCoroutine = null; if (onInitialized != null) { onInitialized.Invoke(); } StartWaitFrameTimeThread(); break; } else { initFrameCount++; yield return(null); } } if (isTimeout) { videoCapture = null; isInitWaiting = false; initCoroutine = null; getFilePathCoroutine = null; if (onErrorOccurred != null) { onErrorOccurred.Invoke(ErrorCode.TIMEOUT); } } }
/// <summary> /// Initializes this instance by coroutine. /// </summary> protected override IEnumerator _Initialize() { if (hasInitDone) { StartCoroutine(_Stop()); while (isChangeVideoModeWaiting) { yield return(null); } ReleaseResources(); if (onDisposed != null) { onDisposed.Invoke(); } } isInitWaiting = true; while (isChangeVideoModeWaiting) { yield return(null); } isChangeVideoModeWaiting = true; if (videoCapture != null) { videoCapture.StopVideoModeAsync(result1 => { cameraParams = CreateCameraParams(videoCapture); videoCapture.StartVideoModeAsync(cameraParams, result2 => { if (!result2.success) { isChangeVideoModeWaiting = false; isInitWaiting = false; CancelInitCoroutine(); if (onErrorOccurred != null) { onErrorOccurred.Invoke(ErrorCode.UNKNOWN); } } else { isChangeVideoModeWaiting = false; } }); }); } else { //Fetch a pointer to Unity's spatial coordinate system if you need pixel mapping #if UNITY_2017_2_OR_NEWER spatialCoordinateSystemPtr = UnityEngine.XR.WSA.WorldManager.GetNativeISpatialCoordinateSystemPtr(); #else spatialCoordinateSystemPtr = UnityEngine.VR.WSA.WorldManager.GetNativeISpatialCoordinateSystemPtr(); #endif HoloLensCameraStream.VideoCapture.CreateAync(videoCapture => { if (initCoroutine == null) { return; } if (videoCapture == null) { Debug.LogError("Did not find a video capture object. You may not be using the HoloLens."); isChangeVideoModeWaiting = false; isInitWaiting = false; CancelInitCoroutine(); if (onErrorOccurred != null) { onErrorOccurred.Invoke(ErrorCode.CAMERA_DEVICE_NOT_EXIST); } return; } this.videoCapture = videoCapture; //Request the spatial coordinate ptr if you want fetch the camera and set it if you need to videoCapture.WorldOriginPtr = spatialCoordinateSystemPtr; cameraParams = CreateCameraParams(videoCapture); videoCapture.FrameSampleAcquired -= OnFrameSampleAcquired; videoCapture.FrameSampleAcquired += OnFrameSampleAcquired; videoCapture.StartVideoModeAsync(cameraParams, result => { if (!result.success) { isChangeVideoModeWaiting = false; isInitWaiting = false; CancelInitCoroutine(); if (onErrorOccurred != null) { onErrorOccurred.Invoke(ErrorCode.UNKNOWN); } } else { isChangeVideoModeWaiting = false; } }); }); } int initFrameCount = 0; bool isTimeout = false; while (true) { if (initFrameCount > _timeoutFrameCount) { isTimeout = true; break; } else if (didUpdateThisFrame) { Debug.Log("HololensCameraStreamToMatHelper:: " + "name:" + "" + " width:" + cameraParams.cameraResolutionWidth + " height:" + cameraParams.cameraResolutionHeight + " fps:" + cameraParams.frameRate); baseColorFormat = (outputColorFormat == ColorFormat.GRAY) ? ColorFormat.GRAY : ColorFormat.BGRA; baseMat = new Mat(cameraParams.cameraResolutionHeight, cameraParams.cameraResolutionWidth, CvType.CV_8UC(Channels(baseColorFormat))); if (baseColorFormat == outputColorFormat) { frameMat = baseMat; } else { frameMat = new Mat(baseMat.rows(), baseMat.cols(), CvType.CV_8UC(Channels(outputColorFormat))); } if (_rotate90Degree) { rotatedFrameMat = new Mat(frameMat.cols(), frameMat.rows(), CvType.CV_8UC(Channels(outputColorFormat))); } isInitWaiting = false; hasInitDone = true; initCoroutine = null; if (onInitialized != null) { onInitialized.Invoke(); } hasInitEventCompleted = true; break; } else { initFrameCount++; yield return(null); } } if (isTimeout) { if (videoCapture != null) { videoCapture.FrameSampleAcquired -= OnFrameSampleAcquired; isChangeVideoModeWaiting = true; videoCapture.StopVideoModeAsync(result => { videoCapture.Dispose(); videoCapture = null; isChangeVideoModeWaiting = false; }); isInitWaiting = false; initCoroutine = null; if (onErrorOccurred != null) { onErrorOccurred.Invoke(ErrorCode.TIMEOUT); } } else { isInitWaiting = false; initCoroutine = null; if (onErrorOccurred != null) { onErrorOccurred.Invoke(ErrorCode.TIMEOUT); } } } }
/// <summary> /// Initializes this instance by coroutine. /// </summary> protected virtual IEnumerator _Initialize() { if (hasInitDone) { ReleaseResources(); if (onDisposed != null) { onDisposed.Invoke(); } } isInitWaiting = true; #if (UNITY_IOS || UNITY_ANDROID) && !UNITY_EDITOR // Checks camera permission state. IEnumerator coroutine = hasUserAuthorizedCameraPermission(); yield return(coroutine); if (!(bool)coroutine.Current) { isInitWaiting = false; initCoroutine = null; if (onErrorOccurred != null) { onErrorOccurred.Invoke(ErrorCode.CAMERA_PERMISSION_DENIED); } yield break; } #endif float requestedFPS = this.requestedFPS; // Creates the camera var devices = WebCamTexture.devices; if (!String.IsNullOrEmpty(requestedDeviceName)) { int requestedDeviceIndex = -1; if (Int32.TryParse(requestedDeviceName, out requestedDeviceIndex)) { if (requestedDeviceIndex >= 0 && requestedDeviceIndex < devices.Length) { webCamDevice = devices[requestedDeviceIndex]; if (avoidAndroidFrontCameraLowLightIssue && webCamDevice.isFrontFacing == true) { requestedFPS = 15f; } if (requestedFPS < 0) { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight); } else { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, (int)requestedFPS); } } } else { for (int cameraIndex = 0; cameraIndex < devices.Length; cameraIndex++) { if (devices[cameraIndex].name == requestedDeviceName) { webCamDevice = devices[cameraIndex]; if (avoidAndroidFrontCameraLowLightIssue && webCamDevice.isFrontFacing == true) { requestedFPS = 15f; } if (requestedFPS < 0) { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight); } else { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, (int)requestedFPS); } break; } } } if (webCamTexture == null) { Debug.Log("Cannot find camera device " + requestedDeviceName + "."); } } if (webCamTexture == null) { // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < devices.Length; cameraIndex++) { #if UNITY_2018_3_OR_NEWER if (devices[cameraIndex].kind != WebCamKind.ColorAndDepth && devices[cameraIndex].isFrontFacing == requestedIsFrontFacing) #else if (devices[cameraIndex].isFrontFacing == requestedIsFrontFacing) #endif { webCamDevice = devices[cameraIndex]; if (avoidAndroidFrontCameraLowLightIssue && webCamDevice.isFrontFacing == true) { requestedFPS = 15f; } if (requestedFPS < 0) { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight); } else { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, (int)requestedFPS); } break; } } } if (webCamTexture == null) { if (devices.Length > 0) { webCamDevice = devices[0]; if (avoidAndroidFrontCameraLowLightIssue && webCamDevice.isFrontFacing == true) { requestedFPS = 15f; } if (requestedFPS < 0) { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight); } else { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, (int)requestedFPS); } } else { isInitWaiting = false; initCoroutine = null; if (onErrorOccurred != null) { onErrorOccurred.Invoke(ErrorCode.CAMERA_DEVICE_NOT_EXIST); } yield break; } } // Starts the camera webCamTexture.Play(); int initFrameCount = 0; bool isTimeout = false; while (true) { if (initFrameCount > timeoutFrameCount) { isTimeout = true; break; } else if (webCamTexture.didUpdateThisFrame) { Debug.Log("WebCamTextureToMatHelper:: " + "devicename:" + webCamTexture.deviceName + " name:" + webCamTexture.name + " width:" + webCamTexture.width + " height:" + webCamTexture.height + " fps:" + webCamTexture.requestedFPS + " videoRotationAngle:" + webCamTexture.videoRotationAngle + " videoVerticallyMirrored:" + webCamTexture.videoVerticallyMirrored + " isFrongFacing:" + webCamDevice.isFrontFacing); if (colors == null || colors.Length != webCamTexture.width * webCamTexture.height) { colors = new Color32[webCamTexture.width * webCamTexture.height]; } baseMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); if (baseColorFormat == outputColorFormat) { frameMat = baseMat; } else { frameMat = new Mat(baseMat.rows(), baseMat.cols(), CvType.CV_8UC(Channels(outputColorFormat))); } screenOrientation = Screen.orientation; screenWidth = Screen.width; screenHeight = Screen.height; bool isRotatedFrame = false; #if !UNITY_EDITOR && !(UNITY_STANDALONE || UNITY_WEBGL) if (screenOrientation == ScreenOrientation.Portrait || screenOrientation == ScreenOrientation.PortraitUpsideDown) { if (!rotate90Degree) { isRotatedFrame = true; } } else if (rotate90Degree) { isRotatedFrame = true; } #else if (rotate90Degree) { isRotatedFrame = true; } #endif if (isRotatedFrame) { rotatedFrameMat = new Mat(frameMat.cols(), frameMat.rows(), CvType.CV_8UC(Channels(outputColorFormat))); } isInitWaiting = false; hasInitDone = true; initCoroutine = null; if (onInitialized != null) { onInitialized.Invoke(); } break; } else { initFrameCount++; yield return(null); } } if (isTimeout) { webCamTexture.Stop(); webCamTexture = null; isInitWaiting = false; initCoroutine = null; if (onErrorOccurred != null) { onErrorOccurred.Invoke(ErrorCode.TIMEOUT); } } }
protected virtual void OnFrameSampleAcquired(VideoCaptureSample sample) { lock (latestImageBytesLockObject) { //When copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized. //You can reuse this byte[] until you need to resize it (for whatever reason). if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength) { _latestImageBytes = new byte[sample.dataLength]; } sample.CopyRawImageDataIntoBuffer(_latestImageBytes); } float[] cameraToWorldMatrixAsFloat; if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrixAsFloat) == false) { sample.Dispose(); return; } float[] projectionMatrixAsFloat; if (sample.TryGetProjectionMatrix(out projectionMatrixAsFloat) == false) { sample.Dispose(); return; } // Right now we pass things across the pipe as a float array then convert them back into UnityEngine.Matrix using a utility method projectionMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(projectionMatrixAsFloat); cameraToWorldMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat); cameraIntrinsics = sample.cameraIntrinsics; sample.Dispose(); didUpdateThisFrame = true; didUpdateImageBufferInCurrentFrame = true; if (hasInitEventCompleted && frameMatAcquired != null) { Mat mat = new Mat(cameraParams.cameraResolutionHeight, cameraParams.cameraResolutionWidth, CvType.CV_8UC(Channels(outputColorFormat))); if (baseColorFormat == outputColorFormat) { MatUtils.copyToMat <byte>(latestImageBytes, mat); } else { Mat baseMat = new Mat(cameraParams.cameraResolutionHeight, cameraParams.cameraResolutionWidth, CvType.CV_8UC(Channels(baseColorFormat))); MatUtils.copyToMat <byte>(latestImageBytes, baseMat); Imgproc.cvtColor(baseMat, mat, ColorConversionCodes(baseColorFormat, outputColorFormat)); } if (_rotate90Degree) { Mat rotatedFrameMat = new Mat(cameraParams.cameraResolutionWidth, cameraParams.cameraResolutionHeight, CvType.CV_8UC(Channels(outputColorFormat))); Core.rotate(mat, rotatedFrameMat, Core.ROTATE_90_CLOCKWISE); mat.Dispose(); FlipMat(rotatedFrameMat, _flipVertical, _flipHorizontal); frameMatAcquired.Invoke(rotatedFrameMat, projectionMatrix, cameraToWorldMatrix, cameraIntrinsics); } else { FlipMat(mat, _flipVertical, _flipHorizontal); frameMatAcquired.Invoke(mat, projectionMatrix, cameraToWorldMatrix, cameraIntrinsics); } } }
// Update is called once per frame protected virtual void Update() { if (hasInitDone) { // Catch the orientation change of the screen and correct the mat image to the correct direction. if (screenOrientation != Screen.orientation && (screenWidth != Screen.width || screenHeight != Screen.height)) { if (onDisposed != null) { onDisposed.Invoke(); } if (frameMat != null) { frameMat.Dispose(); frameMat = null; } if (baseMat != null) { baseMat.Dispose(); baseMat = null; } if (rotatedFrameMat != null) { rotatedFrameMat.Dispose(); rotatedFrameMat = null; } baseMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4, new Scalar(0, 0, 0, 255)); if (baseColorFormat == outputColorFormat) { frameMat = baseMat; } else { frameMat = new Mat(baseMat.rows(), baseMat.cols(), CvType.CV_8UC(Channels(outputColorFormat))); } screenOrientation = Screen.orientation; screenWidth = Screen.width; screenHeight = Screen.height; bool isRotatedFrame = false; #if !UNITY_EDITOR && !(UNITY_STANDALONE || UNITY_WEBGL) if (screenOrientation == ScreenOrientation.Portrait || screenOrientation == ScreenOrientation.PortraitUpsideDown) { if (!rotate90Degree) { isRotatedFrame = true; } } else if (rotate90Degree) { isRotatedFrame = true; } #else if (rotate90Degree) { isRotatedFrame = true; } #endif if (isRotatedFrame) { rotatedFrameMat = new Mat(frameMat.cols(), frameMat.rows(), CvType.CV_8UC(Channels(outputColorFormat)), new Scalar(0, 0, 0, 255)); } if (onInitialized != null) { onInitialized.Invoke(); } } else { screenWidth = Screen.width; screenHeight = Screen.height; } } }