/// <summary> /// Responds to privilege requester result. /// </summary> /// <param name="result">Result of the privilege request.</param> private void HandlePrivilegesDone(MLResult result) { if (!result.IsOk) { CheckPrivilegeDenied(result); Debug.LogErrorFormat("Error: RawVideoCaptureExample failed to get all requested privileges, disabling script. Reason: {0}", result); enabled = false; return; } Debug.Log("Succeeded in requesting all privileges"); // Called here because it needs privileges to be granted first on resume by PrivilegeRequester. if (_appPaused) { _appPaused = false; result = MLCamera.ApplicationPause(_appPaused); if (!result.IsOk) { Debug.LogErrorFormat("Error: RawVideoCaptureExample failed to resume MLCamera, disabling script. Reason: {0}", result); enabled = false; return; } _isCameraConnected = true; MLInput.OnControllerButtonDown += OnButtonDown; } else { EnableCapture(); } }
void processVideo() { if (videostate == VideoState.VIDEO_READY) { videostate = VideoState.VIDEO_STARTED; MLog("Starting video"); MLCamera.StartVideoCapture(filepath); MLog("Started video"); count = 0; } count++; if (count > 240 && videostate == VideoState.VIDEO_STARTED) { videostate = VideoState.VIDEO_ENDED; MLog("Stopping video"); MLCamera.StopVideoCapture(); MLog("Stopped video"); } if (videostate == VideoState.VIDEO_ENDED) { sendVideo(); } }
void Start() { StartCoroutine(HttpLongPollNotification("http://vcm-12481.vm.duke.edu/push/")); MLImageTracker.Start(); MLImageTracker.Enable(); MLImageTracker.AddTarget("sea", imageTarget, 0.279f, (target, result) => { if (!_imageObjectAdded && result.Status == MLImageTargetTrackingStatus.Tracked) { Debug.Log("Image recognized."); DisplayAssets(); _imageObjectAdded = true; } }, true); MLResult camera = MLCamera.Start(); if (camera.IsOk) { MLCamera.Connect(); Debug.Log("Camera Enabled"); } // MLCamera.OnRawImageAvailable += delegate(byte[] jpegBytes) { StartCoroutine(Upload(jpegBytes)); }; // StartCoroutine(Capture()); }
/// <summary> /// Once privileges have been granted, enable the camera and callbacks. /// </summary> private void StartCapture() { if (!_hasStarted) { lock (_cameraLockObject) { EnableMLCamera(); MLCVCameraIntrinsicCalibrationParameters cameraIntrinsics; MLCamera.GetIntrinsicCalibrationParameters(out cameraIntrinsics); Debug.Log("Camera is connected:" + cameraIntrinsics.FOV); _results.cameraIntrinsics = new SerializableCameraIntrinsics { Distortion = cameraIntrinsics.Distortion, FocalLength = cameraIntrinsics.FocalLength, FOV = cameraIntrinsics.FOV, Height = cameraIntrinsics.Height, PrincipalPoint = cameraIntrinsics.PrincipalPoint, Width = cameraIntrinsics.Width }; MLCamera.OnRawImageAvailable += OnCaptureRawImageComplete; MLCamera.OnCaptureCompleted += OnCaptureCompleted; } MLInput.OnControllerButtonDown += OnButtonDown; _hasStarted = true; } }
/// <summary> /// Connects the MLCamera component and instantiates a new instance /// if it was never created. /// </summary> private void EnableMLCamera() { #if PLATFORM_LUMIN MLResult result = MLCamera.Start(); if (result.IsOk) { result = MLCamera.Connect(); if (_rawVideoCaptureMode) { MLCamera.OnRawVideoFrameAvailableYUV += OnRawCaptureDataReceived; } else { _rawVideoCaptureVisualizer.OnRawCaptureEnded(); } _isCameraConnected = true; } else { Debug.LogErrorFormat("Error: VideoCaptureExample failed starting MLCamera, disabling script. Reason: {0}", result); enabled = false; return; } #endif }
/// <summary> /// Stop capturing video. /// </summary> public void EndCapture() { if (_isCapturing) { MLResult result = MLCamera.StopVideoCapture(); if (result.IsOk) { _isCapturing = false; _captureStartTime = 0; OnVideoCaptureEnded.Invoke(_captureFilePath); _captureFilePath = null; } else { if (result.Code == MLResultCode.PrivilegeDenied) { Instantiate(Resources.Load("PrivilegeDeniedError")); } Debug.LogErrorFormat("Error: VideoCaptureExample failed to end video capture. Error Code: {0}", MLCamera.GetErrorCode().ToString()); } } else { Debug.LogError("Error: VideoCaptureExample failed to end video capture because the camera is not recording."); } }
/// <summary> /// Start capturing video to input filename. /// </summary> /// <param name="fileName">File path to write the video to.</param> public void StartCapture(string fileName) { if (!_isCapturing && MLCamera.IsStarted && _isCameraConnected) { // Check file fileName extensions string extension = System.IO.Path.GetExtension(fileName); if (string.IsNullOrEmpty(extension) || !extension.Equals(_validFileFormat, System.StringComparison.OrdinalIgnoreCase)) { Debug.LogErrorFormat("Invalid fileName extension '{0}' passed into Capture({1}).\n" + "Videos must be saved in {2} format.", extension, fileName, _validFileFormat); return; } string pathName = System.IO.Path.Combine(Application.persistentDataPath, fileName); MLResult result = MLCamera.StartVideoCapture(pathName); if (result.IsOk) { _isCapturing = true; _captureStartTime = Time.time; _captureFilePath = pathName; OnVideoCaptureStarted.Invoke(); } else { Debug.LogErrorFormat("Failure: Could not start video capture for {0}. Error Code: {1}", fileName, MLCamera.GetErrorCode().ToString()); } } else { Debug.LogErrorFormat("Failure: Could not start video capture for {0} because '{1}' is already recording!", fileName, _captureFilePath); } }
/// <summary> /// Disconnects the MLCamera if it was ever created or connected. /// </summary> public void DisableMLCamera() { MLCamera.Disconnect(); // Explicitly set to false here as the disconnect was attempted. _isCameraConnected = false; MLCamera.Stop(); }
/// <summary> /// Cannot make the assumption that a privilege is still granted after /// returning from pause. Return the application to the state where it /// requests privileges needed and clear out the list of already granted /// privileges. Also, disable the camera and unregister callbacks. /// </summary> void OnApplicationPause(bool pause) { if (pause) { _appPaused = true; if (_isCameraConnected && MLCamera.IsStarted) { MLResult result = MLCamera.ApplicationPause(_appPaused); if (!result.IsOk) { Debug.LogErrorFormat("Error: VideoCaptureExample failed to pause MLCamera, disabling script. Reason: {0}", result); enabled = false; return; } if (_isCapturing) { OnVideoCaptureEnded.Invoke(_captureFilePath); _captureFilePath = null; } _isCapturing = false; _captureStartTime = 0; _isCameraConnected = false; } MLInput.OnControllerButtonDown -= OnButtonDown; } }
// void ArucoDetection() { // // Detect ArUco markers // Dictionary dict = Aruco.getPredefinedDictionary(Aruco.DICT_4X4_1000); // Aruco.detectMarkers(cached_initMat, dict, corners, ids); // Aruco.drawDetectedMarkers(cached_initMat, corners, ids); // // Debug.Log("AD - 93: Markers Detected"); // // Debug.LogFormat("Corners: {0}", corners.Count); // // Get desired corner of marker // Point[] src_point_array = new Point[POINT_COUNT]; // for (int i = 0; i < corners.Count; i++) { // int aruco_id = (int) (ids.get(i, 0)[0]); // int src_i = arucoTosrc(aruco_id); // int corner_i = aruco_id % 4; // // Debug.LogFormat("AD - 101: aruco_id: {0}; corner_i: {1}; src_i: {2}", aruco_id, corner_i, src_i); // // Store corner[i] into spa[src_i] // src_point_array[src_i] = new Point(corners[i].get(0, corner_i)[0], corners[i].get(0, corner_i)[1]); // // Display the corner as circle on outMat. // Imgproc.circle(cached_initMat, src_point_array[src_i], 10, new Scalar(255, 255, 0)); // } // // Converting to Ray values for Raycast // Camera _cam = Camera.main; // if (_cam != null) { // for (int i = 0; i < POINT_COUNT; i++) { // if (src_point_array[i] != null) { // src_ray_array[i] = _cam.ScreenPointToRay( // new Vector3((float) src_point_array[i].x,(float) src_point_array[i].y, 0)).direction; // } // } // } // // Debug.LogFormat("Detected Direction: {0}", src_ray_array[0]); // // Debug.LogFormat("Camera Direction: {0}", _cam.transform.forward); // // Count non-null source points // bool spa_full = (count_src_nulls() == 7); // // Check if have valid faces // for (int i = 0; i < FACE_COUNT; i++) { // // faceX_full[i] = check_faces(i); // faceX_full[i] = check_faces(i); // } // // Core.flip(cached_initMat, outMat, 0); // } /// Sets the projected ScreenPoints of the world coordinate values in src_world_array /// from the PoV of the RGB Camera. void SetC1ScreenPoints() { Camera _camera = Camera.main; MatrixToTransform(camera_pose, rgb_camera); Debug.LogFormat("Camera Pose: {0} \n old transform: {1}, {2}, {3} \n new transform: {4}, {5}, {6}", camera_pose, _camera.transform.position, _camera.transform.rotation, _camera.transform.localScale, rgb_camera.transform.position, rgb_camera.transform.rotation, rgb_camera.transform.localScale); MLCVCameraIntrinsicCalibrationParameters intrinsicParam; MLCamera.GetIntrinsicCalibrationParameters(out intrinsicParam); Debug.LogFormat("Camera Pose: {0} \n Left Eye Pose: {1} \n Intrinsics: FOV -- {5} vs {2} \n Focal Length -- {6} vs. {3} \n Principal Point -- {7} vs. {4} \n Sensor Size {8} vs. {9} x {10} \n Camera Size: {11} x {12} \n Camera Rect: {13}", camera_pose, rgb_camera.GetStereoViewMatrix(Camera.StereoscopicEye.Left), intrinsicParam.FOV, intrinsicParam.FocalLength, intrinsicParam.PrincipalPoint, _camera.fieldOfView, _camera.focalLength, _camera.lensShift, _camera.sensorSize, intrinsicParam.Width, intrinsicParam.Height, rgb_camera.pixelWidth, rgb_camera.pixelHeight, rgb_camera.pixelRect); rgb_camera.fieldOfView = intrinsicParam.FOV; rgb_camera.focalLength = intrinsicParam.FocalLength.x; rgb_camera.sensorSize = new Vector2(intrinsicParam.Width, intrinsicParam.Height); rgb_camera.usePhysicalProperties = true; for (int i = 0; i < POINT_COUNT; i++) { Vector3 world_pos = src_world_array[i]; Vector3 c1_vector3 = rgb_camera.WorldToScreenPoint(world_pos); c1_point_array[i] = new Point(((c1_vector3.x * 2) - 128) / SCALE_FACTOR, (c1_vector3.y * 2) / SCALE_FACTOR); } }
private void disableExternalCamera() { MLog("disableExternalCamera :: External Camera Disabled"); MLCamera.StopPreview(); MLCamera.Disconnect(); MLCamera.Stop(); externalCameraActive = false; }
private void enableExternalCamera() { MLog("enableExternalCamera :: External Camera Enabled"); MLCamera.Start(); MLCamera.Connect(); MLCamera.StartPreview(); externalCameraActive = true; }
/// <summary> /// Connects the MLCamera component and instantiates a new instance /// if it was never created. /// </summary> public bool EnableMLCamera() { if (MLCamera.Start()) { _isCameraConnected = MLCamera.Connect(); } return(_isCameraConnected); }
IEnumerator Capture() { while (true) { MLCamera.CaptureRawImageAsync(); yield return(new WaitForSeconds(0.1f)); } }
/// <summary> /// Disconnects the MLCamera if it was ever created or connected. /// Also stops any video recording if active. /// </summary> public void DisableMLCamera() { if (_isCapturing) { EndCapture(); } MLCamera.Disconnect(); _isCameraConnected = false; MLCamera.Stop(); }
/// <summary> /// Handles the event for raw capture data recieved, and forwards it to any listeners. /// Sets the orientation of the framePoseTransform to the current frame pose. /// </summary> /// <param name="extras">Contains timestamp to use with GetFramePose, also forwarded to listeners.</param> /// <param name="frameData">Forwarded to listeners.</param> /// <param name="frameMetadata">Forwarded to listeners.</param> private void OnRawCaptureDataReceived(MLCameraResultExtras extras, YUVFrameInfo frameData, MLCameraFrameMetadata frameMetadata) { Matrix4x4 matrix = Matrix4x4.identity; MLResult result = MLCamera.GetFramePose(extras.VcamTimestampUs * 1000, out matrix); _framePoseTransform.position = matrix.MultiplyPoint(Vector3.zero); _framePoseTransform.rotation = matrix.rotation; OnRawVideoDataReceived?.Invoke(extras, frameData, frameMetadata); }
/// <summary> /// Captures a still image using the device's camera and returns /// the data path where it is saved. /// </summary> /// <param name="fileName">The name of the file to be saved to.</param> public void TriggerAsyncCapture() { if (MLCamera.IsStarted && _isCameraConnected) { if (MLCamera.CaptureRawImageAsync()) { _isCapturing = true; } } }
/// <summary> /// Disconnects the MLCamera if it was ever created or connected. /// </summary> private void DisableMLCamera() { if (MLCamera.IsStarted) { MLCamera.Disconnect(); // Explicitly set to false here as the disconnect was attempted. _isCameraConnected = false; MLCamera.Stop(); } }
/// <summary> /// Connects the MLCamera component and instantiates a new instance /// if it was never created. /// </summary> public bool EnableMLCamera() { MLResult result = MLCamera.Start(); if (result.IsOk) { result = MLCamera.Connect(); _isCameraConnected = result.IsOk; } return(_isCameraConnected); }
private void CaptureThreadWorker() { if (MLCamera.IsStarted && isConnected) { MLResult result = MLCamera.CaptureRawImageAsync(); if (!result.IsOk) { Debug.Log("Error: Capture Failed."); } } }
/// <summary> /// Captures a still image using the device's camera and returns /// the data path where it is saved. /// </summary> /// <param name="fileName">The name of the file to be saved to.</param> public void TriggerAsyncCapture() { if (MLCamera.IsStarted && _isCameraConnected) { MLResult result = MLCamera.CaptureRawImageAsync(); if (result.IsOk) { _isCapturing = true; } } }
/// <summary> /// Start capturing video. /// </summary> public void StartCapture() { _text.text = "StartPreview called"; if (!_isCapturing && MLCamera.IsStarted && _isCameraConnected) { // MLCamera.Settings.Apply(); MLResult result = MLCamera.StartPreview(_texture); if (result.Code == MLResultCode.Ok) { _text.text = "StartPreview is OK"; // _material.mainTexture = _texture; for (int i = 0; i < _rawImage.Count; i++) { _rawImage[i].texture = _texture; } _isCapturing = true; _captureStartTime = Time.time; OnVideoCaptureStarted.Invoke(); } else { _text.text = "StartPreview is NOT OK"; if (result.Code == MLResultCode.InvalidParam) { _text.text = "Failed due to an invalid input parameter"; } if (result.Code == MLResultCode.Pending) { _text.text = "PCF system isn't fully initialized"; } if (result.Code == MLResultCode.UnspecifiedFailure) { _text.text = "Other internal error"; } if (result.Code == MLResultCode.SnapshotPoseNotFound) { _text.text = "Coordinate Frame is valid, but not found"; } if (result.Code == MLResultCode.PrivilegeDenied) { _text.text = "Privilege denied"; Instantiate(Resources.Load("PrivilegeDeniedError")); } Debug.LogErrorFormat("Error: VideoCaptureExample failed to start video capture for {0}. Reason: {1}", MLCamera.GetErrorCode().ToString()); } } else { _text.text = "StartPreview failed"; Debug.LogErrorFormat("Error: VideoCaptureExample failed to start video capture for {0} because '{1}' is already recording!"); } }
void OnDisable() { if (MLCamera.IsStarted) { MLCamera.Disconnect(); MLCamera.Stop(); } if (MLLocation.IsStarted) { //MLLocation.Stop(); } }
/// <summary> /// Disconnects the MLCamera if it was ever created or connected. /// Also stops any video recording if active. /// </summary> private void DisableMLCamera() { if (MLCamera.IsStarted) { if (_isCapturing) { EndCapture(); } MLCamera.Disconnect(); _isCameraConnected = false; MLCamera.Stop(); } }
/// <summary> /// Once privileges have been granted, enable the camera and callbacks. /// </summary> private void StartCapture() { if (!_hasStarted) { lock (_cameraLockObject) { EnableMLCamera(); } _hasStarted = true; MLCamera.StartPreview(); texture = new Texture2D(MLCamera.PreviewTextureWidth, MLCamera.PreviewTextureHeight, TextureFormat.RGBA32, false); Debug.Log("WebCamTextureToMatHelper:: " + " width:" + MLCamera.PreviewTextureWidth + " height:" + MLCamera.PreviewTextureHeight); if (colors == null || colors.Length != MLCamera.PreviewTextureWidth * MLCamera.PreviewTextureHeight) { colors = new Color32[MLCamera.PreviewTextureWidth * MLCamera.PreviewTextureHeight]; } frameMat = new Mat(MLCamera.PreviewTextureHeight, MLCamera.PreviewTextureWidth, CvType.CV_8UC4); screenOrientation = Screen.orientation; screenWidth = Screen.width; screenHeight = Screen.height; bool isRotatedFrame = false; if (rotate90Degree) { isRotatedFrame = true; } if (isRotatedFrame) { rotatedFrameMat = new Mat(MLCamera.PreviewTextureWidth, MLCamera.PreviewTextureHeight, CvType.CV_8UC4); } isInitWaiting = false; hasInitDone = true; initCoroutine = null; if (onInitialized != null) { onInitialized.Invoke(); } } }
/// <summary> /// Disconnects the MLCamera if it was ever created or connected. /// Also stops any video recording if active. /// </summary> private void DisableMLCamera() { if (MLCamera.IsStarted) { if (_isCapturing) { EndCapture(); } MLCamera.Disconnect(); _isCameraConnected = false; MLCamera.Stop(); MLCamera.OnRawVideoFrameAvailableYUV -= OnRawCaptureDataReceived; } }
/// <summary> /// Disconnects the MLCamera if it was ever created or connected. /// </summary> private void DisableMLCamera() { lock (_cameraLockObject) { Debug.Log("DisableMLCamera()"); if (MLCamera.IsStarted) { MLCamera.Disconnect(); // Explicitly set to false here as the disconnect was attempted. _isCameraConnected = false; MLCamera.Stop(); } } }
/// <summary> /// Stops the active camera. /// </summary> public virtual void Stop() { #if !UNITY_EDITOR if (hasInitDone) { MLCamera.StopPreview(); } #else if (hasInitDone) { webCamTexture.Stop(); } #endif }
/// <summary> /// Setup the text field for camera intrinsic values. /// Precondition: MLCamera must be successfully started. /// </summary> void SetupCameraIntrinsics() { MLCVCameraIntrinsicCalibrationParameters parameters; MLResult result = MLCamera.GetIntrinsicCalibrationParameters(out parameters); if (result.IsOk) { _intrinsicValuesText.text = CalibrationParametersToString(parameters); } else { Debug.LogErrorFormat("Error: RawVideoCaptureExample failed to GetIntrinsicCalibrationParameters. Reason: {0}", result); } }
/// <summary> /// Disconnects the MLCamera if it was ever created or connected. /// </summary> private void DisableMLCamera() { #if PLATFORM_LUMIN lock (_cameraLockObject) { if (MLCamera.IsStarted) { MLCamera.Disconnect(); // Explicitly set to false here as the disconnect was attempted. _isCameraConnected = false; MLCamera.Stop(); } } #endif }