void Initialize() { Debug.Log("Initializing..."); List <Resolution> resolutions = new List <Resolution> (PhotoCapture.SupportedResolutions); Resolution selectedResolution = resolutions [1]; foreach (var item in resolutions) { Debug.Log("resolution width " + item.width + " height " + item.height); } m_CameraParameters = new WSAWebCamCameraParameters(WebCamMode.PhotoMode); m_CameraParameters.cameraResolutionWidth = selectedResolution.width; m_CameraParameters.cameraResolutionHeight = selectedResolution.height; m_CameraParameters.hologramOpacity = 0.0f; m_CameraParameters.pixelFormat = CapturePixelFormat.BGRA32; m_Texture = new Texture2D(selectedResolution.width, selectedResolution.height, TextureFormat.BGRA32, false); rgbaMat = new Mat(m_Texture.height, m_Texture.width, CvType.CV_8UC4); colors = new Color32[rgbaMat.cols() * rgbaMat.rows()]; faceLandmarkDetector = new FaceLandmarkDetector(DlibFaceLandmarkDetector.Utils.getFilePath("sp_human_face_68.dat")); PhotoCapture.CreateAsync(false, OnCreatedPhotoCaptureObject); }
void StartVideoCaptureTest() { Resolution cameraResolution = UnityEngine.XR.WSA.WebCam.VideoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); Debug.Log(cameraResolution); float cameraFramerate = UnityEngine.XR.WSA.WebCam.VideoCapture.GetSupportedFrameRatesForResolution(cameraResolution).OrderByDescending((fps) => fps).First(); Debug.Log(cameraFramerate); UnityEngine.XR.WSA.WebCam.VideoCapture.CreateAsync(false, delegate(UnityEngine.XR.WSA.WebCam.VideoCapture videoCapture) { if (videoCapture != null) { m_VideoCapture = videoCapture; Debug.Log("Created VideoCapture Instance!"); UnityEngine.XR.WSA.WebCam.CameraParameters cameraParameters = new UnityEngine.XR.WSA.WebCam.CameraParameters(); cameraParameters.hologramOpacity = 0.0f; cameraParameters.frameRate = cameraFramerate; cameraParameters.cameraResolutionWidth = cameraResolution.width; cameraParameters.cameraResolutionHeight = cameraResolution.height; cameraParameters.pixelFormat = UnityEngine.XR.WSA.WebCam.CapturePixelFormat.BGRA32; m_VideoCapture.StartVideoModeAsync(cameraParameters, UnityEngine.XR.WSA.WebCam.VideoCapture.AudioState.ApplicationAndMicAudio, OnStartedVideoCaptureMode); } else { Debug.LogError("Failed to create VideoCapture Instance!"); } }); }
void Initialize() { Debug.Log("Initializing..."); List <Resolution> resolutions = new List <Resolution>(PhotoCapture.SupportedResolutions); Resolution selectedResolution = resolutions[1]; foreach (var item in resolutions) { Debug.Log("resolution width " + item.width + " height " + item.height); } m_CameraParameters = new WSAWebCamCameraParameters(WebCamMode.PhotoMode); m_CameraParameters.cameraResolutionWidth = selectedResolution.width; m_CameraParameters.cameraResolutionHeight = selectedResolution.height; m_CameraParameters.hologramOpacity = 0.0f; m_CameraParameters.pixelFormat = CapturePixelFormat.BGRA32; m_Texture = new Texture2D(selectedResolution.width, selectedResolution.height, TextureFormat.BGRA32, false); rgbaMat = new Mat(m_Texture.height, m_Texture.width, CvType.CV_8UC4); colors = new Color32[rgbaMat.cols() * rgbaMat.rows()]; dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath(dlibShapePredictorFileName); if (string.IsNullOrEmpty(dlibShapePredictorFilePath)) { Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); PhotoCapture.CreateAsync(false, OnCreatedPhotoCaptureObject); }
void Initialize() { Debug.Log("Initializing..."); List <Resolution> resolutions = new List <Resolution> (PhotoCapture.SupportedResolutions); Resolution selectedResolution = resolutions [1]; foreach (var item in resolutions) { Debug.Log("resolution width " + item.width + " height " + item.height); } m_CameraParameters = new WSAWebCamCameraParameters(WebCamMode.PhotoMode); m_CameraParameters.cameraResolutionWidth = selectedResolution.width; m_CameraParameters.cameraResolutionHeight = selectedResolution.height; m_CameraParameters.hologramOpacity = 0.0f; m_CameraParameters.pixelFormat = CapturePixelFormat.BGRA32; m_Texture = new Texture2D(selectedResolution.width, selectedResolution.height, TextureFormat.BGRA32, false); rgbaMat = new Mat(m_Texture.height, m_Texture.width, CvType.CV_8UC4); colors = new Color32[rgbaMat.cols() * rgbaMat.rows()]; grayMat = new Mat(rgbaMat.rows(), rgbaMat.cols(), CvType.CV_8UC1); faces = new MatOfRect(); cascade = new CascadeClassifier(); cascade.load(Utils.getFilePath("haarcascade_frontalface_alt.xml")); PhotoCapture.CreateAsync(false, OnCreatedPhotoCaptureObject); }
/// <summary> /// Use the start function to start the picture capturing process /// </summary> void Start() { //Get the highest resolution m_cameraResolution = UnityEngine.XR.WSA.WebCam.PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); UnityEngine.XR.WSA.WebCam.PhotoCapture.CreateAsync(false, delegate(UnityEngine.XR.WSA.WebCam.PhotoCapture captureObject) { //Assign capture object m_photoCapture = captureObject; //Configure camera UnityEngine.XR.WSA.WebCam.CameraParameters cameraParameters = new UnityEngine.XR.WSA.WebCam.CameraParameters(); cameraParameters.hologramOpacity = 0.0f; cameraParameters.cameraResolutionWidth = m_cameraResolution.width; cameraParameters.cameraResolutionHeight = m_cameraResolution.height; cameraParameters.pixelFormat = UnityEngine.XR.WSA.WebCam.CapturePixelFormat.JPEG; //Start the photo mode and start taking pictures m_photoCapture.StartPhotoModeAsync(cameraParameters, delegate(UnityEngine.XR.WSA.WebCam.PhotoCapture.PhotoCaptureResult result) { Debug.Log("Photo Mode started"); InvokeRepeating("ExecutePictureProcess", 0, m_pictureInterval); }); }); }
// Quand une photo est prise void OnPhotoCaptureCreated(UnityEngine.XR.WSA.WebCam.PhotoCapture captureObject) { photo_capture_ = captureObject; camera_resolution_ = UnityEngine.XR.WSA.WebCam.PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); UnityEngine.XR.WSA.WebCam.CameraParameters c = new UnityEngine.XR.WSA.WebCam.CameraParameters(); c.hologramOpacity = 0.0f; c.cameraResolutionWidth = camera_resolution_.width; c.cameraResolutionHeight = camera_resolution_.height; c.pixelFormat = UnityEngine.XR.WSA.WebCam.CapturePixelFormat.PNG; captureObject.StartPhotoModeAsync(c, OnPhotoModeStarted); }
void InitCamera() { List <Resolution> resolutions = new List <Resolution>(UnityEngine.XR.WSA.WebCam.PhotoCapture.SupportedResolutions); //1280 * 720 Resolution selectedResolution = resolutions[0]; //camera params m_CameraParameters = new UnityEngine.XR.WSA.WebCam.CameraParameters(UnityEngine.XR.WSA.WebCam.WebCamMode.PhotoMode) { cameraResolutionWidth = selectedResolution.width, cameraResolutionHeight = selectedResolution.height, hologramOpacity = 0.0f, pixelFormat = UnityEngine.XR.WSA.WebCam.CapturePixelFormat.BGRA32 }; textureArray = new Texture2DArray(TEXTURE_WIDTH, TEXTURE_HEIGHT, maxPhotoNum, TextureFormat.DXT5, false); var clearTexture = new Texture2D(TEXTURE_WIDTH, TEXTURE_HEIGHT, TextureFormat.ARGB32, false); var resetColorArray = clearTexture.GetPixels(); for (int i = 0; i < resetColorArray.Length; i++) { resetColorArray[i] = Color.clear; } clearTexture.SetPixels(resetColorArray); clearTexture.Apply(); clearTexture.Compress(true); Graphics.CopyTexture(clearTexture, 0, 0, textureArray, 0, 0); //Copies the last texture // m_Texture = new Texture2D(m_CameraParameters.cameraResolutionWidth, m_CameraParameters.cameraResolutionHeight, TextureFormat.ARGB32, false); //init photocaptureobject UnityEngine.XR.WSA.WebCam.PhotoCapture.CreateAsync(false, OnCreatedPhotoCaptureObject); /* * recognizer = new GestureRecognizer(); * recognizer.TappedEvent += (source, tapCount, ray) => * { * OnPhotoKeyWordDetected(); * }; * recognizer.StartCapturingGestures(); */ }
public void CapturePhotoAsync(OnCaptured _callback) { callback = _callback; UnityEngine.XR.WSA.WebCam.PhotoCapture.CreateAsync(false, (_photoCapture) => { Debug.Log("PhotoInput start"); this.photoCapture = _photoCapture; Resolution cameraResolution = UnityEngine.XR.WSA.WebCam.PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); UnityEngine.XR.WSA.WebCam.CameraParameters c = new UnityEngine.XR.WSA.WebCam.CameraParameters(); c.hologramOpacity = 0.0f; c.cameraResolutionWidth = cameraResolution.width; c.cameraResolutionHeight = cameraResolution.height; c.pixelFormat = UnityEngine.XR.WSA.WebCam.CapturePixelFormat.BGRA32; c.hologramOpacity = 0; this.cameraParameters = c; photoCapture.StartPhotoModeAsync(cameraParameters, onPhotoModeStarted); }); }
// Use this for initialization void Start() { Instance = this; // Create a PhotoCapture object UnityEngine.XR.WSA.WebCam.PhotoCapture photoCaptureObject = captureObject; UnityEngine.XR.WSA.WebCam.CameraParameters cameraParameters = new UnityEngine.XR.WSA.WebCam.CameraParameters(); cameraParameters.hologramOpacity = 0.0f; cameraParameters.cameraResolutionWidth = cameraResolution.width; cameraParameters.cameraResolutionHeight = cameraResolution.height; cameraParameters.pixelFormat = UnityEngine.XR.WSA.WebCam.CapturePixelFormat.BGRA32; // Set up a GestureRecognizer to detect Select gestures. recognizer = new UnityEngine.XR.WSA.Input.GestureRecognizer(); recognizer.TappedEvent += (source, tapCount, ray, photoCapture) => { // Activate the camera photoCapture.StartPhotoModeAsync(cameraParameters, delegate(UnityEngine.XR.WSA.WebCam.PhotoCapture.PhotoCaptureResult result) { // Take a picture photoCapture.TakePhotoAsync(OnCapturedPhotoToMemory); }); }; recognizer.StartCapturingGestures(); }
private extern void StartVideoMode_Internal(CameraParameters cameraParameters, AudioState audioState, OnVideoModeStartedCallback onVideoModeStartedCallback);
private extern void StartPhotoMode_Internal(CameraParameters setupParams, OnPhotoModeStartedCallback onPhotoModeStartedCallback);