// Use this for initialization void Start() { // get camera device WebCamDevice? device = GetBackCameraDevice(); if (device == null) { // TODO:confirm to return return; } selected_device_ = (WebCamDevice)device; // create custom texture camera_texture_ = CreateCameraTexture(selected_device_.name, kCameraFPS); PlayCamera(camera_texture_); }
/// <summary> /// Initializes this instance by coroutine. /// </summary> protected virtual IEnumerator _Initialize() { if (hasInitDone) { ReleaseResources(); if (onDisposed != null) { onDisposed.Invoke(); } } isInitWaiting = true; // Creates the camera if (!String.IsNullOrEmpty(requestedDeviceName)) { int requestedDeviceIndex = -1; if (Int32.TryParse(requestedDeviceName, out requestedDeviceIndex)) { if (requestedDeviceIndex >= 0 && requestedDeviceIndex < WebCamTexture.devices.Length) { webCamDevice = WebCamTexture.devices [requestedDeviceIndex]; if (requestedFPS < 0) { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight); } else { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, (int)requestedFPS); } } } else { for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].name == requestedDeviceName) { webCamDevice = WebCamTexture.devices [cameraIndex]; if (requestedFPS < 0) { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight); } else { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, (int)requestedFPS); } break; } } } if (webCamTexture == null) { Debug.Log("Cannot find camera device " + requestedDeviceName + "."); } } if (webCamTexture == null) { // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == requestedIsFrontFacing) { webCamDevice = WebCamTexture.devices [cameraIndex]; if (requestedFPS < 0) { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight); } else { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, (int)requestedFPS); } break; } } } if (webCamTexture == null) { if (WebCamTexture.devices.Length > 0) { webCamDevice = WebCamTexture.devices [0]; if (requestedFPS < 0) { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight); } else { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, (int)requestedFPS); } } else { isInitWaiting = false; if (onErrorOccurred != null) { onErrorOccurred.Invoke(ErrorCode.CAMERA_DEVICE_NOT_EXIST); } yield break; } } // Starts the camera webCamTexture.Play(); int initFrameCount = 0; bool isTimeout = false; while (true) { if (initFrameCount > timeoutFrameCount) { isTimeout = true; break; } // If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) else if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else else if (webCamTexture.didUpdateThisFrame) { #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2 while (webCamTexture.width <= 16) { if (initFrameCount > timeoutFrameCount) { isTimeout = true; break; } else { initFrameCount++; } webCamTexture.GetPixels32(); yield return(new WaitForEndOfFrame()); } if (isTimeout) { break; } #endif #endif Debug.Log("WebCamTextureToMatHelper:: " + "devicename:" + webCamTexture.deviceName + " name:" + webCamTexture.name + " width:" + webCamTexture.width + " height:" + webCamTexture.height + " fps:" + webCamTexture.requestedFPS + " videoRotationAngle:" + webCamTexture.videoRotationAngle + " videoVerticallyMirrored:" + webCamTexture.videoVerticallyMirrored + " isFrongFacing:" + webCamDevice.isFrontFacing); if (colors == null || colors.Length != webCamTexture.width * webCamTexture.height) { colors = new Color32[webCamTexture.width * webCamTexture.height]; } frameMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); screenOrientation = Screen.orientation; screenWidth = Screen.width; screenHeight = Screen.height; bool isRotatedFrame = false; #if !UNITY_EDITOR && !(UNITY_STANDALONE || UNITY_WEBGL) if (screenOrientation == ScreenOrientation.Portrait || screenOrientation == ScreenOrientation.PortraitUpsideDown) { if (!rotate90Degree) { isRotatedFrame = true; } } else if (rotate90Degree) { isRotatedFrame = true; } #else if (rotate90Degree) { isRotatedFrame = true; } #endif if (isRotatedFrame) { rotatedFrameMat = new Mat(webCamTexture.width, webCamTexture.height, CvType.CV_8UC4); } isInitWaiting = false; hasInitDone = true; initCoroutine = null; if (onInitialized != null) { onInitialized.Invoke(); } break; } else { initFrameCount++; yield return(null); } } if (isTimeout) { webCamTexture.Stop(); webCamTexture = null; isInitWaiting = false; initCoroutine = null; if (onErrorOccurred != null) { onErrorOccurred.Invoke(ErrorCode.TIMEOUT); } } }
// Start is called before the first frame update void Start() { int camWidth = 640; int camHeight = 480; Debug.Log(Screen.width + "x" + Screen.height + ":" + SystemInfo.SupportsTextureFormat(TextureFormat.RGFloat)); int width = Screen.width; int height = Screen.height; distortMap = new Texture2D(width, height, TextureFormat.RGFloat, false, true); distortMap.filterMode = FilterMode.Point; distortMap.anisoLevel = 1; distortMap.wrapMode = TextureWrapMode.Clamp; float[] distortData = new float[width * height * 2]; for (int i = 0; i < distortData.Length; i++) { distortData[i] = -1; } for (double i = 0; i < camHeight; i += 0.5) { for (double j = 0; j < camWidth; j += 0.5) { double x = (j - _CX) / _FX; double y = (i - _CY) / _FY; double r2 = x * x + y * y; double distort = 1 + _K1 * r2 + _K2 * r2 * r2 + _K3 * r2 * r2 * r2; double x_distort = x * distort; double y_distort = y * distort; x_distort += (2 * _P1 * x * y + _P2 * (r2 + 2 * x * x)); y_distort += (_P1 * (r2 + 2 * y * y) + 2 * _P2 * x * y); x_distort = x_distort * _FX + _CX; y_distort = y_distort * _FY + _CY; //Debug.Log(x_distort + "," + y_distort); int idxU = (int)Math.Round(x_distort / camWidth * width); int idxV = (int)Math.Round(y_distort / camHeight * height); if (idxU >= 0 && idxV >= 0 && idxU < width && idxV < height) { int mapIdx = idxV * width * 2 + idxU * 2; //Debug.Log(mapIdx); distortData[mapIdx] = (float)j / camWidth; distortData[mapIdx + 1] = (float)i / camHeight; } } } /*for (int i = 0; i < distortData.Length; i++) * { * if (distortData[i] < 0) * { * distortData[i] = distortData[i - 1]; * } * }*/ distortMap.SetPixelData(distortData, 0); distortMap.Apply(false); mat.SetTexture("_DistortTex", distortMap); WebCamDevice[] webCams = WebCamTexture.devices; foreach (WebCamDevice webCam in webCams) { if (webCam.name.StartsWith("USB2.0")) { Debug.Log("background camera:" + webCam.name); webcamTexture = new WebCamTexture(webCam.name); webcamTexture.Play(); mat.SetTexture("_CamTex", webcamTexture); break; } } cam = GetComponent <Camera>(); }
void Start() { cam = new WebCamTexture(); }
public CamInfo(WebCamTexture c, int idx) { Cam = c; index = idx; }
// Use this for initialization void Start() { _webcam = new WebCamTexture(); _window.material.mainTexture = _webcam; _webcam.Play(); }
public static void WebCamTextureUpdate(WebCamTexture texture, out Vector3 scale, out Quaternion rotation) { scale = new Vector3(1, texture.videoVerticallyMirrored ? -1f : 1f, 1); rotation = Quaternion.AngleAxis(texture.videoRotationAngle, Vector3.forward); }
void PlayCamera(WebCamTexture texture) { renderer.material.mainTexture = texture; texture.Play(); }
private void OnDestroy() { camTexture = null; }
private IEnumerator init() { if (webCamTexture != null) { webCamTexture.Stop(); initDone = false; rgbaMat.Dispose(); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); } Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play(); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.transform.eulerAngles = new Vector3(0, 0, 0); #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR gameObject.transform.eulerAngles = new Vector3(0, 0, -90); #endif // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1); // bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f; // if (webCamTexture.videoRotationAngle == 270) // scaleY = -1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR Camera.main.orthographicSize = webCamTexture.width / 2; #else Camera.main.orthographicSize = webCamTexture.height / 2; #endif initDone = true; break; } else { yield return(0); } } } // Update is called once per frame void Update() { if (!initDone) { return; } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip(rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 90) { } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, -1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip(rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 90) { } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, -1); } } } Utils.matToTexture2D(rgbaMat, texture, colors); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; } } void OnDisable() { webCamTexture.Stop(); } void OnGUI() { float screenScale = Screen.width / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical(); if (GUILayout.Button("back")) { Application.LoadLevel("OpenCVForUnitySample"); } if (GUILayout.Button("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine(init()); } GUILayout.EndVertical(); } } }
public async void StartWebcam(System.Action callback) { WebCamDevice[] devices = WebCamTexture.devices; if (devices.Length == 0) { Debug.Log("No camera detected"); camAvailable = false; return; } if (webcamTexture == null) { #if UNITY_STANDALONE_WIN for (int i = 0; i < devices.Length; i++) { WebcamList.Add(devices[i].name); } webcamTexture = new WebCamTexture(devices[WebcamIndex].name, Screen.width, Screen.height); Debug.Log($"Use Webcam : {devices[WebcamIndex].name}"); #else for (int i = 0; i < devices.Length; i++) { if (devices [i].isFrontFacing) { webcamTexture = new WebCamTexture(devices[i].name, Screen.width, Screen.height); Debug.Log($"Use Webcam : {devices[i].name}"); break; } if (i > 0 && i == devices.Length - 1) { webcamTexture = new WebCamTexture(devices[1].name, Screen.width, Screen.height); Debug.Log($"Guess Webcam : {devices[1].name}"); } } #endif } if (webcamTexture == null) { Debug.Log("Unable to find back camera"); return; } if (webcamTexture.isPlaying) { Debug.Log("Webcam are running."); callback?.Invoke(); return; } webcamTexture.Play(); drawPixels.enabled = true; drawPixels.color = Color.white; drawPixels.texture = webcamTexture; camAvailable = true; await Task.Yield(); callback?.Invoke(); }
void StartCamera(int cameraIndex = -1) { var devices = WebCamTexture.devices; if (devices.Length == 0) { FlipButton.SetActive(false); return; } if (WebCamTextures == null) { WebCamTextures = new WebCamTexture[devices.Length]; } if (WebCamTextures.Length == 1) { FlipButton.SetActive(false); } else { FlipButton.SetActive(true); } OnDisable(); CurrentCameraIndex = 0; if (cameraIndex < 0) { var lastDevice = SelectedDevice; for (var i = 0; i < devices.Length; i++) { if (devices[i].name == lastDevice) { CurrentCameraIndex = i; break; } } } else if (cameraIndex < devices.Length) { CurrentCameraIndex = cameraIndex; } // in iOS, "Front Camera", "Back Camera", videoRotationAngle = 90 var c = CurrentWebCamTexture; if (c == null) { c = WebCamTextures[CurrentCameraIndex] = new WebCamTexture( devices[CurrentCameraIndex].name, 640, 640, Application.targetFrameRate); } OnEnable(); if (c.isPlaying) { CameraImage.texture = c; } WebCamTexrureIsChanged = false; if (DecodeThread == null) { DecodeThread = new Thread(DecodeQR); DecodeThread.Start(); } }
private IEnumerator init() { if (webCamTexture != null) { webCamTexture.Stop(); initDone = false; frame.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices[0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) //if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; frame = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC3); frame_pot = new Mat (webCamTexture.width, webCamTexture.height, CvType.CV_8UC3); frame_hsv = new Mat (webCamTexture.width, webCamTexture.height, CvType.CV_8UC3); //final_thresh = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); frame_thresh = new Mat[NSAMPLES]; frame_thresh[0] = new Mat(webCamTexture.width, webCamTexture.height, CvType.CV_8UC1); frame_thresh[1] = new Mat(webCamTexture.width, webCamTexture.height, CvType.CV_8UC1); frame_thresh[2] = new Mat(webCamTexture.width, webCamTexture.height, CvType.CV_8UC1); frame_thresh[3] = new Mat(webCamTexture.width, webCamTexture.height, CvType.CV_8UC1); frame_thresh[4] = new Mat(webCamTexture.width, webCamTexture.height, CvType.CV_8UC1); frame_thresh[5] = new Mat(webCamTexture.width, webCamTexture.height, CvType.CV_8UC1); frame_thresh[6] = new Mat(webCamTexture.width, webCamTexture.height, CvType.CV_8UC1); //Core.setIdentity (final_thresh, new Scalar(0,0,0)); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGB24, false); roi = new List<My_ROI>(); c_lower[0,0] = 12; //12 c_upper[0,0] = 7; //7 c_lower[0,1] = 30; //30 c_upper[0,1] = 40; //40 c_lower[0,2] = 80; //80 c_upper[0,2] = 80; //80 ringObj[0].SetActive(true); ringObj[1].SetActive(false); ringObj[2].SetActive(false); ringObj[3].SetActive(false); ringObj[4].SetActive(false); ringObj[0].transform.position = new Vector3(-320, 500, 0.0f); ringObj[1].transform.position = new Vector3(-320, 500, 0.0f); ringObj[2].transform.position = new Vector3(-320, 500, 0.0f); ringObj[3].transform.position = new Vector3(-320, 500, 0.0f); ringObj[4].transform.position = new Vector3(-320, 500, 0.0f); gameObject.transform.eulerAngles = new Vector3 (0, 0, 0); #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR gameObject.transform.eulerAngles = new Vector3 (0, 0, -90); #endif // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1); // bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f; // if (webCamTexture.videoRotationAngle == 270) // scaleY = -1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR Camera.main.orthographicSize = webCamTexture.width / 2; #else Camera.main.orthographicSize = webCamTexture.height / 2; #endif initDone = true; break; } else { yield return 0; } } }
WebCamTexture CreateCameraTexture(string name, int fps) { WebCamTexture texture = new WebCamTexture(name); texture.requestedFPS = fps; return texture; }
private IEnumerator init() { if (webCamTexture != null) { webCamTexture.Stop(); initDone = false; rgbMat.Dispose(); thresholdMat.Dispose(); hsvMat.Dispose(); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) { Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); } Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play(); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2 while (webCamTexture.width <= 16) { webCamTexture.GetPixels32(); yield return(new WaitForEndOfFrame()); } #endif #endif Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC3); texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); thresholdMat = new Mat(); hsvMat = new Mat(); MAX_OBJECT_AREA = (int)(webCamTexture.height * webCamTexture.width / 1.5); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; updateLayout(); screenOrientation = Screen.orientation; initDone = true; break; } else { yield return(0); } } }
private static void INTERNAL_CALL_Pause(WebCamTexture self){}
protected virtual void Start() { GlobalState.Instance.SceneToSwitchTo = Config.Scenes.None; // Keep a static reference to this class to be able to display toast messages from other components (namely QrCodeCollection.cs). CameraScriptInstance = this; _coin = GetComponent <AudioSource>(); _qrCodeCollection = new QrCodeCollection(); // Reset the global state current question and coin every time we restart the camera scene. GlobalState.Instance.Reset(); Application.RequestUserAuthorization(UserAuthorization.WebCam); if (Application.HasUserAuthorization(UserAuthorization.WebCam)) { Debug.Log("access to webcam granted!"); Debug.Log("#WebCamDevices: " + WebCamTexture.devices.GetLength(0)); if (GlobalState.Instance.WebCamTexture == null) { // Find a backfacing camera device. foreach (WebCamDevice device in WebCamTexture.devices) { Debug.Log("WebCamDevice: " + device.name); Debug.Log("FrontFacing? " + device.isFrontFacing); if (!device.isFrontFacing) { _backFacing = device; } } // Try to obtain a 1024x768 texture from the webcam. _webcamTexture = new WebCamTexture(_backFacing.name, 1024, 768); _webcamTexture.Play(); // The device might not support the requested resolution, so we try again with a lower one. if (_webcamTexture.width != 1024 || _webcamTexture.height != 768) { _webcamTexture.Stop(); _webcamTexture = new WebCamTexture(_backFacing.name, 640, 480); _webcamTexture.Play(); } // Keep a global reference to the WebCamTexture to speed up scene initialization next time. GlobalState.Instance.WebCamTexture = _webcamTexture; } else { _webcamTexture = GlobalState.Instance.WebCamTexture; _webcamTexture.Play(); } GetComponent <Renderer>().material.SetTexture("_MainTex", _webcamTexture); Debug.Log(string.Format("Actual camera dimens: {0}x{1}", _webcamTexture.width, _webcamTexture.height)); GlobalState.Instance.CamWidth = _webcamTexture.width; GlobalState.Instance.CamHeight = _webcamTexture.height; float camRatio = (float)_webcamTexture.width / _webcamTexture.height; float screenRatio = (float)Screen.width / Screen.height; // Scale plane so it fills the screen while keeping the camera's aspect ratio. // If the camera's aspect ratio differs from the screen's, // one side will match exactly and the other side will be larger than the screen's dimension. var idealHeight = 0.7f; if (screenRatio > camRatio) { gameObject.transform.localScale = new Vector3(screenRatio * idealHeight, 1, screenRatio * idealHeight / camRatio); } else { gameObject.transform.localScale = new Vector3(camRatio * idealHeight, 1, idealHeight); } GlobalState.Instance.PlaneWidth = gameObject.transform.localScale.x * 10; GlobalState.Instance.PlaneHeight = gameObject.transform.localScale.z * 10; _qrCodeThread = new Thread(DecodeQr); _qrCodeThread.Start(); } else { Debug.LogError("No User Authorization for Camera Device."); } // Check win condition: if the user has already collected all the coins, show a toast. if (GlobalState.Instance.AllQuestions.questions.Length == GlobalState.Instance.CollectedCoinCount()) { SetToastToShow(StringResources.WinToastMessage, ToastLengthLong); } }
private void Start() { //PlayerPrefs.DeleteAll(); //PlayerPrefs.SetInt("Deletes", 10); lockTime = System.DateTime.Now.Ticks; //randomPoolList = new List<string> { "leg", "tree", "flower", "arm", "furniture", "finger", "nail", "chin", "nose", "head", "eye", "girl", "room", "glasses", "ceiling", "floor", "hand", "shoe", "sock", "water" }; //File.WriteAllLines(Path.Combine(Application.persistentDataPath, "randomPool.txt"), randomPoolList.ToArray()); if (!File.Exists(Path.Combine(Application.persistentDataPath, "randomPool.txt"))) { randomPoolList = new List <string> { "leg", "tree", "flower", "arm", "furniture", "finger", "nail", "chin", "nose", "head", "eye", "girl", "room", "glasses", "ceiling", "floor", "hand", "shoe", "sock", "water" }; File.WriteAllLines(Path.Combine(Application.persistentDataPath, "randomPool.txt"), randomPoolList.ToArray()); } else { randomPoolList = new List <string>(File.ReadAllLines(Path.Combine(Application.persistentDataPath, "randomPool.txt"))); } //FillCurrentWords(); //File.WriteAllLines(Path.Combine(Application.persistentDataPath, "currentWords.txt"), currentWordsList.ToArray()); if (!File.Exists(Path.Combine(Application.persistentDataPath, "currentWords.txt"))) { FillCurrentWords(); File.WriteAllLines(Path.Combine(Application.persistentDataPath, "currentWords.txt"), currentWordsList.ToArray()); } else { currentWordsList = new List <string>(File.ReadAllLines(Path.Combine(Application.persistentDataPath, "currentWords.txt"))); } UpdateTextFields(); headers = new Dictionary <string, string>(); headers.Add("Content-Type", "application/json; charset=UTF-8"); if (apiKey == null || apiKey == "") { Debug.LogError("No API key. Please set your API key into the \"Web Cam Texture To Cloud Vision(Script)\" component."); } defaultBackground = background.texture; WebCamDevice[] devices = WebCamTexture.devices; if (devices.Length == 0) { Debug.Log("No Camera detected"); camAvailable = false; return; } //for(int i = 0; i < devices.Length; i++) //{ // if (!devices[i].isFrontFacing) // { // webcamTexture = new WebCamTexture(devices[i].name, Screen.width, Screen.height); // } //} webcamTexture = new WebCamTexture(devices[0].name, Screen.width, Screen.height); if (webcamTexture == null) { Debug.Log("Unable to find back camera"); return; } webcamTexture.Play(); background.texture = webcamTexture; camAvailable = true; }
/// <summary> /// Init this instance by coroutine. /// </summary> private IEnumerator init() { if (initDone) { dispose(); } initWaiting = true; if (!String.IsNullOrEmpty(requestDeviceName)) { //Debug.Log ("deviceName is "+requestDeviceName); webCamTexture = new WebCamTexture(requestDeviceName, requestWidth, requestHeight); } else { //Debug.Log ("deviceName is null"); // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == requestIsFrontFacing) { //Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, requestWidth, requestHeight); break; } } } if (webCamTexture == null) { if (WebCamTexture.devices.Length > 0) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture(webCamDevice.name, requestWidth, requestHeight); } else { //Debug.Log("Camera device does not exist."); initWaiting = false; if (OnErrorOccurredEvent != null) { OnErrorOccurredEvent.Invoke(ErrorCode.CAMERA_DEVICE_NOT_EXIST); } yield break; } } //Debug.Log ("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play(); int initCount = 0; bool isTimeout = false; while (true) { if (initCount > timeoutFrameCount) { isTimeout = true; break; } // If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) else if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else else if (webCamTexture.didUpdateThisFrame) { #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2 while (webCamTexture.width <= 16) { if (initCount > timeoutFrameCount) { isTimeout = true; break; } else { initCount++; } webCamTexture.GetPixels32(); yield return(new WaitForEndOfFrame()); } if (isTimeout) { break; } #endif #endif Debug.Log("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); if (colors == null || colors.Length != webCamTexture.width * webCamTexture.height) { colors = new Color32[webCamTexture.width * webCamTexture.height]; } rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); //Debug.Log ("Screen.orientation " + Screen.orientation); screenOrientation = Screen.orientation; #if !UNITY_EDITOR && !(UNITY_STANDALONE || UNITY_WEBGL) if (screenOrientation == ScreenOrientation.Portrait || screenOrientation == ScreenOrientation.PortraitUpsideDown) { rotatedRgbaMat = new Mat(webCamTexture.width, webCamTexture.height, CvType.CV_8UC4); } #endif initWaiting = false; initDone = true; if (OnInitedEvent != null) { OnInitedEvent.Invoke(); } break; } else { initCount++; yield return(0); } } if (isTimeout) { //Debug.Log("Init time out."); webCamTexture.Stop(); webCamTexture = null; initWaiting = false; if (OnErrorOccurredEvent != null) { OnErrorOccurredEvent.Invoke(ErrorCode.TIMEOUT); } } }
void Start() { webCamTexture = new WebCamTexture(); imageRenderer.material.mainTexture = webCamTexture; webCamTexture.Play(); }
private void CreateWebCamTexture() { if (_webCamTexture != null) { DestroyImmediate(_webCamTexture); } if (_deviceMode == DeviceMode.Auto) { if (_resolutionMode == ResolutionMode.Auto) { _webCamTexture = new WebCamTexture(); } else { if (_fpsMode == FPSMode.Auto) { _webCamTexture = new WebCamTexture(_requestedWidth, _requestedHeight); } else { _webCamTexture = new WebCamTexture(_requestedWidth, _requestedHeight, _requestedFPS); } } } else { string deviceName = null; if (_deviceMode == DeviceMode.DeviceName) { deviceName = _deviceName; } else if (_deviceMode == DeviceMode.DeviceIndex) { if (_deviceIndex < WebCamTexture.devices.Length) { deviceName = WebCamTexture.devices[_deviceIndex].name; } else { Debug.LogError("WebCamStream | Cannot find webcam device at index " + _deviceIndex); } } if (_resolutionMode == ResolutionMode.Auto) { _webCamTexture = new WebCamTexture(deviceName); } else { if (_fpsMode == FPSMode.Auto) { _webCamTexture = new WebCamTexture(deviceName, _requestedWidth, _requestedHeight); } else { _webCamTexture = new WebCamTexture(deviceName, _requestedWidth, _requestedHeight, _requestedFPS); } } } _webCamDevice = WebCamTexture.devices.FirstOrDefault(x => x.name == _webCamTexture.deviceName); }
// Use this for initialization void Start() { webCamTexture = new WebCamTexture(); rawImage.material.mainTexture = webCamTexture; webCamTexture.Play(); }
/// <summary> /// Processes current texture /// This function is intended to be overridden by sub-classes /// </summary> /// <param name="input">Input WebCamTexture object</param> /// <param name="output">Output Texture2D object</param> /// <returns>True if anything has been processed, false if output didn't change</returns> protected abstract bool ProcessTexture(WebCamTexture input, ref Texture2D output);
internal JMCamera(int camId) { _camId = camId; _camTex = new WebCamTexture(); }
public void Process(WebCamTexture texture) { imageBuffer = texture.GetPixels(); GetFocusLocation(); ReadHeartRate(); }
/// <summary> /// Initializes webcam texture by coroutine. /// </summary> private IEnumerator _Initialize() { if (hasInitDone) { Dispose(); } isInitWaiting = true; // Checks camera permission state. #if UNITY_IOS && UNITY_2018_1_OR_NEWER UserAuthorization mode = UserAuthorization.WebCam; if (!Application.HasUserAuthorization(mode)) { isUserRequestingPermission = true; yield return(Application.RequestUserAuthorization(mode)); float timeElapsed = 0; while (isUserRequestingPermission) { if (timeElapsed > 0.25f) { isUserRequestingPermission = false; break; } timeElapsed += Time.deltaTime; yield return(null); } } if (!Application.HasUserAuthorization(mode)) { if (fpsMonitor != null) { fpsMonitor.consoleText = "Camera permission is denied."; } isInitWaiting = false; yield break; } #elif UNITY_ANDROID && UNITY_2018_3_OR_NEWER string permission = UnityEngine.Android.Permission.Camera; if (!UnityEngine.Android.Permission.HasUserAuthorizedPermission(permission)) { isUserRequestingPermission = true; UnityEngine.Android.Permission.RequestUserPermission(permission); float timeElapsed = 0; while (isUserRequestingPermission) { if (timeElapsed > 0.25f) { isUserRequestingPermission = false; break; } timeElapsed += Time.deltaTime; yield return(null); } } if (!UnityEngine.Android.Permission.HasUserAuthorizedPermission(permission)) { if (fpsMonitor != null) { fpsMonitor.consoleText = "Camera permission is denied."; } isInitWaiting = false; yield break; } #endif // Creates the camera var devices = WebCamTexture.devices; if (!String.IsNullOrEmpty(requestedDeviceName)) { int requestedDeviceIndex = -1; if (Int32.TryParse(requestedDeviceName, out requestedDeviceIndex)) { if (requestedDeviceIndex >= 0 && requestedDeviceIndex < devices.Length) { webCamDevice = devices[requestedDeviceIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, requestedFPS); } } else { for (int cameraIndex = 0; cameraIndex < devices.Length; cameraIndex++) { if (devices[cameraIndex].name == requestedDeviceName) { webCamDevice = devices[cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, requestedFPS); break; } } } if (webCamTexture == null) { Debug.Log("Cannot find camera device " + requestedDeviceName + "."); } } if (webCamTexture == null) { // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < devices.Length; cameraIndex++) { #if UNITY_2018_3_OR_NEWER if (devices[cameraIndex].kind != WebCamKind.ColorAndDepth && devices[cameraIndex].isFrontFacing == requestedIsFrontFacing) #else if (devices[cameraIndex].isFrontFacing == requestedIsFrontFacing) #endif { webCamDevice = devices[cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, requestedFPS); break; } } } if (webCamTexture == null) { if (devices.Length > 0) { webCamDevice = devices[0]; webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, requestedFPS); } else { Debug.LogError("Camera device does not exist."); isInitWaiting = false; yield break; } } // Starts the camera webCamTexture.Play(); while (true) { if (webCamTexture.didUpdateThisFrame) { Debug.Log("name:" + webCamTexture.deviceName + " width:" + webCamTexture.width + " height:" + webCamTexture.height + " fps:" + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle:" + webCamTexture.videoRotationAngle + " videoVerticallyMirrored:" + webCamTexture.videoVerticallyMirrored + " isFrongFacing:" + webCamDevice.isFrontFacing); screenOrientation = Screen.orientation; screenWidth = Screen.width; screenHeight = Screen.height; isInitWaiting = false; hasInitDone = true; OnInited(); break; } else { yield return(0); } } }
void Start() { _ARRootTransform = GameObject.FindWithTag("ARRoot").transform; _webCamTexture = GetComponentInParent <InitWebCamera>().GetCamera(); }
protected IEnumerator Initialize() { // Waiting for a frame can help on some devices, especially when initializing the camera when returning from background yield return(null); WebCamDevice?selectedDevice = null; // First search for a back-facing device foreach (var device in WebCamTexture.devices) { if (!device.isFrontFacing) { selectedDevice = device; break; } } // If no back-facing device was found, search again for a front facing device if (selectedDevice == null) { if (WebCamTexture.devices.Length > 0) { selectedDevice = WebCamTexture.devices[0]; } } if (selectedDevice != null) { _feed = new WebCamTexture(selectedDevice.Value.name, FrameWidth, FrameHeight); _feed.Play(); } if (_feed == null) { Debug.LogError("Could not find any cameras on the device."); } ResetBuffers(FrameWidth, FrameHeight, 4); // Wait a frame before getting the camera rotation, otherwise it might not be initialized yet yield return(null); if (Application.platform == RuntimePlatform.Android) { bool rotatedSensor = false; switch (Screen.orientation) { case ScreenOrientation.Portrait: { rotatedSensor = _feed.videoRotationAngle == 270; break; } case ScreenOrientation.LandscapeLeft: { rotatedSensor = _feed.videoRotationAngle == 180; break; } case ScreenOrientation.LandscapeRight: { rotatedSensor = _feed.videoRotationAngle == 0; break; } case ScreenOrientation.PortraitUpsideDown: { rotatedSensor = _feed.videoRotationAngle == 90; break; } } if (rotatedSensor) { // Normally, we use InvertedFrame = true, because textures in Unity are mirrored vertically, when compared with the ones the camera provides. // However, when we detect that the camera sensor is rotated by 180 degrees, as is the case for the Nexus 5X for example, // We turn off inverted frame and enable mirrored frame, which has the effect of rotating the frame upside down. // We use the MirroredFrame property and not the EnableMirroring property because the first one actually changes the data that // is being processed, while the second one only changes how the frame is rendered, leaving the frame data intact. // WikitudeCam.InvertedFrame = false; // WikitudeCam.MirroredFrame = true; } } }
// Use this for initialization void Start() { tex = new WebCamTexture(); tex.Play(); }
public static float GetRatio(WebCamTexture webcam) { float ratio = ((float)(webcam.width) / (float)(webcam.height)); return(ratio); }
/** * Converts Unity WebCamTexture to OpenCV Mat. * <p> * <br>This function converts an Unity WebCamTexture image to the OpenCV Mat. * <br>The output Mat object has to be of the same size as the input WebCamTexture'(width * height). * <br>The output Mat object has to be of the types 'CV_8UC4' (RGBA) , 'CV_8UC3' (RGB) or 'CV_8UC1' (GRAY). * * @param webcamTexture * @param mat The output Mat object has to be of the same size as the input WebCamTexture'(width * height). * The output Mat object has to be of the types 'CV_8UC4' (RGBA) , 'CV_8UC3' (RGB) or 'CV_8UC1' (GRAY). * @param bufferColors Optional array to receive pixel data. * You can optionally pass in an array of Color32s to use in colors to avoid allocating new memory each frame. * The array needs to be initialized to a length matching width * height of the texture.(http://docs.unity3d.com/ScriptReference/WebCamTexture.GetPixels32.html) */ public static void webCamTextureToMat(WebCamTexture webCamTexture, Mat mat, Color32[] bufferColors = null) { if (mat != null) { mat.ThrowIfDisposed(); } if (webCamTexture == null) { throw new ArgumentNullException("webCamTexture == null"); } if (mat == null) { throw new ArgumentNullException("mat == null"); } if (mat.cols() != webCamTexture.width || mat.rows() != webCamTexture.height) { throw new ArgumentException("The output Mat object has to be of the same size"); } // byte[] data = new byte[mat.cols () * mat.rows () * mat.channels ()]; // // Color32[] colors = webCamTexture.GetPixels32 (); // // if (mat.type () == CvType.CV_8UC1) { // for (int i = 0; i < colors.Length; i++) { // data [i] = colors [i].b; // } // mat.put (0, 0, data); // Core.flip (mat, mat, 0); // } else if (mat.type () == CvType.CV_8UC3) { // for (int i = 0; i < colors.Length; i++) { // data [(i * 3) + 0] = colors [i].r; // data [(i * 3) + 1] = colors [i].g; // data [(i * 3) + 2] = colors [i].b; // } // mat.put (0, 0, data); // Core.flip (mat, mat, 0); // } else if (mat.type () == CvType.CV_8UC4) { // for (int i = 0; i < colors.Length; i++) { // data [(i * 4) + 0] = colors [i].r; // data [(i * 4) + 1] = colors [i].g; // data [(i * 4) + 2] = colors [i].b; // data [(i * 4) + 3] = colors [i].a; // } // mat.put (0, 0, data); // Core.flip (mat, mat, 0); // } // #if UNITY_PRO_LICENSE || ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) || UNITY_5 // if(mat.type () == CvType.CV_8UC4){ // OpenCVForUnity_LowLevelTextureToMat (webCamTexture.GetNativeTexturePtr(), webCamTexture.width, webCamTexture.height, mat.nativeObj); // // return; // } // // #endif #if (UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_5_0_0 || UNITY_5_0_1)) if (mat.type() == CvType.CV_8UC4) { OpenCVForUnity_LowLevelTextureToMat(webCamTexture.GetNativeTexturePtr(), webCamTexture.width, webCamTexture.height, mat.nativeObj); if (Utils.getLowLevelGraphicsDeviceType() == 16 && Utils.getLowLevelTextureFormat(webCamTexture) == 80) { Imgproc.cvtColor(mat, mat, Imgproc.COLOR_BGRA2RGBA); } Core.flip(mat, mat, 0); return; } #endif #if UNITY_PRO_LICENSE || ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) || UNITY_5 GCHandle colorsHandle; if (bufferColors == null) { Color32[] colors = webCamTexture.GetPixels32(); colorsHandle = GCHandle.Alloc(colors, GCHandleType.Pinned); } else { webCamTexture.GetPixels32(bufferColors); colorsHandle = GCHandle.Alloc(bufferColors, GCHandleType.Pinned); } OpenCVForUnity_TextureToMat(colorsHandle.AddrOfPinnedObject(), mat.nativeObj); colorsHandle.Free(); #else return; #endif }
/// <summary> /// Initializes webcam texture by coroutine. /// </summary> private IEnumerator _Initialize() { if (hasInitDone) { Dispose(); } isInitWaiting = true; // Creates the camera if (!String.IsNullOrEmpty(requestedDeviceName)) { int requestedDeviceIndex = -1; if (Int32.TryParse(requestedDeviceName, out requestedDeviceIndex)) { if (requestedDeviceIndex >= 0 && requestedDeviceIndex < WebCamTexture.devices.Length) { webCamDevice = WebCamTexture.devices [requestedDeviceIndex]; if (requestedFPS <= 0) { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight); } else { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, requestedFPS); } } } else { for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].name == requestedDeviceName) { webCamDevice = WebCamTexture.devices [cameraIndex]; if (requestedFPS <= 0) { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight); } else { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, requestedFPS); } break; } } } if (webCamTexture == null) { Debug.Log("Cannot find camera device " + requestedDeviceName + "."); } } if (webCamTexture == null) { // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == requestedIsFrontFacing) { webCamDevice = WebCamTexture.devices [cameraIndex]; if (requestedFPS <= 0) { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight); } else { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, requestedFPS); } break; } } } if (webCamTexture == null) { if (WebCamTexture.devices.Length > 0) { webCamDevice = WebCamTexture.devices [0]; if (requestedFPS <= 0) { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight); } else { webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, requestedFPS); } } else { Debug.LogError("Camera device does not exist."); isInitWaiting = false; yield break; } } // Starts the camera. webCamTexture.Play(); while (true) { // If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/). #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2 while (webCamTexture.width <= 16) { webCamTexture.GetPixels32(); yield return(new WaitForEndOfFrame()); } #endif #endif Debug.Log("name:" + webCamTexture.deviceName + " width:" + webCamTexture.width + " height:" + webCamTexture.height + " fps:" + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle:" + webCamTexture.videoRotationAngle + " videoVerticallyMirrored:" + webCamTexture.videoVerticallyMirrored + " isFrongFacing:" + webCamDevice.isFrontFacing); isInitWaiting = false; hasInitDone = true; OnInited(); break; } else { yield return(null); } } }
/// <summary> /// Our main function to process the tracking: /// 1. If there is no active dragging and no area - it does nothing useful, just renders the image /// 2. If there is active dragging - it renders image with dragging rectangle over it (red color) /// 3. if there is no dragging, but there is tracker and it has result - it draws image with tracked object rect over it (green color) /// </summary> /// <param name="input"></param> /// <param name="output"></param> /// <returns></returns> protected override bool ProcessTexture(WebCamTexture input, ref Texture2D output) { Mat image = Unity.TextureToMat(input, TextureParameters); Mat downscaled = image.Resize(Size.Zero, downScale, downScale); // screen space -> image space Vector2 sp = ConvertToImageSpace(startPoint, image.Size()); Vector2 ep = ConvertToImageSpace(endPoint, image.Size()); Point location = new Point(Math.Min(sp.x, ep.x), Math.Min(sp.y, ep.y)); Size size = new Size(Math.Abs(ep.x - sp.x), Math.Abs(ep.y - sp.y)); var areaRect = new OpenCvSharp.Rect(location, size); Rect2d obj = Rect2d.Empty; // If not dragged - show the tracking data if (!isDragging) { // drop tracker if the frame's size has changed, this one is necessary as tracker doesn't hold it well if (frameSize.Height != 0 && frameSize.Width != 0 && downscaled.Size() != frameSize) { DropTracking(); } // we have to tracker - let's initialize one if (null == tracker) { // but only if we have big enough "area of interest", this one is added to avoid "tracking" some 1x2 pixels areas if ((ep - sp).magnitude >= minimumAreaDiagonal) { obj = new Rect2d(areaRect.X, areaRect.Y, areaRect.Width, areaRect.Height); // initial tracker with current image and the given rect, one can play with tracker types here tracker = Tracker.Create(TrackerTypes.MedianFlow); tracker.Init(downscaled, obj); frameSize = downscaled.Size(); } } // if we already have an active tracker - just to to update with the new frame and check whether it still tracks object else { if (!tracker.Update(downscaled, ref obj)) { obj = Rect2d.Empty; } } // save tracked object location if (0 != obj.Width && 0 != obj.Height) { areaRect = new OpenCvSharp.Rect((int)obj.X, (int)obj.Y, (int)obj.Width, (int)obj.Height); } } // render rect we've tracker or one is being drawn by the user if (isDragging || (null != tracker && obj.Width != 0)) { Cv2.Rectangle((InputOutputArray)image, areaRect * (1.0 / downScale), isDragging? Scalar.Red : Scalar.LightGreen); } // result, passing output texture as parameter allows to re-use it's buffer // should output texture be null a new texture will be created output = Unity.MatToTexture(image, output); return(true); }
private static void INTERNAL_CALL_Stop(WebCamTexture self){}
public void Start() { webcamTexture = new WebCamTexture(); }