private void Run() { Debug.Log("Run"); if (webCamTextureToMatHelper == null) { //webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>(); //webCamTextureToMatHelper = GetComponent<WebCamTextureToMatHelper>(); //webCamTextureToMatHelper = new WebCamTextureToMatHelper(); webCamTextureToMatHelper = gameObject.AddComponent <WebCamTextureToMatHelper>() as WebCamTextureToMatHelper; } live2DModel.textureFiles = new Texture2D[texture_filepath.Length]; for (int i = 0; i < texture_filepath.Length; i++) { if (string.IsNullOrEmpty(texture_filepath[i])) { continue; } Texture2D tex = new Texture2D(2, 2); tex.LoadImage(File.ReadAllBytes(texture_filepath[i])); live2DModel.textureFiles[i] = tex; } if (!string.IsNullOrEmpty(shizuku_moc_filepath)) { live2DModel.setMocFileFromBytes(File.ReadAllBytes(shizuku_moc_filepath)); } if (!string.IsNullOrEmpty(shizuku_physics_filepath)) { live2DModel.setPhysicsFileFromBytes(File.ReadAllBytes(shizuku_physics_filepath)); } if (!string.IsNullOrEmpty(shizuku_pose_filepath)) { live2DModel.setPoseFileFromBytes(File.ReadAllBytes(shizuku_pose_filepath)); } Debug.Log(shape_predictor_68_face_landmarks_dat_filepath); faceLandmarkDetector = new FaceLandmarkDetector(shape_predictor_68_face_landmarks_dat_filepath); Debug.Log(faceLandmarkDetector); frontalFaceParam = new FrontalFaceParam(); webCamTextureToMatHelper.Initialize(null, webCamTextureToMatHelper.requestWidth2(), webCamTextureToMatHelper.requestHeight2(), !webCamTextureToMatHelper.requestIsFrontFacing2()); webCamTextureToMatHelper.onInitialized.AddListener(OnWebCamTextureToMatHelperInited); webCamTextureToMatHelper.Initialize(null, webCamTextureToMatHelper.requestWidth2(), webCamTextureToMatHelper.requestHeight2(), !webCamTextureToMatHelper.requestIsFrontFacing2()); isInitedWCT2M = false; webCamTextureToMatHelper.onInitialized.AddListener(OnWebCamTextureToMatHelperInited); //webCamTextureToMatHelper.Initialize (); }
void Start() { webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>(); webCamTextureToMatHelper.Initialize(); frame = new Mat(); img_orig = new Mat(); img_lab = new Mat(); img_gray = new Mat(); img_edges = new Mat(); img_segmented = new Mat(); drawing = new Mat(); imgLab = new Mat(); // alignment = ALIGNMENT.DRAWING; nowRectPoints = new List <Point>(); dstRectPoints = new List <Point>(); dstRectPoints.Add(new Point(0, 0)); dstRectPoints.Add(new Point(1120, 0)); dstRectPoints.Add(new Point(1120, 860)); dstRectPoints.Add(new Point(0, 860)); if (showTextureOnScreen) { gameObject.GetComponent <Renderer>().enabled = true; } else { gameObject.GetComponent <Renderer>().enabled = false; } }
// Start is called before the first frame update void Start() { camera = GameObject.FindWithTag("MainCamera"); //cube = GameObject.FindWithTag("Player"); sphereColor = GameObject.FindWithTag("GameController"); fireEffect = GameObject.FindWithTag("Finish"); audioSmoke = GameObject.FindWithTag("Sound").GetComponent <AudioSource>(); buttonsTransforms = gameObject.transform.GetChild(0).GetComponentsInChildren <Transform>(); InteractionManager.InteractionSourceDetected += InteractionManager_InteractionSourceDetected; InteractionManager.InteractionSourceUpdated += InteractionManager_InteractionSourceUpdated; InteractionManager.InteractionSourceLost += InteractionManager_InteractionSourceLost; HidePlane(); webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>(); webCamTextureToMatHelper.Initialize(); detector = new ColorBlobDetector(); spectrumMat = new Mat(); blobColorHsv = new Scalar(255); SPECTRUM_SIZE = new Size(200, 64); CONTOUR_COLOR = new Scalar(255, 0, 0, 255); CONTOUR_COLOR_WHITE = new Scalar(255, 255, 255, 255); BIGGEST_CONTOUR_COLOR = new Scalar(0, 255, 0, 255); // set color in image Scalar hand_color = new Scalar(16, 92, 177, 0); detector.SetHsvColor(hand_color); Imgproc.resize(detector.GetSpectrum(), spectrumMat, SPECTRUM_SIZE); }
// Use this for initialization void Start() { // fpsMonitor = GetComponent<FpsMonitor> (); webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> (); #if UNITY_ANDROID && !UNITY_EDITOR // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2). webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true; #endif webCamTextureToMatHelper.Initialize(); //Calculates the start position based on the focus square if (start_rect != null) { var corners = new Vector3[4]; start_rect.GetWorldCorners(corners); var center = GetCenter(corners); var screen_point = Camera.main.WorldToScreenPoint(center); var world_point = Camera.main.ScreenToWorldPoint(center); Debug.Log($"WorldCenter: {center}, ScreenCenter: {screen_point}, World: {world_point}"); ball_tracker.SetStartPosition(world_point); } else { Enable(); } //Event bindings Events.ON_START_DETECTING.AddListener(this.Enable); }
void Start() { webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>(); webCamTextureToMatHelper.Initialize(); Run(); }
// Use this for initialization void Start() { GameObject cameraAR = GameObject.Find("ARCamera"); ARCamera = cameraAR.GetComponent <Camera>(); webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>(); #if UNITY_ANDROID && !UNITY_EDITOR // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2). webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true; #endif webCamTextureToMatHelper.Initialize(); Texture2D originalTexture = LoadTexture2D(); Texture2D transparentTexture = GetTransparentTexture(originalTexture); RawImage imageTransparent = FindObjectOfType <RawImage>(); imageTransparent.texture = transparentTexture; Canvas canvas = FindObjectOfType <Canvas>(); RectTransform canvasRectTransform = canvas.GetComponent <RectTransform>(); imageTransparent.rectTransform.sizeDelta = new Vector2( canvasRectTransform.rect.width, canvasRectTransform.rect.height); }
private void Start() { webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>(); webCamTextureToMatHelper.Initialize(); grayMat = new Mat(); makerGrayMat = new Mat(originMakerTexture.height, originMakerTexture.width, CvType.CV_8UC1); makerTexture = new Texture2D(originMakerTexture.width, originMakerTexture.height); Graphics.CopyTexture(originMakerTexture, makerTexture); detector = ORB.create(); extractor = ORB.create(); // Get Key Points of Maker makerMat = new Mat(originMakerTexture.height, originMakerTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(makerTexture, makerMat, false); makerKeyPoints = new MatOfKeyPoint(); makerDescriptors = new Mat(); Imgproc.cvtColor(makerMat, makerGrayMat, Imgproc.COLOR_BGR2GRAY); detector.detect(makerGrayMat, makerKeyPoints); extractor.compute(makerGrayMat, makerKeyPoints, makerDescriptors); matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); }
void Start() { graph_filepath = Utils.getFilePath("dnn/graph1.pb"); webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>(); webCamTextureToMatHelper.Initialize(); Mat img = webCamTextureToMatHelper.GetMat(); if (!string.IsNullOrEmpty(graph_filepath)) { net = Dnn.readNetFromTensorflow(graph_filepath); } if (net == null) { Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); } float imageWidth = img.width(); float imageHeight = img.height(); float widthScale = (float)Screen.width / imageWidth; float heightScale = (float)Screen.height / imageHeight; if (widthScale < heightScale) { Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = imageHeight / 2; } }
void Start() { Input.backButtonLeavesApp = true; _fpsMonitor = GetComponent <FpsMonitor>(); _webCamTextureToMatHelper = GetComponent <WebCamTextureToMatHelper>(); _webCamTextureToMatHelper.Initialize(); _fire = Instantiate(_firePrefab, new Vector3(0f, 0f, FIRE_Z_POS), Quaternion.identity); _fire.transform.localScale = new Vector3(FIRE_SCALE, FIRE_SCALE, FIRE_SCALE); }
// Use this for initialization void Start() { webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>(); detector = new QRCodeDetector(); #if UNITY_ANDROID && !UNITY_EDITOR // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2). webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true; #endif webCamTextureToMatHelper.Initialize(); }
void Start() { _WebCamTextureToMatHelper.onInitialized.AddListener(OnWebCamTextureToMatHelperInitialized); _WebCamTextureToMatHelper.onDisposed.AddListener(OnWebCamTextureToMatHelperDisposed); _WebCamTextureToMatHelper.onErrorOccurred.AddListener(OnWebCamTextureToMatHelperErrorOccurred); #if UNITY_ANDROID && !UNITY_EDITOR // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2). _WebCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true; #endif _WebCamTextureToMatHelper.Initialize(); }
// Use this for initialization IEnumerator Start() { yield return(StartCoroutine(SwitchToVR())); webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> (); #if UNITY_ANDROID && !UNITY_EDITOR // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2). webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true; #endif webCamTextureToMatHelper.Initialize(); }
void Start() { fpsMonitor = GetComponent <FpsMonitor>(); webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>(); #if UNITY_ANDROID && !UNITY_EDITOR // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2). webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true; #endif webCamTextureToMatHelper.Initialize(); m_MyAudioSource = GetComponent <AudioSource>(); }
public void Run() { //anim = GetComponent<Animator>(); //initialize FaceTracker faceTracker = new FaceTracker(tracker_model_json_filepath); //initialize FaceTrackerParams faceTrackerParams = new FaceTrackerParams(); cascade = new CascadeClassifier(); cascade.load(haarcascade_frontalface_alt_xml_filepath); // if (cascade.empty()) // { // Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); // } #if UNITY_ANDROID && !UNITY_EDITOR // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2). webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true; #endif webCamTextureToMatHelper.Initialize(); if (SpeechRecognizer.ExistsOnDevice()) { resultText.text = "I am running run"; SpeechRecognizerListener listener = GameObject.FindObjectOfType <SpeechRecognizerListener>(); listener.onAuthorizationStatusFetched.AddListener(OnAuthorizationStatusFetched); listener.onAvailabilityChanged.AddListener(OnAvailabilityChange); listener.onErrorDuringRecording.AddListener(OnError); listener.onErrorOnStartRecording.AddListener(OnError); listener.onFinalResults.AddListener(OnFinalResult); listener.onPartialResults.AddListener(OnPartialResult); listener.onEndOfSpeech.AddListener(OnEndOfSpeech); //startRecordingButton.enabled = false; SpeechRecognizer.RequestAccess(); SpeechRecognizer.StartRecording(true); resultText.text = "Say something :-)"; } else { resultText.text = "Sorry, but this device doesn't support speech recognition"; Debug.Log("Next Command is crossfade from run function"); //GameObject.FindGameObjectWithTag("twohand)").GetComponent<Animator>().CrossFade("V", -1); //startRecordingButton.enabled = false; } }
// Use this for initialization void Run() { //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console. Utils.setDebugMode(true); if (!string.IsNullOrEmpty(classes)) { classNames = readClassNames(classes_filepath); if (classNames == null) { Debug.LogError(classes_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); } } else if (classesList.Count > 0) { classNames = classesList; } if (string.IsNullOrEmpty(config_filepath) || string.IsNullOrEmpty(model_filepath)) { Debug.LogError(config_filepath + " or " + model_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); } else { //! [Initialize network] net = Dnn.readNet(model_filepath, config_filepath); //! [Initialize network] outBlobNames = getOutputsNames(net); //for (int i = 0; i < outBlobNames.Count; i++) //{ // Debug.Log("names [" + i + "] " + outBlobNames[i]); //} outBlobTypes = getOutputsTypes(net); //for (int i = 0; i < outBlobTypes.Count; i++) //{ // Debug.Log("types [" + i + "] " + outBlobTypes[i]); //} } webCamTextureToMatHelper.Initialize(); }
// Use this for initialization void Start() { webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>(); webCamTextureToMatHelper.onInitialized.AddListener(() => { OnWebCamTextureToMatHelperInitialized(); }); webCamTextureToMatHelper.onErrorOccurred.AddListener((WebCamTextureToMatHelper.ErrorCode errorCode) => { OnWebCamTextureToMatHelperErrorOccurred(errorCode); }); webCamTextureToMatHelper.onDisposed.AddListener(() => { OnWebCamTextureToMatHelperDisposed(); }); webCamTextureToMatHelper.Initialize(); }
IEnumerator Start() { Input.backButtonLeavesApp = true; Screen.sleepTimeout = SleepTimeout.NeverSleep; _detector = GetComponent <FaceDetector>(); _apiManager = GetComponent <FaceApiManager>(); _toMatHelperMgr = GetComponent <WebCamTextureToMatHelperManager>(); _toMatHelper = GetComponent <WebCamTextureToMatHelper>(); //カメラ等の初期化完了後,画像サイズを取得する _toMatHelper.Initialize(); yield return(WaitInitialization()); var imgSize = new Size(_toMatHelper.GetWidth(), _toMatHelper.GetHeight()); _zeroMat = new ZeroMat(imgSize); _volume.profile.TryGetSettings(out _distortion); }
private void Run() { //set 3d face object points. 设置3d脸的对象点 objectPoints = new MatOfPoint3f( new Point3(-31, 72, 86), //l eye new Point3(31, 72, 86), //r eye new Point3(0, 40, 114), //nose new Point3(-20, 15, 90), //l mouth //new Point3(-22, 17, 90),//l mouth new Point3(20, 15, 90), //r mouth //new Point3(22, 17, 90),//r mouth new Point3(-69, 76, -2), //l ear new Point3(69, 76, -2) //r ear ); imagePoints = new MatOfPoint2f(); rotMat = new Mat(3, 3, CvType.CV_64FC1); //设置dat faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath); //获取相机的mat webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>(); webCamTextureToMatHelper.Initialize(); }
void Start() { Input.backButtonLeavesApp = true; Screen.sleepTimeout = SleepTimeout.NeverSleep; _toMatHelper = GetComponent <WebCamTextureToMatHelper>(); _invCvtr = new InvisibleConverter(_text); _cameraSwitcher = new CameraSwitcher(_toMatHelper, _invCvtr, _mainCanvas, _camSwitchDialog); _fpsMonitor = GetComponent <FpsMonitor>(); _recordSound = GetComponent <AudioSource>(); //リア/フロントをPlayerPrefabsから読み込む _toMatHelper.requestedIsFrontFacing = _cameraSwitcher.UseCamera; _toMatHelperMgr = new ToMatHelperManager(gameObject, _toMatHelper, _fpsMonitor); _toMatHelper.Initialize(); //スマホの場合カメラ起動まで指定秒待つ #if !UNITY_EDITOR && UNITY_ANDROID Task.Run(WaitCamStartup).Wait(); #endif }
public void OnChangeCameraButtonClick() { webCamTextureToMatHelper.Initialize(null, webCamTextureToMatHelper.requestedWidth, webCamTextureToMatHelper.requestedHeight, !webCamTextureToMatHelper.requestedIsFrontFacing); }