Ejemplo n.º 1
0
    // Start is called before the first frame update
    void Start()
    {
        camera = GameObject.FindWithTag("MainCamera");
        //cube = GameObject.FindWithTag("Player");
        sphereColor       = GameObject.FindWithTag("GameController");
        fireEffect        = GameObject.FindWithTag("Finish");
        audioSmoke        = GameObject.FindWithTag("Sound").GetComponent <AudioSource>();
        buttonsTransforms = gameObject.transform.GetChild(0).GetComponentsInChildren <Transform>();

        InteractionManager.InteractionSourceDetected += InteractionManager_InteractionSourceDetected;
        InteractionManager.InteractionSourceUpdated  += InteractionManager_InteractionSourceUpdated;
        InteractionManager.InteractionSourceLost     += InteractionManager_InteractionSourceLost;

        HidePlane();

        webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();

        webCamTextureToMatHelper.Initialize();

        detector              = new ColorBlobDetector();
        spectrumMat           = new Mat();
        blobColorHsv          = new Scalar(255);
        SPECTRUM_SIZE         = new Size(200, 64);
        CONTOUR_COLOR         = new Scalar(255, 0, 0, 255);
        CONTOUR_COLOR_WHITE   = new Scalar(255, 255, 255, 255);
        BIGGEST_CONTOUR_COLOR = new Scalar(0, 255, 0, 255);

        // set color in image
        Scalar hand_color = new Scalar(16, 92, 177, 0);

        detector.SetHsvColor(hand_color);
        Imgproc.resize(detector.GetSpectrum(), spectrumMat, SPECTRUM_SIZE);
    }
Ejemplo n.º 2
0
    void Start()
    {
        graph_filepath           = Utils.getFilePath("dnn/graph1.pb");
        webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();
        webCamTextureToMatHelper.Initialize();
        Mat img = webCamTextureToMatHelper.GetMat();

        if (!string.IsNullOrEmpty(graph_filepath))
        {
            net = Dnn.readNetFromTensorflow(graph_filepath);
        }

        if (net == null)
        {
            Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
        }

        float imageWidth  = img.width();
        float imageHeight = img.height();
        float widthScale  = (float)Screen.width / imageWidth;
        float heightScale = (float)Screen.height / imageHeight;

        if (widthScale < heightScale)
        {
            Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
        }
        else
        {
            Camera.main.orthographicSize = imageHeight / 2;
        }
    }
Ejemplo n.º 3
0
    // Use this for initialization
    void Start()
    {
        if (Instance == null)
        {
            Instance = this;
        }

        fpsMonitor = GetComponent <OpenCVForUnityExample.FpsMonitor>();

        webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();


        if (!string.IsNullOrEmpty(classes))
        {
            classes_filepath = Utils.getFilePath("dnn/" + classes);
        }
        if (!string.IsNullOrEmpty(config))
        {
            config_filepath = Utils.getFilePath("dnn/" + config);
        }
        if (!string.IsNullOrEmpty(model))
        {
            model_filepath = Utils.getFilePath("dnn/" + model);
        }
        Run();
    }
Ejemplo n.º 4
0
    private void Start()
    {
        webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();
        webCamTextureToMatHelper.Initialize();

        grayMat      = new Mat();
        makerGrayMat = new Mat(originMakerTexture.height, originMakerTexture.width, CvType.CV_8UC1);

        makerTexture = new Texture2D(originMakerTexture.width, originMakerTexture.height);
        Graphics.CopyTexture(originMakerTexture, makerTexture);

        detector  = ORB.create();
        extractor = ORB.create();

        // Get Key Points of Maker
        makerMat = new Mat(originMakerTexture.height, originMakerTexture.width, CvType.CV_8UC3);
        Utils.texture2DToMat(makerTexture, makerMat, false);
        makerKeyPoints   = new MatOfKeyPoint();
        makerDescriptors = new Mat();

        Imgproc.cvtColor(makerMat, makerGrayMat, Imgproc.COLOR_BGR2GRAY);

        detector.detect(makerGrayMat, makerKeyPoints);
        extractor.compute(makerGrayMat, makerKeyPoints, makerDescriptors);

        matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
    }
Ejemplo n.º 5
0
    // Use this for initialization
    void Start()
    {
        // fpsMonitor = GetComponent<FpsMonitor> ();

        webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> ();

            #if UNITY_ANDROID && !UNITY_EDITOR
        // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
        webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
            #endif
        webCamTextureToMatHelper.Initialize();


        //Calculates the start position based on the focus square
        if (start_rect != null)
        {
            var corners = new Vector3[4];
            start_rect.GetWorldCorners(corners);
            var center       = GetCenter(corners);
            var screen_point = Camera.main.WorldToScreenPoint(center);
            var world_point  = Camera.main.ScreenToWorldPoint(center);

            Debug.Log($"WorldCenter: {center}, ScreenCenter: {screen_point}, World: {world_point}");
            ball_tracker.SetStartPosition(world_point);
        }
        else
        {
            Enable();
        }

        //Event bindings
        Events.ON_START_DETECTING.AddListener(this.Enable);
    }
Ejemplo n.º 6
0
    void Start()
    {
        webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();
        webCamTextureToMatHelper.Initialize();

        frame         = new Mat();
        img_orig      = new Mat();
        img_lab       = new Mat();
        img_gray      = new Mat();
        img_edges     = new Mat();
        img_segmented = new Mat();
        drawing       = new Mat();
        imgLab        = new Mat();

        // alignment = ALIGNMENT.DRAWING;
        nowRectPoints = new List <Point>();
        dstRectPoints = new List <Point>();
        dstRectPoints.Add(new Point(0, 0));
        dstRectPoints.Add(new Point(1120, 0));
        dstRectPoints.Add(new Point(1120, 860));
        dstRectPoints.Add(new Point(0, 860));


        if (showTextureOnScreen)
        {
            gameObject.GetComponent <Renderer>().enabled = true;
        }
        else
        {
            gameObject.GetComponent <Renderer>().enabled = false;
        }
    }
        // Use this for initialization
        void Start()
        {
            Debug.Log("Start func called !");

            if (webCamTextureToMatHelper == null)
            {
                //webCamTextureToMatHelper = transform.root.gameObject.GetComponent<WebCamTextureToMatHelper>();
                //webCamTextureToMatHelper = GetComponent<WebCamTextureToMatHelper>();
                webCamTextureToMatHelper = gameObject.AddComponent <WebCamTextureToMatHelper>() as WebCamTextureToMatHelper;
            }

            #if UNITY_WEBGL && !UNITY_EDITOR
            webCamTextureToMatHelper.flipHorizontal = true;
            StartCoroutine(getFilePathCoroutine());
            #else
            shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath("shape_predictor_68_face_landmarks.dat");
            shizuku_moc_filepath     = OpenCVForUnity.Utils.getFilePath("shizuku/shizuku.moc.bytes");
            shizuku_physics_filepath = OpenCVForUnity.Utils.getFilePath("shizuku/shizuku.physics.json");
            shizuku_pose_filepath    = OpenCVForUnity.Utils.getFilePath("shizuku/shizuku.pose.json");
            for (int i = 0; i < texture_filepath.Length; i++)
            {
                texture_filepath [i] = OpenCVForUnity.Utils.getFilePath("shizuku/shizuku.1024/texture_0" + i + ".png");
            }
            Run();
            #endif

            server = new WebSocketServer(8001);

            msender = new MotionSender();
            Func <MotionSender> fn = getMotionSender;
            server.AddWebSocketService <MotionSender>("/", fn);
            server.Start();
        }
    // Use this for initialization
    void Start()
    {
        //anim = GetComponent<Animator>();
        //GameObject.Find("twohand)").transform.localScale = new Vector3(0, 0, 0); //initialize hands to zero scale until a face is recognized
        //speechrec
        if (SpeechRecognizer.ExistsOnDevice())
        {
            resultText.text = "I exist";
        }
        else
        {
            resultText.text = "Sorry, but this device doesn't support speech recognition";
            //anim.transform.localScale = new Vector3(0, 0, 0);
            //GameObject hand = GameObject.FindGameObjectWithTag("(twohand)");
            //GameObject.Find("(twohand)").transform.localScale = new Vector3(0, 0, 0);
            //startRecordingButton.enabled = false;
        }

        //facerec
        webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> ();

        isAutoResetModeToggle.isOn = isAutoResetMode;

        #if UNITY_WEBGL && !UNITY_EDITOR
        getFilePath_Coroutine = GetFilePath();
        StartCoroutine(getFilePath_Coroutine);
        #else
        tracker_model_json_filepath = Utils.getFilePath("tracker_model.json");
        haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath("haarcascade_frontalface_alt.xml");
        Run();
        #endif
    }
Ejemplo n.º 9
0
    void Start()
    {
        webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();
        webCamTextureToMatHelper.Initialize();

        Run();
    }
Ejemplo n.º 10
0
        // Use this for initialization
        void Start()
        {
            GameObject cameraAR = GameObject.Find("ARCamera");

            ARCamera = cameraAR.GetComponent <Camera>();

            webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();

#if UNITY_ANDROID && !UNITY_EDITOR
            // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
            webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
#endif
            webCamTextureToMatHelper.Initialize();

            Texture2D originalTexture    = LoadTexture2D();
            Texture2D transparentTexture = GetTransparentTexture(originalTexture);

            RawImage imageTransparent = FindObjectOfType <RawImage>();
            imageTransparent.texture = transparentTexture;

            Canvas        canvas = FindObjectOfType <Canvas>();
            RectTransform canvasRectTransform = canvas.GetComponent <RectTransform>();

            imageTransparent.rectTransform.sizeDelta = new Vector2(
                canvasRectTransform.rect.width, canvasRectTransform.rect.height);
        }
Ejemplo n.º 11
0
    // Start is called before the first frame update
    void Start()
    {
        WebCamTextureToMatHelper webCamTextureToMatHelper = quad.GetComponent <WebCamTextureToMatHelper>();

        x = webCamTextureToMatHelper.requestedWidth / 2;
        y = webCamTextureToMatHelper.requestedHeight / 2;
    }
Ejemplo n.º 12
0
    void Start()
    {
        Input.backButtonLeavesApp = true;

        _fpsMonitor = GetComponent <FpsMonitor>();
        _webCamTextureToMatHelper = GetComponent <WebCamTextureToMatHelper>();
        _webCamTextureToMatHelper.Initialize();

        _fire = Instantiate(_firePrefab, new Vector3(0f, 0f, FIRE_Z_POS), Quaternion.identity);
        _fire.transform.localScale = new Vector3(FIRE_SCALE, FIRE_SCALE, FIRE_SCALE);
    }
Ejemplo n.º 13
0
    // Use this for initialization
    void Start()
    {
        webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();

        detector = new QRCodeDetector();

        #if UNITY_ANDROID && !UNITY_EDITOR
        // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
        webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
        #endif
        webCamTextureToMatHelper.Initialize();
    }
Ejemplo n.º 14
0
    public CameraSwitcher(
        WebCamTextureToMatHelper toMatHelper,
        InvisibleConverter invCvtr,
        GameObject mainCanvas, GameObject camSwitchDialog)
    {
        _toMatHelper     = toMatHelper;
        _invCvtr         = invCvtr;
        _mainCanvas      = mainCanvas;
        _camSwitchDialog = camSwitchDialog;

        UseCamera = Convert.ToBoolean(PlayerPrefs.GetInt(USE_CAMERA_KEY, 0));
    }
Ejemplo n.º 15
0
        // Use this for initialization
        IEnumerator Start()
        {
            yield return(StartCoroutine(SwitchToVR()));


            webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> ();

            #if UNITY_ANDROID && !UNITY_EDITOR
            // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
            webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
            #endif
            webCamTextureToMatHelper.Initialize();
        }
    void Start()
    {
        fpsMonitor = GetComponent <FpsMonitor>();

        webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();

#if UNITY_ANDROID && !UNITY_EDITOR
        // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
        webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
#endif
        webCamTextureToMatHelper.Initialize();
        m_MyAudioSource = GetComponent <AudioSource>();
    }
        private void Run()
        {
            Debug.Log("Run");
            if (webCamTextureToMatHelper == null)
            {
                //webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
                //webCamTextureToMatHelper = GetComponent<WebCamTextureToMatHelper>();
                //webCamTextureToMatHelper = new WebCamTextureToMatHelper();
                webCamTextureToMatHelper = gameObject.AddComponent <WebCamTextureToMatHelper>() as WebCamTextureToMatHelper;
            }
            live2DModel.textureFiles = new Texture2D[texture_filepath.Length];
            for (int i = 0; i < texture_filepath.Length; i++)
            {
                if (string.IsNullOrEmpty(texture_filepath[i]))
                {
                    continue;
                }

                Texture2D tex = new Texture2D(2, 2);
                tex.LoadImage(File.ReadAllBytes(texture_filepath[i]));
                live2DModel.textureFiles[i] = tex;
            }
            if (!string.IsNullOrEmpty(shizuku_moc_filepath))
            {
                live2DModel.setMocFileFromBytes(File.ReadAllBytes(shizuku_moc_filepath));
            }
            if (!string.IsNullOrEmpty(shizuku_physics_filepath))
            {
                live2DModel.setPhysicsFileFromBytes(File.ReadAllBytes(shizuku_physics_filepath));
            }
            if (!string.IsNullOrEmpty(shizuku_pose_filepath))
            {
                live2DModel.setPoseFileFromBytes(File.ReadAllBytes(shizuku_pose_filepath));
            }

            Debug.Log(shape_predictor_68_face_landmarks_dat_filepath);
            faceLandmarkDetector = new FaceLandmarkDetector(shape_predictor_68_face_landmarks_dat_filepath);
            Debug.Log(faceLandmarkDetector);


            frontalFaceParam = new FrontalFaceParam();

            webCamTextureToMatHelper.Initialize(null, webCamTextureToMatHelper.requestWidth2(), webCamTextureToMatHelper.requestHeight2(), !webCamTextureToMatHelper.requestIsFrontFacing2());
            webCamTextureToMatHelper.onInitialized.AddListener(OnWebCamTextureToMatHelperInited);
            webCamTextureToMatHelper.Initialize(null, webCamTextureToMatHelper.requestWidth2(), webCamTextureToMatHelper.requestHeight2(), !webCamTextureToMatHelper.requestIsFrontFacing2());
            isInitedWCT2M = false;
            webCamTextureToMatHelper.onInitialized.AddListener(OnWebCamTextureToMatHelperInited);

            //webCamTextureToMatHelper.Initialize ();
        }
Ejemplo n.º 18
0
    // Use this for initialization
    void Start()
    {
        webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();

        webCamTextureToMatHelper.onInitialized.AddListener(() =>
        {
            OnWebCamTextureToMatHelperInitialized();
        });
        webCamTextureToMatHelper.onErrorOccurred.AddListener((WebCamTextureToMatHelper.ErrorCode errorCode) =>
        {
            OnWebCamTextureToMatHelperErrorOccurred(errorCode);
        });
        webCamTextureToMatHelper.onDisposed.AddListener(() =>
        {
            OnWebCamTextureToMatHelperDisposed();
        });

        webCamTextureToMatHelper.Initialize();
    }
Ejemplo n.º 19
0
        // Use this for initialization
        void Start()
        {
            webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> ();

            #if UNITY_WEBGL && !UNITY_EDITOR
            webCamTextureToMatHelper.flipHorizontal = true;
            StartCoroutine(getFilePathCoroutine());
            #else
            shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath("shape_predictor_68_face_landmarks.dat");
            shizuku_moc_filepath     = OpenCVForUnity.Utils.getFilePath("shizuku/shizuku.moc.bytes");
            shizuku_physics_filepath = OpenCVForUnity.Utils.getFilePath("shizuku/shizuku.physics.json");
            shizuku_pose_filepath    = OpenCVForUnity.Utils.getFilePath("shizuku/shizuku.pose.json");
            for (int i = 0; i < texture_filepath.Length; i++)
            {
                texture_filepath [i] = OpenCVForUnity.Utils.getFilePath("shizuku/shizuku.1024/texture_0" + i + ".png");
            }
            Run();
            #endif
        }
Ejemplo n.º 20
0
    IEnumerator Start()
    {
        Input.backButtonLeavesApp = true;
        Screen.sleepTimeout       = SleepTimeout.NeverSleep;

        _detector       = GetComponent <FaceDetector>();
        _apiManager     = GetComponent <FaceApiManager>();
        _toMatHelperMgr = GetComponent <WebCamTextureToMatHelperManager>();
        _toMatHelper    = GetComponent <WebCamTextureToMatHelper>();

        //カメラ等の初期化完了後,画像サイズを取得する
        _toMatHelper.Initialize();
        yield return(WaitInitialization());

        var imgSize = new Size(_toMatHelper.GetWidth(), _toMatHelper.GetHeight());

        _zeroMat = new ZeroMat(imgSize);

        _volume.profile.TryGetSettings(out _distortion);
    }
Ejemplo n.º 21
0
 private void Run()
 {
     //set 3d face object points. 设置3d脸的对象点
     objectPoints = new MatOfPoint3f(
         new Point3(-31, 72, 86),  //l eye
         new Point3(31, 72, 86),   //r eye
         new Point3(0, 40, 114),   //nose
         new Point3(-20, 15, 90),  //l mouth //new Point3(-22, 17, 90),//l mouth
         new Point3(20, 15, 90),   //r mouth //new Point3(22, 17, 90),//r mouth
         new Point3(-69, 76, -2),  //l ear
         new Point3(69, 76, -2)    //r ear
         );
     imagePoints = new MatOfPoint2f();
     rotMat      = new Mat(3, 3, CvType.CV_64FC1);
     //设置dat
     faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);
     //获取相机的mat
     webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();
     webCamTextureToMatHelper.Initialize();
 }
Ejemplo n.º 22
0
    void Start()
    {
        Input.backButtonLeavesApp = true;
        Screen.sleepTimeout       = SleepTimeout.NeverSleep;

        _toMatHelper    = GetComponent <WebCamTextureToMatHelper>();
        _invCvtr        = new InvisibleConverter(_text);
        _cameraSwitcher = new CameraSwitcher(_toMatHelper, _invCvtr, _mainCanvas, _camSwitchDialog);
        _fpsMonitor     = GetComponent <FpsMonitor>();
        _recordSound    = GetComponent <AudioSource>();

        //リア/フロントをPlayerPrefabsから読み込む
        _toMatHelper.requestedIsFrontFacing = _cameraSwitcher.UseCamera;
        _toMatHelperMgr = new ToMatHelperManager(gameObject, _toMatHelper, _fpsMonitor);
        _toMatHelper.Initialize();

        //スマホの場合カメラ起動まで指定秒待つ
        #if !UNITY_EDITOR && UNITY_ANDROID
        Task.Run(WaitCamStartup).Wait();
        #endif
    }
Ejemplo n.º 23
0
        // Use this for initialization
        void Start()
        {
            startBtn  = GameObject.Find("StartButton");
            finishBtn = GameObject.Find("FinishButton");
            startBtn.SetActive(true);
            finishBtn.SetActive(false);

            webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();

#if UNITY_WEBGL && !UNITY_EDITOR
            webCamTextureToMatHelper.flipHorizontal = true;
            StartCoroutine(getFilePathCoroutine());
#else
            // FaceLandmark model filepath
            shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath("shape_predictor_68_face_landmarks.dat");

            // Load Texture filepath
            LoadTexture();

            Run();
#endif
        }
Ejemplo n.º 24
0
 public ToMatHelperManager(GameObject quad, WebCamTextureToMatHelper texToMatHelper, FpsMonitor fpsMonitor)
 {
     _quad           = quad;
     _texToMatHelper = texToMatHelper;
     _fpsMonitor     = fpsMonitor;
 }
Ejemplo n.º 25
0
    const string USE_CAMERA_KEY = "USE CAMERA"; //PlayerPrefabsのKey(リア/フロント)

    void Start()
    {
        _toMatHelper = GetComponent <WebCamTextureToMatHelper>();
    }
 void Start()
 {
     _toMatHelper = GetComponent <WebCamTextureToMatHelper>();
     _fpsMonitor  = GetComponent <FpsMonitor>();
 }