Beispiel #1
0
    void Update()
    {
        if (CoreXT.IsDevice)
        {
            if (webCam.didUpdateThisFrame)
            {
                CGImageOrientation orientation = CGImageOrientation.RotatedLeft;
                switch (webCam.videoRotationAngle)
                {
                case 0:
                    orientation = CGImageOrientation.Default;
                    break;

                case 90:
                    orientation = CGImageOrientation.RotatedLeft;
                    break;

                case 180:
                    orientation = CGImageOrientation.UpsideDown;
                    break;

                case 270:
                    orientation = CGImageOrientation.RotatedRight;
                    break;
                }

                var ciimage = new CIImage(CGImage.FromWebCamTexture(webCam));
                faceDetector.ProjectedScale = Screen.width / webCam.width;
                faces = faceDetector.DetectInImage(ciimage, orientation);
//				foreach (var face in faces) {
//					Log("face: " + face.Bounds + ", " + face.HasMouthPosition + ", " + face.LeftEyePosition + ", " + face.RightEyePosition);
//				}

                if (faces.Length == 1)
                {
                    var face = faces[0];
                    if (face.Bounds.center.x < (Screen.width / 2))
                    {
                        GameObject.Find("Main Camera").GetComponent <Main>().rotateArmToLeft();
                    }
                    else
                    {
                        GameObject.Find("Main Camera").GetComponent <Main>().rotateArmToRight();
                    }



//					var newX = (face.Bounds.center.x / Screen.width) * diffX + minX;
//					var newZ = (face.Bounds.center.y / Screen.height) * diffY + minY;
//					var newY = (face.Bounds.width / diffFaceWidth) * diffFaceWidth + maxFaceWidth;
//					GameObject.Find("Main Camera").camera.transform.position = new Vector3(newX, newY, newZ);
//					GameObject.Find("Main Camera").GetComponent<Main>().Log("face: " + face.Bounds.center.x + ", " + face.Bounds.center.y + ", " + face.Bounds.width + ", " + face.Bounds.height
//						+ "; " + newX + ", " + newY + ", " + newZ);
                }
            }
        }
    }
Beispiel #2
0
    void Update()
    {
        if (CoreXT.IsDevice)
        {
            if (webCam.didUpdateThisFrame)
            {
                CGImageOrientation orientation = CGImageOrientation.RotatedLeft;
                switch (webCam.videoRotationAngle)
                {
                case 0:
                    orientation = CGImageOrientation.Default;
                    break;

                case 90:
                    orientation = CGImageOrientation.RotatedLeft;
                    break;

                case 180:
                    orientation = CGImageOrientation.UpsideDown;
                    break;

                case 270:
                    orientation = CGImageOrientation.RotatedRight;
                    break;
                }

                var ciimage = new CIImage(CGImage.FromWebCamTexture(webCam));
                faceDetector.ProjectedScale = Screen.width / webCam.width;
                faces = faceDetector.DetectInImage(ciimage, orientation);
                foreach (var face in faces)
                {
                    Log("face: " + face.Bounds + ", " + face.HasMouthPosition + ", " + face.LeftEyePosition + ", " + face.RightEyePosition);
                }
            }
        }
    }
Beispiel #3
0
    void OnMediaPicked(object sender, MediaPickedEventArgs e)
    {
        // clean up old stuff
        if (_photo != null)
        {
            Texture2D.Destroy(_photo);
        }

        if (_scrambledFaces != null)
        {
            foreach (var face in _scrambledFaces)
            {
                Texture2D.Destroy(face);
            }
            _scrambledFaces = null;
        }

//		_photo = e.image.ToTexture2D(true, 0.25f);

        System.Random random = new System.Random();

        // set input
        _imageFilter.SetInput(e.image);

        // randomly apply some filter to the image first
        switch (random.Next(7))
        {
        case 0:
            Log("Applying auto-adjust.");
            _imageFilter.AutoAdjust();
            break;

        case 1:
            Log("Applying sepia and sharpen.");
            _imageFilter.SepiaTone(1.0f);
            break;

        case 2:
            Log("Applying bloom and vignette.");
            // chaining multiple filters together
            _imageFilter.Bloom(10.0f, 1.0f)
            .Filter("CIVignette", new Dictionary <string, object> {
                { "inputRadius", 1.0f },
                { "inputIntensity", 0.5f }
            });
            break;

        case 3:
            Log("Applying color invert.");
            _imageFilter.ColorInvert();
            break;

        case 4:
            Log("Applying red monochrome.");
            _imageFilter.ColorMonochrome(new Color32(0xff, 0x00, 0x00, 0xff), 1.0f);
            break;

        case 5:
            Log("Applying yellow monochrome.");
            _imageFilter.ColorMonochrome(new Color32(0xff, 0xff, 0x00, 0xff), 1.0f);
            break;

        case 6:
            Log("Applying blue monochrome.");
            _imageFilter.ColorMonochrome(new Color32(0x00, 0x00, 0xff, 0xff), 1.0f);
            break;
        }

        // render the image
        _photo = _imageFilter.Render(
            new Rect(0, 0, e.image.size.Width, e.image.size.Height),
            null, CONVERT_SCALE, e.image.imageOrientation.ToCorrectedRotateAngle());

        // detect faces
        _faces = _faceDetector.DetectInImage(e.image);

        if (_faces.Length > 0)
        {
            _scrambledFaces = new Texture2D[_faces.Length];

            for (int i = 0; i < _faces.Length; i++)
            {
                var face = _faces[i];
//				Log("face: " + face.bounds + ", " + face.hasMouthPosition + ", " + face.leftEyePosition + ", " + face.rightEyePosition);

                // randomly scramble the faces
                _imageFilter.SetInput(_photo);

                switch (random.Next(3))
                {
                case 0:
                    Log("Pixellating face.");
                    _imageFilter.Pixellate(new float[] { 0, 0 }, 10);
                    break;

                case 1:
                    Log("Applying blur to face.");
                    _imageFilter.GaussianBlur(30);
                    break;

                case 2:
                    Log("Applying vortex distortion to face.");
                    _imageFilter.VortexDistortion(
                        new float[] { face.bounds.x *CONVERT_SCALE, face.bounds.y *CONVERT_SCALE },
                        3000, 9000
                        );
                    break;
                }

                // render the face only
                _scrambledFaces[i] = _imageFilter.Render(
                    new Rect(
                        face.bounds.x * CONVERT_SCALE,
                        face.bounds.y * CONVERT_SCALE,
                        face.bounds.width * CONVERT_SCALE,
                        face.bounds.height * CONVERT_SCALE
                        ));
            }
        }
    }