Example #1
0
        /// <summary>
        /// Initializes a new instance of the FusionImageFrame class.
        /// </summary>
        /// <param name="imageType">The type of image frame to create.</param>
        /// <param name="width">The width of the image to create.</param>
        /// <param name="height">The height of the image to create.</param>
        /// <param name="cameraParameters">The Camera Parameters of the image.</param>
        internal FusionImageFrame(FusionImageType imageType, int width, int height, CameraParameters cameraParameters)
        {
            NativeFrameHandle handle;

            ExceptionHelper.ThrowIfFailed(
                NativeMethods.NuiFusionCreateImageFrame(
                imageType,
                (uint)width,
                (uint)height,
                cameraParameters,
                out handle));

            Handle = handle;
        }
Example #2
0
            // ArucoCamera methods

            /// <summary>
            /// Configure the VR input tracking, the Ovrvision plugin, and auto-start the cameras. The cameras need to be stopped before configured.
            /// </summary>
            public override void Configure()
            {
                if (IsStarted)
                {
                    return;
                }

                // Reset state
                IsConfigured = false;

                // Update VR tracking
                UnityEngine.VR.InputTracking.Recenter();

                // Try to load the camera parameters
                if (CameraParametersFilePath != null && CameraParametersFilePath.Length > 0)
                {
                    string fullCameraParametersFilePath = Path.Combine((Application.isEditor) ? Application.dataPath : Application.persistentDataPath, CameraParametersFilePath);
                    CameraParameters = CameraParameters.LoadFromXmlFile(fullCameraParametersFilePath);
                }

                base.Configure();
            }
Example #3
0
    // Use this for initialization
    void Start()
    {
        Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();

        targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height);

        // Create a PhotoCapture object
        PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject) {
            photoCaptureObject = captureObject;
            CameraParameters cameraParameters       = new CameraParameters();
            cameraParameters.hologramOpacity        = 0.0f;
            cameraParameters.cameraResolutionWidth  = cameraResolution.width;
            cameraParameters.cameraResolutionHeight = cameraResolution.height;
            cameraParameters.pixelFormat            = CapturePixelFormat.BGRA32;

            // Activate the camera
            photoCaptureObject.StartPhotoModeAsync(cameraParameters, delegate(PhotoCapture.PhotoCaptureResult result) {
                // Take a picture
                photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory);
            });
        });
    }
Example #4
0
        public PhotoCaptureController()
        {
            filePath = System.IO.Path.Combine(Application.persistentDataPath, filename);
#if UNITY_METRO && !UNITY_EDITOR
            try
            {
                Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
                width         = cameraResolution.width;
                height        = cameraResolution.height;
                targetTexture = new Texture2D(width, height);
                //TakeAPicture();
                cameraParameters = new CameraParameters();
                cameraParameters.hologramOpacity        = 1.0f;
                cameraParameters.cameraResolutionWidth  = width;
                cameraParameters.cameraResolutionHeight = height;
                cameraParameters.pixelFormat            = CapturePixelFormat.BGRA32;
            }
            catch (System.Exception e)
            {
            }
#endif
        }
    public void OnInputClicked(InputClickedEventData eventData)
    {
        PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject)
        {
            photoCaptureObj = captureObject;



            CameraParameters cameraParameters       = new CameraParameters();
            cameraParameters.hologramOpacity        = 0.0f;
            cameraParameters.cameraResolutionWidth  = cameraResolution.width;
            cameraParameters.cameraResolutionHeight = cameraResolution.height;
            cameraParameters.pixelFormat            = CapturePixelFormat.BGRA32;

            photoCaptureObj.StartPhotoModeAsync(cameraParameters, delegate(PhotoCapture.PhotoCaptureResult result)
            {
                photoCaptureObj.TakePhotoAsync(OnCapturedPhotoToMemory);
            });
        });

        Debug.Log("Photo Capture CreateAsync Succeed!");
    }
Example #6
0
            /// <summary>
            /// <see cref="ArucoObjectTracker.Draw(int, Dictionary, HashSet{ArucoObject})"/>
            /// </summary>
            public override void Draw(int cameraId, Aruco.Dictionary dictionary)
            {
                if (!IsActivated)
                {
                    return;
                }

                bool updatedCameraImage = false;

                Cv.Core.Mat[]    cameraImages     = arucoTracker.ArucoCamera.Images;
                CameraParameters cameraParameters = arucoTracker.ArucoCamera.CameraParameters;

                if (DetectedDiamonds[cameraId][dictionary] > 0)
                {
                    // Draw detected diamonds
                    if (arucoTracker.DrawDetectedDiamonds)
                    {
                        Aruco.DrawDetectedDiamonds(cameraImages[cameraId], DiamondCorners[cameraId][dictionary], DiamondIds[cameraId][dictionary]);
                        updatedCameraImage = true;
                    }

                    // Draw axes of detected diamonds
                    if (arucoTracker.DrawAxes && cameraParameters != null && DiamondRvecs[cameraId][dictionary] != null)
                    {
                        for (uint i = 0; i < DetectedDiamonds[cameraId][dictionary]; i++)
                        {
                            Aruco.DrawAxis(cameraImages[cameraId], cameraParameters.CamerasMatrix[cameraId], cameraParameters.DistCoeffs[cameraId],
                                           DiamondRvecs[cameraId][dictionary].At(i), DiamondTvecs[cameraId][dictionary].At(i), DRAW_AXIS_LENGTH);
                            updatedCameraImage = true;
                        }
                    }
                }

                if (updatedCameraImage)
                {
                    arucoTracker.ArucoCamera.Images = cameraImages;
                }
            }
Example #7
0
        private void OnPhotoCaptureCreated(PhotoCapture captureObject)
        {
            _photoCapture = captureObject;

            Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();

            CameraParameters c = new CameraParameters
            {
                hologramOpacity        = 0.0f,
                cameraResolutionWidth  = cameraResolution.width,
                cameraResolutionHeight = cameraResolution.height,
                pixelFormat            = CapturePixelFormat.BGRA32
            };

            try
            {
                captureObject.StartPhotoModeAsync(c, OnPhotoModeStarted);
            }
            catch (Exception e)
            {
                Debug.Log(e.Message);
            }
        }
Example #8
0
    void OnPhotoCaptureCreated(PhotoCapture captureObject)
    {
        photoCaptureObject = captureObject;

        //Resolution: 2048x1152 at 0Hz.
        //Resolution: 1408x792 at 0Hz.
        //Resolution: 1344x756 at 0Hz.
        //Resolution: 1280x720 at 0Hz.
        //Resolution: 896x504 at 0Hz.

        //Might effect recognization accuracy, especially there's requirment for different detection. but fast for upload to server.
        Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).Skip(4).First();

        CameraParameters c = new CameraParameters();

        c.hologramOpacity        = 0.0f;
        c.cameraResolutionWidth  = cameraResolution.width;
        c.cameraResolutionHeight = cameraResolution.height;
        c.pixelFormat            = CapturePixelFormat.PNG;


        photoCaptureObject.StartPhotoModeAsync(c, false, OnPhotoModeStartedForBinary);
    }
Example #9
0
    public void TakePicture()
    {
        CameraParameters cameraParameters = new CameraParameters();

        cameraParameters.hologramOpacity        = 0.0f;
        cameraParameters.cameraResolutionWidth  = cameraResolution.width;
        cameraParameters.cameraResolutionHeight = cameraResolution.height;
        cameraParameters.pixelFormat            = CapturePixelFormat.BGRA32;

        // Activate the camera
        if (photoCaptureObject != null)
        {
            if (shutterSound != null)
            {
                shutterSound.Play();
            }
            photoCaptureObject.StartPhotoModeAsync(cameraParameters, delegate(PhotoCapture.PhotoCaptureResult result)
            {
                // Take a picture
                photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory);
            });
        }
    }
Example #10
0
    private void ExecuteImageCaptureAndAnalysis()
    {
        Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
        Texture2D  targetTexture    = new Texture2D(cameraResolution.width, cameraResolution.height);

        PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject)
        {
            photoCaptureObject                   = captureObject;
            CameraParameters camParameters       = new CameraParameters();
            camParameters.hologramOpacity        = 0.0f; // for MR 0.9f
            camParameters.cameraResolutionWidth  = targetTexture.width;
            camParameters.cameraResolutionHeight = targetTexture.height;
            camParameters.pixelFormat            = CapturePixelFormat.BGRA32;
            captureObject.StartPhotoModeAsync(camParameters, delegate(PhotoCapture.PhotoCaptureResult result)
            {
                string filename = string.Format(@"CapturedImage{0}.jpg", tapsCount);
                string filePath = Path.Combine(Application.persistentDataPath, filename);
                RecognizeTextManager.instance.imagePath = filePath;
                photoCaptureObject.TakePhotoAsync(filePath, PhotoCaptureFileOutputFormat.JPG, OnCapturedPhotoToDisk);
                currentlyCapturing = false;
            });
        });
    }
Example #11
0
        /// <summary>
        /// Run logic for initializing Hololens web camera for taking pictures
        /// for texturing the room mesh.
        /// </summary>
        private static void InitCamera()
        {
#if UNITY_WSA_10_0
            // Set camera resolution (documented default of 1280 x 720)
            List <Resolution> resolutions = new List <Resolution>(PhotoCapture.SupportedResolutions);
            CameraResolution = resolutions[0];

            // Set other camera parameters
            m_CameraParameters = new CameraParameters(WebCamMode.PhotoMode);
#endif
            m_CameraParameters.cameraResolutionWidth  = CameraResolution.width;
            m_CameraParameters.cameraResolutionHeight = CameraResolution.height;
            m_CameraParameters.hologramOpacity        = 0.0f;
#if UNITY_WSA_10_0
            m_CameraParameters.pixelFormat = CapturePixelFormat.BGRA32;
#endif

            // Set the Texture2DArray-compatible texture settings
            UtilizedResolution = DowngradeTextureResolution(CameraResolution);

            // Create texture array
            CreateTexture2DArray();
        }
        // Use this for initialization
        void Create()
        {
            if (photoCaptureObject != null)
            {
                Debug.LogError("The NRPhotoCapture has already been created.");
                return;
            }

            // Create a PhotoCapture object
            NRPhotoCapture.CreateAsync(false, delegate(NRPhotoCapture captureObject)
            {
                cameraResolution = NRPhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
                targetTexture    = new Texture2D(cameraResolution.width, cameraResolution.height);

                if (captureObject != null)
                {
                    photoCaptureObject = captureObject;
                }
                else
                {
                    Debug.LogError("Can not get a captureObject.");
                }

                CameraParameters cameraParameters       = new CameraParameters();
                cameraParameters.hologramOpacity        = 0.0f;
                cameraParameters.cameraResolutionWidth  = cameraResolution.width;
                cameraParameters.cameraResolutionHeight = cameraResolution.height;
                cameraParameters.pixelFormat            = CapturePixelFormat.BGRA32;
                cameraParameters.blendMode = BlendMode.Blend;

                // Activate the camera
                photoCaptureObject.StartPhotoModeAsync(cameraParameters, delegate(NRPhotoCapture.PhotoCaptureResult result)
                {
                    Debug.Log("Start PhotoMode Async");
                });
            });
        }
        // 写真撮影する
        public void TakePhoto()
        {
            if (photoTaking)
            {
                Debug.Log("写真撮影中");
                return;
            }
            photoTaking = true;
            debug.text  = "写真撮影開始";

            try
            {
                // PhotoCapture オブジェクトを作成します
                PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject)
                {
                    photoCaptureObject = captureObject;
                    CameraParameters cameraParameters       = new CameraParameters();
                    cameraParameters.hologramOpacity        = 0.0f;
                    cameraParameters.cameraResolutionWidth  = cameraResolution.width;
                    cameraParameters.cameraResolutionHeight = cameraResolution.height;
                    cameraParameters.pixelFormat            = CapturePixelFormat.BGRA32;

                    // カメラをアクティベートします
                    photoCaptureObject.StartPhotoModeAsync(cameraParameters,
                                                           delegate(PhotoCapture.PhotoCaptureResult result)
                    {
                        // 写真を撮ります
                        photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory);
                    });
                });
            }
            catch (Exception ex)
            {
                Debug.Log(ex.Message);
                debug.text = ex.Message;
            }
        }
Example #14
0
    /// <summary>
    /// Begin process of Image Capturing and send To Azure Custom Vision Service.
    /// </summary>
    private void ExecuteImageCaptureAndAnalysis()
    {
        // Update camera status to analysis.
        SceneOrganiser.Instance.SetCameraStatus("Analysis");

        // Create a label in world space using the SceneOrganiser class
        // Invisible at this point but correctly positioned where the image was taken
        SceneOrganiser.Instance.PlaceAnalysisLabel();

        // Set the camera resolution to be the highest possible
        Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();

        Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height);

        // Begin capture process, set the image format
        PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject)
        {
            photoCaptureObject = captureObject;

            CameraParameters camParameters = new CameraParameters
            {
                hologramOpacity        = 0.0f,
                cameraResolutionWidth  = targetTexture.width,
                cameraResolutionHeight = targetTexture.height,
                pixelFormat            = CapturePixelFormat.BGRA32
            };

            // Capture the image from the camera and save it in the App internal folder
            captureObject.StartPhotoModeAsync(camParameters, delegate(PhotoCapture.PhotoCaptureResult result)
            {
                string filename = string.Format(@"CapturedImage{0}.jpg", captureCount);
                filePath        = Path.Combine(Application.persistentDataPath, filename);
                captureCount++;
                photoCaptureObject.TakePhotoAsync(filePath, PhotoCaptureFileOutputFormat.JPG, OnCapturedPhotoToDisk);
            });
        });
    }
    public void TakePicture()
    {
        // Create a PhotoCapture object
        PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject)
        {
            if (captureObject != null)
            {
                _photoCaptureObject = captureObject;
                CameraParameters cameraParameters       = new CameraParameters();
                cameraParameters.hologramOpacity        = 0.0f;
                cameraParameters.cameraResolutionWidth  = _cameraResolution.width;
                cameraParameters.cameraResolutionHeight = _cameraResolution.height;
                cameraParameters.pixelFormat            = CapturePixelFormat.BGRA32;

                if (!Directory.Exists(_mediaPath))
                {
                    Directory.CreateDirectory(_mediaPath);
                }

                _fileName         = DateTime.Now.ToString("yyyy-MM-dd-hh-mm-ss") + ".jpg";
                _fullPathFileName = Path.Combine(_mediaPath, _fileName);

                _cameraBorderController.SetRecordingMaterial();
                // Activate the camera
                _photoCaptureObject.StartPhotoModeAsync(cameraParameters, delegate(PhotoCapture.PhotoCaptureResult result)
                {
                    // Take a picture
                    _photoCaptureObject.TakePhotoAsync(_fullPathFileName, PhotoCaptureFileOutputFormat.JPG, OnCapturedPhotoToDisk);
                });
            }
            else
            {
                Debug.LogError("Failed to create PhotoCapture Instance!");
                OnPhotoFinished?.Invoke("Error");
            }
        });
    }
        async void Start()
        {
            // Initialize camera and camera parameters
            _captureResolution = PhotoCapture.SupportedResolutions.OrderBy((res) => res.width * res.height).First();

            _cameraPara = new CameraParameters();
            _cameraPara.hologramOpacity        = 0.0f;
            _cameraPara.cameraResolutionWidth  = _captureResolution.width;
            _cameraPara.cameraResolutionHeight = _captureResolution.height;
            _cameraPara.pixelFormat            = CapturePixelFormat.JPEG;

            if (Const.HOLO_CAPTURE)
            {
                PhotoCapture.CreateAsync(true, OnPhotoCaptureCreatedHOLO);
            }
            else
            {
                PhotoCapture.CreateAsync(false, OnPhotoCaptureCreated);
            }

            // Initialize logger
            _myLogger = new MyLogger("latency-" + Const.SERVER_IP + "-" + Const.TOKEN_SIZE + ".txt");
            await _myLogger.InitializeLogger();

            // Initialize token control
            _tokenController = new TokenController(Const.TOKEN_SIZE, _myLogger);

            // Initialize file loaders
            await InitializeFileLoading();

            // Initialize network
            await InitializeNetworkAsync();
            await GetServerTimeOffsetAsync();

            _isInitialized = true;
            _startTime     = GetTimeMillis();
        }
Example #17
0
    /// <summary>
    /// Begin process of Image Capturing and send To Azure Custom Vision Service.
    /// </summary>
    private void ExecuteImageCaptureAndAnalysis()
    {
        if (!cameraResolutionSet)
        {
            cameraResolution    = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
            cameraResolutionSet = true;
        }
        {
            // Set the camera resolution to be the highest possible

            //Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height);

            // Begin capture process, set the image format
            PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject)
            {
                photoCaptureObject = captureObject;

                CameraParameters camParameters = new CameraParameters
                {
                    hologramOpacity        = 0.0f,
                    cameraResolutionWidth  = cameraResolution.width,
                    cameraResolutionHeight = cameraResolution.height,
                    pixelFormat            = CapturePixelFormat.BGRA32
                };

                // Capture the image from the camera and save it in the App internal folder
                captureObject.StartPhotoModeAsync(camParameters, delegate(PhotoCapture.PhotoCaptureResult result)
                {
                    string filename = string.Format(@"CapturedImage{0}.jpg", captureCount);
                    filePath        = Path.Combine(Path.Combine(Application.persistentDataPath, "bathroom"), filename);
                    captureCount++;
                    photoCaptureObject.TakePhotoAsync(filePath, PhotoCaptureFileOutputFormat.JPG, OnCapturedPhotoToDisk);
                });
            });
        }
    }
Example #18
0
        void InitializeFusion()
        {
            // Reconstruction Parameters
            float voxelPerMeter = 256;
            int   voxelsX       = 512;
            int   voxelsY       = 384;
            int   voxelsZ       = 512;

            reconstructionParameters = new ReconstructionParameters(voxelPerMeter, voxelsX, voxelsY, voxelsZ);

            //カメラ座標の初期値をワールド座標に設定
            worldToCameraTransform = Matrix4.Identity;

            //FusionのReconstructionオブジェクトを作成
            reconstruction = ColorReconstruction.FusionCreateReconstruction(reconstructionParameters, ReconstructionProcessor.Amp, -1, worldToCameraTransform);

            // Fusionのイメージフレームを作成
            cameraParameters      = CameraParameters.Defaults;
            depthImageFrame       = new FusionFloatImageFrame(depthWidth, depthHeight, cameraParameters);
            smoothDepthImageFrame = new FusionFloatImageFrame(depthWidth, depthHeight, cameraParameters);
            colorImageFrame       = new FusionColorImageFrame(depthWidth, depthHeight, cameraParameters);
            pointCloudImageFrame  = new FusionPointCloudImageFrame(depthWidth, depthHeight, cameraParameters);
            surfaceImageFrame     = new FusionColorImageFrame(depthWidth, depthHeight, cameraParameters);
        }
Example #19
0
    /*
     *  Initializes settings that the web camera will use
     * Function called by TakePhoto
     */
    void OnPhotoCaptureCreated(PhotoCapture captureObject)
    {
        Debug.Log("Photocapture");

        photoCaptureObject = captureObject;

        /* setting up camera resolution */
        Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();

        CameraParameters c = new CameraParameters();

        /* setting the opacity of captured holograms */
        c.hologramOpacity = 0.0f;

        /* valid resolutions for use with the web camera */
        c.cameraResolutionWidth  = cameraResolution.width;
        c.cameraResolutionHeight = cameraResolution.height;

        /* pixel format to capture & record image data */
        c.pixelFormat = CapturePixelFormat.BGRA32;

        /* Activate mode to take photo */
        captureObject.StartPhotoModeAsync(c, OnPhotoModeStarted);
    }
Example #20
0
    /// <summary>
    /// Begin process of Image Capturing and send To Azure
    /// Computer Vision service.
    /// </summary>
    private void ExecuteImageCaptureAndAnalysis()
    {
        // Set the camera resolution to be the highest possible
        Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();

        Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height);

        // Debug.Log("execute image capture and analysis");

        // Begin capture process, set the image format
        PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject)
        {
            photoCaptureObject                   = captureObject;
            CameraParameters camParameters       = new CameraParameters();
            camParameters.hologramOpacity        = 0.0f;
            camParameters.cameraResolutionWidth  = targetTexture.width;
            camParameters.cameraResolutionHeight = targetTexture.height;
            camParameters.pixelFormat            = CapturePixelFormat.BGRA32;

            // Capture the image from the camera and save it in the App internal folder
            captureObject.StartPhotoModeAsync(camParameters, delegate(PhotoCapture.PhotoCaptureResult result)
            {
                string filename = @"current_image.jpg";

                // Debug.Log(Application.persistentDataPath);

                string filePath = Path.Combine(Application.persistentDataPath, filename);

                VisionManager.instance.imagePath = filePath;

                photoCaptureObject.TakePhotoAsync(filePath, PhotoCaptureFileOutputFormat.JPG, OnCapturedPhotoToDisk);

                currentlyCapturing = false;
            });
        });
    }
Example #21
0
    /// <summary>
    /// Begin process of Image Capturing and send To Azure Computer Vision service.
    /// </summary>
    private void ExecuteImageCaptureAndAnalysis()
    {
        //Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending
        //    ((res) => res.width * res.height).First();
        Resolution cameraResolution = PhotoCapture.SupportedResolutions.ElementAt(1);

        Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height);

        //tempText.text = "W: " + cameraResolution.width + " | H: " + cameraResolution.height;

        PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject)
        {
            photoCaptureObject = captureObject;

            CameraParameters c       = new CameraParameters();
            c.hologramOpacity        = 0.0f;
            c.cameraResolutionWidth  = targetTexture.width;
            c.cameraResolutionHeight = targetTexture.height;
            c.pixelFormat            = CapturePixelFormat.BGRA32;

            Debug.Log("before start photoasync");

            captureObject.StartPhotoModeAsync(c, delegate(PhotoCapture.PhotoCaptureResult result)
            {
                string filename = string.Format(@"CapturedImages{0}.jpg", tapsCount);
                string filePath = Path.Combine(Application.persistentDataPath, filename);

                //tempText.GetComponent<TextMeshPro>().text = Application.persistentDataPath;

                // Set the image path on the FaceAnalysis class
                FaceAnalysis.Instance.imagePath = filePath;

                photoCaptureObject.TakePhotoAsync(filePath, PhotoCaptureFileOutputFormat.JPG, OnCapturedPhotoToDisk);
            });
        });
    }
Example #22
0
    private void OnPhotoCaptureCreated(PhotoCapture photoCapture)
    {
        _photoCapture = photoCapture;

        IEnumerable <Resolution> cameraResolutions = PhotoCapture.SupportedResolutions;
        //Resolution cameraResolution = cameraResolutions.ToArray()[15]; // 800x600 using the logicool webcam. Closest I could get to to HoloLens' 896x504 resolution
        var        reso             = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).ToArray();
        Resolution cameraResolution = reso.Last(); //reso[8]; //reso.Last();

        Debug.Log(string.Format("Camera Resolution: {0}x{1}", cameraResolution.width, cameraResolution.height));

        // GameObject source = GameObject.Find("Display");
        _sourceTexture = new Texture2D(cameraResolution.width, cameraResolution.height, TextureFormat.RGB24, false);
        //  source.GetComponent<Renderer>().material.mainTexture = _sourceTexture;

        CameraParameters c = new CameraParameters();

        c.hologramOpacity        = 0.0f;
        c.cameraResolutionWidth  = cameraResolution.width;
        c.cameraResolutionHeight = cameraResolution.height;
        c.pixelFormat            = CapturePixelFormat.BGRA32;

        _photoCapture.StartPhotoModeAsync(c, false, OnPhotoModeStarted);
    }
    void OnVideoCaptureCreated(VideoCapture videoCapture)
    {
        if (videoCapture == null)
        {
            Debug.LogError("Did not find a video capture object. You may not be using the HoloLens.");
            return;
        }

        this._videoCapture = videoCapture;

        _resolution = CameraStreamHelper.Instance.GetLowestResolution();
        //float frameRate = CameraStreamHelper.Instance.GetHighestFrameRate(_resolution);
        float frameRate = CameraStreamHelper.Instance.GetLowestFrameRate(_resolution);

        videoCapture.FrameSampleAcquired += OnFrameSampleAcquired;

        //You don't need to set all of these params.
        //I'm just adding them to show you that they exist.
        CameraParameters cameraParams = new CameraParameters();

        cameraParams.cameraResolutionHeight = _resolution.height;
        cameraParams.cameraResolutionWidth  = _resolution.width;

        //new
        frameRate = _videoPanelUI.requestedFrameRate;
        cameraParams.frameRate = Mathf.RoundToInt(frameRate);
        int finalFrameRate = Mathf.RoundToInt(frameRate);

        cameraParams.pixelFormat           = CapturePixelFormat.BGRA32;
        cameraParams.rotateImage180Degrees = true;         //If your image is upside down, remove this line.
        cameraParams.enableHolograms       = false;

        _videoPanelUI.SetResolution(_resolution.width, _resolution.height, finalFrameRate);

        videoCapture.StartVideoModeAsync(cameraParams, OnVideoModeStarted);
    }
    void Start_take_picture(PhotoCapture captureObject)
    {
        photoCaptureObject = captureObject;

        //ディスプレイの解像度を取得
        Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();

        //カメラパラメータを設定
        CameraParameters c = new CameraParameters();

        c.hologramOpacity        = 0.0f;
        c.cameraResolutionWidth  = cameraResolution.width;
        c.cameraResolutionHeight = cameraResolution.height;
        c.pixelFormat            = CapturePixelFormat.BGRA32;

        //画像の縦、横を取得
        Width  = cameraResolution.width;
        Height = cameraResolution.height;

        //写真撮影
        photoCaptureObject.StartPhotoModeAsync(c, delegate(PhotoCapture.PhotoCaptureResult result) {
            photoCaptureObject.TakePhotoAsync(Publish_to_ROS);
        });
    }
            /// <summary>
            /// <see cref="ArucoObjectTracker.Draw(int, Dictionary, HashSet{ArucoObject})"/>
            /// </summary>
            public override void Draw(int cameraId, Aruco.Dictionary dictionary)
            {
                if (!IsActivated || arucoTracker.MarkerTracker.DetectedMarkers[cameraId][dictionary] <= 0)
                {
                    return;
                }

                bool updatedCameraImage = false;

                Cv.Core.Mat[]    cameraImages     = arucoTracker.ArucoCamera.Images;
                CameraParameters cameraParameters = arucoTracker.ArucoCamera.CameraParameters;

                foreach (var arucoCharucoBoard in arucoTracker.GetArucoObjects <ArucoCharucoBoard>(dictionary))
                {
                    if (arucoCharucoBoard.InterpolatedCorners > 0 && arucoCharucoBoard.Rvec != null)
                    {
                        if (arucoTracker.DrawDetectedCharucoMarkers)
                        {
                            Aruco.DrawDetectedCornersCharuco(cameraImages[cameraId], arucoCharucoBoard.DetectedCorners, arucoCharucoBoard.DetectedIds);
                            updatedCameraImage = true;
                        }

                        if (arucoTracker.DrawAxes && cameraParameters != null && arucoCharucoBoard.ValidTransform)
                        {
                            Aruco.DrawAxis(cameraImages[cameraId], cameraParameters.CamerasMatrix[cameraId], cameraParameters.DistCoeffs[cameraId],
                                           arucoCharucoBoard.Rvec, arucoCharucoBoard.Tvec, arucoCharucoBoard.AxisLength);
                            updatedCameraImage = true;
                        }
                    }
                }

                if (updatedCameraImage)
                {
                    arucoTracker.ArucoCamera.Images = cameraImages;
                }
            }
Example #26
0
    void OnPhotoCaptureCreated(PhotoCapture captureObject)
    {
        photoCaptureObject = captureObject;

        Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();

        CameraParameters c = new CameraParameters();

        c.hologramOpacity        = 0.0f;
        c.cameraResolutionWidth  = cameraResolution.width;
        c.cameraResolutionHeight = cameraResolution.height;
        c.pixelFormat            = CapturePixelFormat.BGRA32;

        Width  = cameraResolution.width;
        Height = cameraResolution.height;
        //Debug.Log(String.Format("width={0}, height={1}", Width, Height));

        //captureObject.StartPhotoModeAsync(c, OnPhotoModeStarted);
        // Activate the camera
        photoCaptureObject.StartPhotoModeAsync(c, delegate(PhotoCapture.PhotoCaptureResult result) {
            // Take a picture
            photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory);
        });
    }
    private bool TakeScreenshotHololens()
    {
        // Set the camera resolution to be the highest possible
        Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();

        Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height);

        // Begin capture process, set the image format
        PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject)
        {
            photoCaptureObject                   = captureObject;
            CameraParameters camParameters       = new CameraParameters();
            camParameters.hologramOpacity        = 0.0f;
            camParameters.cameraResolutionWidth  = targetTexture.width;
            camParameters.cameraResolutionHeight = targetTexture.height;
            camParameters.pixelFormat            = CapturePixelFormat.BGRA32;

            // Capture the image from the camera and save it in the App internal folder
            captureObject.StartPhotoModeAsync(camParameters, delegate(PhotoCapture.PhotoCaptureResult result)
            {
                string filename = string.Format(@"CapturedImage.jpg");

                string filePath = Path.Combine(Application.persistentDataPath, filename);

                //SEt the filePath for the Api call to find
                //VisionManager.instance.imagePath = filePath;

                //take picture and save to file.
                //photoCaptureObject.TakePhotoAsync(filePath, PhotoCaptureFileOutputFormat.JPG, OnCapturedPhotoToDisk);

                //stop capturing
                //currentlyCapturing = false;
            });
        });
        return(true);
    }
Example #28
0
 /// <inheritdoc />
 public void Visit(CameraParameters parameters) => _cameraParameters = parameters;
 public CameraDrag(CameraParameters cameraParameters)
     : base(cameraParameters)
 {
 }
Example #30
0
        /// <summary>
        /// Initializes this instance by coroutine.
        /// </summary>
        protected override IEnumerator _Initialize()
        {
            if (hasInitDone)
            {
                ReleaseResources();

                if (onDisposed != null)
                {
                    onDisposed.Invoke();
                }
            }

            isInitWaiting = true;

            while (isChangeVideoModeWaiting)
            {
                yield return(null);
            }

            isChangeVideoModeWaiting = true;
            if (videoCapture != null)
            {
                videoCapture.StopVideoModeAsync(result1 => {
                    cameraParams = CreateCameraParams(videoCapture);
                    videoCapture.StartVideoModeAsync(cameraParams, result2 => {
                        if (!result2.success)
                        {
                            isChangeVideoModeWaiting = false;
                            isInitWaiting            = false;
                            CancelInitCoroutine();

                            if (onErrorOccurred != null)
                            {
                                onErrorOccurred.Invoke(ErrorCode.UNKNOWN);
                            }
                        }
                        else
                        {
                            isChangeVideoModeWaiting = false;
                        }
                    });
                });
            }
            else
            {
                //Fetch a pointer to Unity's spatial coordinate system if you need pixel mapping
                spatialCoordinateSystemPtr = UnityEngine.VR.WSA.WorldManager.GetNativeISpatialCoordinateSystemPtr();

                HoloLensCameraStream.VideoCapture.CreateAync(videoCapture => {
                    if (initCoroutine == null)
                    {
                        return;
                    }

                    if (videoCapture == null)
                    {
                        Debug.LogError("Did not find a video capture object. You may not be using the HoloLens.");

                        isChangeVideoModeWaiting = false;
                        isInitWaiting            = false;
                        CancelInitCoroutine();

                        if (onErrorOccurred != null)
                        {
                            onErrorOccurred.Invoke(ErrorCode.CAMERA_DEVICE_NOT_EXIST);
                        }

                        return;
                    }

                    this.videoCapture = videoCapture;

                    //Request the spatial coordinate ptr if you want fetch the camera and set it if you need to
                    videoCapture.WorldOriginPtr = spatialCoordinateSystemPtr;

                    cameraParams = CreateCameraParams(videoCapture);

                    videoCapture.FrameSampleAcquired -= OnFrameSampleAcquired;
                    videoCapture.FrameSampleAcquired += OnFrameSampleAcquired;
                    videoCapture.StartVideoModeAsync(cameraParams, result => {
                        if (!result.success)
                        {
                            isChangeVideoModeWaiting = false;
                            isInitWaiting            = false;
                            CancelInitCoroutine();

                            if (onErrorOccurred != null)
                            {
                                onErrorOccurred.Invoke(ErrorCode.UNKNOWN);
                            }
                        }
                        else
                        {
                            isChangeVideoModeWaiting = false;
                        }
                    });
                });
            }

            int  initFrameCount = 0;
            bool isTimeout      = false;

            while (true)
            {
                if (initFrameCount > _timeoutFrameCount)
                {
                    isTimeout = true;
                    break;
                }
                else if (didUpdateThisFrame)
                {
                    Debug.Log("HololensCameraStreamToMatHelper:: " + "name:" + "" + " width:" + cameraParams.cameraResolutionWidth + " height:" + cameraParams.cameraResolutionHeight + " fps:" + cameraParams.frameRate);

                    if (colors == null || colors.Length != cameraParams.cameraResolutionWidth * cameraParams.cameraResolutionHeight)
                    {
                        colors = new Color32[cameraParams.cameraResolutionWidth * cameraParams.cameraResolutionHeight];
                    }

                    frameMat          = new Mat(cameraParams.cameraResolutionHeight, cameraParams.cameraResolutionWidth, CvType.CV_8UC4);
                    screenOrientation = Screen.orientation;

                    if (_requestedRotate90Degree)
                    {
                        rotatedFrameMat = new Mat(cameraParams.cameraResolutionWidth, cameraParams.cameraResolutionHeight, CvType.CV_8UC4);
                    }

                    isInitWaiting = false;
                    initCoroutine = null;

                    if (onInitialized != null)
                    {
                        onInitialized.Invoke();
                    }

                    hasInitDone = true;

                    break;
                }
                else
                {
                    initFrameCount++;
                    yield return(null);
                }
            }

            if (isTimeout)
            {
                if (videoCapture != null)
                {
                    videoCapture.FrameSampleAcquired -= OnFrameSampleAcquired;

                    isChangeVideoModeWaiting = true;
                    videoCapture.StopVideoModeAsync(result => {
                        videoCapture.Dispose();
                        videoCapture             = null;
                        isChangeVideoModeWaiting = false;
                    });

                    isInitWaiting = false;
                    initCoroutine = null;

                    if (onErrorOccurred != null)
                    {
                        onErrorOccurred.Invoke(ErrorCode.TIMEOUT);
                    }
                }
                else
                {
                    isInitWaiting = false;
                    initCoroutine = null;

                    if (onErrorOccurred != null)
                    {
                        onErrorOccurred.Invoke(ErrorCode.TIMEOUT);
                    }
                }
            }
        }
 public CameraZoom(CameraParameters cameraParameters)
     : base(cameraParameters)
 {
 }
Example #32
0
 public override void CopyFrom(CameraParameters other)
 {
     base.CopyFrom(other);
     Dir.Control.Text          = other.Dir;
     InitialGain.Control.Value = other.InitialGain;
 }
Example #33
0
 public override void CopyTo(CameraParameters other)
 {
     base.CopyTo(other);
     other.Dir         = Dir.Control.Text;
     other.InitialGain = (int)InitialGain.Control.Value;
 }
 public CameraRotate(CameraParameters cameraParameters)
     : base(cameraParameters)
 {
 }
Example #35
0
 /// <summary>
 /// Initializes a new instance of the FusionFloatImageFrame class.
 /// </summary>
 /// <param name="width">Image width.</param>
 /// <param name="height">Image height.</param>
 /// <param name="cameraParameters">The camera parameters.</param>
 public FusionFloatImageFrame(int width, int height, CameraParameters cameraParameters)
     : base(FusionImageType.Float, width, height, cameraParameters)
 {
 }
Example #36
0
 /// <summary>
 /// Initializes a new instance of the FusionColorImageFrame class.
 /// </summary>
 /// <param name="width">Image width.</param>
 /// <param name="height">Image height.</param>
 /// <param name="cameraParameters">The camera parameters.</param>
 public FusionColorImageFrame(int width, int height, CameraParameters cameraParameters)
     : base(FusionImageType.Color, width, height, cameraParameters)
 {
 }
 protected CameraObject(CameraParameters cameraParameters)
 {
     cameraParams = cameraParameters;
 }
 /// <summary>
 /// Initializes a new instance of the FusionPointCloudImageFrame class.
 /// </summary>
 /// <param name="width">Image width.</param>
 /// <param name="height">Image height.</param>
 /// <param name="cameraParameters">The camera parameters.</param>
 public FusionPointCloudImageFrame(int width, int height, CameraParameters cameraParameters)
     : base(FusionImageType.PointCloud, width, height, cameraParameters)
 {
 }
Example #39
0
        void InitializeFusion()
        {
            // Reconstruction Parameters
            float voxelPerMeter = 256;
            int voxelsX = 512;
            int voxelsY = 384;
            int voxelsZ = 512;
            reconstructionParameters = new ReconstructionParameters( voxelPerMeter, voxelsX, voxelsY, voxelsZ );

            //カメラ座標の初期値をワールド座標に設定
            worldToCameraTransform = Matrix4.Identity;

            //FusionのReconstructionオブジェクトを作成
            reconstruction = ColorReconstruction.FusionCreateReconstruction( reconstructionParameters, ReconstructionProcessor.Amp, -1, worldToCameraTransform );

            // Fusionのイメージフレームを作成
            cameraParameters = CameraParameters.Defaults;
            depthImageFrame = new FusionFloatImageFrame( depthWidth, depthHeight, cameraParameters );
            smoothDepthImageFrame = new FusionFloatImageFrame( depthWidth, depthHeight, cameraParameters );
            colorImageFrame = new FusionColorImageFrame( depthWidth, depthHeight, cameraParameters );
            pointCloudImageFrame = new FusionPointCloudImageFrame( depthWidth, depthHeight, cameraParameters );
            surfaceImageFrame = new FusionColorImageFrame( depthWidth, depthHeight, cameraParameters );
        }