public void OnInputClicked(InputClickedEventData eventData) { if (GazeManager.Instance.HitObject.tag != "Interaction") { PhotoCapture.CreateAsync(true, OnPhotoCaptureCreated); } }
IEnumerator OpenCamera() { yield return(Application.RequestUserAuthorization(UserAuthorization.WebCam)); PhotoCapture.CreateAsync(false, pc => { capture = pc; Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); CameraParameters c = new CameraParameters(); c.hologramOpacity = 0.0f; c.cameraResolutionWidth = cameraResolution.width; c.cameraResolutionHeight = cameraResolution.height; c.pixelFormat = CapturePixelFormat.BGRA32; capture.StartPhotoModeAsync(c, psr => { if (psr.success) { IsCamInited = true; Debug.Log("Cam Inited"); } }); }); }
/// <summary> /// Begin process of Image Capturing and send To Azure /// Computer Vision service. /// </summary> private void ExecuteImageCaptureAndAnalysis() { // Set the camera resolution to be the highest possible Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height); // Begin capture process, set the image format PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject) { photoCaptureObject = captureObject; CameraParameters camParameters = new CameraParameters(); camParameters.hologramOpacity = 0.0f; camParameters.cameraResolutionWidth = targetTexture.width; camParameters.cameraResolutionHeight = targetTexture.height; camParameters.pixelFormat = CapturePixelFormat.BGRA32; // Capture the image from the camera and save it in the App internal folder captureObject.StartPhotoModeAsync(camParameters, delegate(PhotoCapture.PhotoCaptureResult result) { string filename = string.Format(@"CapturedImage{0}.jpg", tapsCount); string filePath = Path.Combine(Application.persistentDataPath, filename); VisionManager.instance.imagePath = filePath; photoCaptureObject.TakePhotoAsync(filePath, PhotoCaptureFileOutputFormat.JPG, OnCapturedPhotoToDisk); currentlyCapturing = false; }); }); }
public void TakePhoto() { taking = true; Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).ToArray()[1]; targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height); PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject) { Debug.Log("createasync"); photoCaptureObject = captureObject; CameraParameters cameraParameters = new CameraParameters(); cameraParameters.hologramOpacity = 0.0f; cameraParameters.cameraResolutionWidth = cameraResolution.width; cameraParameters.cameraResolutionHeight = cameraResolution.height; cameraParameters.pixelFormat = CapturePixelFormat.BGRA32; // Activate the camera photoCaptureObject.StartPhotoModeAsync(cameraParameters, delegate(PhotoCapture.PhotoCaptureResult result) { // Take a picture Debug.Log("create, photo is null: " + (photoCaptureObject == null).ToString()); Debug.Log("here"); photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory); }); }); }
// Use this for initialization void Start() { filename = "tmp.jpg"; filePath = System.IO.Path.Combine(Application.persistentDataPath, filename); Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height); // Create a PhotoCapture object var stimulusManagerObject = GameObject.Find("Stimulus Manager"); stimulusManager = stimulusManagerObject.GetComponent <StimulusManager>(); targetTime = 1f; state = 0; PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject) { photoCaptureObject = captureObject; CameraParameters cameraParameters = new CameraParameters(); cameraParameters.hologramOpacity = 0.0f; cameraParameters.cameraResolutionWidth = cameraResolution.width; cameraParameters.cameraResolutionHeight = cameraResolution.height; cameraParameters.pixelFormat = CapturePixelFormat.BGRA32; // Activate the camera photoCaptureObject.StartPhotoModeAsync(cameraParameters, delegate(PhotoCapture.PhotoCaptureResult result) { // Take a picture //photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory); //photoCaptureObject.TakePhotoAsync(filePath, PhotoCaptureFileOutputFormat.JPG, OnCapturedPhotoToDisk); }); }); }
/// <summary> /// Begin process of image capturing and send to Azure Custom Vision Service. /// </summary> private void ExecuteImageCaptureAndAnalysis() { // Create a label in world space using the ResultsLabel class // Invisible at this point but correctly positioned where the image was taken SceneOrganiser.Instance.StartAnalysisLabel(); Debug.Log("Execute Image Capture and Analysis"); // Set the camera resolution to be the highest possible Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending ((res) => res.width * res.height).First(); Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height); // Begin capture process, set the image format PhotoCapture.CreateAsync(true, delegate(PhotoCapture captureObject) { photoCaptureObject = captureObject; CameraParameters camParameters = new CameraParameters { hologramOpacity = 1.0f, cameraResolutionWidth = targetTexture.width, cameraResolutionHeight = targetTexture.height, pixelFormat = CapturePixelFormat.BGRA32 }; // Capture the image from the camera and save it in the App internal folder captureObject.StartPhotoModeAsync(camParameters, delegate(PhotoCapture.PhotoCaptureResult result) { string filename = string.Format(@"CapturedImage{0}.jpg", captureCount); filePath = Path.Combine(Application.persistentDataPath, filename); captureCount++; photoCaptureObject.TakePhotoAsync(filePath, PhotoCaptureFileOutputFormat.JPG, OnCapturedPhotoToDisk); }); }); }
/// <summary> /// Begin process of image capturing and send to Azure Custom Vision Service. /// </summary> private void ExecuteImageCaptureAndAnalysis() { cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending ((res) => res.width * res.height).First(); targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height); PhotoCapture.CreateAsync(true, delegate(PhotoCapture captureObject) { photoCaptureObject = captureObject; CameraParameters camParameters = new CameraParameters { hologramOpacity = 0f, cameraResolutionWidth = targetTexture.width, cameraResolutionHeight = targetTexture.height, pixelFormat = CapturePixelFormat.BGRA32 }; captureObject.StartPhotoModeAsync(camParameters, delegate(PhotoCapture.PhotoCaptureResult result) { captureCount++; photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemoryCallback); }); }); }
public static void BeginCapture() { if (photoCaptureObject == null) { PhotoCapture.CreateAsync(false, OnPhotoCaptureCreated); } }
public void StartTakePhoto(MainController mci) { if (taking) { return; } taking = true; FindObjectOfType <Flasher>().Flash(); //Debug.Log("starting to take photo"); mc = mci; Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height); PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject) { Debug.Log("createasync"); photoCaptureObject = captureObject; CameraParameters cameraParameters = new CameraParameters(); cameraParameters.hologramOpacity = 0.0f; cameraParameters.cameraResolutionWidth = cameraResolution.width; cameraParameters.cameraResolutionHeight = cameraResolution.height; cameraParameters.pixelFormat = CapturePixelFormat.BGRA32; // Activate the camera photoCaptureObject.StartPhotoModeAsync(cameraParameters, delegate(PhotoCapture.PhotoCaptureResult result) { // Take a picture //Debug.Log("create, photo is null: " + (photoCaptureObject == null).ToString()); //Debug.Log("here"); photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory); }); }); }
// Update is called once per frame void Update() { // Capture Photo if (m_cameraCapture.WaitOne(0)) { beginSequence = true; AAR.AARCameraProjectorRig.Instance.Follow(m_projectorFocus); } if (beginSequence) { if ((sequenceCounter >> 2) % 2 == 0) { m_currentFlashColor = FlashColor; } else { m_currentFlashColor = Color.black; } if (sequenceCounter == 30) { PhotoCapture.CreateAsync(false, OnPhotoCaptureCreated); } sequenceCounter++; } }
public void OnInputClicked(InputClickedEventData eventData) { //在整个场景中点击均有效 //InputManager.Instance.AddGlobalListener(gameObject); PhotoCapture.CreateAsync(true, delegate(PhotoCapture captureObject) { photoCaptureObj = captureObject; CameraParameters cameraParameters = new CameraParameters(); cameraParameters.hologramOpacity = 1.0f; cameraParameters.cameraResolutionWidth = cameraResolution.width; cameraParameters.cameraResolutionHeight = cameraResolution.height; cameraParameters.pixelFormat = CapturePixelFormat.BGRA32; photoCaptureObj.StartPhotoModeAsync(cameraParameters, delegate(PhotoCapture.PhotoCaptureResult result) { photoCaptureObj.TakePhotoAsync(OnCapturedPhotoToMemory); }); }); capturingPhoto = true; Debug.Log("Photo Capture CreateAsync Succeed!"); }
void Start() { gui = GameObject.FindObjectOfType <Text>(); image = GameObject.Find("Image"); if (image == null) { gui.text = "Could not find image object"; return; } mat = image.GetComponent <Renderer>().material; if (mat == null) { gui.text = "Could not find renderer or material"; return; } Resolution?cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).FirstOrDefault(); if ((cameraResolution == null) || (cameraResolution.Value.width == 0)) { gui.text = "Could not determine camera resolution - are you running on HoloLens?"; return; } ImageTexture = new Texture2D(cameraResolution.Value.width, cameraResolution.Value.height); PhotoCapture.CreateAsync(false, OnPhotoCaptureCreated); }
// Use this for initialization void Start() { //First: Last: worst resolution? Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).Last(); targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height); // Create a PhotoCapture object //Params: Show Holograms=false, onCreatedCallback, wenn PhotoCapture Instance created and ready to be used PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject) { photoCaptureObject = captureObject; //needed for Calling PhotoCapture.StartPhotoModeAsync CameraParameters cameraParameters = new CameraParameters(); cameraParameters.hologramOpacity = 0.0f; cameraParameters.cameraResolutionWidth = cameraResolution.width; cameraParameters.cameraResolutionHeight = cameraResolution.height; cameraParameters.pixelFormat = CapturePixelFormat.BGRA32; // Activate the web camera photoCaptureObject.StartPhotoModeAsync(cameraParameters, delegate(PhotoCapture.PhotoCaptureResult result) { // Take a screenshot photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory); }); }); }
public void OnInputClicked(InputClickedEventData eventData) { if (!capturingPhoto) { PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject) { photoCaptureObj = captureObject; CameraParameters cameraParameters = new CameraParameters(); cameraParameters.hologramOpacity = 0.0f; cameraParameters.cameraResolutionWidth = cameraResolution.width; cameraParameters.cameraResolutionHeight = cameraResolution.height; cameraParameters.pixelFormat = CapturePixelFormat.BGRA32; //开始拍照时,capturingPhoto为真,capturingSucceed为假 photoCaptureObj.StartPhotoModeAsync(cameraParameters, delegate(PhotoCapture.PhotoCaptureResult result) { photoCaptureObj.TakePhotoAsync(OnCapturedPhotoToMemoryAsync); capturingPhoto = true; evaluating = false; }); }); UnityEngine.Debug.Log("Photo Capture CreateAsync Succeed!"); } }
// Update is called once per frame private void ExecuteImageCaptureAndAnalysis() { Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height); PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject) { photoCaptureObject = captureObject; CameraParameters c = new CameraParameters(); c.hologramOpacity = 0.0f; c.cameraResolutionWidth = targetTexture.width; c.cameraResolutionHeight = targetTexture.height; c.pixelFormat = CapturePixelFormat.BGRA32; captureObject.StartPhotoModeAsync(c, delegate(PhotoCapture.PhotoCaptureResult result) { string filename = string.Format(@"CapturedImage{0}.jpg", tapsCount); string filepath = Path.Combine(Application.persistentDataPath, filename); VisionManager.instance.imagePath = filepath; Debug.Log("Saving Photo into file:" + filepath); photoCaptureObject.TakePhotoAsync(filepath, PhotoCaptureFileOutputFormat.JPG, OnCapturePhotoToDisk); currentlyCapturing = false; } ); } ); }
// Use this for initialization public void Start() { Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height); // Create a PhotoCapture object PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject) { photoCaptureObject = captureObject; CameraParameters cameraParameters = new CameraParameters(); cameraParameters.hologramOpacity = 0.0f; cameraParameters.cameraResolutionWidth = cameraResolution.width; cameraParameters.cameraResolutionHeight = cameraResolution.height; cameraParameters.pixelFormat = CapturePixelFormat.BGRA32; width = cameraResolution.width; height = cameraResolution.height; // Activate the camera photoCaptureObject.StartPhotoModeAsync(cameraParameters, delegate(PhotoCapture.PhotoCaptureResult result) { // Take a picture photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory); }); }); }
public Task <bool> StartCapture() { if (FrameHeight == 0 && FrameWidth == 0) { Debug.LogError("StartCapture() invoked before camera initialized."); return(Task.FromResult(false)); } PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject) { _photoCaptureObject = captureObject; UnityEngine.Windows.WebCam.CameraParameters cameraParameters = new UnityEngine.Windows.WebCam.CameraParameters { hologramOpacity = 0.0f, cameraResolutionWidth = FrameWidth, cameraResolutionHeight = FrameHeight, pixelFormat = CapturePixelFormat.NV12 }; _photoCaptureObject?.StartPhotoModeAsync(cameraParameters, delegate { _photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory); }); }); return(Task.FromResult(true)); }
private void StartCapture() { //there can only be one active PhotoCapture if (captureStarted) { return; } captureStarted = true; // Create a PhotoCapture object PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject) { photoCaptureObject = captureObject; CameraParameters cameraParameters = new CameraParameters(); cameraParameters.hologramOpacity = 0.0f; cameraParameters.cameraResolutionWidth = cameraResolution.width; cameraParameters.cameraResolutionHeight = cameraResolution.height; cameraParameters.pixelFormat = CapturePixelFormat.BGRA32; // Activate the camera photoCaptureObject.StartPhotoModeAsync(cameraParameters, delegate(PhotoCapture.PhotoCaptureResult result) { // Take a picture if (result.success) { photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory); } }); }); }
void Initialize() { Debug.Log("Initializing..."); List <Resolution> resolutions = new List <Resolution> (PhotoCapture.SupportedResolutions); Resolution selectedResolution = resolutions [1]; foreach (var item in resolutions) { Debug.Log("resolution width " + item.width + " height " + item.height); } m_CameraParameters = new WSAWebCamCameraParameters(WebCamMode.PhotoMode); m_CameraParameters.cameraResolutionWidth = selectedResolution.width; m_CameraParameters.cameraResolutionHeight = selectedResolution.height; m_CameraParameters.hologramOpacity = 0.0f; m_CameraParameters.pixelFormat = CapturePixelFormat.BGRA32; m_Texture = new Texture2D(selectedResolution.width, selectedResolution.height, TextureFormat.BGRA32, false); rgbaMat = new Mat(m_Texture.height, m_Texture.width, CvType.CV_8UC4); colors = new Color32[rgbaMat.cols() * rgbaMat.rows()]; grayMat = new Mat(rgbaMat.rows(), rgbaMat.cols(), CvType.CV_8UC1); faces = new MatOfRect(); cascade = new CascadeClassifier(); cascade.load(Utils.getFilePath("haarcascade_frontalface_alt.xml")); PhotoCapture.CreateAsync(false, OnCreatedPhotoCaptureObject); }
void TakePhoto() { Vector3 position = new Vector3(newHeadPosition.x, newHeadPosition.y, newHeadPosition.z); // Debug.Log("New head position: " + newHeadPosition.ToString()); Debug.Log("Taking picture"); // Debug.Log("Rotation:" + Camera.main.transform.rotation.ToString()); audioData.Play(0); photoCount++; // Create a PhotoCapture object PhotoCapture.CreateAsync(holograms, delegate(PhotoCapture captureObject) { photoCaptureObject = captureObject; m_cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).Last(); CameraParameters cameraParameters = new CameraParameters(); cameraParameters.hologramOpacity = 0.0f; cameraParameters.cameraResolutionWidth = m_cameraResolution.width; cameraParameters.cameraResolutionHeight = m_cameraResolution.height; cameraParameters.pixelFormat = CapturePixelFormat.JPEG; // Activate the camera photoCaptureObject.StartPhotoModeAsync(cameraParameters, delegate(PhotoCapture.PhotoCaptureResult result) { // Take a picture photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory); }); }); }
void Initialize() { Debug.Log("Initializing..."); List <Resolution> resolutions = new List <Resolution>(PhotoCapture.SupportedResolutions); Resolution selectedResolution = resolutions[1]; foreach (var item in resolutions) { Debug.Log("resolution width " + item.width + " height " + item.height); } m_CameraParameters = new WSAWebCamCameraParameters(WebCamMode.PhotoMode); m_CameraParameters.cameraResolutionWidth = selectedResolution.width; m_CameraParameters.cameraResolutionHeight = selectedResolution.height; m_CameraParameters.hologramOpacity = 0.0f; m_CameraParameters.pixelFormat = CapturePixelFormat.BGRA32; m_Texture = new Texture2D(selectedResolution.width, selectedResolution.height, TextureFormat.BGRA32, false); rgbaMat = new Mat(m_Texture.height, m_Texture.width, CvType.CV_8UC4); colors = new Color32[rgbaMat.cols() * rgbaMat.rows()]; dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath(dlibShapePredictorFileName); if (string.IsNullOrEmpty(dlibShapePredictorFilePath)) { Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); PhotoCapture.CreateAsync(false, OnCreatedPhotoCaptureObject); }
/// <summary> /// 初始化 Socket 通信 /// </summary> async Task InitializeNetworkAsync() { Debug.Log("初始化网络"); HostName serverHost = new HostName(HOSTIP_TX2); socket_vstream = new StreamSocket(); try { await socket_vstream.ConnectAsync(serverHost, PORT_TX2); writer_vstream = new DataWriter(socket_vstream.OutputStream) { ByteOrder = ByteOrder.LittleEndian, }; reader_receiving = new DataReader(socket_vstream.InputStream) { ByteOrder = ByteOrder.LittleEndian }; Debug.Log("视觉模块连接就绪,准备接收数据"); ShowMsg.UpdateCubeMsg("就绪"); PhotoCapture.CreateAsync(true, OnCaptureCreated_HOLO); #if IS_PC_NEEDED await InitNetForPC(); #endif } catch (Exception e) { MyLog.DebugLog("初始化网络连接错误" + e.Message); await InitializeNetworkAsync(); } }
private void ExecuteImageCaptureAndAnalysis() /// Begin process of Image Capturing and send To Azure Computer Vision service. { Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height); PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject) { photoCaptureObject = captureObject; CameraParameters c = new CameraParameters(); c.hologramOpacity = 0.0f; c.cameraResolutionWidth = targetTexture.width; c.cameraResolutionHeight = targetTexture.height; c.pixelFormat = CapturePixelFormat.BGRA32; captureObject.StartPhotoModeAsync(c, delegate(PhotoCapture.PhotoCaptureResult result) { string filename = string.Format(@"CapturedImage{0}.jpg", tapsCount); string filePath = Path.Combine(Application.persistentDataPath, filename); // Set the image path on the FaceAnalysis class FaceAnalysis.Instance.imagePath = filePath; photoCaptureObject.TakePhotoAsync(filePath, PhotoCaptureFileOutputFormat.JPG, OnCapturedPhotoToDisk); }); }); }
// Use this for initialization void Awake() { Instance = this; // Set up a GestureRecognizer to detect Select gestures. recognizer = new GestureRecognizer(); recognizer.TappedEvent += (source, tapCount, ray) => { Debug.Log("tap"); if (!_busy) { _busy = true; status.GetComponent <TextMesh>().text = "taking photo..."; status.SetActive(true); PhotoCapture.CreateAsync(false, OnPhotoCaptureCreated); } else { status.GetComponent <TextMesh>().text = "busy..."; status.SetActive(true); } }; recognizer.StartCapturingGestures(); status.GetComponent <TextMesh>().text = "taking photo..."; _busy = true; PhotoCapture.CreateAsync(false, OnPhotoCaptureCreated); }
void Initialize() { Debug.Log("Initializing..."); List <Resolution> resolutions = new List <Resolution> (PhotoCapture.SupportedResolutions); Resolution selectedResolution = resolutions [1]; foreach (var item in resolutions) { Debug.Log("resolution width " + item.width + " height " + item.height); } m_CameraParameters = new CameraParameters(WebCamMode.PhotoMode); m_CameraParameters.cameraResolutionWidth = selectedResolution.width; m_CameraParameters.cameraResolutionHeight = selectedResolution.height; m_CameraParameters.hologramOpacity = 0.0f; m_CameraParameters.pixelFormat = CapturePixelFormat.BGRA32; m_Texture = new Texture2D(selectedResolution.width, selectedResolution.height, TextureFormat.BGRA32, false); rgbaMat = new Mat(m_Texture.height, m_Texture.width, CvType.CV_8UC4); colors = new Color32[rgbaMat.cols() * rgbaMat.rows()]; faceLandmarkDetector = new FaceLandmarkDetector(DlibFaceLandmarkDetector.Utils.getFilePath("shape_predictor_68_face_landmarks.dat")); PhotoCapture.CreateAsync(false, OnCreatedPhotoCaptureObject); }
public void change() { Log.Info("change"); // Renderer r = gameObject.GetComponent<Renderer>(); // r.material.SetColor(1, Color.yellow); TrackerManager.Instance.GetTracker <ObjectTracker>().Stop(); CameraDevice.Instance.Stop(); Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height); // Create a PhotoCapture object PhotoCapture.CreateAsync(false, delegate(PhotoCapture captureObject) { photoCaptureObject = captureObject; CameraParameters cameraParameters = new CameraParameters(); cameraParameters.hologramOpacity = 0.0f; cameraParameters.cameraResolutionWidth = cameraResolution.width; cameraParameters.cameraResolutionHeight = cameraResolution.height; cameraParameters.pixelFormat = CapturePixelFormat.BGRA32; // Activate the camera photoCaptureObject.StartPhotoModeAsync(cameraParameters, delegate(PhotoCapture.PhotoCaptureResult result) { // Take a picture photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory); }); }); }
// Use this for initialization void Awake() { Instance = this; // Set up a GestureRecognizer to detect Select gestures. recognizer = new GestureRecognizer(); recognizer.TappedEvent += (source, tapCount, ray) => { Debug.Log("tap"); status.GetComponent <TextMesh>().text = "taking photo..."; status.SetActive(true); PhotoCapture.CreateAsync(false, OnPhotoCaptureCreated); }; recognizer.StartCapturingGestures(); PhotoCapture.CreateAsync(false, OnPhotoCaptureCreated); if (File.Exists("config.cfg")) { var cfg = Configuration.LoadFromFile("config.cfg"); var apiSettings = cfg["API"]; FaceAPIKey = apiSettings["FaceAPIKey"].StringValue; EmotionAPIKey = apiSettings["EmotionAPIKey"].StringValue; OpenFaceUrl = apiSettings["OpenFaceUrl"].StringValue; Debug.Log("loaded settings from config.cfg"); } }
async void Start() { // Initialize camera and camera parameters _captureResolution = PhotoCapture.SupportedResolutions.OrderBy((res) => res.width * res.height).First(); _cameraPara = new CameraParameters(); _cameraPara.hologramOpacity = 0.0f; _cameraPara.cameraResolutionWidth = _captureResolution.width; _cameraPara.cameraResolutionHeight = _captureResolution.height; _cameraPara.pixelFormat = CapturePixelFormat.JPEG; if (Const.HOLO_CAPTURE) { PhotoCapture.CreateAsync(true, OnPhotoCaptureCreatedHOLO); } else { PhotoCapture.CreateAsync(false, OnPhotoCaptureCreated); } // Initialize file loaders await InitializeFileLoading(); // Initialize network await SetupWebsocket(); _isInitialized = true; _startTime = GetTimeMillis(); }
//クリックされた時 void IInputClickHandler.OnInputClicked(InputClickedEventData eventData) { if (Time.frameCount - time < 15 & finished) { Debug.Log("ara:clicked"); finished = false; if (!isCapturing) { audioSource.PlayOneShot(try_get_pcl); PointAll.Clear(); ColorAll.Clear(); not_hitted_pixel.Clear(); if (photoCaptureObject != null) { photoCaptureObject.Dispose(); photoCaptureObject = null; } PhotoCapture.CreateAsync(false, Start_take_picture); isCapturing = true; } } time = Time.frameCount; }
// ############################################# UNITY void Start() { ipEndPoint = "http://128.173.236.208:9005"; imageBufferBytesArray = null; // Photo Capture cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height, TextureFormat.BGRA32, false); m_CameraParameters = new CameraParameters(WebCamMode.PhotoMode) { hologramOpacity = 0.0f, cameraResolutionWidth = cameraResolution.width, cameraResolutionHeight = cameraResolution.height, pixelFormat = CapturePixelFormat.BGRA32 }; imageBufferBytesArray = null; // Photo Capture cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height, TextureFormat.BGRA32, false); m_CameraParameters = new CameraParameters(WebCamMode.PhotoMode) { hologramOpacity = 0.0f, cameraResolutionWidth = cameraResolution.width, cameraResolutionHeight = cameraResolution.height, pixelFormat = CapturePixelFormat.BGRA32 }; PhotoCapture.CreateAsync(false, OnPhotoCaptureCreated); // Debugging time_before_send = 0.0f; }