void OnVideoCaptureCreated(VideoCapture videoCapture) { if (videoCapture == null) { Debug.LogError("Did not find a video capture object. You may not be using the HoloLens."); return; } this._videoCapture = videoCapture; //Request the spatial coordinate ptr if you want fetch the camera and set it if you need to CameraStreamHelper.Instance.SetNativeISpatialCoordinateSystemPtr(_spatialCoordinateSystemPtr); _resolution = CameraStreamHelper.Instance.GetLowestResolution(); float frameRate = CameraStreamHelper.Instance.GetHighestFrameRate(_resolution); //videoCapture.FrameSampleAcquired += OnFrameSampleAcquired; //You don't need to set all of these params. //I'm just adding them to show you that they exist. _cameraParams = new CameraParameters(); _cameraParams.cameraResolutionHeight = _resolution.height; _cameraParams.cameraResolutionWidth = _resolution.width; _cameraParams.frameRate = Mathf.RoundToInt(frameRate); _cameraParams.pixelFormat = CapturePixelFormat.BGRA32; _cameraParams.rotateImage180Degrees = true; //If your image is upside down, remove this line. _cameraParams.enableHolograms = false; _cameraParams.AutoExposureEnabled = false; _cameraParams.ManualExposureAmount = 0.1f; UnityEngine.WSA.Application.InvokeOnAppThread(() => { _videoPanelUI.SetResolution(_resolution.width, _resolution.height); }, false); Debug.Log("Set up video capture. Ready to record."); }
private void OnVideoCaptureCreated(HoloLensCameraStream.VideoCapture v) { if (v == null) { Debug.LogError("No VideoCapture found"); return; } _videoCapture = v; //Request the spatial coordinate ptr if you want fetch the camera and set it if you need to CameraStreamHelper.Instance.SetNativeISpatialCoordinateSystemPtr(_spatialCoordinateSystemPtr); _resolution = CameraStreamHelper.Instance.GetLowestResolution(); float frameRate = CameraStreamHelper.Instance.GetHighestFrameRate(_resolution); _videoCapture.FrameSampleAcquired += OnFrameSampleAcquired; HoloLensCameraStream.CameraParameters cameraParams = new HoloLensCameraStream.CameraParameters(); cameraParams.cameraResolutionHeight = _resolution.height; cameraParams.cameraResolutionWidth = _resolution.width; cameraParams.frameRate = Mathf.RoundToInt(frameRate); cameraParams.pixelFormat = HoloLensCameraStream.CapturePixelFormat.BGRA32; UnityEngine.WSA.Application.InvokeOnAppThread(() => { _pictureTexture = new Texture2D(_resolution.width, _resolution.height, TextureFormat.BGRA32, false); }, false); _videoCapture.StartVideoModeAsync(cameraParams, OnVideoModeStarted); }
void OnVideoCaptureCreated(VideoCapture videoCapture) { if (videoCapture == null) { Debug.LogError("Did not find a video capture object. You may not be using the HoloLens."); return; } this._videoCapture = videoCapture; _resolution = CameraStreamHelper.Instance.GetLowestResolution(); float frameRate = CameraStreamHelper.Instance.GetHighestFrameRate(_resolution); videoCapture.FrameSampleAcquired += OnFrameSampleAcquired; //You don't need to set all of these params. //I'm just adding them to show you that they exist. CameraParameters cameraParams = new CameraParameters(); cameraParams.cameraResolutionHeight = _resolution.height; cameraParams.cameraResolutionWidth = _resolution.width; cameraParams.frameRate = Mathf.RoundToInt(frameRate); cameraParams.pixelFormat = CapturePixelFormat.BGRA32; cameraParams.rotateImage180Degrees = true; //If your image is upside down, remove this line. cameraParams.enableHolograms = false; UnityEngine.WSA.Application.InvokeOnAppThread(() => { _videoPanelUI.SetResolution(_resolution.width, _resolution.height); }, false); videoCapture.StartVideoModeAsync(cameraParams, OnVideoModeStarted); }
// Cannot be called on multiple threads! private void OnVideoCaptureCreated(VideoCapture v) { if (v == null) { Debug.LogError("No VideoCapture found"); return; } videoCapture = v; processingFrame = false; //Request the spatial coordinate ptr if you want fetch the camera and set it if you need to CameraStreamHelper.Instance.SetNativeISpatialCoordinateSystemPtr(spatialCoordinateSystemPtr); _resolution = CameraStreamHelper.Instance.GetLowestResolution(); float frameRate = CameraStreamHelper.Instance.GetHighestFrameRate(_resolution); videoCapture.FrameSampleAcquired += OnFrameSampleAcquired; cameraParams = new CameraParameters(); cameraParams.cameraResolutionHeight = _resolution.height; cameraParams.cameraResolutionWidth = _resolution.width; cameraParams.frameRate = Mathf.RoundToInt(frameRate); cameraParams.pixelFormat = CapturePixelFormat.BGRA32; videoCapture.StartVideoModeAsync(cameraParams, OnVideoModeStarted); }
private void OnVideoCaptureCreated(VideoCapture videoCapture) { if (videoCapture == null) { Debug.LogError("Did not find a video capture object. You may not be using the HoloLens."); return; } _videoCapture = videoCapture; //Request the spatial coordinate ptr if you want fetch the camera and set it if you need to CameraStreamHelper.Instance.SetNativeISpatialCoordinateSystemPtr(_spatialCoordinateSystemPtr); _resolution = CameraStreamHelper.Instance.GetLowestResolution(); float frameRate = CameraStreamHelper.Instance.GetHighestFrameRate(_resolution); Debug.Log("Frame rate: " + frameRate); videoCapture.FrameSampleAcquired += OnFrameSampleAcquired; CameraParameters cameraParams = new CameraParameters(); cameraParams.cameraResolutionHeight = _resolution.height; cameraParams.cameraResolutionWidth = _resolution.width; cameraParams.frameRate = Mathf.RoundToInt(frameRate); cameraParams.pixelFormat = CapturePixelFormat.BGRA32; cameraParams.rotateImage180Degrees = false; cameraParams.enableHolograms = false; videoCapture.StartVideoModeAsync(cameraParams, OnVideoModeStarted); }
public static HoloPicture RestoreHologram(string path) { var holo = HoloSaver.Instance.RestoreHologram(path); var resolution = new HoloLensCameraStream.Resolution(holo.width, holo.height); Vector3 position = new Vector3(holo.x, holo.y, holo.z); Quaternion rotation = new Quaternion(holo.qx, holo.qy, holo.qz, holo.qw); var picture = CreateHologram(holo.image, resolution, holo.cameraToWorldMatrix, holo.projectionMatrix); picture.Predictions = Enumerable.Range(0, holo.predictedRects.Count) .Select(i => new YoloBoundingBox() { Label = holo.labels[i], Confidence = holo.confidences[i], X = holo.predictedRects[i].xMin, Y = holo.predictedRects[i].yMin, Width = holo.predictedRects[i].width, Height = holo.predictedRects[i].height, }); picture.HeadPos = new Vector3(holo.headX, holo.headY, holo.headZ); return(picture); }
void ApplyCapture(byte[] data, HoloLensCameraStream.Resolution size, float[] camera2WorldFloat, float[] projectionFloat, bool setPostion = false) { this.camera2WorldFloat = camera2WorldFloat; this.projectionFloat = projectionFloat; this.camera2WorldMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(camera2WorldFloat); this.projectionMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(projectionFloat); var pictureRenderer = gameObject.GetComponent <Renderer>(); pictureRenderer.material = new Material(Shader.Find("AR/HolographicImageBlend")); var pictureTexture = new Texture2D(size.width, size.height, TextureFormat.BGRA32, false); // Upload bytes to texture pictureTexture.LoadRawTextureData(data); pictureTexture.wrapMode = TextureWrapMode.Clamp; pictureTexture.Apply(); // Set material parameters pictureRenderer.sharedMaterial.SetTexture("_MainTex", pictureTexture); pictureRenderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", camera2WorldMatrix.inverse); pictureRenderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix); pictureRenderer.sharedMaterial.SetFloat("_VignetteScale", 0f); this.Resolution = new HoloLensCameraStream.Resolution(pictureTexture.width, pictureTexture.height); this.HeadPos = Camera.main.transform.position; // time to enable tap-to-place pictureRenderer.enabled = true; }
void OnVideoCaptureCreated(VideoCapture videoCapture) { if (videoCapture == null) { Debug.LogError("Did not find a video capture object. You may not be using the HoloLens."); return; } this._videoCapture = videoCapture; //Request the spatial coordinate ptr if you want fetch the camera and set it if you need to CameraStreamHelper.Instance.SetNativeISpatialCoordinateSystemPtr(_spatialCoordinateSystemPtr); _resolution = CameraStreamHelper.Instance.GetLowestResolution(); float frameRate = CameraStreamHelper.Instance.GetHighestFrameRate(_resolution); videoCapture.FrameSampleAcquired += OnFrameSampleAcquired; //You don't need to set all of these params. //I'm just adding them to show you that they exist. CameraParameters cameraParams = new CameraParameters(); cameraParams.cameraResolutionHeight = _resolution.height; cameraParams.cameraResolutionWidth = _resolution.width; cameraParams.frameRate = Mathf.RoundToInt(frameRate); cameraParams.pixelFormat = CapturePixelFormat.BGRA32; cameraParams.rotateImage180Degrees = true; //If your image is upside down, remove this line. cameraParams.enableHolograms = false; Debug.Log("Configuring camera: " + _resolution.width + "x" + _resolution.height + " | " + cameraParams.pixelFormat); Enqueue(() => _videoPanelUI.SetResolution(_resolution.width, _resolution.height)); videoCapture.StartVideoModeAsync(cameraParams, OnVideoModeStarted); }
void OnVideoCaptureCreated(VideoCapture videoCapture) { if (videoCapture == null) { Debug.LogError("Did not find a video capture object. You may not be using the HoloLens."); return; } this._videoCapture = videoCapture; _resolution = CameraStreamHelper.Instance.GetLowestResolution(); float frameRate = CameraStreamHelper.Instance.GetHighestFrameRate(_resolution); videoCapture.FrameSampleAcquired += OnFrameSampleAcquired; CameraParameters cameraParams = new CameraParameters(); cameraParams.cameraResolutionHeight = _resolution.height; cameraParams.cameraResolutionWidth = _resolution.width; cameraParams.frameRate = Mathf.RoundToInt(frameRate); cameraParams.pixelFormat = CapturePixelFormat.BGRA32; UnityEngine.WSA.Application.InvokeOnAppThread(() => { _videoPanelUI.SetResolution(_resolution.width, _resolution.height); }, false); videoCapture.StartVideoModeAsync(cameraParams, OnVideoModeStarted); }
void OnVideoCaptureCreated(VideoCapture videoCapture) { if (videoCapture == null) { Debug.LogError("Did not find a video capture object. You may not be using the HoloLens."); return; } this.videoCapture = videoCapture; //Request the spatial coordinate ptr if you want fetch the camera and set it if you need to CameraStreamHelper.Instance.SetNativeISpatialCoordinateSystemPtr(spatialCoordinateSystemPtr); resolution = CameraStreamHelper.Instance.GetLowestResolution(); float frameRate = CameraStreamHelper.Instance.GetLowestFrameRate(resolution); videoCapture.FrameSampleAcquired += OnFrameSampleAcquired; //You don't need to set all of these params. //I'm just adding them to show you that they exist. CameraParameters cameraParams = new CameraParameters(); cameraParams.cameraResolutionHeight = resolution.height; cameraParams.cameraResolutionWidth = resolution.width; cameraParams.frameRate = Mathf.RoundToInt(frameRate); cameraParams.pixelFormat = CapturePixelFormat.BGRA32; cameraParams.enableHolograms = false; // UnityEngine.WSA.Application.InvokeOnAppThread(() => { videoPanelUI.SetResolution(resolution.width, resolution.height); }, false); videoCapture.StartVideoModeAsync(cameraParams, OnVideoModeStarted); }
public float GetLowestFrameRate(HoloLensCameraStream.Resolution forResolution) { if (videoCapture == null) { throw new Exception("Please call this method after a VideoCapture instance has been created."); } return(videoCapture.GetSupportedFrameRatesForResolution(forResolution).OrderBy(r => r).FirstOrDefault()); }
protected static HoloPicture CreateHologram(byte[] data, HoloLensCameraStream.Resolution size, float[] camera2WorldFloat, float[] projectionFloat, Tuple <Vector3, Quaternion> positionRotation) { var pos = positionRotation.Item1; var rotation = positionRotation.Item2; var picObj = GameObject.FindGameObjectWithTag(VideoCaptureTag).GetComponent <HoloPicture>(); var picture = Instantiate(picObj, pos, rotation); picture.ApplyCapture(data, size, camera2WorldFloat, projectionFloat); return(picture); }
/// <summary> /// Converts pixel coordinates to screen-space coordinates that span from -1 to 1 on both axes. /// This is the format that is required to determine the z-depth of a given pixel taken by the HoloLens camera. /// </summary> /// <param name="pixelCoords">The coordinate of the pixel that should be converted to screen-space.</param> /// <param name="res">The resolution of the image that the pixel came from.</param> /// <returns>A 2D vector with values between -1 and 1, representing the left-to-right scale within the image dimensions.</returns> static Vector2 ConvertPixelCoordsToScaledCoords(Vector2 pixelCoords, HoloLensCameraStream.Resolution resolution) { float halfWidth = (float)resolution.width / 2f; float halfHeight = (float)resolution.height / 2f; //Translate registration to image center; pixelCoords.x -= halfWidth; pixelCoords.y -= halfHeight; //Scale pixel coords to percentage coords (-1 to 1) pixelCoords = new Vector2(pixelCoords.x / halfWidth, pixelCoords.y / halfHeight * -1f); return(pixelCoords); }
void OnVideoCaptureCreated(VideoCapture videoCapture) { if (videoCapture == null) { Debug.LogError("Did not find a video capture object. You may not be using the HoloLens."); return; } this._videoCapture = videoCapture; //Request the spatial coordinate ptr if you want fetch the camera and set it if you need to CameraStreamHelper.Instance.SetNativeISpatialCoordinateSystemPtr(_spatialCoordinateSystemPtr); _resolution = CameraStreamHelper.Instance.GetLowestResolution(); processedImageData = new byte[_resolution.height * _resolution.width * 4]; initChessPoseController(); setImageSize(_resolution.height, _resolution.width); float frameRate = CameraStreamHelper.Instance.GetHighestFrameRate(_resolution); //videoCapture.FrameSampleAcquired += OnFrameSampleAcquired; //You don't need to set all of these params. //I'm just adding them to show you that they exist. _cameraParams = new CameraParameters(); _cameraParams.cameraResolutionHeight = _resolution.height; _cameraParams.cameraResolutionWidth = _resolution.width; _cameraParams.frameRate = Mathf.RoundToInt(frameRate); _cameraParams.pixelFormat = CapturePixelFormat.BGRA32; _cameraParams.rotateImage180Degrees = true; //If your image is upside down, remove this line. _cameraParams.enableHolograms = false; _cameraParams.AutoExposureEnabled = true; //_cameraParams.AutoExposureEnabled = false; //_cameraParams.ManualExposureAmount = 0.1f; UnityEngine.WSA.Application.InvokeOnAppThread(() => { _videoPanelUI.SetResolution(_resolution.width, _resolution.height); }, false); Debug.Log("Set up video capture. Ready to record."); Debug.Log("DUMMY INIT: mocking input coming in from top-down cam"); // DUMMY: // act as if we have received the mock message... incomingMessageQueue.Enqueue(TEST_RECEIVED_MSG_FROM_TOPDOWN); // COMMENT THIS LINE OUT if you are not wanting to use mock incoming top-down camera app data }
public float GetHighestFrameRate(HoloLensCameraStream.Resolution forResolution) { if (videoCapture == null) { throw new Exception("Please call this method after a VideoCapture instance has been created."); } var frameRates = videoCapture.GetSupportedFrameRatesForResolution(forResolution); Debug.Log("support frames"); foreach (var rate in frameRates) { Debug.Log(rate); } return(videoCapture.GetSupportedFrameRatesForResolution(forResolution).OrderByDescending(r => r).FirstOrDefault()); }
protected virtual HoloLensCameraStream.CameraParameters CreateCameraParams(HoloLensCameraStream.VideoCapture videoCapture) { int min1 = videoCapture.GetSupportedResolutions().Min(r => Mathf.Abs((r.width * r.height) - (_requestedWidth * _requestedHeight))); HoloLensCameraStream.Resolution resolution = videoCapture.GetSupportedResolutions().First(r => Mathf.Abs((r.width * r.height) - (_requestedWidth * _requestedHeight)) == min1); float min2 = videoCapture.GetSupportedFrameRatesForResolution(resolution).Min(f => Mathf.Abs(f - _requestedFPS)); float frameRate = videoCapture.GetSupportedFrameRatesForResolution(resolution).First(f => Mathf.Abs(f - _requestedFPS) == min2); HoloLensCameraStream.CameraParameters cameraParams = new HoloLensCameraStream.CameraParameters(); cameraParams.cameraResolutionHeight = resolution.height; cameraParams.cameraResolutionWidth = resolution.width; cameraParams.frameRate = Mathf.RoundToInt(frameRate); cameraParams.pixelFormat = CapturePixelFormat.BGRA32; cameraParams.enableHolograms = false; return(cameraParams); }
protected virtual HoloLensCameraStream.CameraParameters CreateCameraParams(HoloLensCameraStream.VideoCapture videoCapture) { int min1 = videoCapture.GetSupportedResolutions().Min(r => Mathf.Abs((r.width * r.height) - (_requestedWidth * _requestedHeight))); HoloLensCameraStream.Resolution resolution = videoCapture.GetSupportedResolutions().First(r => Mathf.Abs((r.width * r.height) - (_requestedWidth * _requestedHeight)) == min1); float min2 = videoCapture.GetSupportedFrameRatesForResolution(resolution).Min(f => Mathf.Abs(f - _requestedFPS)); float frameRate = videoCapture.GetSupportedFrameRatesForResolution(resolution).First(f => Mathf.Abs(f - _requestedFPS) == min2); HoloLensCameraStream.CameraParameters cameraParams = new HoloLensCameraStream.CameraParameters(); cameraParams.cameraResolutionHeight = resolution.height; cameraParams.cameraResolutionWidth = resolution.width; cameraParams.frameRate = Mathf.RoundToInt(frameRate); cameraParams.pixelFormat = (outputColorFormat == ColorFormat.GRAY) ? CapturePixelFormat.NV12 : CapturePixelFormat.BGRA32; cameraParams.rotateImage180Degrees = false; cameraParams.enableHolograms = false; cameraParams.enableVideoStabilization = false; cameraParams.recordingIndicatorVisible = false; return(cameraParams); }
public static Vector3 PixelCoordToWorldCoord(Matrix4x4 viewTransform, Matrix4x4 projectionTransform, HoloLensCameraStream.Resolution resolution, Vector2 pixelCoordinates, float depthOffset = 0, Vector3?referenceDepthPoint = null) { //TODO: This whole function is in progress and needs to be understood and fixed. It doesn't work properly. pixelCoordinates = ConvertPixelCoordsToScaledCoords(pixelCoordinates, resolution); float focalLengthX = projectionTransform.GetColumn(0).x; float focalLengthY = projectionTransform.GetColumn(1).y; float centerOffsetX = projectionTransform.m20; float centerOffsetY = projectionTransform.m21; var dirRay = new Vector3(pixelCoordinates.x / focalLengthX, pixelCoordinates.y / focalLengthY, 1.0f).normalized; //Direction is in camera space var cameraPositionOffset = new Vector3(centerOffsetX / 2f, centerOffsetY / 2f); Vector3 centerPosition = viewTransform.MultiplyPoint(cameraPositionOffset); //centerPosition += frameSample.worldPosition; var direction = new Vector3(Vector3.Dot(dirRay, viewTransform.GetRow(0)), Vector3.Dot(dirRay, viewTransform.GetRow(1)), Vector3.Dot(dirRay, viewTransform.GetRow(2))); //Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); var ray = new Ray(centerPosition, direction); var depth = 1f; if (referenceDepthPoint.HasValue == false) { RaycastHit hit; if (Physics.Raycast(ray, out hit)) { depth = Vector3.Magnitude(hit.point - centerPosition); } } else { depth = Vector3.Magnitude(referenceDepthPoint.Value - centerPosition); } depth -= depthOffset; return(centerPosition + direction * depth); }
public IEnumerable <float> GetSupportedFrameRatesForResolution(Resolution resolution) { throw new NotImplementedException(); }
/// <summary> /// Creates a quad hologram to display image capture. Positions it in front of the camera /// </summary> /// <param name="data">Raw bytes of the image</param> /// <param name="camera2WorldMatrix">Camera -> World matrix</param> /// <param name="projectionMatrix"> Campera projection matrix</param> public static HoloPicture CreateHologram(byte[] data, HoloLensCameraStream.Resolution size, float[] camera2WorldFloat, float[] projectionFloat) { var positionRotation = GetPositionFromCamera(camera2WorldFloat); return(CreateHologram(data, size, camera2WorldFloat, projectionFloat, positionRotation)); }
/// <summary> /// Helper method for pixel projection into Unity3D world space. /// This method return a Vector3 with direction: optical center of the camera to the pixel coordinate /// The method is based on: https://developer.microsoft.com/en-us/windows/mixed-reality/locatable_camera#pixel_to_application-specified_coordinate_system /// </summary> /// <param name="cameraToWorldMatrix">The camera to Unity world matrix.</param> /// <param name="projectionMatrix">Projection Matrix.</param> /// <param name="pixelCoordinates">The coordinate of the pixel that should be converted to world-space.</param> /// <param name="cameraResolution">The resolution of the image that the pixel came from.</param> /// <returns>Vector3 with direction: optical center to camera world-space coordinates</returns> public static Vector3 PixelCoordToWorldCoord(Matrix4x4 cameraToWorldMatrix, Matrix4x4 projectionMatrix, HoloLensCameraStream.Resolution cameraResolution, Vector2 pixelCoordinates) { pixelCoordinates = ConvertPixelCoordsToScaledCoords(pixelCoordinates, cameraResolution); // -1 to 1 coords float focalLengthX = projectionMatrix.GetColumn(0).x; float focalLengthY = projectionMatrix.GetColumn(1).y; float centerX = projectionMatrix.GetColumn(2).x; float centerY = projectionMatrix.GetColumn(2).y; // On Microsoft Webpage the centers are normalized float normFactor = projectionMatrix.GetColumn(2).z; centerX = centerX / normFactor; centerY = centerY / normFactor; Vector3 dirRay = new Vector3((pixelCoordinates.x - centerX) / focalLengthX, (pixelCoordinates.y - centerY) / focalLengthY, 1.0f / normFactor); //Direction is in camera space Vector3 direction = new Vector3(Vector3.Dot(cameraToWorldMatrix.GetRow(0), dirRay), Vector3.Dot(cameraToWorldMatrix.GetRow(1), dirRay), Vector3.Dot(cameraToWorldMatrix.GetRow(2), dirRay)); return(direction); }
//This method is still in progress public static Vector3 PixelCoordToWorldCoord(Matrix4x4 cameraToWorldMatrix, Matrix4x4 projectionMatrix, HoloLensCameraStream.Resolution cameraResolution, Vector2 pixelCoordinates, Plane depthPlane) { pixelCoordinates = ConvertPixelCoordsToScaledCoords(pixelCoordinates, cameraResolution); float focalLengthX = projectionMatrix.GetColumn(0).x; float focalLengthY = projectionMatrix.GetColumn(1).y; Vector3 dirRay = new Vector3(pixelCoordinates.x / focalLengthX, pixelCoordinates.y / focalLengthY, 1.0f).normalized; //Direction is in camera space Vector3 centerPosition = cameraToWorldMatrix.MultiplyPoint(Vector3.zero); Vector3 direction = new Vector3(Vector3.Dot(dirRay, cameraToWorldMatrix.GetRow(0)), Vector3.Dot(dirRay, cameraToWorldMatrix.GetRow(1)), Vector3.Dot(dirRay, cameraToWorldMatrix.GetRow(2))); float depth = 1f; Ray ray = new Ray(centerPosition, direction * -1); depthPlane.Raycast(ray, out depth); return(centerPosition - direction * depth); }