public ProjectorFormLoader(String path) { Forms = new List <ProjectorForm>(); // load ensemble.xml string directory = Path.GetDirectoryName(path); var ensemble = ProjectorCameraEnsemble.FromFile(path); // create d3d device var factory = new Factory1(); var adapter = factory.Adapters[0]; // When using DeviceCreationFlags.Debug on Windows 10, ensure that "Graphics Tools" are installed via Settings/System/Apps & features/Manage optional features. // Also, when debugging in VS, "Enable native code debugging" must be selected on the project. var device = new SharpDX.Direct3D11.Device(adapter, DeviceCreationFlags.None); Object renderLock = new Object(); // create a form for each projector foreach (var projector in ensemble.projectors) { var form = new ProjectorForm(factory, device, renderLock, projector); form.FullScreen = FULLSCREEN_ENABLED; // TODO: fix this so can be called after Show form.Show(); Forms.Add(form); } }
public static ProjectorCameraEnsemble ReadCalibration(Stream stream) { ProjectorCameraEnsemble room = null; try { var knownTypeList = new List <Type>(); //knownTypeList.Add(typeof(Matrix)); knownTypeList.Add(typeof(ProjectorCameraEnsemble.Camera.Kinect2Calibration)); var serializer = new DataContractSerializer(typeof(ProjectorCameraEnsemble), knownTypeList); room = (ProjectorCameraEnsemble)serializer.ReadObject(stream); stream.Close(); //var serializer = new XmlSerializer(typeof(ProjectorCameraEnsemble)); ////var fileStream = new FileStream(filename, FileMode.Open); //room = (ProjectorCameraEnsemble)serializer.Deserialize(stream); //stream.Close(); } catch (Exception e) { Debug.LogError("Error loading configuration file: " + e.Message); } return(room); }
private void newToolStripMenuItem_Click(object sender, EventArgs e) { var newDialog = new NewDialog(); if (newDialog.ShowDialog(this) == DialogResult.OK) { var newEnsemble = new ProjectorCameraEnsemble(newDialog.NumProjectors, newDialog.NumCameras); var saveFileDialog = new SaveFileDialog(); saveFileDialog.Filter = "xml files (*.xml)|*.xml|All files (*.*)|*.*"; saveFileDialog.FilterIndex = 0; saveFileDialog.RestoreDirectory = true; if (saveFileDialog.ShowDialog() == DialogResult.OK) { try { path = saveFileDialog.FileName; directory = Path.GetDirectoryName(path); newEnsemble.Save(path); } catch (Exception ex) { Console.WriteLine("Could not save file to disk.\n" + ex); } lock (renderLock) { ensemble = newEnsemble; EnsembleChanged(); } } } }
public void LoadAsset() { if (calibration != null) { ensemble = ProjectorCameraEnsemble.ReadCalibration(calibration.text); } loaded = true; }
internal void LoadCalibrationData() { cam = this.GetComponent <Camera>(); projConfig = null; if (hasCalibration) { ProjectorCameraEnsemble ensembleConfig = calibrationData.GetEnsemble(); foreach (ProjectorCameraEnsemble.Projector pc in ensembleConfig.projectors) { if (pc.name == nameInConfiguration) { projConfig = pc; } } } else { projConfig = null; } if (projConfig != null) { if (displayIndex < 0) { displayIndex = projConfig.displayIndex; } //Debug.Log("Projective Rendering - Loading projector calibration information."); imageWidth = projConfig.width; imageHeight = projConfig.height; //// used by shadow etc... //// this is the vertical field of view - fy cam.aspect = (float)imageWidth / imageHeight; float fieldOfViewRad = 2.0f * (float)Math.Atan((((double)(imageHeight)) / 2.0) / projConfig.cameraMatrix[1, 1]); float fieldOfViewDeg = fieldOfViewRad / 3.14159265359f * 180.0f; cam.fieldOfView = fieldOfViewDeg; Matrix4x4 opencvProjMat = GetProjectionMatrix(projConfig.cameraMatrix, cam.nearClipPlane, cam.farClipPlane); cam.projectionMatrix = UnityUtilities.ConvertRHtoLH(opencvProjMat); //var irCoef = projConfig.lensDistortion.AsFloatArray(); //! jolaur -- looks like this is not being used and is now 2 elements instead of four in the new xml format //! lensDist = new Vector4(irCoef[0], irCoef[1], irCoef[2], irCoef[3]); lensDist = new Vector4(); Matrix4x4 worldToLocal = RAT2Unity.Convert(projConfig.pose); worldToLocal = UnityUtilities.ConvertRHtoLH(worldToLocal); this.transform.localPosition = worldToLocal.ExtractTranslation(); this.transform.localRotation = worldToLocal.ExtractRotation(); } else { Debug.Log("Projective Rendering - Using default camera calibration information."); lensDist = new Vector4(); } }
// public void Save(string filename) // { // var serializer = new XmlSerializer(typeof(ProjectorCameraEnsemble)); // var writer = new StreamWriter(filename); // serializer.Serialize(writer, this); // writer.Close(); // } public static ProjectorCameraEnsemble ReadCalibration(string calibrationString) { #if !UNITY_WSA_8_1 byte[] byteArray = Encoding.ASCII.GetBytes(calibrationString); #else byte[] byteArray = Encoding.UTF8.GetBytes(calibrationString); #endif ProjectorCameraEnsemble ensemble = null; using (MemoryStream stream = new MemoryStream(byteArray)) { ensemble = ReadCalibration(stream); } return(ensemble); }
void LoadEnsemble() { lock (renderLock) { try { ensemble = ProjectorCameraEnsemble.FromFile(path); Console.WriteLine("Loaded " + path); } catch (Exception ex) { Console.WriteLine("Could not read file from disk.\n" + ex); return; } EnsembleChanged(); } }
public static ProjectorCameraEnsemble Load(string filename) { ProjectorCameraEnsemble room = null; try { /* * var serializer = new XmlSerializer(typeof(ProjectorCameraEnsemble)); * var fileStream = new FileStream(filename, FileMode.Open); * room = (ProjectorCameraEnsemble)serializer.Deserialize(fileStream); * fileStream.Close(); */ room = ReadCalibration(new FileStream(filename, FileMode.Open)); // testing { /* * var outputFilestream = new FileStream(filename + ".out.xml", FileMode.Create); * serializer.WriteObject(outputFilestream, room); * outputFilestream.Close(); */ /*var outKnownTypeList = new List<Type>(); * knownTypeList.Add(typeof(Kinect2Calibration)); * var outSerializer = new DataContractSerializer(typeof(ProjectorCameraEnsemble), knownTypeList); * var settings = new XmlWriterSettings { Indent = true }; * using (var writer = XmlWriter.Create(filename + ".out.xml", settings)) * outSerializer.WriteObject(writer, room);*/ } } catch (Exception e) { Debug.LogError("Error loading configuration file: " + e.Message); } return(room); }
// encapsulates d3d resources for a camera public CameraDeviceResource(SharpDX.Direct3D11.Device device, ProjectorCameraEnsemble.Camera camera, Object renderLock, string directory) { this.device = device; this.camera = camera; this.renderLock = renderLock; // Kinect depth image var depthImageTextureDesc = new Texture2DDescription() { Width = Kinect2Calibration.depthImageWidth, Height = Kinect2Calibration.depthImageHeight, MipLevels = 1, ArraySize = 1, Format = SharpDX.DXGI.Format.R16_UInt, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), Usage = ResourceUsage.Dynamic, BindFlags = BindFlags.ShaderResource, CpuAccessFlags = CpuAccessFlags.Write, }; depthImageTexture = new Texture2D(device, depthImageTextureDesc); depthImageTextureRV = new ShaderResourceView(device, depthImageTexture); var floatDepthImageTextureDesc = new Texture2DDescription() { Width = Kinect2Calibration.depthImageWidth, Height = Kinect2Calibration.depthImageHeight, MipLevels = 1, ArraySize = 1, Format = SharpDX.DXGI.Format.R32_Float, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource, CpuAccessFlags = CpuAccessFlags.None, }; floatDepthImageTexture = new Texture2D(device, floatDepthImageTextureDesc); floatDepthImageRV = new ShaderResourceView(device, floatDepthImageTexture); floatDepthImageRenderTargetView = new RenderTargetView(device, floatDepthImageTexture); floatDepthImageTexture2 = new Texture2D(device, floatDepthImageTextureDesc); floatDepthImageRV2 = new ShaderResourceView(device, floatDepthImageTexture2); floatDepthImageRenderTargetView2 = new RenderTargetView(device, floatDepthImageTexture2); // Kinect color image var colorImageStagingTextureDesc = new Texture2DDescription() { Width = Kinect2Calibration.colorImageWidth, Height = Kinect2Calibration.colorImageHeight, MipLevels = 1, ArraySize = 1, Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), Usage = ResourceUsage.Dynamic, BindFlags = BindFlags.ShaderResource, CpuAccessFlags = CpuAccessFlags.Write }; colorImageStagingTexture = new Texture2D(device, colorImageStagingTextureDesc); var colorImageTextureDesc = new Texture2DDescription() { Width = Kinect2Calibration.colorImageWidth, Height = Kinect2Calibration.colorImageHeight, MipLevels = 0, ArraySize = 1, Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.ShaderResource | BindFlags.RenderTarget, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.GenerateMipMaps }; colorImageTexture = new Texture2D(device, colorImageTextureDesc); colorImageTextureRV = new ShaderResourceView(device, colorImageTexture); // vertex buffer var table = camera.calibration.ComputeDepthFrameToCameraSpaceTable(); int numVertices = 6 * (Kinect2Calibration.depthImageWidth - 1) * (Kinect2Calibration.depthImageHeight - 1); var vertices = new VertexPosition[numVertices]; Int3[] quadOffsets = new Int3[] { new Int3(0, 0, 0), new Int3(1, 0, 0), new Int3(0, 1, 0), new Int3(1, 0, 0), new Int3(1, 1, 0), new Int3(0, 1, 0), }; int vertexIndex = 0; for (int y = 0; y < Kinect2Calibration.depthImageHeight - 1; y++) { for (int x = 0; x < Kinect2Calibration.depthImageWidth - 1; x++) { for (int i = 0; i < 6; i++) { int vertexX = x + quadOffsets[i].X; int vertexY = y + quadOffsets[i].Y; var point = table[Kinect2Calibration.depthImageWidth * vertexY + vertexX]; var vertex = new VertexPosition(); vertex.position = new SharpDX.Vector4(point.X, point.Y, vertexX, vertexY); vertices[vertexIndex++] = vertex; } } } var stream = new DataStream(numVertices * VertexPosition.SizeInBytes, true, true); stream.WriteRange(vertices); stream.Position = 0; var vertexBufferDesc = new BufferDescription() { BindFlags = BindFlags.VertexBuffer, CpuAccessFlags = CpuAccessFlags.None, Usage = ResourceUsage.Default, SizeInBytes = numVertices * VertexPosition.SizeInBytes, }; vertexBuffer = new SharpDX.Direct3D11.Buffer(device, stream, vertexBufferDesc); vertexBufferBinding = new VertexBufferBinding(vertexBuffer, VertexPosition.SizeInBytes, 0); stream.Dispose(); var colorImage = new RoomAliveToolkit.ARGBImage(Kinect2Calibration.colorImageWidth, Kinect2Calibration.colorImageHeight); ProjectorCameraEnsemble.LoadFromTiff(imagingFactory, colorImage, directory + "/camera" + camera.name + "/color.tiff"); var depthImage = new RoomAliveToolkit.ShortImage(Kinect2Calibration.depthImageWidth, Kinect2Calibration.depthImageHeight); ProjectorCameraEnsemble.LoadFromTiff(imagingFactory, depthImage, directory + "/camera" + camera.name + "/mean.tiff"); lock (renderLock) // necessary? { UpdateColorImage(device.ImmediateContext, colorImage.DataIntPtr); UpdateDepthImage(device.ImmediateContext, depthImage.DataIntPtr); } colorImage.Dispose(); depthImage.Dispose(); }
public ProjectorForm(Factory factory, SharpDX.Direct3D11.Device device, Object renderLock, ProjectorCameraEnsemble.Projector projector) : base(factory, device, renderLock) { this.projector = projector; Text = "Projector " + projector.name; }
public void BuildSceneComponents() { if (calibrationData.IsValid()) { ProjectorCameraEnsemble ensemble = calibrationData.GetEnsemble(); foreach (ProjectorCameraEnsemble.Camera cam in ensemble.cameras) { GameObject kinectGameObject = new GameObject("Kinect_" + cam.name); kinectGameObject.transform.parent = transform; RATKinectClient kinect = kinectGameObject.AddComponent <RATKinectClient>(); kinect.calibrationData = calibrationData; kinect.nameInConfiguration = cam.name; kinect.UpdateFromCalibrationData(); GameObject deptMeshGameObject = new GameObject("DepthMesh"); deptMeshGameObject.transform.parent = kinectGameObject.transform; deptMeshGameObject.AddComponent <RATDepthMesh>(); RATDepthMesh dm = deptMeshGameObject.GetComponent <RATDepthMesh>(); dm.kinectClient = kinect; Shader s = Shader.Find("RoomAlive/DepthMeshSurfaceShader"); dm.surfaceMaterial = new Material(s); deptMeshGameObject.transform.localPosition = Vector3.zero; deptMeshGameObject.transform.localRotation = Quaternion.identity; //this is purely for visualization purposes if (kinectModel != null) { GameObject model = Instantiate(kinectModel); model.name = "Kinect3DModel"; model.transform.parent = kinectGameObject.transform; model.transform.localPosition = Vector3.zero; model.transform.localRotation = Quaternion.identity; } } foreach (ProjectorCameraEnsemble.Projector proj in ensemble.projectors) { GameObject projectorGameObject = new GameObject("Projector_" + proj.name); //Instantiate(projectorGameObject); projectorGameObject.transform.parent = transform; Camera cam = projectorGameObject.AddComponent <Camera>(); cam.clearFlags = CameraClearFlags.SolidColor; cam.backgroundColor = Color.black; cam.cullingMask = 0; //should likely be set to render only the real world RATProjector projrend = projectorGameObject.AddComponent <RATProjector>(); projrend.calibrationData = calibrationData; projrend.nameInConfiguration = proj.name; projrend.LoadCalibrationData(); //uncomment this if you want to add the option of Dynamic Masking the projection output //projectorGameObject.AddComponent<RATDynamicMask>(); //this is purely for visualization purposes if (projectorModel != null) { GameObject model = Instantiate(projectorModel); model.name = "Projector3DModel"; model.transform.parent = projectorGameObject.transform; model.transform.localPosition = Vector3.zero; model.transform.localRotation = Quaternion.identity; } } //add projection manager and setup appropriate viewports RATProjectionManager projManager = transform.gameObject.AddComponent <RATProjectionManager>(); projManager.FindProjectionsDepthMeshesAndUsers(); int n = ensemble.projectors.Count; float dx = 1f / n; projManager.screenViewports = new Rect[ensemble.projectors.Count]; for (int i = 0; i < n; i++) { projManager.screenViewports[i] = new Rect(i * dx, 0, dx, 1); // this is a default configuration and it needs to be edited manually if the displays are aranged differently } int cnt = 0; foreach (RATProjector proj in projManager.projections) { proj.GetComponent <Camera>().rect = projManager.screenViewports[cnt++]; } } }
// loades ensemble configuration from XML file static bool LoadXML() { try { using (var fileStream = new FileStream(XMLfilename, FileMode.Open)) { var knownTypeList = new List<Type>(); knownTypeList.Add(typeof(Kinect2Calibration)); var serializer = new DataContractSerializer(typeof(ProjectorCameraEnsemble), knownTypeList); ensemble = (ProjectorCameraEnsemble)serializer.ReadObject(fileStream); } return true; } catch (FileNotFoundException) { cout.Print("Could not find XML configuration file."); } catch (Exception e) { cout.Print("Could not open configuration file."); cout.DebugPrint(e.Message); } return false; }
// coordinates server discovery and XML setup static void DiscoverServers() { cout.VerbosePrint("Finding Kinect and projector servers..."); var findKServers = Task<Collection<EndpointDiscoveryMetadata>>.Factory.StartNew(DiscoverCameras); var findPServers = Task<Collection<EndpointDiscoveryMetadata>>.Factory.StartNew(DiscoverProjectors); var kServers = findKServers.Result; var pServers = findPServers.Result; ensemble = new ProjectorCameraEnsemble(pServers.Count, kServers.Count); for (int i = 0; i < kServers.Count; ++i) { ensemble.cameras[i].name = i.ToString(); ensemble.cameras[i].hostNameOrAddress = kServers[i].Address.Uri.DnsSafeHost; } for (int i = 0; i < pServers.Count; ++i) { ensemble.projectors[i].name = i.ToString(); ensemble.projectors[i].hostNameOrAddress = pServers[i].Address.Uri.DnsSafeHost; ensemble.projectors[i].displayIndex = 1; // Projectors are indexed 1 by default } cout.VerbosePrint("Server search complete."); }
public ProjectionMappingSample(string[] args) { // load ensemble.xml string path = args[0]; string directory = Path.GetDirectoryName(path); ensemble = RoomAliveToolkit.ProjectorCameraEnsemble.FromFile(path); // create d3d device var factory = new Factory1(); var adapter = factory.Adapters[0]; // When using DeviceCreationFlags.Debug on Windows 10, ensure that "Graphics Tools" are installed via Settings/System/Apps & features/Manage optional features. // Also, when debugging in VS, "Enable native code debugging" must be selected on the project. device = new SharpDX.Direct3D11.Device(adapter, DeviceCreationFlags.None); // shaders depthAndColorShader = new DepthAndColorShader(device); projectiveTexturingShader = new ProjectiveTexturingShader(device); fromUIntPS = new FromUIntPS(device, Kinect2Calibration.depthImageWidth, Kinect2Calibration.depthImageHeight); bilateralFilter = new BilateralFilter(device, Kinect2Calibration.depthImageWidth, Kinect2Calibration.depthImageHeight); // create device objects for each camera foreach (var camera in ensemble.cameras) cameraDeviceResources[camera] = new CameraDeviceResource(device, camera, renderLock, directory); // user view depth buffer var userViewDpethBufferDesc = new Texture2DDescription() { Width = userViewTextureWidth, Height = userViewTextureHeight, MipLevels = 1, ArraySize = 1, Format = Format.D32_Float, // necessary? SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.DepthStencil, CpuAccessFlags = CpuAccessFlags.None }; var userViewDepthStencil = new Texture2D(device, userViewDpethBufferDesc); userViewDepthStencilView = new DepthStencilView(device, userViewDepthStencil); // create a form for each projector foreach (var projector in ensemble.projectors) { var form = new ProjectorForm(factory, device, renderLock, projector); if (fullScreenEnabled) form.FullScreen = fullScreenEnabled; // TODO: fix this so can be called after Show form.Show(); projectorForms.Add(form); } clock.Start(); if (liveDepthEnabled) { foreach (var cameraDeviceResource in cameraDeviceResources.Values) cameraDeviceResource.StartLive(); } new System.Threading.Thread(RenderLoop).Start(); }