public void TestPointLightRenderer1() { World world = new World(); world.addPointLight(new PointLight(new Point(10f, 10f, 10f), Constant.White)); world.addShape(new Sphere(Transformation.Translation(new Vec(10f, 10f, 10f)))); Sphere sphere = new Sphere( transformation: Transformation.Translation(new Vec(2f, 0f, 0f)) * Transformation.Scaling(new Vec(0.2f, 0.2f, 0.2f)), material: new Material(Brdf: new DiffuseBRDF(pig: new UniformPigment(Constant.White)))); HdrImage image = new HdrImage(3, 3); OrthogonalCamera camera = new OrthogonalCamera(); ImageTracer tracer = new ImageTracer(i: image, c: camera); world.addShape(sphere); PointLightRender renderer = new PointLightRender(world); tracer.fireAllRays(renderer); Assert.True(image.getPixel(0, 0).isClose(renderer.backgroundColor), "TestPointLight rendered failed (light source shielded by a sphere) - Assert 1/9"); Assert.True(image.getPixel(1, 0).isClose(renderer.backgroundColor), "TestPointLight rendered failed (light source shielded by a sphere) - Assert 2/9"); Assert.True(image.getPixel(2, 0).isClose(renderer.backgroundColor), "TestPointLight rendered failed (light source shielded by a sphere) - Assert 3/9"); Assert.True(image.getPixel(0, 1).isClose(renderer.backgroundColor), "TestPointLight rendered failed (light source shielded by a sphere) - Assert 4/9"); Assert.True(image.getPixel(1, 1).isClose(renderer.ambientColor), "TestPointLight rendered failed (light source shielded by a sphere) - Assert 5/9"); Assert.True(image.getPixel(2, 1).isClose(renderer.backgroundColor), "TestPointLight rendered failed (light source shielded by a sphere) - Assert 6/9"); Assert.True(image.getPixel(0, 2).isClose(renderer.backgroundColor), "TestPointLight rendered failed (light source shielded by a sphere) - Assert 7/9"); Assert.True(image.getPixel(1, 2).isClose(renderer.backgroundColor), "TestPointLight rendered failed (light source shielded by a sphere) - Assert 8/9"); Assert.True(image.getPixel(2, 2).isClose(renderer.backgroundColor), "TestPointLight rendered failed (light source shielded by a sphere) - Assert 9/9"); }
public void TestFlatRender() { Color sphereColor = new Color(1f, 2f, 3f); Sphere sphere = new Sphere( transformation: Transformation.Translation(new Vec(2f, 0f, 0f)) * Transformation.Scaling(new Vec(0.2f, 0.2f, 0.2f)), material: new Material(Brdf: new DiffuseBRDF(pig: new UniformPigment(sphereColor))) ); HdrImage image = new HdrImage(3, 3); OrthogonalCamera camera = new OrthogonalCamera(); ImageTracer tracer = new ImageTracer(image, camera); World world = new World(); world.addShape(sphere); FlatRender renderer = new FlatRender(world); tracer.fireAllRays(renderer); Assert.True(image.getPixel(0, 0).isClose(Constant.Black), "TestFlatRender failed - Assert 1/9"); Assert.True(image.getPixel(1, 0).isClose(Constant.Black), "TestFlatRender failed - Assert 2/9"); Assert.True(image.getPixel(2, 0).isClose(Constant.Black), "TestFlatRender failed - Assert 3/9"); Assert.True(image.getPixel(0, 1).isClose(Constant.Black), "TestFlatRender failed - Assert 4/9"); Assert.True(image.getPixel(1, 1).isClose(sphereColor), "TestFlatRender failed - Assert 5/9"); Assert.True(image.getPixel(2, 1).isClose(Constant.Black), "TestFlatRender failed - Assert 6/9"); Assert.True(image.getPixel(0, 2).isClose(Constant.Black), "TestFlatRender failed - Assert 7/9"); Assert.True(image.getPixel(1, 2).isClose(Constant.Black), "TestFlatRender failed - Assert 8/9"); Assert.True(image.getPixel(2, 2).isClose(Constant.Black), "TestFlatRender failed - Assert 9/9"); }
public ProjectMV(MainWindowVM mainWindow, DXElement dxElement) : base("Unbenanntes Project") { isDisposed = false; _mainWindow = mainWindow; _dxElement = dxElement; MouseDownCommand = new RelayCommand(p => handleMouseDown(p as MouseEventArgs)); MouseMoveCommand = new RelayCommand(p => handleMouseMove(p as MouseEventArgs), p => canHandleMouseMove(p as MouseEventArgs)); MouseUpCommand = new RelayCommand(p => handleMouseUp(p as MouseEventArgs)); MouseWheelCommand = new RelayCommand(p => handleMouseWheel(p as MouseWheelEventArgs)); PreviewMouseWheelCommand = new RelayCommand(p => handlePreviewMouseWheel(p as MouseWheelEventArgs)); KeyDownCommand = new RelayCommand(p => handleKeyDown(p as KeyEventArgs)); KeyUpCommand = new RelayCommand(p => handleKeyUp(p as KeyEventArgs)); MouseEnterCommand = new RelayCommand(p => handleMouseEnter(p as MouseEventArgs)); MouseLeaveCommand = new RelayCommand(p => handleMouseLeave(p as MouseEventArgs)); //_models = new List<Models.BaseModel>(); //Elements = new ObservableCollection<Models.BaseModel>(); Items = new SelectionManager(); _camera = new OrthogonalCamera(); D3D11 = new D3D11EditorVM(this) { Camera = _camera }; //CurrentTool = new Tools.Rectangle(D3D11, _models, _camera); Tools = new ObservableCollection <Tools.BaseTool>(); Tools.Add(new Tools.Rectangle(D3D11, Items, _camera)); Tools.Add(new Tools.Selection(Items, _camera)); SelectedTool = Tools[0]; // Initialize the GPU Data Models.Polygon.Init(D3D11); Models.BaseModel.Init(D3D11, dxElement); }
void TestOrthogonalCameraTransform() { OrthogonalCamera cam = new OrthogonalCamera(transformation: Transformation.Translation(-2.0f * Constant.VEC_Y) * Transformation.RotationZ((float)Math.PI / 2.0f)); Ray ray = cam.fireRay(0.5f, 0.5f); Assert.True(ray.at(1.0f).isClose(new Point(0.0f, -2.0f, 0.0f)), "testOrthogonalCameraTransform failed - Assert 1/1"); }
/// <summary> /// Gets the camera of the BCF viewpoint. It returns the value within an optional, which is None, if the BCF /// viewpoint has no camera set. /// </summary> /// <returns>The optional containing the camera.</returns> public Option <Camera> GetCamera() { Camera camera = null; if (Viewpoint?.Perspective_camera != null) { var c = new PerspectiveCamera(); Perspective_camera bcfPerspective = Viewpoint.Perspective_camera; c.FieldOfView = bcfPerspective.Field_of_view.ToDecimal(); c.Position = new Position( bcfPerspective.Camera_view_point.ToVector3(), bcfPerspective.Camera_direction.ToVector3(), bcfPerspective.Camera_up_vector.ToVector3()); camera = c; } if (Viewpoint?.Orthogonal_camera != null) { var c = new OrthogonalCamera(); Orthogonal_camera bcfOrthogonal = Viewpoint.Orthogonal_camera; c.ViewToWorldScale = bcfOrthogonal.View_to_world_scale.ToDecimal(); c.Position = new Position( bcfOrthogonal.Camera_view_point.ToVector3(), bcfOrthogonal.Camera_direction.ToVector3(), bcfOrthogonal.Camera_up_vector.ToVector3()); camera = c; } return(camera.SomeNotNull()); }
/// <summary> /// Renders all items. /// </summary> /// <param name="deviceContext">The device context.</param> /// <param name="camera">The camera.</param> public void Render(DeviceContext deviceContext, OrthogonalCamera camera) { foreach (var model in AllItems) { model.RenderContent(deviceContext, camera); } foreach (var model in SelectedItems) { model.RenderBoundingBox(deviceContext, camera); } }
public static void CompareOrthogonalCameras(OrthogonalCamera expected, OrthogonalCamera actual) { Assert.Equal(expected.ViewToWorldScale, actual.ViewToWorldScale); Assert.Equal(expected.CameraDirection.X, actual.CameraDirection.X); Assert.Equal(expected.CameraDirection.Y, actual.CameraDirection.Y); Assert.Equal(expected.CameraDirection.Z, actual.CameraDirection.Z); Assert.Equal(expected.CameraUpVector.X, actual.CameraUpVector.X); Assert.Equal(expected.CameraUpVector.Y, actual.CameraUpVector.Y); Assert.Equal(expected.CameraUpVector.Z, actual.CameraUpVector.Z); Assert.Equal(expected.CameraViewPoint.X, actual.CameraViewPoint.X); Assert.Equal(expected.CameraViewPoint.Y, actual.CameraViewPoint.Y); Assert.Equal(expected.CameraViewPoint.Z, actual.CameraViewPoint.Z); }
/// <summary> /// Returns true if this camera has any values set and should therefore be serialized /// </summary> /// <param name="camera"></param> /// <returns></returns> public static bool IsAnyValueSet(this OrthogonalCamera camera) { return(Math.Abs(camera.ViewToWorldScale) > 0.01 || Math.Abs(camera.CameraViewPoint.X) > 0.01 || Math.Abs(camera.CameraViewPoint.Y) > 0.01 || Math.Abs(camera.CameraViewPoint.Z) > 0.01 || Math.Abs(camera.CameraUpVector.Z) > 0.01 || Math.Abs(camera.CameraUpVector.Z) > 0.01 || Math.Abs(camera.CameraUpVector.Z) > 0.01 || Math.Abs(camera.CameraDirection.Z) > 0.01 || Math.Abs(camera.CameraDirection.Z) > 0.01 || Math.Abs(camera.CameraDirection.Z) > 0.01); }
public override void Render(DeviceContext deviceContext, OrthogonalCamera camera) { if (!_isVisible) { return; } Matrix view = camera.View; Matrix projection = camera.Projection; float top = Math.Max(_downPos.Y, _curPos.Y); float bottom = Math.Min(_downPos.Y, _curPos.Y); float left = Math.Min(_downPos.X, _curPos.X); float right = Math.Max(_downPos.X, _curPos.X); Matrix world = Matrix.Identity; world.M11 = right - left; world.M22 = top - bottom; world.M41 = left; world.M42 = bottom; Matrix worldViewProjection; Matrix.Multiply(ref world, ref view, out worldViewProjection); Matrix.Multiply(ref worldViewProjection, ref projection, out worldViewProjection); // Transpose local Matrices Matrix.Transpose(ref worldViewProjection, out worldViewProjection); deviceContext.VertexShader.SetConstantBuffers(0, _globalBuffer); deviceContext.PixelShader.SetConstantBuffers(0, _globalBuffer); deviceContext.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; deviceContext.InputAssembler.InputLayout = _inputLayout; deviceContext.VertexShader.Set(_vertexShader); deviceContext.HullShader.Set(null); deviceContext.DomainShader.Set(null); deviceContext.PixelShader.Set(_pixelShader); deviceContext.InputAssembler.SetIndexBuffer(_indexBuffer, Format.R32_UInt, 0); deviceContext.InputAssembler.SetVertexBuffers(0, _bufferBinding); DataStream dataStream; deviceContext.MapSubresource(_globalBuffer, 0, MapMode.WriteDiscard, MapFlags.None, out dataStream); dataStream.Write(worldViewProjection); dataStream.Write(_color.Vector4); deviceContext.UnmapSubresource(_globalBuffer, 0); deviceContext.DrawIndexed(6, 0, 0); }
public Selection(SelectionManager items, OrthogonalCamera camera) { _items = items; _camera = camera; _cursors = new List <Cursor>(); for (int i = 0; i < 8; i++) { using (Stream stream = Application.GetResourceStream(new Uri(string.Format("pack://application:,,,/Content/Cursors/scale/{0}.cur", i))).Stream) { _cursors.Add(new Cursor(stream)); } } }
public Rectangle(D3D11 d3d, SelectionManager items, OrthogonalCamera camera) { //_models = items; _items = items; _camera = camera; Device device = d3d.Device; //_color = new Vector4(0.0f, 0.0f, 0.0f, 0.1f); _color = new ColorVM(0.9f, 0.4f, 0.0f, 0.4f); //_color = new Vector4(0.9f, 0.4f, 0.0f, 0.4f); _isVisible = false; using (var bytecode = ShaderBytecodeExtension.CompileFromResource("pack://*****:*****@"Shader\Tools.Rectangle.hlsl", "VS", "vs_5_0", shaderFlags, EffectFlags.None)) { _vertexShader = ToDispose(new VertexShader(device, bytecode)); _inputLayout = ToDispose(new InputLayout(device, bytecode, new[] { new InputElement("POSITION", 0, Format.R32G32B32_Float, 0, 0, InputClassification.PerVertexData, 0) })); } using (var bytecode = ShaderBytecodeExtension.CompileFromResource("pack://*****:*****@"Shader\Tools.Rectangle.hlsl", "PS", "ps_5_0", shaderFlags, EffectFlags.None)) { _pixelShader = ToDispose(new PixelShader(device, bytecode)); } _globalBuffer = ToDispose(new Buffer(device, Matrix.SizeInBytes + Vector4.SizeInBytes, ResourceUsage.Dynamic, BindFlags.ConstantBuffer, CpuAccessFlags.Write, ResourceOptionFlags.None, 0)); using (var dataStream = new DataStream(Vector3.SizeInBytes * 4, true, true)) { dataStream.Write(new Vector3(0.0f, 0.0f, 1.0f)); dataStream.Write(new Vector3(0.0f, 1.0f, 1.0f)); dataStream.Write(new Vector3(1.0f, 1.0f, 1.0f)); dataStream.Write(new Vector3(1.0f, 0.0f, 1.0f)); dataStream.Position = 0; _buffer = ToDispose(new Buffer(device, dataStream, Vector3.SizeInBytes * 4, ResourceUsage.Default, BindFlags.VertexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0)); _bufferBinding = new VertexBufferBinding(_buffer, Vector3.SizeInBytes, 0); } using (var dataStream = new DataStream(sizeof(int) * 6, true, true)) { dataStream.Write(0); dataStream.Write(1); dataStream.Write(2); dataStream.Write(2); dataStream.Write(3); dataStream.Write(0); dataStream.Position = 0; _indexBuffer = ToDispose(new Buffer(device, dataStream, sizeof(int) * 6, ResourceUsage.Default, BindFlags.IndexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0)); } }
public void testOrthogonalCamera() { OrthogonalCamera cam = new OrthogonalCamera(aspectRatio: 2.0f); Ray ray1 = cam.fireRay(0.0f, 0.0f); Ray ray2 = cam.fireRay(1.0f, 0.0f); Ray ray3 = cam.fireRay(0.0f, 1.0f); Ray ray4 = cam.fireRay(1.0f, 1.0f); Assert.True(Utility.areClose(0.0f, ray1.dir.crossProd(ray2.dir).getSquaredNorm()), "testOrthogonalCamera failed - assert 1/7"); Assert.True(Utility.areClose(0.0f, ray1.dir.crossProd(ray3.dir).getSquaredNorm()), "testOrthogonalCamera failed - assert 2/7"); Assert.True(Utility.areClose(0.0f, ray1.dir.crossProd(ray4.dir).getSquaredNorm()), "testOrthogonalCamera failed - assert 3/7"); Assert.True(ray1.at(1.0f).isClose(new Point(0.0f, 2.0f, -1.0f)), "testOrthogonalCamera failed - assert 4/7"); Assert.True(ray2.at(1.0f).isClose(new Point(0.0f, -2.0f, -1.0f)), "testOrthogonalCamera failed - assert 5/7"); Assert.True(ray3.at(1.0f).isClose(new Point(0.0f, 2.0f, 1.0f)), "testOrthogonalCamera failed - assert 6/7"); Assert.True(ray4.at(1.0f).isClose(new Point(0.0f, -2.0f, 1.0f)), "testOrthogonalCamera failed - assert 7/7"); }
private static orthogonal_camera GetViewpointOrthogonalCamera(OrthogonalCamera GivenOrthogonalCamera) { var ReturnObject = new orthogonal_camera(); ReturnObject.camera_direction = new PointOrVector(); ReturnObject.camera_direction.x = GivenOrthogonalCamera.CameraDirection.X; ReturnObject.camera_direction.y = GivenOrthogonalCamera.CameraDirection.Y; ReturnObject.camera_direction.z = GivenOrthogonalCamera.CameraDirection.Z; ReturnObject.camera_up_vector = new PointOrVector(); ReturnObject.camera_up_vector.x = GivenOrthogonalCamera.CameraUpVector.X; ReturnObject.camera_up_vector.y = GivenOrthogonalCamera.CameraUpVector.Y; ReturnObject.camera_up_vector.z = GivenOrthogonalCamera.CameraUpVector.Z; ReturnObject.camera_view_point = new PointOrVector(); ReturnObject.camera_view_point.x = GivenOrthogonalCamera.CameraViewPoint.X; ReturnObject.camera_view_point.y = GivenOrthogonalCamera.CameraViewPoint.Y; ReturnObject.camera_view_point.z = GivenOrthogonalCamera.CameraViewPoint.Z; ReturnObject.view_to_world_scale = GivenOrthogonalCamera.ViewToWorldScale; return(ReturnObject); }
public override void RenderContent(DeviceContext deviceContext, OrthogonalCamera camera) { Matrix view = camera.View; Matrix projection = camera.Projection; //Matrix world = Matrix.Identity; //world.M41 = - camera.Width / 2.0f; //world.M42 = - camera.Height / 2.0f; //world.M42 = + camera.Height / 2.0f; Matrix worldViewProjection; Matrix.Multiply(ref _world, ref view, out worldViewProjection); Matrix.Multiply(ref worldViewProjection, ref projection, out worldViewProjection); // Transpose local Matrices Matrix.Transpose(ref worldViewProjection, out worldViewProjection); deviceContext.VertexShader.SetConstantBuffers(0, ShaderManager.Instance.GlobalBuffer); deviceContext.PixelShader.SetConstantBuffers(0, ShaderManager.Instance.GlobalBuffer); deviceContext.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; deviceContext.InputAssembler.InputLayout = ShaderManager.Instance.InputLayout; deviceContext.VertexShader.Set(ShaderManager.Instance.VertexShader); deviceContext.HullShader.Set(null); deviceContext.DomainShader.Set(null); deviceContext.PixelShader.Set(ShaderManager.Instance.PixelShader); deviceContext.InputAssembler.SetIndexBuffer(_indexBuffer, Format.R32_UInt, 0); deviceContext.InputAssembler.SetVertexBuffers(0, _bufferBinding); DataStream dataStream; deviceContext.MapSubresource(ShaderManager.Instance.GlobalBuffer, 0, MapMode.WriteDiscard, MapFlags.None, out dataStream); dataStream.Write(worldViewProjection); dataStream.Write(Color.Vector4); deviceContext.UnmapSubresource(ShaderManager.Instance.GlobalBuffer, 0); deviceContext.DrawIndexed(3 * _triangles.Count, 0, 0); }
private bool SetOrthogonalView(View3D bcfView, OrthogonalCamera camera) { var result = false; try { var zoom = camera.ViewToWorldScale.ToFeet(); var direction = RevitUtils.GetRevitXYZ(camera.CameraDirection); var upVector = RevitUtils.GetRevitXYZ(camera.CameraUpVector); var viewPoint = RevitUtils.GetRevitXYZ(camera.CameraViewPoint); var orientation = RevitUtils.ConvertBasePoint(ActiveDoc, viewPoint, direction, upVector, true); using (var trans = new Transaction(ActiveDoc)) { trans.Start("Set Orientation"); try { bcfView.SetOrientation(orientation); trans.Commit(); } catch (Exception ex) { trans.RollBack(); var message = ex.Message; } } var m_xyzTl = bcfView.Origin.Add(bcfView.UpDirection.Multiply(zoom)).Subtract(bcfView.RightDirection.Multiply(zoom)); var m_xyzBr = bcfView.Origin.Subtract(bcfView.UpDirection.Multiply(zoom)).Add(bcfView.RightDirection.Multiply(zoom)); BCFUIView.ZoomAndCenterRectangle(m_xyzTl, m_xyzBr); result = true; } catch (Exception ex) { var message = ex.Message; } return(result); }
public void Render(DeviceContext deviceContext, OrthogonalCamera camera) { Matrix projection = camera.Projection; _tileSize.Z = -camera.Width / 2.0f; _tileSize.W = -camera.Height / 2.0f; Matrix backgroundWorld = Matrix.Scaling(camera.Width, camera.Height, 1.0f); Matrix worldViewProjection; //Matrix.Multiply(ref backgroundWorld, ref view, out worldViewProjection); Matrix.Multiply(ref backgroundWorld, ref projection, out worldViewProjection); // Transpose local Matrices Matrix.Transpose(ref worldViewProjection, out worldViewProjection); deviceContext.VertexShader.SetConstantBuffers(0, _globalBuffer); deviceContext.PixelShader.SetConstantBuffers(0, _globalBuffer); deviceContext.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; deviceContext.InputAssembler.InputLayout = _inputLayout; deviceContext.VertexShader.Set(_vertexShader); deviceContext.HullShader.Set(null); deviceContext.DomainShader.Set(null); deviceContext.PixelShader.Set(_pixelShader); deviceContext.InputAssembler.SetIndexBuffer(_indexBuffer, Format.R32_UInt, 0); deviceContext.InputAssembler.SetVertexBuffers(0, _bufferBinding); DataStream dataStream; deviceContext.MapSubresource(_globalBuffer, 0, MapMode.WriteDiscard, MapFlags.None, out dataStream); dataStream.Write(worldViewProjection); dataStream.Write(_color1); dataStream.Write(_color2); dataStream.Write(_tileSize); deviceContext.UnmapSubresource(_globalBuffer, 0); deviceContext.DrawIndexed(6, 0, 0); }
public void Draw(float number, float posX, float posY, DeviceContext deviceContext, OrthogonalCamera camera, int digits = 2, bool vertical = false) { //string text = string.Format("{0:n1}", number); string text = string.Format(string.Format("{0:s}0:n{1:d}{2:s}", "{", digits, "}"), number); Draw(text, posX, posY, deviceContext, camera, vertical); }
/// <summary> /// /// </summary> /// <param name="view"></param> /// <param name="viewpoint_Guid"></param> /// <returns></returns> private OrthogonalCamera GetOrthogonalCamera(View3D view, string viewpoint_Guid) { var camera = new OrthogonalCamera(); try { var uiView = FindDefaultUIView(view); if (null == uiView) { return(camera); } var topLeft = uiView.GetZoomCorners()[0]; var bottomRight = uiView.GetZoomCorners()[1]; var x = (topLeft.X + bottomRight.X) / 2; var y = (topLeft.Y + bottomRight.Y) / 2; var z = (topLeft.Z + bottomRight.Z) / 2; var viewCenter = new XYZ(x, y, z); var diagVector = topLeft.Subtract(bottomRight); var dist = topLeft.DistanceTo(bottomRight) / 2; var zoomValue = dist * Math.Sin(diagVector.AngleTo(view.RightDirection)).ToMeters(); var orientation = RevitUtils.ConvertBasePoint(ActiveDoc, viewCenter, view.ViewDirection, view.UpDirection, false); var c = orientation.EyePosition; var vi = orientation.ForwardDirection; var up = orientation.UpDirection; camera.Guid = Guid.NewGuid().ToString(); camera.ViewPointGuid = viewpoint_Guid; var viewPoint = new Schemas.Point { Guid = Guid.NewGuid().ToString(), X = c.X.ToMeters(), Y = c.Y.ToMeters(), Z = c.Z.ToMeters() }; camera.CameraViewPoint = viewPoint; var upVector = new Direction { Guid = Guid.NewGuid().ToString(), X = up.X.ToMeters(), Y = up.Y.ToMeters(), Z = up.Z.ToMeters() }; camera.CameraUpVector = upVector; var direction = new Direction { Guid = Guid.NewGuid().ToString(), X = -(vi.X.ToMeters()), Y = -(vi.Y.ToMeters()), Z = -(vi.Z.ToMeters()) }; camera.CameraDirection = direction; camera.ViewToWorldScale = zoomValue; } catch (Exception) { // ignored } return(camera); }
public void Draw(string text, float posX, float posY, DeviceContext deviceContext, OrthogonalCamera camera, bool vertical = false) { Matrix view = Matrix.Identity; Matrix projection = camera.Projection; deviceContext.PixelShader.SetShaderResource(0, _texture); deviceContext.VertexShader.SetConstantBuffers(0, _globalBuffer); deviceContext.PixelShader.SetConstantBuffers(0, _globalBuffer); deviceContext.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; deviceContext.InputAssembler.InputLayout = _inputLayout; deviceContext.VertexShader.Set(_vertexShader); deviceContext.HullShader.Set(null); deviceContext.DomainShader.Set(null); deviceContext.PixelShader.Set(_pixelShader); deviceContext.PixelShader.SetSampler(0, _sampleState); deviceContext.InputAssembler.SetIndexBuffer(_indexBuffer, Format.R32_UInt, 0); deviceContext.InputAssembler.SetVertexBuffers(0, _bufferBinding); float shift = 0.0f; //if (vertical) // posY += CaclulateWidth(text); foreach (char c in text) { if (!_glyphs.ContainsKey(c)) { continue; } var glyph = _glyphs[c]; Vector4 texPosition = new Vector4(glyph.Position / texWidth, 1.0f / texWidth, (glyph.Width + 1.0f) / texWidth, (glyph.Height + 1.0f) / texHeight); Matrix world = Matrix.Identity; if (!vertical) { world.M11 = glyph.Width + 1.0f; world.M22 = glyph.Height + 1.0f; world.M41 = (float)Math.Floor(posX + shift - camera.Width / 2.0f); world.M42 = (float)Math.Floor(posY - camera.Height / 2.0f); } else { texPosition = new Vector4((glyph.Position - 0.5f) / texWidth, 1.0f / texWidth, (glyph.Width + 1.0f) / texWidth, (glyph.Height + 1.0f) / texHeight); world.M11 = 0.0f; world.M22 = 0.0f; world.M21 = -(glyph.Height + 1.0f); world.M12 = glyph.Width + 1.0f; world.M41 = (float)Math.Floor(posX - camera.Width / 2.0f); world.M42 = (float)Math.Floor(posY + shift - camera.Height / 2.0f); } shift += glyph.Width + 1.0f; Matrix worldViewProjection; Matrix.Multiply(ref world, ref view, out worldViewProjection); Matrix.Multiply(ref worldViewProjection, ref projection, out worldViewProjection); // Transpose local Matrices Matrix.Transpose(ref worldViewProjection, out worldViewProjection); DataStream dataStream; deviceContext.MapSubresource(_globalBuffer, 0, MapMode.WriteDiscard, MapFlags.None, out dataStream); dataStream.Write(worldViewProjection); dataStream.Write(_color); dataStream.Write(texPosition); deviceContext.UnmapSubresource(_globalBuffer, 0); deviceContext.DrawIndexed(6, 0, 0); } }
private OrthogonalCamera GetOrthogonalCamera(Dictionary <int, ElementProperties> elementDictionary, string imagePath) { OrthogonalCamera orthoCamera = new OrthogonalCamera(); try { BoundingBoxXYZ boundingBox = new BoundingBoxXYZ(); boundingBox.Enabled = true; for (int i = 0; i < 3; i++) { boundingBox.set_MinEnabled(i, true); boundingBox.set_MaxEnabled(i, true); boundingBox.set_BoundEnabled(0, i, true); boundingBox.set_BoundEnabled(1, i, true); } BoundingBoxXYZ tempBoundingBox = elementDictionary.First().Value.RevitElement.get_BoundingBox(null); tempBoundingBox.Enabled = true; double maxX = tempBoundingBox.Max.X; double maxY = tempBoundingBox.Max.Y; double maxZ = tempBoundingBox.Max.Z; double minX = tempBoundingBox.Min.X; double minY = tempBoundingBox.Min.Y; double minZ = tempBoundingBox.Min.Z; List <ElementId> elementIds = new List <ElementId>(); Dictionary <int, Category> categories = new Dictionary <int, Category>(); foreach (ElementProperties ep in elementDictionary.Values) { Element element = ep.RevitElement; if (null != element) { try { if (!categories.ContainsKey(element.Category.Id.IntegerValue)) { categories.Add(element.Category.Id.IntegerValue, element.Category); } BoundingBoxXYZ bbBox = element.get_BoundingBox(null); bbBox.Enabled = true; elementIds.Add(element.Id); if (null != boundingBox) { if (bbBox.Max.X > maxX) { maxX = bbBox.Max.X; } if (bbBox.Max.Y > maxY) { maxY = bbBox.Max.Y; } if (bbBox.Max.Z > maxZ) { maxZ = bbBox.Max.Z; } if (bbBox.Min.X < minX) { minX = bbBox.Min.X; } if (bbBox.Min.Y < minY) { minY = bbBox.Min.Y; } if (bbBox.Min.Z < minZ) { minZ = bbBox.Min.Z; } } } catch { continue; } } } XYZ xyzMax = new XYZ(maxX, maxY, maxZ); XYZ xyzMin = new XYZ(minX, minY, minZ); boundingBox.set_Bounds(0, xyzMin); boundingBox.set_Bounds(1, xyzMax); ViewFamilyType view3dFamilyType = null; FilteredElementCollector collector = new FilteredElementCollector(m_doc); List <Element> elements = collector.OfClass(typeof(ViewFamilyType)).ToElements().ToList(); foreach (Element element in elements) { ViewFamilyType viewfamilytype = element as ViewFamilyType; if (viewfamilytype.ViewFamily == ViewFamily.ThreeDimensional) { view3dFamilyType = viewfamilytype; break; } } if (null != view3dFamilyType) { using (TransactionGroup transGroup = new TransactionGroup(m_doc)) { transGroup.Start("Start Creating View 3D"); using (Transaction trans = new Transaction(m_doc)) { trans.Start("Create View"); View3D view3d = View3D.CreateIsometric(m_doc, view3dFamilyType.Id); view3d.SetSectionBox(boundingBox); view3d.GetSectionBox().Enabled = true; view3d.DetailLevel = ViewDetailLevel.Fine; foreach (Category category in categories.Values) { if (category.get_AllowsVisibilityControl(view3d)) { #if RELEASE2017 || RELEASE2018 view3d.SetCategoryHidden(category.Id, false); #else view3d.SetVisibility(category, true); #endif } } view3d.get_Parameter(BuiltInParameter.MODEL_GRAPHICS_STYLE).Set(4); //m_app.ActiveUIDocument.ActiveView = view3d; //m_app.ActiveUIDocument.RefreshActiveView(); XYZ eyePostion = view3d.GetOrientation().EyePosition; Point viewPoint = new Point(); viewPoint.X = eyePostion.X; viewPoint.Y = eyePostion.Y; viewPoint.Z = eyePostion.Z; orthoCamera.CameraViewPoint = viewPoint; XYZ forwardDirection = view3d.GetOrientation().ForwardDirection; Direction fDirection = new Direction(); fDirection.X = forwardDirection.X; fDirection.Y = forwardDirection.Y; fDirection.Z = forwardDirection.Z; orthoCamera.CameraDirection = fDirection; XYZ upDirection = view3d.GetOrientation().UpDirection; Direction uDirection = new Direction(); uDirection.X = upDirection.X; uDirection.Y = upDirection.Y; uDirection.Z = upDirection.Z; orthoCamera.CameraUpVector = uDirection; orthoCamera.ViewToWorldScale = view3d.Scale; m_app.ActiveUIDocument.RefreshActiveView(); trans.Commit(); trans.Start("Export Image"); //create snapshot.png ImageExportOptions option = new ImageExportOptions(); option.HLRandWFViewsFileType = ImageFileType.PNG; option.ImageResolution = ImageResolution.DPI_300; option.ShouldCreateWebSite = false; option.ExportRange = ExportRange.SetOfViews; option.FilePath = imagePath; List <ElementId> viewIds = new List <ElementId>(); viewIds.Add(view3d.Id); option.SetViewsAndSheets(viewIds); if (ImageExportOptions.IsValidFileName(option.FilePath)) { m_doc.ExportImage(option); } trans.Commit(); } transGroup.RollBack(); } if (File.Exists(imagePath)) { File.Delete(imagePath); } string[] fileNames = Directory.GetFiles(Path.GetDirectoryName(imagePath), "snapshot*"); foreach (string fName in fileNames) { if (Path.GetExtension(fName) == ".png" || Path.GetExtension(fName) == ".jpg") { File.Move(fName, imagePath); if (File.Exists(fName)) { File.Delete(fName); } } } } } catch (Exception ex) { MessageBox.Show("Failed to get the orthogonal camera.\n" + ex.Message, "Get Orthogonal Camera", MessageBoxButton.OK, MessageBoxImage.Warning); } return(orthoCamera); }
public abstract void Render(DeviceContext deviceContext, OrthogonalCamera camera);
public void RenderBoundingBox(DeviceContext deviceContext, OrthogonalCamera camera) { Matrix view = camera.View; Matrix projection = camera.Projection; //Matrix linesWorld; Matrix worldViewProjection; DataStream dataStream; Matrix world = _world; world.M41 += _left * world.M11; world.M42 += _bottom * world.M22; world.M11 *= _right - _left; world.M22 *= _top - _bottom; #region Boundary Rect // Correct deviceContext.InputAssembler.InputLayout = ShaderManager.Instance.InputLayoutBoundingRect; deviceContext.VertexShader.Set(ShaderManager.Instance.VertexShaderBoundingRect); deviceContext.HullShader.Set(null); deviceContext.DomainShader.Set(null); deviceContext.PixelShader.Set(ShaderManager.Instance.PixelShaderBoundingRect); deviceContext.InputAssembler.PrimitiveTopology = PrimitiveTopology.LineStrip; deviceContext.InputAssembler.SetVertexBuffers(0, ShaderManager.Instance.BufferBindingBoundingRect); deviceContext.VertexShader.SetConstantBuffers(0, ShaderManager.Instance.GlobalBufferBoundingRect); deviceContext.PixelShader.SetConstantBuffers(0, ShaderManager.Instance.GlobalBufferBoundingRect); Matrix.Multiply(ref world, ref view, out worldViewProjection); Matrix.Multiply(ref worldViewProjection, ref projection, out worldViewProjection); dataStream = null; deviceContext.MapSubresource(ShaderManager.Instance.GlobalBufferBoundingRect, 0, MapMode.WriteDiscard, MapFlags.None, out dataStream); dataStream.Write(worldViewProjection); dataStream.Write(ShaderManager.Instance.ColorBoundingRect); deviceContext.UnmapSubresource(ShaderManager.Instance.GlobalBufferBoundingRect, 0); deviceContext.Draw(5, 0); #endregion #region Nodes deviceContext.InputAssembler.InputLayout = ShaderManager.Instance.InputLayoutNodes; deviceContext.VertexShader.Set(ShaderManager.Instance.VertexShaderNodes); deviceContext.HullShader.Set(null); deviceContext.DomainShader.Set(null); deviceContext.PixelShader.Set(ShaderManager.Instance.PixelShaderNodes); deviceContext.InputAssembler.SetVertexBuffers(0, ShaderManager.Instance.BufferBindingNodes); deviceContext.VertexShader.SetConstantBuffers(0, ShaderManager.Instance.GlobalBufferNodes); deviceContext.PixelShader.SetConstantBuffers(0, ShaderManager.Instance.GlobalBufferNodes); //Vector2 position = camera.Project(_position); //float posX = camera.ProjectX(_position.X); //float posY = camera.ProjectY(_position.Y); Matrix worldView; world = _world; //world.M11 *= _right - _left; //world.M22 *= _top - _bottom; //float width = (float)Math.Floor(_right - _left); //float height = (float)Math.Floor(_top - _bottom); float width = _right - _left; float height = _top - _bottom; Vector4 metric = new Vector4(6.0f, 6.0f, Math.Max(18.0f, width), Math.Max(18.0f, height)); Vector4 rect = new Vector4(_left + width / 2.0f, _bottom + height / 2.0f, Math.Max(18.0f, width), Math.Max(18.0f, height)); Matrix.Multiply(ref world, ref view, out worldView); //Matrix.Multiply(ref worldViewProjection, ref projection, out worldViewProjection); #region Lines deviceContext.InputAssembler.PrimitiveTopology = PrimitiveTopology.LineList; deviceContext.InputAssembler.SetIndexBuffer(ShaderManager.Instance.IndexBufferNodesLines, Format.R32_UInt, 0); dataStream = null; deviceContext.MapSubresource(ShaderManager.Instance.GlobalBufferNodes, 0, MapMode.WriteDiscard, MapFlags.None, out dataStream); dataStream.Write(worldView); dataStream.Write(projection); dataStream.Write(ShaderManager.Instance.ColorNodesBorder); dataStream.Write(metric); dataStream.Write(rect); deviceContext.UnmapSubresource(ShaderManager.Instance.GlobalBufferNodes, 0); deviceContext.DrawIndexed(8 * 8, 0, 0); #endregion deviceContext.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; deviceContext.InputAssembler.SetIndexBuffer(ShaderManager.Instance.IndexBufferNodesTriangles, Format.R32_UInt, 0); metric.X -= 1.0f; metric.Y -= 1.0f; dataStream = null; deviceContext.MapSubresource(ShaderManager.Instance.GlobalBufferNodes, 0, MapMode.WriteDiscard, MapFlags.None, out dataStream); dataStream.Write(worldView); dataStream.Write(projection); dataStream.Write(ShaderManager.Instance.ColorNodesFill); dataStream.Write(metric); dataStream.Write(rect); deviceContext.UnmapSubresource(ShaderManager.Instance.GlobalBufferNodes, 0); deviceContext.DrawIndexed(6 * 8, 0, 0); #endregion }
public void Render(DeviceContext deviceContext, OrthogonalCamera camera) { Items.Render(deviceContext, camera); SelectedTool.Render(deviceContext, camera); }
public override void Render(DeviceContext deviceContext, OrthogonalCamera camera) { }
public static void ExecuteDemo(int width, int height, int angle, bool orthogonal, string pfmFile, string ldrFile, int scene, float?luminosity, int spp, char rendType) { Stopwatch sw = new Stopwatch(); sw.Start(); Console.WriteLine("Starting Demo with these parameters:\n"); Console.WriteLine("Width: " + width + " pixels"); Console.WriteLine("Height: " + height + " pixels"); Console.WriteLine("Angle: " + angle + " degrees"); Console.WriteLine(orthogonal ? "Orthogonal Camera" : "Perspective Camera"); Console.WriteLine("pfmFile: " + pfmFile); Console.WriteLine("ldrFile: " + ldrFile); Console.WriteLine("Samples per pixel: " + spp); Console.WriteLine("Render type: " + dictRend[rendType]); Console.WriteLine("\n"); HdrImage image = new HdrImage(width, height); // Camera initialization Console.WriteLine("Creating the camera..."); // var cameraTransf = Transformation.RotationZ(Utility.DegToRad(angle)) * Transformation.Translation(-2.0f, 0.0f, 0.5f) * Tsf.RotationY(Utility.DegToRad(15)); var cameraTransf = Transformation.Translation(-2.0f, 0.0f, 0.0f); Camera camera; if (orthogonal) { camera = new OrthogonalCamera(aspectRatio: (float)width / height, transformation: cameraTransf); } else { camera = new PerspectiveCamera(aspectRatio: (float)width / height, transformation: cameraTransf); } // Default value on/off renderer Render?renderer = null; Console.WriteLine("Creating the scene..."); World world = new World(); List <float> Vertices = new List <float>() { -0.5f, 0.5f }; switch (scene) { case 1: //One sphere for each vertex of the cube foreach (var x in Vertices) { foreach (var y in Vertices) { foreach (var z in Vertices) { world.addShape(new Sphere(Tsf.Translation(new Vec(x, y, z)) * Tsf.Scaling(new Vec(0.1f, 0.1f, 0.1f)))); } // z } // y } // x //Adding two more spheres to break simmetry world.addShape(new Sphere(Tsf.Translation(new Vec(0f, 0f, -0.5f)) * Tsf.Scaling(0.1f))); world.addShape(new Sphere(Tsf.Translation(new Vec(0f, 0.5f, 0f)) * Tsf.Scaling(0.1f))); break; case 2: HdrImage img = new HdrImage(); string inputpfm = "Texture/CokeTexture.pfm"; using (FileStream inputStream = File.OpenRead(inputpfm)) { img.readPfm(inputStream); Console.WriteLine($"Texture {inputpfm} has been correctly read from disk."); } Material groundM = new Material(new DiffuseBRDF(new CheckeredPigment(CC.BrightGreen, CC.Orange, 4)), new UniformPigment(CC.Black)); world.addShape(CC.SKY); world.addShape(new Plane(Tsf.Translation(0f, 0f, -3f), groundM)); world.addShape( new Cylinder( transformation: Tsf.Translation(.5f, -1f, -1f) * Transformation.Scaling(.6f, 0.6f, 1.3f) * Tsf.RotationY(Utility.DegToRad(45)), material: new Material( Brdf: new DiffuseBRDF(new ImagePigment(img)) //EmittedRadiance: new UniformPigment(CC.Red)// new ImagePigment(img) ) ) ); world.addShape( new Cylinder( transformation: Tsf.Translation(.5f, 1f, -1f) * Transformation.Scaling(.6f, 0.6f, 1.3f) * Tsf.RotationY(Utility.DegToRad(-45)), material: new Material( Brdf: new DiffuseBRDF(new ImagePigment(img)) //EmittedRadiance: new UniformPigment(CC.Red)// new ImagePigment(img) ) ) ); break; case 3: PCG pcg = new PCG(); Material sph1Mat = new Material(new DiffuseBRDF(new UniformPigment(CC.BlueChill))); Material sph2Mat = new Material(new DiffuseBRDF(new UniformPigment(Color.random()))); Material boxMat = new Material(new DiffuseBRDF(new UniformPigment(CC.BrightGreen))); world.addShape(new Sphere(Tsf.Scaling(500f), CC.skyMat)); world.addShape(new Plane(Tsf.Translation(0f, 0f, -1f), CC.groundMat)); world.addShape(new CSGUnion(new Sphere(Transformation.Translation(0.5f, -2.6f, 1f) * Transformation.Scaling(0.6f), sph2Mat), new Box(new Point(0f, -2.25f, 0.9f), new Point(1f, -3.25f, 1.8f), null, boxMat))); world.addShape(new Sphere(Tsf.Translation(3f, 5f, 1.6f) * Tsf.Scaling(2.0f, 4.0f, 2.0f), CC.refMat)); world.addShape(new Sphere(Tsf.Translation(4f, -1f, 1.3f) * Tsf.Scaling(1.0f), sph1Mat)); world.addShape(new Sphere(Tsf.Translation(-4f, -0.5f, 1f) * Tsf.Scaling(2f), sph2Mat)); break; case 4: Material mat = new Material(null, new UniformPigment(new Color(10f, 10f, 10f))); world.addShape(CC.SKY); world.addShape(new Plane(Tsf.Scaling(-3f, 0f, 0f) * Tsf.RotationY(Utility.DegToRad(270)), mat)); world.addShape(CC.wikiShape(Tsf.RotationZ(Utility.DegToRad(23)))); // world.addShape(CC.wikiShape(Tsf.RotationZ(Utility.DegToRad(45)))); break; case 5: Material skyM = new Material(new DiffuseBRDF(new UniformPigment(CC.SkyBlue)), new UniformPigment(CC.SkyBlue)); Material checkered = new Material(new DiffuseBRDF(new CheckeredPigment(CC.Blue, CC.Yellow)), new UniformPigment(CC.Black)); Material ground = new Material(new DiffuseBRDF(new CheckeredPigment(CC.LightRed, CC.Orange)), new UniformPigment(CC.Black)); world.addShape(new Sphere(Tsf.Scaling(500f), skyM)); world.addShape(new Cylinder(Tsf.Translation(0f, 2f, -0.5f) * Tsf.Scaling(0.5f), checkered)); world.addShape(new Cone(r: 0.5f, material: checkered)); world.addShape(new Plane(Tsf.Translation(0f, 0f, -1f), ground)); break; default: break; } switch (rendType) { case 'o': renderer = new OnOffRender(world); break; case 'f': renderer = new FlatRender(world); break; case 'p': renderer = new PointLightRender(world); break; case 'r': renderer = new PathTracer(world, CC.Black, new PCG()); break; default: break; } // Ray tracing Console.WriteLine("Rendering the scene..."); var rayTracer = new ImageTracer(image, camera, (int)Math.Sqrt(spp)); if (renderer == null) { renderer = new OnOffRender(world); } rayTracer.fireAllRays(renderer); // Write PFM image Console.WriteLine("Saving in pfm format..."); using (FileStream outpfmstream = File.OpenWrite(pfmFile)) { image.savePfm(outpfmstream); Console.WriteLine($"Image saved in {pfmFile}"); } Convert.ExecuteConvert(pfmFile, ldrFile, Default.factor, Default.gamma, luminosity); sw.Stop(); TimeSpan ts = sw.Elapsed; string elapsedTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}", ts.Hours, ts.Minutes, ts.Seconds, ts.Milliseconds / 10); Console.WriteLine("RunTime " + elapsedTime); }//Demo