public Stroke(ref Scene scene, uint devIndex) : base(ref scene) { stroke_g = new Geometry.GeometryStroke(ref scene); stroke_m = new Material.SingleColorMaterial(1, 0, 0, 1); currentState = State.READY; primaryDeviceIndex = devIndex; }
//addRhinoObject-pointOnObjRef (where curve on) //drawPoint,editPointSN in PointMarkers-addSceneNode public void resetVariables() { point_g = new Geometry.PointMarker(new Vector3()); currentState = State.READY; targetPRhObjID = Guid.Empty; //pointOnObjRef = null; drawPoint = null; projectP = new Point3d(); snapPointsList = new List <Point3d>(); rayCastingObjs = new List <ObjRef>(); pointsList = new List <Point3d>(); pointMarkers = new List <SceneNode>(); contourCurve = null; curvePlane = new Plane(); toleranceMax = 100; snapDistance = 40; isSnap = false; shouldSnap = false; moveControlerOrigin = new Point3d(); movePlaneRef = null; planeNormal = new Rhino.Geometry.Vector3d(); lastTranslate = 0.0f; }
public void Build() { var geo = new Geometry.Geometry(); Source.Read(); geo.AddADT(Source); }
public AddPoint(ref Scene scene) : base(ref scene) { mScene = scene; point_g = new Geometry.PointMarker(new Vector3()); point_m = new Material.SingleColorMaterial(0f, .5f, 1f, 1f); currentState = State.READY; }
public override void draw(ref Geometry.Geometry g, ref Matrix4 model, ref Matrix4 vp) { ErrorCode e; mShader.bind(); GL.ActiveTexture(TextureUnit.Texture0); GL.BindTexture(TextureTarget.Texture2D, m_iTexture); GL.DepthFunc(DepthFunction.Always); //TODO- what's is dim for? compSize is because UnsignedInt 4 bytes? mShader.uploadAttrib <int>("indices", g.mGeometryIndices.Length, 3, 4, VertexAttribPointerType.UnsignedInt, false, ref g.mGeometryIndices, 0); e = GL.GetError(); mShader.uploadAttrib <float>("position", g.mGeometry.Count(), 3, 4, VertexAttribPointerType.Float, false, ref g.mGeometry, 0); e = GL.GetError(); mShader.uploadAttrib <float>("uvs", g.mUvs.Count(), 2, 4, VertexAttribPointerType.Float, false, ref g.mUvs, 0); e = GL.GetError(); GL.UniformMatrix4(mShader.uniform("modelTransform"), true, ref model); e = GL.GetError(); GL.UniformMatrix4(mShader.uniform("viewProjTransform"), false, ref vp); //GL.UniformMatrix4(mShader.uniform("viewProjTransform"), false, ref vp); GL.Uniform1(mShader.uniform("tex"), 0); //for debugging float[] funMatrix = new float[16]; GL.GetUniform(mShader.mProgramShader, mShader.uniform("viewProjTransform"), funMatrix); e = GL.GetError(); mShader.drawIndexed(g.primitiveType, 0, g.mNumPrimitives); GL.DepthFunc(DepthFunction.Less); }
private void button3_Click(object sender, EventArgs e) { try { List<double> points = new List<double>(); string coordinates = Points.Text; foreach (string point in coordinates.Split(';')) { double x = Convert.ToDouble(point.Split(',')[0]); double y = Convert.ToDouble(point.Split(',')[1]); points.Add(x); points.Add(panel1.Height - y); } double[] pointsArray = points.ToArray(); Geometry.Geometry geom = new Geometry.Geometry(pointsArray[0], pointsArray[1], pointsArray[2], pointsArray[3], pointsArray[4], pointsArray[5], pointsArray[6], pointsArray[7]); if (geom.IsParalel) { Log.Clear(); Log.AppendText("True"); } else { Log.Clear(); Log.AppendText("False"); } } catch { Points.Clear(); Points.AppendText("Please enter points"); } }
public override void draw(ref Geometry.Geometry g, ref Matrix4 model, ref Matrix4 vp) { mShader.bind(); mShader.uploadAttrib <int>("indices", 3, 3, 4, VertexAttribPointerType.UnsignedInt, false, ref g.mGeometryIndices, 0); mShader.uploadAttrib <float>("position", 9, 3, 4, VertexAttribPointerType.Float, false, ref g.mGeometry, 0); mShader.drawIndexed(BeginMode.Triangles, 0, 3); }
public Feature(int id, Schema schema, Geometry.Geometry geometry, Dictionary <string, Object> attributes) { this.featureID = id; this.schema = schema; this.geometry = geometry; this.attributes = attributes; }
// this function will check to see if the current extent needs to be normalized // because wraping is turned on and maximum extent has been set. private Envelope NormalizeExtent(Envelope extent) { // If overmap has an MaximumExtent and the base map is panned into // another frames then extent needs to be normalized in order to determine // if the current extent of the base map on other frames is within // the maximum extent. If there is no MaximumExtent or WrapAround is not // present in the overview map then there is no need to normalize extent. if (MaximumExtent != null && Map.WrapAroundIsActive) { Geometry.Geometry normExtent = Envelope.NormalizeCentralMeridian(extent); // if the entire extent is in another frame then an envelope will be returned. if (normExtent is Envelope) { return(normExtent as Envelope); } // if the extent crosses the between two frames then half of the // extent exists in one frame andthe other half exists in the // other frame. Polygon with two rings is returned to reprsent each // side of the extent in each frame. else if (normExtent is Polygon) { return(CreateDateLineExtent((Polygon)normExtent)); } } // if normalizing is not needed then the extent will be // returned unchanged. return(extent); }
public MarkingMenu(ref Scene scene, MenuLayout2 layout = MenuLayout2.MainMenu) : base(ref scene) { mScene = scene; mLayout = layout; mNumSectors = getNumSectors(layout); mFirstSectorOffsetAngle = getAngularMenuOffset(mNumSectors); mCurrentSelection = -1; if (scene.isOculus) { mMinSelectionRadius = 0.2f; mOuterSelectionRadius = 0.8f; } else { mMinSelectionRadius = 0.4f; mOuterSelectionRadius = 0.6f; } Geometry.Geometry g = new Geometry.Geometry("C:\\workspace\\SparrowHawk\\src\\resources\\circle.obj"); switch (mLayout) { } radialMenuMat = new Material.RadialMenuMaterial(mScene.rhinoDoc, getTexturePath(mLayout)); mSceneNode = new SceneNode("MarkingMenu", ref g, ref radialMenuMat); mSceneNode.transform = new OpenTK.Matrix4(2, 0, 0, 0, 0, 0, -2, 0, 0, 2, 0, 0, 0, 0, 0, 1); UtilOld.showLaser(ref mScene, false); }
public void LoadTileData() { s_Geometry = new Geometry.Geometry(); // First read target tile to verify we have geometry and get bounding box // Read tile s_Tile.Read(); // Generate relevant geometry s_Tile.Generate(); // Append to our geometry s_Geometry.AddADT(s_Tile, false); // Generate the bounding box for the tile // (Because we need to cut the surrounding tiles later) Bounds = Geometry.Geometry.GetBoundingBox(X, Y, s_Geometry.Vertices); // We need to load the surrounding tiles because there sometimes is overlap in geometry to and from different tiles for (var y = Y - 1; y <= Y + 1; y++) { for (var x = X - 1; y <= X + 1; y++) { // Skip target tile as we already added it if (X == x && Y == y) { continue; } // Add a surrounding tile var tile = new ADT(World, x, y); tile.Read(); tile.Generate(); s_Geometry.AddADT(tile, false); } } }
public static Geometry.Geometry WKT2Geometry(String wkt) { Geometry.Geometry geometry = null; string wkt1 = wkt.Trim(); if (wkt1.StartsWith("POINT")) { geometry = WKT2PointD(wkt1); geometry.geometryType = OSGeo.OGR.wkbGeometryType.wkbPoint; } else if (wkt1.StartsWith("LINESTRING")) { geometry = WKT2SimplePolyline(wkt1); geometry.geometryType = OSGeo.OGR.wkbGeometryType.wkbLineString; } else if (wkt1.StartsWith("POLYGON")) { geometry = WKT2SimplePolygon(wkt1); geometry.geometryType = OSGeo.OGR.wkbGeometryType.wkbPolygon; } else if (wkt1.StartsWith("MULTILINESTRING")) { geometry = WKT2Polyline(wkt1); geometry.geometryType = OSGeo.OGR.wkbGeometryType.wkbMultiLineString; } else { geometry = WKT2Polygon(wkt1); geometry.geometryType = OSGeo.OGR.wkbGeometryType.wkbMultiPolygon; } return(geometry); }
public override void draw(ref Geometry.Geometry g, ref Matrix4 model, ref Matrix4 vp) { GL.Disable(EnableCap.DepthTest); GL.BlendFunc(BlendingFactorSrc.SrcAlpha, BlendingFactorDest.OneMinusSrcAlpha); GL.Enable(EnableCap.Blend); int dim; if (g.primitiveType == BeginMode.Lines) { dim = 2; } else { dim = 3; } mShader.bind(); mShader.uploadAttrib <int>("indices", g.mGeometryIndices.Length, dim, 4, VertexAttribPointerType.UnsignedInt, false, ref g.mGeometryIndices, 0); mShader.uploadAttrib <float>("position", g.mGeometry.Count(), 3, 4, VertexAttribPointerType.Float, false, ref g.mGeometry, 0); GL.Uniform4(mShader.uniform("color"), mColor); GL.UniformMatrix4(mShader.uniform("modelTransform"), true, ref model); GL.UniformMatrix4(mShader.uniform("viewProjTransform"), false, ref vp); // TODO: Fix this mShader.drawIndexed(g.primitiveType, 0, g.mNumPrimitives); GL.Disable(EnableCap.Blend); GL.Enable(EnableCap.DepthTest); }
override public void draw(ref Geometry.Geometry g, ref Matrix4 model, ref Matrix4 vp) { // bind shader GL.Disable(EnableCap.DepthTest); GL.BlendFunc(BlendingFactorSrc.SrcAlpha, BlendingFactorDest.OneMinusSrcAlpha); mShader.bind(); Matrix4 modelIT = model.Inverted(); modelIT.Transpose(); GL.Enable(EnableCap.Blend); mShader.uploadAttrib <int>("indices", g.mGeometryIndices.Length, 3, 4, VertexAttribPointerType.UnsignedInt, false, ref g.mGeometryIndices, 0); mShader.uploadAttrib <float>("position", g.mGeometry.Count(), 3, 4, VertexAttribPointerType.Float, false, ref g.mGeometry, 0); if (g.mNormals == null) { Math.addNormalsToMesh(g); // TODO: I literally hate this, but it was easier than patching Eric's code. } if (g.mNormals != null) { mShader.uploadAttrib <float>("normal", g.mNormals.Count(), 3, 4, VertexAttribPointerType.Float, false, ref g.mNormals, 0); } GL.Uniform1(mShader.uniform("alpha"), mAlpha); GL.ProgramUniformMatrix4(mShader.mProgramShader, mShader.uniform("viewProjTransform"), false, ref vp); GL.ProgramUniformMatrix4(mShader.mProgramShader, mShader.uniform("modelTransform"), true, ref model); GL.ProgramUniformMatrix4(mShader.mProgramShader, mShader.uniform("modelInvTrans"), true, ref modelIT); mShader.drawIndexed(g.primitiveType, 0, g.mNumPrimitives); GL.Disable(EnableCap.Blend); GL.Disable(EnableCap.DepthTest); }
/// <summary> /// Creates a stroke in order to populate an existent piece of geometry. /// The interaction will be popped of the stack (and therefore disappear) /// after a stroke is completed. Can start in either state, but releasing /// the grip is the only thing that will complete the stroke. /// </summary> /// <param name="s"></param> /// <param name="target">The geometry to populate. NOTE: This should be a /// GeometryStroke but C# will not allow this because of type safety.</param> /// <param name="currentState">The starting state. Probably == State.Paint</param> /// <param name="devIndex">The controller index responsible for this interaction.</param> public Stroke(ref Scene scene, ref Geometry.Geometry target, State state, uint devIndex) : base(ref scene) { mScene = scene; stroke_g = target; currentState = state; primaryDeviceIndex = devIndex; mPopAfterStroke = true; }
public GeometryMesh( Geometry.Geometry geometry, NormalStyle normalStyle, VertexFormat vertexFormat ) { Geometry = geometry; BuildMeshFromGeometry(BufferUsageHint.StaticDraw, normalStyle, vertexFormat); }
public CreateCurve(ref Scene scene, bool _isClosed, CurveID curveID) : base(ref scene) { beforeCurveCount = mScene.iCurveList.Count; stroke_g = new Geometry.GeometryStroke(ref mScene); stroke_m = new Material.SingleColorMaterial(1, 0, 0, 1); mesh_m = new Material.RGBNormalMaterial(0.5f); railPlane_m = new Material.SingleColorMaterial(34f / 255f, 139f / 255f, 34f / 255f, 0.4f); isClosed = _isClosed; rayCastingObjs = new List <ObjRef>(); resetVariables(); FunctionType modelFun = (FunctionType)mScene.selectionDic[SelectionKey.ModelFun]; //0:3D, 1:onDPlanes, 2: onSurfaces, 3: onTargets if (curveID == CurveID.ProfileCurve1) { drawnType = (DrawnType)mScene.selectionDic[SelectionKey.Profile1On]; //Revolve only needs 1 profilecurve in our case if (modelFun == FunctionType.Revolve) { dynamicRender = "Revolve"; } } else if (curveID == CurveID.ProfileCurve2) { drawnType = (DrawnType)mScene.selectionDic[SelectionKey.Profile2On]; //need to visualize the model switch (modelFun) { case FunctionType.Extrude: dynamicRender = "Extrude"; break; case FunctionType.Loft: dynamicRender = "Loft"; break; case FunctionType.Sweep: dynamicRender = "Sweep"; break; } } /* * foreach(Curve c in mScene.iCurveList) * { * localListCurve.Add(c); * } */ //testing localListCurve = mScene.iCurveList; }
/// <summary> /// Attempt to set the viewpoint to the given geometry and padding. /// </summary> /// <param name="geometry">Geometry to zoom to, must not be a point</param> /// <param name="padding">Padding around the target geometry</param> private async void TrySetViewpoint(Geometry.Geometry geometry, double padding) { try { await _mapView.SetViewpointGeometryAsync(geometry, padding); } catch (Exception ex) { ErrorLogger.Instance.LogException(ex); } }
protected override void onClickOculusTrigger(ref VREvent_t vrEvent) { Rhino.RhinoApp.WriteLine("oculus grip click event test"); primaryDeviceIndex = vrEvent.trackedDeviceIndex; if (currentState == State.READY) { lockPlane = true; stroke_g = new Geometry.GeometryStroke(ref mScene); reducePoints = new List <Vector3>(); currentState = State.PAINT; } }
public void initOVrvision() { Ovrvision = new COvrvision(); Ovrvision.useProcessingQuality = 0; //DEMOSAIC & REMAP //OV_CAMVR_FULL 1280x960 @45fps x2 //OV_CAMVR_WIDE 960x950 @60fps x2 //OV_CAMVR_VGA 1280x800 @60fps x2 //OV_CAMVR_QVGA 640x480 @90fps x2 if (Ovrvision.Open(COvrvision.OV_CAMVR_FULL)) { camWidth = Ovrvision.imageSizeW; camHeight = Ovrvision.imageSizeH; initScene(); //create the fullscreen quad here fs_quad_g = new Geometry.Geometry(); fs_quad_g.mGeometry = new float[12] { -1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f, -1.0f, 0.0f, -1.0f, -1.0f, 0.0f }; fs_quad_g.mGeometryIndices = new int[6] { 0, 1, 2, 2, 3, 0 }; fs_quad_g.mUvs = new float[8] { 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f }; fs_quad_g.mNumPrimitives = 2; fs_quad_g.primitiveType = BeginMode.Triangles; //left eye texture material fs_quad_m_L = new Material.TextureMaterial(mScene.rhinoDoc, Ovrvision.imageSizeW, Ovrvision.imageSizeH, OpenTK.Graphics.OpenGL4.PixelFormat.Bgr, PixelType.UnsignedByte); fs_quad_m_R = new Material.TextureMaterial(mScene.rhinoDoc, Ovrvision.imageSizeW, Ovrvision.imageSizeH, OpenTK.Graphics.OpenGL4.PixelFormat.Bgr, PixelType.UnsignedByte); BuildProjectionMatrix(0.1f, 30, 0); BuildModelMatrix(); } else { Rhino.RhinoApp.WriteLine("State: Open Error."); } }
/// <summary> /// Displays the measurement result /// </summary> /// <param name="geometry">geometry to measure</param> private void DisplayResult(Geometry.Geometry geometry = null) { if (_measureResultTextBlock != null) { double measurement = 0; if (geometry == null) { switch (Mode) { case MeasureToolbarMode.Line: { geometry = LineSketchEditor.Geometry; break; } case MeasureToolbarMode.Area: { geometry = AreaSketchEditor.Geometry; break; } case MeasureToolbarMode.Feature: { geometry = _measureFeatureResultOverlay.Graphics.FirstOrDefault()?.Geometry; break; } } } if (geometry is Polyline) { measurement = GeometryEngine.LengthGeodetic(geometry, SelectedLinearUnit, GeodeticCurveType.ShapePreserving); } else if (geometry is Polygon || geometry is Envelope) { measurement = GeometryEngine.AreaGeodetic(geometry, SelectedAreaUnit, GeodeticCurveType.ShapePreserving); } if (geometry == null) { var instruction = Mode == MeasureToolbarMode.None ? "Toggle a measure mode" : (Mode == MeasureToolbarMode.Feature ? "Tap a feature" : "Tap to sketch"); _measureResultTextBlock.Text = instruction; } else { _measureResultTextBlock.Text = string.Format("{0:0,0.00}", measurement); } } }
/// <summary> /// Updates visibility of unit selector based on geometry type. /// </summary> /// <param name="geometry">geometry to measure</param> private void PrepareUnitSelector(Geometry.Geometry geometry) { var isMeasuringArea = geometry is Polygon || geometry is Envelope; if (_linearUnitsSelector != null) { _linearUnitsSelector.Visibility = !isMeasuringArea ? Visibility.Visible : Visibility.Collapsed; } if (_areaUnitsSelector != null) { _areaUnitsSelector.Visibility = isMeasuringArea ? Visibility.Visible : Visibility.Collapsed; } }
private void initScene() { //OpenCV Cube init objectList = new List <MCvPoint3D32f>(); for (int i = 0; i < _height; i++) { for (int j = 0; j < _width; j++) { objectList.Add(new MCvPoint3D32f(j * _squareSize, i * _squareSize, 0.0F)); } } axisPoints = new List <MCvPoint3D32f>(); axisPoints.Add(new MCvPoint3D32f(0.0f, 0.0f, 0.0f)); axisPoints.Add(new MCvPoint3D32f(3.0f, 0.0f, 0.0f)); axisPoints.Add(new MCvPoint3D32f(0.0f, 3.0f, 0.0f)); axisPoints.Add(new MCvPoint3D32f(0.0f, 0.0f, -3.0f)); cubePoints = new List <MCvPoint3D32f>(); cubePoints.Add(new MCvPoint3D32f(0.0f, 0.0f, 0.0f)); cubePoints.Add(new MCvPoint3D32f(0.0f, 3.0f, 0.0f)); cubePoints.Add(new MCvPoint3D32f(3.0f, 3.0f, 0.0f)); cubePoints.Add(new MCvPoint3D32f(3.0f, 0.0f, 0.0f)); cubePoints.Add(new MCvPoint3D32f(0.0f, 0.0f, -3.0f)); cubePoints.Add(new MCvPoint3D32f(0.0f, 3.0f, -3.0f)); cubePoints.Add(new MCvPoint3D32f(3.0f, 3.0f, -3.0f)); cubePoints.Add(new MCvPoint3D32f(3.0f, 0.0f, -3.0f)); //OpenGL objects init marker_cube_g = new Geometry.CubeGeometry(3.0f, 3.0f, -3.0f); marker_cube_m = new Material.TextureMaterial(mScene.rhinoDoc, "texture.jpg", false); controller_cube_g = new Geometry.CubeGeometry(0.05f, 0.05f, -0.05f); controller_cube_m = new Material.TextureMaterial(mScene.rhinoDoc, "texture.jpg", false); //TODO- how to deal with controllerPose ? //SceneNode controller_cube = new SceneNode("controller_cube", ref controller_cube_g, ref controller_cube_m); ; //mScene.staticGeometry.add(ref controller_cube); //we need eyepose before calibration if (mHMD == null) { mEyePosLeft = new Matrix4(); } Valve.VR.HmdMatrix34_t M_L = mHMD.GetEyeToHeadTransform(Valve.VR.EVREye.Eye_Left); mEyePosLeft = UtilOld.steamVRMatrixToMatrix4(M_L).Inverted(); Valve.VR.HmdMatrix34_t M_R = mHMD.GetEyeToHeadTransform(Valve.VR.EVREye.Eye_Right); mEyePosRight = UtilOld.steamVRMatrixToMatrix4(M_R).Inverted(); }
//TODO- check if the x,y axis of the plane will change whenever we call tryGetPlane //railPlaneSN-addRhinoObjSceneNode(draw on referece), curveOnObjRef-addRhinoObj(!=In3D) //drawPoint, strokeSN-addSceneNode, renderObjSN-updateSceneNode(Revolve or Curve2) public void resetVariables() { stroke_g = new Geometry.GeometryStroke(ref mScene); currentState = State.READY; reducePoints = new List <Vector3>(); targetPRhObjID = Guid.Empty; drawPoint = null; snapPointSN = null; projectP = new Point3d(); rhinoCurvePoints = new List <Point3d>(); rhinoCurve = null; proj_plane = new Plane(); simplifiedCurvePoints = new List <Point3d>(); simplifiedCurve = null; editCurve = null; //for extrude //curveOnObjRef = null; backgroundStart = false; displacement = 0; dynamicBrep = null; modelName = "tprint"; //dynamicRender = "none"; // need to save same as drawType and shapeType snapPointsList = new List <Point3d>(); rayCastingObjs = new List <ObjRef>(); toleranceMax = 100000; snapDistance = 40; isSnap = false; shouldSnap = false; moveControlerOrigin = new Point3d(); movePlaneRef = null; planeNormal = new Rhino.Geometry.Vector3d(); curvePlane = new Plane(); lastTranslate = 0.0f; d = null; localListCurve = mScene.iCurveList; oldCurveOnObjID = ""; oldPlaneOrigin = ""; oldPlaneNormal = ""; }
public void TemporaryGeometry_End(Point point) { if (TemporaryGeometry == null) { return; } TemporaryGeometry.Points.Add(point); var geo = new Geometry.Geometry() { GeometryType = TemporaryGeometry.GeometryType, Points = TemporaryGeometry.Points }; Geometrys.Add(geo); TemporaryGeometry = null; }
public GeometryDisplayWindow(Geometry.Geometry geometry) : base(800, 600, new GraphicsMode(32, 24, 8, 8), "Mesh Viewer", GameWindowFlags.Default, DisplayDevice.Default, 3, 3, GraphicsContextFlags.ForwardCompatible) { this.geometry = geometry; camera.distanceFromCenter = (geometry.BoundingBox.Upper - geometry.BoundingBox.Lower).Length * 1.0f; camera.moveSpeed = (geometry.BoundingBox.Upper - geometry.BoundingBox.Lower).Length * 0.01f; camera.rotationSpeed = 0.01f; controller = new CameraController(camera); controller.Attach(this); var a = System.Reflection.Assembly.GetExecutingAssembly(); var st = a.GetManifestResourceStream("GeometryModes.Resources.Icon.ico"); var icnTask = new System.Drawing.Icon(st); Icon = icnTask; }
public static string Geometry2WKT(Geometry.Geometry geom) { switch (geom.geometryType) { case OSGeo.OGR.wkbGeometryType.wkbPoint: return(PointD2WKT((PointD)geom)); case OSGeo.OGR.wkbGeometryType.wkbLineString: return(SimplePolyline2WKT((SimplePolyline)geom)); case OSGeo.OGR.wkbGeometryType.wkbPolygon: return(SimplePolygon2WKT((SimplePolygon)geom)); case OSGeo.OGR.wkbGeometryType.wkbMultiLineString: return(Polyline2WKT((Polyline)geom)); default: return(Polygon2WKT((Polygon)geom)); } }
public override void draw(ref Geometry.Geometry g, ref Matrix4 model, ref Matrix4 vp) { UtilOld.depthSort(model * vp, g); GL.Disable(EnableCap.DepthTest); GL.BlendFunc(BlendingFactorSrc.SrcAlpha, BlendingFactorDest.OneMinusSrcAlpha); GL.Enable(EnableCap.Blend); Matrix4 modelIT = model.Inverted(); modelIT.Transpose(); int dim; if (g.primitiveType == BeginMode.Lines) { dim = 2; } else { dim = 3; } mShader.bind(); mShader.uploadAttrib <int>("indices", g.mGeometryIndices.Length, dim, 4, VertexAttribPointerType.UnsignedInt, false, ref g.mGeometryIndices, 0); mShader.uploadAttrib <float>("position", g.mGeometry.Count(), 3, 4, VertexAttribPointerType.Float, false, ref g.mGeometry, 0); if (g.mNormals == null) { Math.addNormalsToMesh(g); } if (g.mNormals != null) { mShader.uploadAttrib <float>("normal", g.mNormals.Count(), 3, 4, VertexAttribPointerType.Float, false, ref g.mNormals, 0); } GL.Uniform4(mShader.uniform("color"), mColor); GL.UniformMatrix4(mShader.uniform("modelTransform"), true, ref model); GL.UniformMatrix4(mShader.uniform("viewProjTransform"), false, ref vp); // TODO: Fix this GL.ProgramUniformMatrix4(mShader.mProgramShader, mShader.uniform("modelInvTrans"), true, ref modelIT); GL.Uniform3(mShader.uniform("lightInt"), 3 * MAX_LIGHT_COUNT, ref lightIntensities[0].X); GL.Uniform3(mShader.uniform("lightPos"), 3 * MAX_LIGHT_COUNT, ref lightPositions[0].X); mShader.drawIndexed(g.primitiveType, 0, g.mNumPrimitives); GL.Disable(EnableCap.Blend); GL.Enable(EnableCap.DepthTest); }
public Stroke(ref Scene scene, bool drawOnP) : base(ref scene) { mScene = scene; stroke_g = new Geometry.GeometryStroke(ref mScene); stroke_m = new Material.SingleColorMaterial(1, 0, 0, 1); currentState = State.READY; onPlane = drawOnP; if (onPlane) { //clear previous drawpoint if (mScene.tableGeometry.children.Count > 0) { foreach (SceneNode sn in mScene.tableGeometry.children) { if (sn.name == "drawPoint") { mScene.tableGeometry.children.Remove(sn); break; } } } Geometry.Geometry geo = new Geometry.PointMarker(new OpenTK.Vector3(0, 0, 0)); Material.Material m = new Material.SingleColorMaterial(250 / 255, 128 / 255, 128 / 255, 1); drawPoint = new SceneNode("drawPoint", ref geo, ref m); drawPoint.transform = new OpenTK.Matrix4(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1); mScene.tableGeometry.add(ref drawPoint); //mScene.staticGeometry.add(ref drawPoint); //TODO-support both controllers if (mScene.mIsLefty) { primaryDeviceIndex = (uint)mScene.leftControllerIdx; } else { primaryDeviceIndex = (uint)mScene.rightControllerIdx; } } }
/// <summary> /// Adds normals to the mesh appropriate for flat shading. /// Note: depending on the mesh, this may increase the number of /// vertices by a factor of three! /// Must be a triangulated mesh /// </summary> /// <param name="mesh"></param> /// <returns></returns> public static void addNormalsToMesh(Geometry.Geometry mesh) { if (mesh.primitiveType != OpenTK.Graphics.OpenGL4.BeginMode.Triangles) { return; } float[] mGeometry = new float[3 * 3 * mesh.mNumPrimitives]; int[] mGeometryIndices = new int[3 * mesh.mNumPrimitives]; float[] mNormals = new float[3 * 3 * mesh.mNumPrimitives]; OpenTK.Vector3[] faceVertices = new OpenTK.Vector3[3]; for (int f = 0; f < mesh.mNumPrimitives; f++) { for (int v = 0; v < 3; v++) { mGeometry[9 * f + 3 * v + 0] = mesh.mGeometry[3 * mesh.mGeometryIndices[3 * f + v] + 0]; mGeometry[9 * f + 3 * v + 1] = mesh.mGeometry[3 * mesh.mGeometryIndices[3 * f + v] + 1]; mGeometry[9 * f + 3 * v + 2] = mesh.mGeometry[3 * mesh.mGeometryIndices[3 * f + v] + 2]; faceVertices[v] = new OpenTK.Vector3(mGeometry[9 * f + 3 * v + 0], mGeometry[9 * f + 3 * v + 1], mGeometry[9 * f + 3 * v + 2]); } OpenTK.Vector3 n = calculateFaceNormal(faceVertices[0], faceVertices[1], faceVertices[2]); for (int v = 0; v < 3; v++) { mNormals[9 * f + 3 * v + 0] = n.X; mNormals[9 * f + 3 * v + 1] = n.Y; mNormals[9 * f + 3 * v + 2] = n.Z; } } for (int i = 0; i < mGeometryIndices.Count(); i++) { mGeometryIndices[i] = i; } mesh.mGeometry = mGeometry; mesh.mGeometryIndices = mGeometryIndices; mesh.mNormals = mNormals; }
public AddPoint(ref Scene scene, int num, CurveID curveID) : base(ref scene) { mScene = scene; beforeCurveCount = mScene.iCurveList.Count; point_g = new Geometry.PointMarker(new Vector3()); point_m = new Material.SingleColorMaterial(0f, .5f, 1f, 1f); if (curveID == CurveID.ProfileCurve1) { drawnType = (DrawnType)mScene.selectionDic[SelectionKey.Profile1On]; shapeType = (ShapeType)mScene.selectionDic[SelectionKey.Profile1Shape]; } else if (curveID == CurveID.ProfileCurve2) { drawnType = (DrawnType)mScene.selectionDic[SelectionKey.Profile2On]; shapeType = (ShapeType)mScene.selectionDic[SelectionKey.Profile2Shape]; } maxNumPoint = num; profile_m = new Material.SingleColorMaterial(0.5f, 0, 0, 0.4f); rayCastingObjs = new List <ObjRef>(); resetVariables(); }
public byte[] Build() { var wdt = new WDT("World\\maps\\" + Dungeon + "\\" + Dungeon + ".wdt"); if (!wdt.IsGlobalModel || !wdt.IsValid) return null; Geometry = new Geometry.Geometry {}; var model = new WMORoot(wdt.ModelFile); Geometry.AddDungeon(model, wdt.ModelDefinition); if (Geometry.Vertices.Count == 0 && Geometry.Indices.Count == 0) throw new InvalidOperationException("Can't build mesh with empty geometry"); Geometry.SaveWavefrontObject($"{Dungeon}.obj"); Context = new RecastContext(); // get raw geometry - lots of slowness here float[] vertices; int[] triangles; byte[] areas; Geometry.GetRawData(out vertices, out triangles, out areas); Geometry.Indices.Clear(); float[] bmin, bmax; Geometry.CalculateBoundingBox(out bmin, out bmax); Geometry.Vertices.Clear(); // Allocate voxel heightfield where we rasterize our input data to. Heightfield hf; int width, height; Recast.CalcGridSize(bmin, bmax, Config.CellSize, out width, out height); if (!Context.CreateHeightfield(out hf, width, height, bmin, bmax, Config.CellSize, Config.CellHeight)) throw new OutOfMemoryException("CreateHeightfield ran out of memory"); // Find triangles which are walkable based on their slope and rasterize them. Context.ClearUnwalkableTriangles(Config.WalkableSlopeAngle, ref vertices, ref triangles, areas); Context.RasterizeTriangles(ref vertices, ref triangles, ref areas, hf, Config.WalkableClimb); // Once all geometry is rasterized, we do initial pass of filtering to // remove unwanted overhangs caused by the conservative rasterization // as well as filter spans where the character cannot possibly stand. Context.FilterLowHangingWalkableObstacles(Config.WalkableClimb, hf); Context.FilterLedgeSpans(Config.WalkableHeight, Config.WalkableClimb, hf); Context.FilterWalkableLowHeightSpans(Config.WalkableHeight, hf); // Compact the heightfield so that it is faster to handle from now on. // This will result in more cache coherent data as well as the neighbours // between walkable cells will be calculated. CompactHeightfield chf; if (!Context.BuildCompactHeightfield(Config.WalkableHeight, Config.WalkableClimb, hf, out chf)) throw new OutOfMemoryException("BuildCompactHeightfield ran out of memory"); hf.Delete(); // Erode the walkable area by agent radius. if (!Context.ErodeWalkableArea(Config.WalkableRadius, chf)) throw new OutOfMemoryException("ErodeWalkableArea ran out of memory"); // Prepare for region partitioning, by calculating distance field along the walkable surface. if (!Context.BuildDistanceField(chf)) throw new OutOfMemoryException("BuildDistanceField ran out of memory"); // Partition the walkable surface into simple regions without holes. if (!Context.BuildRegions(chf, Config.BorderSize, Config.MinRegionArea, Config.MergeRegionArea)) throw new OutOfMemoryException("BuildRegions ran out of memory"); // Create contours. ContourSet cset; if (!Context.BuildContours(chf, Config.MaxSimplificationError, Config.MaxEdgeLength, out cset)) throw new OutOfMemoryException("BuildContours ran out of memory"); // Build polygon navmesh from the contours. PolyMesh pmesh; if (!Context.BuildPolyMesh(cset, Config.MaxVertsPerPoly, out pmesh)) throw new OutOfMemoryException("BuildPolyMesh ran out of memory"); // Build detail mesh. PolyMeshDetail dmesh; if (!Context.BuildPolyMeshDetail(pmesh, chf, Config.DetailSampleDistance, Config.DetailSampleMaxError, out dmesh)) throw new OutOfMemoryException("BuildPolyMeshDetail ran out of memory"); chf.Delete(); cset.Delete(); // Set flags according to area types (e.g. Swim for Water) pmesh.MarkAll(); byte[] meshData; if (!Detour.CreateNavMeshData(out meshData, pmesh, dmesh, 0, 0, bmin, bmax, Config.WorldWalkableHeight, Config.WorldWalkableRadius, Config.WorldWalkableClimb, Config.CellSize, Config.CellHeight, Config.TileWidth, null)) { pmesh.Delete(); dmesh.Delete(); return null; } pmesh.Delete(); dmesh.Delete(); if (Directory.Exists(Dungeon)) Directory.Delete(Dungeon, true); Directory.CreateDirectory(Dungeon); if (meshData != null) File.WriteAllBytes(Dungeon + "\\" + Dungeon + ".dmesh", meshData); return meshData; }
public byte[] Build() { Geometry = new Geometry.Geometry {Transform = true }; { var main = new ADT(World, X, Y); main.Read(); // main.Generate(); Geometry.AddADT(main); } if (Geometry.Vertices.Count == 0 && Geometry.Indices.Count == 0) throw new InvalidOperationException("Can't build tile with empty geometry"); float[] bbMin, bbMax; CalculateTileBounds(out bbMin, out bbMax); Geometry.CalculateMinMaxHeight(out bbMin[1], out bbMax[1]); // again, we load everything - wasteful but who cares /* for (int ty = Y - 1; ty <= Y + 1; ty++) { for (int tx = X - 1; tx <= X + 1; tx++) { try { // don't load main tile again if (tx == X && ty == Y) continue; var adt = new ADT(World, tx, ty); adt.Read(); Geometry.AddADT(adt); } catch (FileNotFoundException) { // don't care - no file means no geometry } } }*/ Context = new RecastContext(); // Context.SetContextHandler(Log); // get raw geometry - lots of slowness here float[] vertices; int[] triangles; byte[] areas; Geometry.GetRawData(out vertices, out triangles, out areas); Geometry.SaveWavefrontObject($"{World}_{X}_{Y}.obj"); Geometry.Indices.Clear(); Geometry.Vertices.Clear(); // add border bbMin[0] -= Config.BorderSize * Config.CellSize; bbMin[2] -= Config.BorderSize * Config.CellSize; bbMax[0] += Config.BorderSize * Config.CellSize; bbMax[2] += Config.BorderSize * Config.CellSize; Heightfield hf; int width = Config.TileWidth + (Config.BorderSize * 2); if (!Context.CreateHeightfield(out hf, width, width, bbMin, bbMax, Config.CellSize, Config.CellHeight)) throw new OutOfMemoryException("CreateHeightfield ran out of memory"); Context.MarkWalkableTriangles(Config.WalkableSlopeAngle, ref vertices, ref triangles,out areas); // Context.ClearUnwalkableTriangles(Config.WalkableSlopeAngle, ref vertices, ref triangles, areas); Context.RasterizeTriangles(ref vertices, ref triangles, ref areas, hf, Config.WalkableClimb); // Once all geometry is rasterized, we do initial pass of filtering to // remove unwanted overhangs caused by the conservative rasterization // as well as filter spans where the character cannot possibly stand. Context.FilterLowHangingWalkableObstacles(Config.WalkableClimb, hf); Context.FilterLedgeSpans(Config.WalkableHeight, Config.WalkableClimb, hf); Context.FilterWalkableLowHeightSpans(Config.WalkableHeight, hf); // Compact the heightfield so that it is faster to handle from now on. // This will result in more cache coherent data as well as the neighbours // between walkable cells will be calculated. CompactHeightfield chf; if (!Context.BuildCompactHeightfield(Config.WalkableHeight, Config.WalkableClimb, hf, out chf)) throw new OutOfMemoryException("BuildCompactHeightfield ran out of memory"); hf.Delete(); // Erode the walkable area by agent radius. if (!Context.ErodeWalkableArea(Config.WalkableRadius, chf)) throw new OutOfMemoryException("ErodeWalkableArea ran out of memory"); // Prepare for region partitioning, by calculating distance field along the walkable surface. if (!Context.BuildDistanceField(chf)) throw new OutOfMemoryException("BuildDistanceField ran out of memory"); // Partition the walkable surface into simple regions without holes. if (!Context.BuildRegions(chf, Config.BorderSize, Config.MinRegionArea, Config.MergeRegionArea)) throw new OutOfMemoryException("BuildRegionsMonotone ran out of memory"); // Create contours. ContourSet cset; if (!Context.BuildContours(chf, Config.MaxSimplificationError, Config.MaxEdgeLength, out cset)) throw new OutOfMemoryException("BuildContours ran out of memory"); // Build polygon navmesh from the contours. PolyMesh pmesh; if (!Context.BuildPolyMesh(cset, Config.MaxVertsPerPoly, out pmesh)) throw new OutOfMemoryException("BuildPolyMesh ran out of memory"); // Build detail mesh. PolyMeshDetail dmesh; if ( !Context.BuildPolyMeshDetail(pmesh, chf, Config.DetailSampleDistance, Config.DetailSampleMaxError, out dmesh)) throw new OutOfMemoryException("BuildPolyMeshDetail ran out of memory"); chf.Delete(); cset.Delete(); // Remove padding from the polymesh data. (Remove this odditity) pmesh.RemovePadding(Config.BorderSize); // Set flags according to area types (e.g. Swim for Water) pmesh.MarkAll(); // get original bounds float[] tilebMin, tilebMax; CalculateTileBounds(out tilebMin, out tilebMax); tilebMin[1] = bbMin[1]; tilebMax[1] = bbMax[1]; // build off mesh connections for flightmasters // bMax and bMin are switched here because of the coordinate system transformation var connections = new List<OffMeshConnection>(); byte[] tileData; if (!Detour.CreateNavMeshData(out tileData, pmesh, dmesh, X, Y, tilebMin, tilebMax, Config.WorldWalkableHeight, Config.WorldWalkableRadius, Config.WorldWalkableClimb, Config.CellSize, Config.CellHeight, Config.TileWidth, connections.ToArray())) { pmesh.Delete(); dmesh.Delete(); return null; } pmesh.Delete(); dmesh.Delete(); GC.Collect(); return tileData; }