public override bool doRelease(M3dView view) { // Scale nodes on the selection list. // Simple implementation that does not // support undo. MSelectionList list = new MSelectionList(); MGlobal.getActiveSelectionList(list); MObject node = new MObject(); for (MItSelectionList iter = new MItSelectionList(list); !iter.isDone; iter.next()) { iter.getDependNode(node); MFnTransform xform; try { xform = new MFnTransform(node); } catch (System.Exception) { continue; } double[] newScale = new double[3]; newScale[0] = mousePointGlName.x + 1; newScale[1] = mousePointGlName.y + 1; newScale[2] = mousePointGlName.z + 1; xform.setScale(newScale); } return(true); }
private void drawUVMapCoordNum(apiMeshGeom geom, M3dView view, MTextureEditorDrawInfo info, bool drawNumbers) // // Description: // Draw the UV points for all uvs on this surface shape. // { view.beginGL(); float[] ptSize = new float[1]; OpenGL.glGetFloatv(OpenGL.GL_POINT_SIZE, ptSize); OpenGL.glPointSize(UV_POINT_SIZE); int uv; uint uv_len = geom.uvcoords.uvcount(); for (uv = 0; uv < uv_len; uv++) { float du = 0.0f; float dv = 0.0f; geom.uvcoords.getUV(uv, ref du, ref dv); drawUVMapCoord(view, uv, du, dv, drawNumbers); } OpenGL.glPointSize(ptSize[0]); view.endGL(); }
// for userInteraction example code // public void drawRedPointAtCenter(MDrawRequest request, M3dView view) // // Description: // // Simple very fast draw routine // // Arguments: // // request - request to be drawn // view - view to draw into // { // Draw point // view.beginGL(); // save state // OpenGL.glPushAttrib(OpenGL.GL_CURRENT_BIT | OpenGL.GL_POINT_BIT); OpenGL.glPointSize(20.0f); OpenGL.glBegin(OpenGL.GL_POINTS); OpenGL.glColor3f(1.0f, 0.0f, 0.0f); OpenGL.glVertex3f(0.0f, 0.0f, 0.0f); OpenGL.glEnd(); // restore state // OpenGL.glPopAttrib(); view.endGL(); }
public override bool doPress(M3dView view) { // Reset the mousePoint information on // a new press mousePointGlName.assign(MPoint.origin); updateDragInformation(); return(true); }
private void drawUVMapCoord(M3dView view, int uv, float u, float v, bool drawNum) // // Description: // Draw the specified uv value into the port view. If drawNum is true // It will also draw the UV id for the the UV. // { if (drawNum) { string s = uv.ToString(); view.drawText(s, new MPoint(u, v, 0), M3dView.TextPosition.kCenter); } OpenGL.glVertex3f(u, v, 0.0f); }
private void drawUVWireframe(apiMeshGeom geom, M3dView view, MTextureEditorDrawInfo info) // // Description: // Draws the UV layout in wireframe mode. // { view.beginGL(); // Draw the polygons // int vid = 0; int vid_start = 0; for (int i = 0; i < geom.faceCount; i++) { OpenGL.glBegin(OpenGL.GL_LINES); uint v; float du1 = 0.0f; float dv1 = 0.0f; float du2 = 0.0f; float dv2 = 0.0f; int uvId1, uvId2; vid_start = vid; for (v = 0; v < geom.face_counts[i] - 1; v++) { uvId1 = geom.uvcoords.uvId(vid); uvId2 = geom.uvcoords.uvId(vid + 1); geom.uvcoords.getUV(uvId1, ref du1, ref dv1); geom.uvcoords.getUV(uvId2, ref du2, ref dv2); OpenGL.glVertex3f(du1, dv1, 0.0f); OpenGL.glVertex3f(du2, dv2, 0.0f); vid++; } uvId1 = geom.uvcoords.uvId(vid); uvId2 = geom.uvcoords.uvId(vid_start); geom.uvcoords.getUV(uvId1, ref du1, ref dv1); geom.uvcoords.getUV(uvId2, ref du2, ref dv2); OpenGL.glVertex3f(du1, dv1, 0.0f); OpenGL.glVertex3f(du2, dv2, 0.0f); vid++; OpenGL.glEnd(); } view.endGL(); }
public void getDrawRequestsShaded(MDrawRequest request, MDrawInfo info, MDrawRequestQueue queue, MDrawData data) { // Need to get the material info // MDagPath path = info.multiPath; // path to your dag object M3dView view = info.view; // view to draw to MMaterial material = base.material(path); M3dView.DisplayStatus displayStatus = info.displayStatus; // Evaluate the material and if necessary, the texture. // material.evaluateMaterial(view, path); bool drawTexture = true; if (drawTexture && material.materialIsTextured) { material.evaluateTexture(data); } request.material = material; bool materialTransparent = false; material.getHasTransparency(ref materialTransparent); if (materialTransparent) { request.isTransparent = true; } // create a draw request for wireframe on shaded if // necessary. // if ((displayStatus == M3dView.DisplayStatus.kActive) || (displayStatus == M3dView.DisplayStatus.kLead) || (displayStatus == M3dView.DisplayStatus.kHilite)) { MDrawRequest wireRequest = info.getPrototype(this); wireRequest.setDrawData(data); getDrawRequestsWireframe(wireRequest, info); wireRequest.token = (int)DrawShapeStyle.kDrawWireframeOnShaded; wireRequest.displayStyle = M3dView.DisplayStyle.kWireFrame; queue.add(wireRequest); } }
public override void draw(M3dView view, MDagPath path, M3dView.DisplayStyle style, M3dView.DisplayStatus status) { base.draw(view, path, style, status); // view.beginGL(); //MPoint textPos = new MPoint(nodeTranslation()); MPoint textPos = new MPoint(0, 0, 0); String distanceText = "Two custom line manipulators"; view.drawText(distanceText, textPos, M3dView.TextPosition.kLeft); // view.endGL(); }
// Main draw routine. Gets called by Maya with draw requests. // public override void draw(MDrawRequest request, M3dView view) // // Description: // // Main (OpenGL) draw routine // // Arguments: // // request - request to be drawn // view - view to draw into // { // Get the token from the draw request. // The token specifies what needs to be drawn. // int token = request.token; switch (token) { case (int)DrawToken.kDrawWireframe: case (int)DrawToken.kDrawWireframeOnShaded: drawWireframe(request, view); break; case (int)DrawToken.kDrawSmoothShaded: drawShaded(request, view); break; case (int)DrawToken.kDrawFlatShaded: // Not implemented break; case (int)DrawToken.kDrawVertices: drawVertices(request, view); break; case (int)DrawToken.kDrawBoundingBox: drawBoundingBox(request, view); break; // for userChangingViewContext example code // case (int)DrawToken.kDrawRedPointAtCenter: drawRedPointAtCenter(request, view); break; } }
public override void doPress(MEvent eventArg) { base.doPress(eventArg); // If we are not in selecting mode (i.e. an object has been selected) // then set up for the translation. if (!_isSelecting()) { eventArg.getPosition(ref startPos_x, ref startPos_y); view = M3dView.active3dView; MDagPath camera = view.Camera; MFnCamera fnCamera = new MFnCamera(camera); MVector upDir = fnCamera.upDirection(MSpace.Space.kWorld); MVector rightDir = fnCamera.rightDirection(MSpace.Space.kWorld); // Determine the camera used in the current view if (fnCamera.isOrtho) { if (upDir.isEquivalent(MVector.zNegAxis, 1e-3)) { currWin = 0; // TOP } else if (rightDir.isEquivalent(MVector.xAxis, 1e-3)) { currWin = 1; // FRONT } else { currWin = 2; // SIDE } } else { currWin = 3; // PERSP MGlobal.displayWarning("moveTool only works in top, front and side views"); } // Create an instance of the move tool command. cmd = _newToolCommand() as moveCmd; cmd.setVector(0.0, 0.0, 0.0); } }
// Main draw routine for UV editor. This is called by maya when the // shape is selected and the UV texture window is visible. // public override void drawUV(M3dView view, MTextureEditorDrawInfo info) // // Description: // Main entry point for UV drawing. This method is called by the UV // texture editor when the shape is 'active'. // // Input: // A 3dView. // { apiMesh meshNode = (apiMesh)surfaceShape; apiMeshGeom geom = meshNode.meshGeom(); uint uv_len = geom.uvcoords.uvcount(); if (uv_len > 0) { view.setDrawColor(new MColor(1.0f, 0.0f, 0.0f)); switch (info.drawingFunction) { case MTextureEditorDrawInfo.DrawingFunction.kDrawWireframe: drawUVWireframe(geom, view, info); break; case MTextureEditorDrawInfo.DrawingFunction.kDrawEverything: case MTextureEditorDrawInfo.DrawingFunction.kDrawUVForSelect: drawUVWireframe(geom, view, info); drawUVMapCoordNum(geom, view, info, false); break; case MTextureEditorDrawInfo.DrawingFunction.kDrawVertexForSelect: case MTextureEditorDrawInfo.DrawingFunction.kDrawEdgeForSelect: case MTextureEditorDrawInfo.DrawingFunction.kDrawFacetForSelect: default: drawUVWireframe(geom, view, info); break; } ; } }
// Main draw routine. Gets called by maya with draw requests. // public override void draw(MDrawRequest request, M3dView view) { // Get the token from the draw request. // The token specifies what needs to be drawn. // DrawShapeStyle token = (DrawShapeStyle)request.token; switch (token) { case DrawShapeStyle.kDrawWireframe: case DrawShapeStyle.kDrawWireframeOnShaded: case DrawShapeStyle.kDrawVertices: drawVertices(request, view); break; case DrawShapeStyle.kDrawSmoothShaded: break; // Not implemented, left as exercise case DrawShapeStyle.kDrawFlatShaded: // Not implemented, left as exercise break; } }
public override void doPress(MEvent eventArg) { // Begin marquee drawing (using OpenGL) // Get the start position of the marquee // Figure out which modifier keys were pressed, and set up the // listAdjustment parameter to reflect what to do with the selected points. if (eventArg.isModifierShift || eventArg.isModifierControl) { if (eventArg.isModifierShift) { if (eventArg.isModifierControl) { // both shift and control pressed, merge new selections listAdjustment = MGlobal.ListAdjustment.kAddToList; } else { // shift only, xor new selections with previous ones listAdjustment = MGlobal.ListAdjustment.kXORWithList; } } else if (eventArg.isModifierControl) { // control only, remove new selections from the previous list listAdjustment = MGlobal.ListAdjustment.kRemoveFromList; } } else { listAdjustment = MGlobal.ListAdjustment.kReplaceList; } // Extract the eventArg information eventArg.getPosition(ref start_x, ref start_y); view = M3dView.active3dView; fsDrawn = false; }
// // From the given draw request, get the draw data and determine // which quadric to draw and with what values. // public override void draw(MDrawRequest request, M3dView view) { MDrawData data = request.drawData(); quadricGeom geom = data.geometry() as quadricGeom; DrawShapeStyle token = (DrawShapeStyle)request.token; bool drawTexture = false; view.beginGL(); if ((token == DrawShapeStyle.kDrawSmoothShaded) || (token == DrawShapeStyle.kDrawFlatShaded)) { OpenGL.glEnable((uint)OpenGL.GL_POLYGON_OFFSET_FILL); // Set up the material // MMaterial material = request.material; material.setMaterial(request.multiPath, request.isTransparent); // Enable texturing // drawTexture = material.materialIsTextured; if (drawTexture) OpenGL.glEnable((uint)OpenGL.GL_TEXTURE_2D); // Apply the texture to the current view // if (drawTexture) { material.applyTexture(view, data); } } IntPtr qobj = GLUFunctionInvoker.gluNewQuadric(); switch (token) { case DrawShapeStyle.kDrawWireframe: case DrawShapeStyle.kDrawWireframeOnShaded: GLUFunctionInvoker.gluQuadricDrawStyle(qobj, GLU_LINE); break; case DrawShapeStyle.kDrawSmoothShaded: GLUFunctionInvoker.gluQuadricNormals(qobj, GLU_SMOOTH); GLUFunctionInvoker.gluQuadricTexture(qobj, GLtrue); GLUFunctionInvoker.gluQuadricDrawStyle(qobj, GLU_FILL); break; case DrawShapeStyle.kDrawFlatShaded: GLUFunctionInvoker.gluQuadricNormals(qobj, GLU_FLAT); GLUFunctionInvoker.gluQuadricTexture(qobj, GLtrue); GLUFunctionInvoker.gluQuadricDrawStyle(qobj, GLU_FILL); break; } switch (geom.shapeType) { case (short)DrawShapeType.kDrawCylinder: GLUFunctionInvoker.gluCylinder(qobj, geom.radius1, geom.radius2, geom.height, geom.slices, geom.stacks); break; case (short)DrawShapeType.kDrawDisk: GLUFunctionInvoker.gluDisk(qobj, geom.radius1, geom.radius2, geom.slices, geom.loops); break; case (short)DrawShapeType.kDrawPartialDisk: GLUFunctionInvoker.gluPartialDisk(qobj, geom.radius1, geom.radius2, geom.slices, geom.loops, geom.startAngle, geom.sweepAngle); break; case (short)DrawShapeType.kDrawSphere: default: GLUFunctionInvoker.gluSphere(qobj, geom.radius1, geom.slices, geom.stacks); break; } // Turn off texture mode // if (drawTexture) OpenGL.glDisable((uint)OpenGL.GL_TEXTURE_2D); view.endGL(); }
public override void draw(M3dView view, MDagPath path, M3dView.DisplayStyle style, M3dView.DisplayStatus status) { base.draw(view, path, style, status); // view.beginGL(); //MPoint textPos = new MPoint(nodeTranslation()); MPoint textPos = new MPoint(0,0,0); String distanceText = "Two custom line manipulators"; view.drawText(distanceText, textPos, M3dView.TextPosition.kLeft); // view.endGL(); }
public override bool doDrag(M3dView view) { updateDragInformation(); return true; }
public override void draw(M3dView view, MDagPath path, M3dView.DisplayStyle style, M3dView.DisplayStatus status) { // Are we in the right view MDagPath dpath = new MDagPath(); view.getCamera(dpath); MFnCamera viewCamera = new MFnCamera(dpath); string nameBuffer = viewCamera.name; if (nameBuffer == null) return; if (nameBuffer.IndexOf("persp") == -1 && nameBuffer.IndexOf("front") == -1) return; // bool rightLine = !affectTranslate; // Populate the point arrays which are in local space MPoint top = lineGeometry.topPoint(); MPoint bottom = lineGeometry.bottomPoint(); // Depending on what's active, we modify the // end points with mouse deltas in local // space uint active = 0; try { glActiveName(ref active); } catch (System.Exception) { return; } if (active == lineName && active != 0) { top[0] += (float)mousePointGlName.x; top[1] += (float)mousePointGlName.y; top[2] += (float)mousePointGlName.z; bottom[0] += (float)mousePointGlName.x; bottom[1] += (float)mousePointGlName.y; bottom[2] += (float)mousePointGlName.z; } // Begin the drawing view.beginGL(); // Get the starting value of the pickable items uint glPickableItem = 0; glFirstHandle(ref glPickableItem); // Top lineName = glPickableItem; // Place before you draw the manipulator component that can // be pickable. colorAndName(view, glPickableItem, true, mainColor()); OpenGL.glBegin((uint)libOpenMayaRenderNet.MGL_LINES); OpenGL.glVertex3d(top.x, top.y, top.z); OpenGL.glVertex3d(bottom.x, bottom.y, bottom.z); OpenGL.glEnd(); // End the drawing view.endGL(); }
public override void doPress(MEvent eventArg) { // Begin marquee drawing (using OpenGL) // Get the start position of the marquee // Figure out which modifier keys were pressed, and set up the // listAdjustment parameter to reflect what to do with the selected points. if (eventArg.isModifierShift || eventArg.isModifierControl ) { if ( eventArg.isModifierShift ) { if ( eventArg.isModifierControl ) { // both shift and control pressed, merge new selections listAdjustment = MGlobal.ListAdjustment.kAddToList; } else { // shift only, xor new selections with previous ones listAdjustment = MGlobal.ListAdjustment.kXORWithList; } } else if ( eventArg.isModifierControl ) { // control only, remove new selections from the previous list listAdjustment = MGlobal.ListAdjustment.kRemoveFromList; } } else { listAdjustment = MGlobal.ListAdjustment.kReplaceList; } // Extract the eventArg information eventArg.getPosition (ref start_x, ref start_y) ; view = M3dView.active3dView; fsDrawn = false; }
public void drawVertices(MDrawRequest request, M3dView view) { MDrawData data = request.drawData(); MVectorArray geom = data.geometry() as MVectorArray; view.beginGL(); // Query current state so it can be restored // bool lightingWasOn = OpenGL.glIsEnabled(OpenGL.GL_LIGHTING) != 0 ? true : false; if (lightingWasOn) { OpenGL.glDisable(OpenGL.GL_LIGHTING); } float lastPointSize; getLastPointSize(out lastPointSize); // Set the point size of the vertices // OpenGL.glPointSize(POINT_SIZE); // If there is a component specified by the draw request // then loop over comp (using an MFnComponent class) and draw the // active vertices, otherwise draw all vertices. // MObject comp = request.component; if (!comp.isNull) { MFnSingleIndexedComponent fnComponent = new MFnSingleIndexedComponent(comp); for (int i = 0; i < fnComponent.elementCount; i++) { int index = fnComponent.element(i); OpenGL.glBegin(OpenGL.GL_POINTS); MVector point = geom[index]; OpenGL.glVertex3f((float)point[0], (float)point[1], (float)point[2]); OpenGL.glEnd(); MPoint mp = new MPoint(point); view.drawText(String.Format("{0}", index), mp); } } else { for (int i = 0; i < geom.length; i++) { OpenGL.glBegin(OpenGL.GL_POINTS); MVector point = geom[i]; OpenGL.glVertex3f((float)point[0], (float)point[1], (float)point[2]); OpenGL.glEnd(); } } // Restore the state // if (lightingWasOn) { OpenGL.glEnable(OpenGL.GL_LIGHTING); } OpenGL.glPointSize(lastPointSize); view.endGL(); }
public override void draw(MDrawRequest request, M3dView view) // // From the given draw request, get the draw data and determine // which quadric to draw and with what values. // { MDrawData data = request.drawData(); quadricGeom geom = data.geometry() as quadricGeom; DrawShapeStyle token = (DrawShapeStyle)request.token; bool drawTexture = false; view.beginGL(); if ((token == DrawShapeStyle.kDrawSmoothShaded) || (token == DrawShapeStyle.kDrawFlatShaded)) { OpenGL.glEnable((uint)OpenGL.GL_POLYGON_OFFSET_FILL); // Set up the material // MMaterial material = request.material; material.setMaterial(request.multiPath, request.isTransparent); // Enable texturing // drawTexture = material.materialIsTextured; if (drawTexture) { OpenGL.glEnable((uint)OpenGL.GL_TEXTURE_2D); } // Apply the texture to the current view // if (drawTexture) { material.applyTexture(view, data); } } IntPtr qobj = GLUFunctionInvoker.gluNewQuadric(); switch (token) { case DrawShapeStyle.kDrawWireframe: case DrawShapeStyle.kDrawWireframeOnShaded: GLUFunctionInvoker.gluQuadricDrawStyle(qobj, GLU_LINE); break; case DrawShapeStyle.kDrawSmoothShaded: GLUFunctionInvoker.gluQuadricNormals(qobj, GLU_SMOOTH); GLUFunctionInvoker.gluQuadricTexture(qobj, GLtrue); GLUFunctionInvoker.gluQuadricDrawStyle(qobj, GLU_FILL); break; case DrawShapeStyle.kDrawFlatShaded: GLUFunctionInvoker.gluQuadricNormals(qobj, GLU_FLAT); GLUFunctionInvoker.gluQuadricTexture(qobj, GLtrue); GLUFunctionInvoker.gluQuadricDrawStyle(qobj, GLU_FILL); break; } switch (geom.shapeType) { case (short)DrawShapeType.kDrawCylinder: GLUFunctionInvoker.gluCylinder(qobj, geom.radius1, geom.radius2, geom.height, geom.slices, geom.stacks); break; case (short)DrawShapeType.kDrawDisk: GLUFunctionInvoker.gluDisk(qobj, geom.radius1, geom.radius2, geom.slices, geom.loops); break; case (short)DrawShapeType.kDrawPartialDisk: GLUFunctionInvoker.gluPartialDisk(qobj, geom.radius1, geom.radius2, geom.slices, geom.loops, geom.startAngle, geom.sweepAngle); break; case (short)DrawShapeType.kDrawSphere: default: GLUFunctionInvoker.gluSphere(qobj, geom.radius1, geom.slices, geom.stacks); break; } // Turn off texture mode // if (drawTexture) { OpenGL.glDisable((uint)OpenGL.GL_TEXTURE_2D); } view.endGL(); }
public override void getDrawRequests(MDrawInfo info, bool objectAndActiveOnly, MDrawRequestQueue queue) { apiSimpleShape shapeNode = surfaceShape as apiSimpleShape; if (shapeNode == null) { return; } // This call creates a prototype draw request that we can fill // in and then add to the draw queue. // MDrawRequest request = info.getPrototype(this); MDrawData data; MVectorArray geomPtr = shapeNode.controlPoints; // Stuff our data into the draw request, it'll be used when the drawing // actually happens getDrawData(geomPtr, out data); request.setDrawData(data); // Decode the draw info and determine what needs to be drawn // M3dView.DisplayStyle appearance = info.displayStyle; M3dView.DisplayStatus displayStatus = info.displayStatus; switch (appearance) { case M3dView.DisplayStyle.kWireFrame: { request.token = (int)DrawShapeStyle.kDrawWireframe; M3dView.ColorTable activeColorTable = M3dView.ColorTable.kActiveColors; M3dView.ColorTable dormantColorTable = M3dView.ColorTable.kDormantColors; switch (displayStatus) { case M3dView.DisplayStatus.kLead: request.setColor(LEAD_COLOR, (int)activeColorTable); break; case M3dView.DisplayStatus.kActive: request.setColor(ACTIVE_COLOR, (int)activeColorTable); break; case M3dView.DisplayStatus.kActiveAffected: request.setColor(ACTIVE_AFFECTED_COLOR, (int)activeColorTable); break; case M3dView.DisplayStatus.kDormant: request.setColor(DORMANT_COLOR, (int)dormantColorTable); break; case M3dView.DisplayStatus.kHilite: request.setColor(HILITE_COLOR, (int)activeColorTable); break; default: break; } queue.add(request); break; } case M3dView.DisplayStyle.kGouraudShaded: { // Create the smooth shaded draw request // request.token = (int)DrawShapeStyle.kDrawSmoothShaded; // Need to get the material info // MDagPath path = info.multiPath; // path to your dag object M3dView view = info.view; // view to draw to MMaterial material = base.material(path); // Evaluate the material and if necessary, the texture. // material.evaluateMaterial(view, path); bool drawTexture = true; if (drawTexture && material.materialIsTextured) { material.evaluateTexture(data); } request.material = material; bool materialTransparent = false; material.getHasTransparency(ref materialTransparent); if (materialTransparent) { request.isTransparent = true; } queue.add(request); // create a draw request for wireframe on shaded if // necessary. // if ((displayStatus == M3dView.DisplayStatus.kActive) || (displayStatus == M3dView.DisplayStatus.kLead) || (displayStatus == M3dView.DisplayStatus.kHilite)) { MDrawRequest wireRequest = request; wireRequest.setDrawData(data); wireRequest.token = (int)DrawShapeStyle.kDrawWireframeOnShaded; wireRequest.displayStyle = M3dView.DisplayStyle.kWireFrame; M3dView.ColorTable activeColorTable = M3dView.ColorTable.kActiveColors; switch (displayStatus) { case M3dView.DisplayStatus.kLead: wireRequest.setColor(LEAD_COLOR, (int)activeColorTable); break; case M3dView.DisplayStatus.kActive: wireRequest.setColor(ACTIVE_COLOR, (int)activeColorTable); break; case M3dView.DisplayStatus.kHilite: wireRequest.setColor(HILITE_COLOR, (int)activeColorTable); break; default: break; } queue.add(wireRequest); } break; } case M3dView.DisplayStyle.kFlatShaded: request.token = (int)DrawShapeStyle.kDrawFlatShaded; break; default: break; } // Add draw requests for components // if (!objectAndActiveOnly) { // Inactive components // if ((appearance == M3dView.DisplayStyle.kPoints) || (displayStatus == M3dView.DisplayStatus.kHilite)) { MDrawRequest vertexRequest = request; vertexRequest.setDrawData(data); vertexRequest.token = (int)DrawShapeStyle.kDrawVertices; vertexRequest.setColor(DORMANT_VERTEX_COLOR, (int)M3dView.ColorTable.kActiveColors); queue.add(vertexRequest); } // Active components // if (shapeNode.hasActiveComponents) { MDrawRequest activeVertexRequest = request; activeVertexRequest.setDrawData(data); activeVertexRequest.token = (int)DrawShapeStyle.kDrawVertices; activeVertexRequest.setColor(ACTIVE_VERTEX_COLOR, (int)M3dView.ColorTable.kActiveColors); MObjectArray clist = shapeNode.activeComponents; MObject vertexComponent = clist[0]; // Should filter list activeVertexRequest.component = vertexComponent; queue.add(activeVertexRequest); } } }
public override void draw(M3dView view, MDagPath path, M3dView.DisplayStyle style, M3dView.DisplayStatus status) { base.draw(view, path, style, status); }
///////////////////////////////////////////////////////////////////// // // Overrides // ///////////////////////////////////////////////////////////////////// public override void getDrawRequests(MDrawInfo info, bool objectAndActiveOnly, MDrawRequestQueue queue) // // Description: // // Add draw requests to the draw queue // // Arguments: // // info - current drawing state // objectsAndActiveOnly - no components if true // queue - queue of draw requests to add to // { // Get the data necessary to draw the shape // MDrawData data = new MDrawData(); apiMesh meshNode = (apiMesh)surfaceShape; apiMeshGeom geom = meshNode.meshGeom(); if ((null == geom) || (0 == geom.faceCount)) { MGlobal.displayInfo("NO DrawRequest for apiMesh"); return; } // This call creates a prototype draw request that we can fill // in and then add to the draw queue. // MDrawRequest request = getDrawRequest(info); // info.getPrototype(this); getDrawData(geom, out data); request.setDrawData(data); // Decode the draw info and determine what needs to be drawn // M3dView.DisplayStyle appearance = info.displayStyle; M3dView.DisplayStatus displayStatus = info.displayStatus; // Are we displaying meshes? if (!info.objectDisplayStatus(M3dView.DisplayObjects.kDisplayMeshes)) { return; } // Use this code to help speed up drawing. // inUserInteraction() is true for any interaction with // the viewport, including object or component TRS and camera changes. // userChangingViewContext() is true only when the user is using view // context tools (tumble, dolly, track, etc.) // if (info.inUserInteraction || info.userChangingViewContext) { // User is using view context tools so // request fast draw and get out // request.token = (int)DrawToken.kDrawRedPointAtCenter; queue.add(request); return; } switch (appearance) { case M3dView.DisplayStyle.kWireFrame: { request.token = (int)DrawToken.kDrawWireframe; int activeColorTable = (int)M3dView.ColorTable.kActiveColors; int dormantColorTable = (int)M3dView.ColorTable.kDormantColors; switch (displayStatus) { case M3dView.DisplayStatus.kLead: request.setColor(LEAD_COLOR, activeColorTable); break; case M3dView.DisplayStatus.kActive: request.setColor(ACTIVE_COLOR, activeColorTable); break; case M3dView.DisplayStatus.kActiveAffected: request.setColor(ACTIVE_AFFECTED_COLOR, activeColorTable); break; case M3dView.DisplayStatus.kDormant: request.setColor(DORMANT_COLOR, dormantColorTable); break; case M3dView.DisplayStatus.kHilite: request.setColor(HILITE_COLOR, activeColorTable); break; default: break; } queue.add(request); break; } case M3dView.DisplayStyle.kGouraudShaded: { // Create the smooth shaded draw request // request.token = (int)DrawToken.kDrawSmoothShaded; // Need to get the material info // MDagPath path = info.multiPath; // path to your dag object M3dView view = info.view; // view to draw to MMaterial material = base.material(path); // If the user currently has the default material enabled on the // view then use the default material for shading. // if (view.usingDefaultMaterial) { material = MMaterial.defaultMaterial; } // Evaluate the material and if necessary, the texture. // material.evaluateMaterial(view, path); bool drawTexture = true; if (drawTexture && material.materialIsTextured) { material.evaluateTexture(data); } request.material = material; // request.setDisplayStyle( appearance ); bool materialTransparent = false; material.getHasTransparency(ref materialTransparent); if (materialTransparent) { request.isTransparent = true; } queue.add(request); // create a draw request for wireframe on shaded if // necessary. // if ((displayStatus == M3dView.DisplayStatus.kActive) || (displayStatus == M3dView.DisplayStatus.kLead) || (displayStatus == M3dView.DisplayStatus.kHilite)) { MDrawRequest wireRequest = getDrawRequest(info); // info.getPrototype(this); wireRequest.setDrawData(data); wireRequest.token = (int)DrawToken.kDrawWireframeOnShaded; wireRequest.displayStyle = M3dView.DisplayStyle.kWireFrame; int activeColorTable = (int)M3dView.ColorTable.kActiveColors; switch (displayStatus) { case M3dView.DisplayStatus.kLead: wireRequest.setColor(LEAD_COLOR, activeColorTable); break; case M3dView.DisplayStatus.kActive: wireRequest.setColor(ACTIVE_COLOR, activeColorTable); break; case M3dView.DisplayStatus.kHilite: wireRequest.setColor(HILITE_COLOR, activeColorTable); break; default: break; } queue.add(wireRequest); } break; } case M3dView.DisplayStyle.kFlatShaded: request.token = (int)DrawToken.kDrawFlatShaded; queue.add(request); break; case M3dView.DisplayStyle.kBoundingBox: request.token = (int)DrawToken.kDrawBoundingBox; queue.add(request); break; default: break; } // Add draw requests for components // if (!objectAndActiveOnly) { // Inactive components // if ((appearance == M3dView.DisplayStyle.kPoints) || (displayStatus == M3dView.DisplayStatus.kHilite)) { MDrawRequest vertexRequest = getDrawRequest(info); // info.getPrototype(this); vertexRequest.setDrawData(data); vertexRequest.token = (int)DrawToken.kDrawVertices; vertexRequest.setColor(DORMANT_VERTEX_COLOR, (int)M3dView.ColorTable.kActiveColors); queue.add(vertexRequest); } // Active components // if (((MPxSurfaceShape)surfaceShape).hasActiveComponents) { MDrawRequest activeVertexRequest = getDrawRequest(info); // info.getPrototype(this); activeVertexRequest.setDrawData(data); activeVertexRequest.token = (int)DrawToken.kDrawVertices; activeVertexRequest.setColor(ACTIVE_VERTEX_COLOR, (int)M3dView.ColorTable.kActiveColors); MObjectArray clist = ((MPxSurfaceShape)surfaceShape).activeComponents; MObject vertexComponent = clist[0]; // Should filter list activeVertexRequest.component = vertexComponent; queue.add(activeVertexRequest); } } }
public bool selectVertices(MSelectInfo selectInfo, MSelectionList selectionList, MPointArray worldSpaceSelectPts) // // Description: // // Vertex selection. // // Arguments: // // selectInfo - the selection state information // selectionList - the list of selected items to add to // worldSpaceSelectPts - // { bool selected = false; M3dView view = selectInfo.view; MPoint xformedPoint = new MPoint(); MPoint selectionPoint = new MPoint(); double z = 0.0; double previousZ = 0.0; int closestPointVertexIndex = -1; MDagPath path = selectInfo.multiPath; // Create a component that will store the selected vertices // MFnSingleIndexedComponent fnComponent = new MFnSingleIndexedComponent(); MObject surfaceComponent = fnComponent.create(MFn.Type.kMeshVertComponent); uint vertexIndex; // if the user did a single mouse click and we find > 1 selection // we will use the alignmentMatrix to find out which is the closest // MMatrix alignmentMatrix = new MMatrix(); MPoint singlePoint = new MPoint(); bool singleSelection = selectInfo.singleSelection; if (singleSelection) { alignmentMatrix = selectInfo.alignmentMatrix; } // Get the geometry information // apiMesh meshNode = (apiMesh)surfaceShape; apiMeshGeom geom = meshNode.meshGeom(); // Loop through all vertices of the mesh and // see if they lie withing the selection area // uint numVertices = geom.vertices.length; for (vertexIndex = 0; vertexIndex < numVertices; vertexIndex++) { MPoint currentPoint = geom.vertices[(int)vertexIndex]; // Sets OpenGL's render mode to select and stores // selected items in a pick buffer // view.beginSelect(); OpenGL.glBegin(OpenGL.GL_POINTS); OpenGL.glVertex3f((float)currentPoint[0], (float)currentPoint[1], (float)currentPoint[2]); OpenGL.glEnd(); if (view.endSelect() > 0) // Hit count > 0 { selected = true; if (singleSelection) { xformedPoint = currentPoint; xformedPoint.homogenize(); xformedPoint.multiplyEqual(alignmentMatrix); z = xformedPoint.z; if (closestPointVertexIndex < 0 || z > previousZ) { closestPointVertexIndex = (int)vertexIndex; singlePoint = currentPoint; previousZ = z; } } else { // multiple selection, store all elements // fnComponent.addElement((int)vertexIndex); } } } // If single selection, insert the closest point into the array // if (selected && selectInfo.singleSelection) { fnComponent.addElement(closestPointVertexIndex); // need to get world space position for this vertex // selectionPoint = singlePoint; selectionPoint.multiplyEqual(path.inclusiveMatrix); } // Add the selected component to the selection list // if (selected) { MSelectionList selectionItem = new MSelectionList(); selectionItem.add(path, surfaceComponent); MSelectionMask mask = new MSelectionMask(MSelectionMask.SelectionType.kSelectComponentsMask); selectInfo.addSelection( selectionItem, selectionPoint, selectionList, worldSpaceSelectPts, mask, true); } return(selected); }
///////////////////////////////////////////////////////////////////// // // Helper routines // ///////////////////////////////////////////////////////////////////// public void drawWireframe(MDrawRequest request, M3dView view) // // Description: // // Wireframe drawing routine // // Arguments: // // request - request to be drawn // view - view to draw into // { MDrawData data = request.drawData(); apiMeshGeom geom = (apiMeshGeom)data.geometry(); if (geom == null) { return; } int token = request.token; bool wireFrameOnShaded = false; if ((int)DrawToken.kDrawWireframeOnShaded == token) { wireFrameOnShaded = true; } view.beginGL(); // Query current state so it can be restored // bool lightingWasOn = OpenGL.glIsEnabled(OpenGL.GL_LIGHTING) != 0; if (lightingWasOn) { OpenGL.glDisable(OpenGL.GL_LIGHTING); } if (wireFrameOnShaded) { OpenGL.glDepthMask(0); } // Draw the wireframe mesh // int vid = 0; for (int i = 0; i < geom.faceCount; i++) { OpenGL.glBegin(OpenGL.GL_LINE_LOOP); for (int v = 0; v < geom.face_counts[i]; v++) { MPoint vertex = geom.vertices[geom.face_connects[vid++]]; OpenGL.glVertex3f((float)vertex[0], (float)vertex[1], (float)vertex[2]); } OpenGL.glEnd(); } // Restore the state // if (lightingWasOn) { OpenGL.glEnable(OpenGL.GL_LIGHTING); } if (wireFrameOnShaded) { OpenGL.glDepthMask(1); } view.endGL(); }
public void drawBoundingBox(MDrawRequest request, M3dView view) // // Description: // // Bounding box drawing routine // // Arguments: // // request - request to be drawn // view - view to draw into // { // Get the surface shape MPxSurfaceShape shape = (MPxSurfaceShape)surfaceShape; if (shape == null) { return; } // Get the bounding box MBoundingBox box = shape.boundingBox(); float w = (float)box.width; float h = (float)box.height; float d = (float)box.depth; view.beginGL(); // Below we just two sides and then connect // the edges together MPoint minVertex = box.min; // Draw first side OpenGL.glBegin(OpenGL.GL_LINE_LOOP); MPoint vertex = minVertex; OpenGL.glVertex3f((float)vertex[0], (float)vertex[1], (float)vertex[2]); OpenGL.glVertex3f((float)vertex[0] + w, (float)vertex[1], (float)vertex[2]); OpenGL.glVertex3f((float)vertex[0] + w, (float)vertex[1] + h, (float)vertex[2]); OpenGL.glVertex3f((float)vertex[0], (float)vertex[1] + h, (float)vertex[2]); OpenGL.glVertex3f((float)vertex[0], (float)vertex[1], (float)vertex[2]); OpenGL.glEnd(); // Draw second side MVector sideFactor = new MVector(0, 0, d); MPoint vertex2 = minVertex.plus(sideFactor); OpenGL.glBegin(OpenGL.GL_LINE_LOOP); OpenGL.glVertex3f((float)vertex2[0], (float)vertex2[1], (float)vertex2[2]); OpenGL.glVertex3f((float)vertex2[0] + w, (float)vertex2[1], (float)vertex2[2]); OpenGL.glVertex3f((float)vertex2[0] + w, (float)vertex2[1] + h, (float)vertex2[2]); OpenGL.glVertex3f((float)vertex2[0], (float)vertex2[1] + h, (float)vertex2[2]); OpenGL.glVertex3f((float)vertex2[0], (float)vertex2[1], (float)vertex2[2]); OpenGL.glEnd(); // Connect the edges together OpenGL.glBegin(OpenGL.GL_LINES); OpenGL.glVertex3f((float)vertex2[0], (float)vertex2[1], (float)vertex2[2]); OpenGL.glVertex3f((float)vertex[0], (float)vertex[1], (float)vertex[2]); OpenGL.glVertex3f((float)vertex2[0] + w, (float)vertex2[1], (float)vertex2[2]); OpenGL.glVertex3f((float)vertex[0] + w, (float)vertex[1], (float)vertex[2]); OpenGL.glVertex3f((float)vertex2[0] + w, (float)vertex2[1] + h, (float)vertex2[2]); OpenGL.glVertex3f((float)vertex[0] + w, (float)vertex[1] + h, (float)vertex[2]); OpenGL.glVertex3f((float)vertex2[0], (float)vertex2[1] + h, (float)vertex2[2]); OpenGL.glVertex3f((float)vertex[0], (float)vertex[1] + h, (float)vertex[2]); OpenGL.glEnd(); view.endGL(); }
private void drawUVMapCoord( M3dView view, int uv, float u, float v, bool drawNum ) // // Description: // Draw the specified uv value into the port view. If drawNum is true // It will also draw the UV id for the the UV. // { if ( drawNum ) { string s = uv.ToString(); view.drawText(s, new MPoint(u, v, 0), M3dView.TextPosition.kCenter); } OpenGL.glVertex3f(u, v, 0.0f); }
public void drawVertices(MDrawRequest request, M3dView view) // // Description: // // Component (vertex) drawing routine // // Arguments: // // request - request to be drawn // view - view to draw into // { MDrawData data = request.drawData(); apiMeshGeom geom = (apiMeshGeom)data.geometry(); if (geom == null) { return; } view.beginGL(); // Query current state so it can be restored // bool lightingWasOn = OpenGL.glIsEnabled(OpenGL.GL_LIGHTING) != 0; if (lightingWasOn) { OpenGL.glDisable(OpenGL.GL_LIGHTING); } float[] lastPointSize = new float[1]; OpenGL.glGetFloatv(OpenGL.GL_POINT_SIZE, lastPointSize); // Set the point size of the vertices // OpenGL.glPointSize(POINT_SIZE); // If there is a component specified by the draw request // then loop over comp (using an MFnComponent class) and draw the // active vertices, otherwise draw all vertices. // MObject comp = request.component; if (!comp.isNull) { MFnSingleIndexedComponent fnComponent = new MFnSingleIndexedComponent(comp); for (int i = 0; i < fnComponent.elementCount; i++) { int index = fnComponent.element(i); OpenGL.glBegin(OpenGL.GL_POINTS); MPoint vertex = geom.vertices[index]; OpenGL.glVertex3f((float)vertex[0], (float)vertex[1], (float)vertex[2]); OpenGL.glEnd(); string annotation = index.ToString(); view.drawText(annotation, vertex); } } else { int vid = 0; for (int i = 0; i < geom.faceCount; i++) { OpenGL.glBegin(OpenGL.GL_POINTS); for (int v = 0; v < geom.face_counts[i]; v++) { MPoint vertex = geom.vertices[geom.face_connects[vid++]]; OpenGL.glVertex3f((float)vertex[0], (float)vertex[1], (float)vertex[2]); } OpenGL.glEnd(); } } // Restore the state // if (lightingWasOn) { OpenGL.glEnable(OpenGL.GL_LIGHTING); } OpenGL.glPointSize(lastPointSize[0]); view.endGL(); }
// // Description: // // Bounding box drawing routine // // Arguments: // // request - request to be drawn // view - view to draw into // public void drawBoundingBox( MDrawRequest request, M3dView view ) { // Get the surface shape MPxSurfaceShape shape = (MPxSurfaceShape)surfaceShape; if ( shape == null ) return; // Get the bounding box MBoundingBox box = shape.boundingBox(); float w = (float) box.width; float h = (float) box.height; float d = (float) box.depth; view.beginGL(); // Below we just two sides and then connect // the edges together MPoint minVertex = box.min; // Draw first side OpenGL.glBegin(OpenGL.GL_LINE_LOOP); MPoint vertex = minVertex; OpenGL.glVertex3f((float)vertex[0], (float)vertex[1], (float)vertex[2]); OpenGL.glVertex3f((float)vertex[0] + w, (float)vertex[1], (float)vertex[2]); OpenGL.glVertex3f((float)vertex[0] + w, (float)vertex[1] + h, (float)vertex[2]); OpenGL.glVertex3f((float)vertex[0], (float)vertex[1] + h, (float)vertex[2]); OpenGL.glVertex3f((float)vertex[0], (float)vertex[1], (float)vertex[2]); OpenGL.glEnd(); // Draw second side MVector sideFactor = new MVector(0, 0, d); MPoint vertex2 = minVertex.plus( sideFactor ); OpenGL.glBegin(OpenGL.GL_LINE_LOOP); OpenGL.glVertex3f((float)vertex2[0], (float)vertex2[1], (float)vertex2[2]); OpenGL.glVertex3f((float)vertex2[0] + w, (float)vertex2[1], (float)vertex2[2]); OpenGL.glVertex3f((float)vertex2[0] + w, (float)vertex2[1] + h, (float)vertex2[2]); OpenGL.glVertex3f((float)vertex2[0], (float)vertex2[1] + h, (float)vertex2[2]); OpenGL.glVertex3f((float)vertex2[0], (float)vertex2[1], (float)vertex2[2]); OpenGL.glEnd(); // Connect the edges together OpenGL.glBegin(OpenGL.GL_LINES); OpenGL.glVertex3f((float)vertex2[0], (float)vertex2[1], (float)vertex2[2]); OpenGL.glVertex3f((float)vertex[0], (float)vertex[1], (float)vertex[2]); OpenGL.glVertex3f((float)vertex2[0] + w, (float)vertex2[1], (float)vertex2[2]); OpenGL.glVertex3f((float)vertex[0] + w, (float)vertex[1], (float)vertex[2]); OpenGL.glVertex3f((float)vertex2[0] + w, (float)vertex2[1] + h, (float)vertex2[2]); OpenGL.glVertex3f((float)vertex[0] + w, (float)vertex[1] + h, (float)vertex[2]); OpenGL.glVertex3f((float)vertex2[0], (float)vertex2[1] + h, (float)vertex2[2]); OpenGL.glVertex3f((float)vertex[0], (float)vertex[1] + h, (float)vertex[2]); OpenGL.glEnd(); view.endGL(); }
public override void doPress(MEvent eventArg) { base.doPress (eventArg) ; // If we are not in selecting mode (i.e. an object has been selected) // then set up for the translation. if ( !_isSelecting () ) { eventArg.getPosition (ref startPos_x, ref startPos_y) ; view = M3dView.active3dView; MDagPath camera = view.Camera ; MFnCamera fnCamera =new MFnCamera (camera) ; MVector upDir =fnCamera.upDirection (MSpace.Space.kWorld) ; MVector rightDir =fnCamera.rightDirection (MSpace.Space.kWorld) ; // Determine the camera used in the current view if ( fnCamera.isOrtho ) { if ( upDir.isEquivalent (MVector.zNegAxis, 1e-3) ) currWin =0 ; // TOP else if ( rightDir.isEquivalent (MVector.xAxis, 1e-3) ) currWin =1 ; // FRONT else currWin =2 ; // SIDE } else { currWin =3 ; // PERSP MGlobal.displayWarning ("moveTool only works in top, front and side views") ; } // Create an instance of the move tool command. cmd = _newToolCommand () as moveCmd; cmd.setVector (0.0, 0.0, 0.0) ; } }
// // Description: // // Simple very fast draw routine // // Arguments: // // request - request to be drawn // view - view to draw into // // for userInteraction example code // public void drawRedPointAtCenter( MDrawRequest request, M3dView view ) { // Draw point // view.beginGL(); // save state // OpenGL.glPushAttrib(OpenGL.GL_CURRENT_BIT | OpenGL.GL_POINT_BIT); OpenGL.glPointSize(20.0f); OpenGL.glBegin(OpenGL.GL_POINTS); OpenGL.glColor3f(1.0f, 0.0f, 0.0f); OpenGL.glVertex3f(0.0f, 0.0f, 0.0f); OpenGL.glEnd(); // restore state // OpenGL.glPopAttrib(); view.endGL(); }
// // Description: // // Shaded drawing routine // // Arguments: // // request - request to be drawn // view - view to draw into // public void drawShaded( MDrawRequest request, M3dView view ) { MDrawData data = request.drawData(); apiMeshGeom geom = (apiMeshGeom)data.geometry(); if (geom == null) return; view.beginGL(); OpenGL.glEnable(OpenGL.GL_POLYGON_OFFSET_FILL); // Set up the material // MMaterial material = request.material; material.setMaterial( request.multiPath, request.isTransparent ); // Enable texturing ... // // Note, Maya does not enable texturing if useDefaultMaterial is enabled. // However, you can choose to ignore this in your draw routine. // bool drawTexture = material.materialIsTextured && !view.usingDefaultMaterial; if (drawTexture) { OpenGL.glEnable(OpenGL.GL_TEXTURE_2D); } // Apply the texture to the current view // if ( drawTexture ) { material.applyTexture( view, data ); } // Draw the polygons // int vid = 0; uint uv_len = geom.uvcoords.uvcount(); for ( int i=0; i<geom.faceCount; i++ ) { OpenGL.glBegin(OpenGL.GL_POLYGON); for ( int v=0; v < geom.face_counts[i]; v++ ) { MPoint vertex = geom.vertices[ geom.face_connects[vid] ]; MVector normal = geom.normals[ geom.face_connects[vid] ]; if (uv_len > 0) { // If we are drawing the texture, make sure the coord // arrays are in bounds. if ( drawTexture ) { int uvId1 = geom.uvcoords.uvId(vid); if ( uvId1 < uv_len ) { float tu = 0.0f; float tv = 0.0f; geom.uvcoords.getUV( uvId1, ref tu, ref tv ); OpenGL.glTexCoord2f( tu, tv ); } } } OpenGL.glNormal3f((float)normal[0], (float)normal[1], (float)normal[2]); OpenGL.glVertex3f((float)vertex[0], (float)vertex[1], (float)vertex[2]); vid++; } OpenGL.glEnd(); } // Turn off texture mode // if (drawTexture) { OpenGL.glDisable(OpenGL.GL_TEXTURE_2D); } view.endGL(); }
// Main draw routine for UV editor. This is called by maya when the // shape is selected and the UV texture window is visible. // public override void drawUV( M3dView view, MTextureEditorDrawInfo info ) // // Description: // Main entry point for UV drawing. This method is called by the UV // texture editor when the shape is 'active'. // // Input: // A 3dView. // { apiMesh meshNode = (apiMesh)surfaceShape; apiMeshGeom geom = meshNode.meshGeom(); uint uv_len = geom.uvcoords.uvcount(); if (uv_len > 0) { view.setDrawColor( new MColor( 1.0f, 0.0f, 0.0f ) ); switch( info.drawingFunction ) { case MTextureEditorDrawInfo.DrawingFunction.kDrawWireframe: drawUVWireframe( geom, view, info ); break; case MTextureEditorDrawInfo.DrawingFunction.kDrawEverything: case MTextureEditorDrawInfo.DrawingFunction.kDrawUVForSelect: drawUVWireframe( geom, view, info ); drawUVMapCoordNum( geom, view, info, false ); break; case MTextureEditorDrawInfo.DrawingFunction.kDrawVertexForSelect: case MTextureEditorDrawInfo.DrawingFunction.kDrawEdgeForSelect: case MTextureEditorDrawInfo.DrawingFunction.kDrawFacetForSelect: default: drawUVWireframe( geom, view, info ); break; }; } }
public override bool doPress(M3dView view) { // Reset the mousePoint information on // a new press mousePointGlName.assign(MPoint.origin); updateDragInformation(); return true; }
// // Description: // // Component (vertex) drawing routine // // Arguments: // // request - request to be drawn // view - view to draw into // public void drawVertices( MDrawRequest request, M3dView view ) { MDrawData data = request.drawData(); apiMeshGeom geom = (apiMeshGeom)data.geometry(); if (geom == null) return; view.beginGL(); // Query current state so it can be restored // bool lightingWasOn = OpenGL.glIsEnabled(OpenGL.GL_LIGHTING) != 0; if ( lightingWasOn ) { OpenGL.glDisable(OpenGL.GL_LIGHTING); } float[] lastPointSize = new float[1]; OpenGL.glGetFloatv(OpenGL.GL_POINT_SIZE, lastPointSize); // Set the point size of the vertices // OpenGL.glPointSize(POINT_SIZE); // If there is a component specified by the draw request // then loop over comp (using an MFnComponent class) and draw the // active vertices, otherwise draw all vertices. // MObject comp = request.component; if ( ! comp.isNull ) { MFnSingleIndexedComponent fnComponent = new MFnSingleIndexedComponent( comp ); for ( int i=0; i<fnComponent.elementCount; i++ ) { int index = fnComponent.element( i ); OpenGL.glBegin(OpenGL.GL_POINTS); MPoint vertex = geom.vertices[ index ]; OpenGL.glVertex3f((float)vertex[0], (float)vertex[1], (float)vertex[2] ); OpenGL.glEnd(); string annotation = index.ToString(); view.drawText( annotation, vertex ); } } else { int vid = 0; for ( int i=0; i<geom.faceCount; i++ ) { OpenGL.glBegin(OpenGL.GL_POINTS); for ( int v=0; v<geom.face_counts[i]; v++ ) { MPoint vertex = geom.vertices[ geom.face_connects[vid++] ]; OpenGL.glVertex3f((float)vertex[0], (float)vertex[1], (float)vertex[2] ); } OpenGL.glEnd(); } } // Restore the state // if ( lightingWasOn ) { OpenGL.glEnable(OpenGL.GL_LIGHTING); } OpenGL.glPointSize(lastPointSize[0]); view.endGL(); }
public override bool doRelease(M3dView view) { // Scale nodes on the selection list. // Simple implementation that does not // support undo. MSelectionList list = new MSelectionList(); MGlobal.getActiveSelectionList(list); MObject node = new MObject(); for (MItSelectionList iter = new MItSelectionList(list); !iter.isDone; iter.next()) { iter.getDependNode(node); MFnTransform xform; try { xform = new MFnTransform(node); } catch (System.Exception) { continue; } double[] newScale = new double[3]; newScale[0] = mousePointGlName.x + 1; newScale[1] = mousePointGlName.y + 1; newScale[2] = mousePointGlName.z + 1; xform.setScale(newScale); } return true; }
// // Description: // // Wireframe drawing routine // // Arguments: // // request - request to be drawn // view - view to draw into // ///////////////////////////////////////////////////////////////////// // // Helper routines // ///////////////////////////////////////////////////////////////////// public void drawWireframe( MDrawRequest request, M3dView view ) { MDrawData data = request.drawData(); apiMeshGeom geom = (apiMeshGeom)data.geometry(); if (geom == null) return; int token = request.token; bool wireFrameOnShaded = false; if ((int)DrawToken.kDrawWireframeOnShaded == token) { wireFrameOnShaded = true; } view.beginGL(); // Query current state so it can be restored // bool lightingWasOn = OpenGL.glIsEnabled(OpenGL.GL_LIGHTING) != 0; if ( lightingWasOn ) { OpenGL.glDisable(OpenGL.GL_LIGHTING); } if ( wireFrameOnShaded ) { OpenGL.glDepthMask(0); } // Draw the wireframe mesh // int vid = 0; for ( int i=0; i<geom.faceCount; i++ ) { OpenGL.glBegin(OpenGL.GL_LINE_LOOP); for ( int v=0; v<geom.face_counts[i]; v++ ) { MPoint vertex = geom.vertices[ geom.face_connects[vid++] ]; OpenGL.glVertex3f((float)vertex[0], (float)vertex[1], (float)vertex[2]); } OpenGL.glEnd(); } // Restore the state // if ( lightingWasOn ) { OpenGL.glEnable(OpenGL.GL_LIGHTING); } if ( wireFrameOnShaded ) { OpenGL.glDepthMask(1); } view.endGL(); }
// // Description: // Draw the specified uv value into the port view. If drawNum is true // It will also draw the UV id for the the UV. // private void drawUVMapCoord( M3dView view, int uv, float u, float v, bool drawNum ) { if ( drawNum ) { string s = uv.ToString(); view.drawText(s, new MPoint(u, v, 0), M3dView.TextPosition.kCenter); } OpenGL.glVertex3f(u, v, 0.0f); }
public override void draw(M3dView view, MDagPath path, M3dView.DisplayStyle style, M3dView.DisplayStatus status) { // Are we in the right view MDagPath dpath = new MDagPath(); view.getCamera(dpath); MFnCamera viewCamera = new MFnCamera(dpath); string nameBuffer = viewCamera.name; if (nameBuffer == null) { return; } if (nameBuffer.IndexOf("persp") == -1 && nameBuffer.IndexOf("front") == -1) { return; } // bool rightLine = !affectTranslate; // Populate the point arrays which are in local space MPoint top = lineGeometry.topPoint(); MPoint bottom = lineGeometry.bottomPoint(); // Depending on what's active, we modify the // end points with mouse deltas in local // space uint active = 0; try { glActiveName(ref active); } catch (System.Exception) { return; } if (active == lineName && active != 0) { top[0] += (float)mousePointGlName.x; top[1] += (float)mousePointGlName.y; top[2] += (float)mousePointGlName.z; bottom[0] += (float)mousePointGlName.x; bottom[1] += (float)mousePointGlName.y; bottom[2] += (float)mousePointGlName.z; } // Begin the drawing view.beginGL(); // Get the starting value of the pickable items uint glPickableItem = 0; glFirstHandle(ref glPickableItem); // Top lineName = glPickableItem; // Place before you draw the manipulator component that can // be pickable. colorAndName(view, glPickableItem, true, mainColor()); OpenGL.glBegin((uint)libOpenMayaRenderNet.MGL_LINES); OpenGL.glVertex3d(top.x, top.y, top.z); OpenGL.glVertex3d(bottom.x, bottom.y, bottom.z); OpenGL.glEnd(); // End the drawing view.endGL(); }
// // Description: // Draw the UV points for all uvs on this surface shape. // private void drawUVMapCoordNum( apiMeshGeom geom, M3dView view, MTextureEditorDrawInfo info, bool drawNumbers) { view.beginGL(); float[] ptSize = new float[1]; OpenGL.glGetFloatv(OpenGL.GL_POINT_SIZE, ptSize); OpenGL.glPointSize(UV_POINT_SIZE); int uv; uint uv_len = geom.uvcoords.uvcount(); for ( uv = 0; uv < uv_len; uv ++ ) { float du = 0.0f; float dv = 0.0f; geom.uvcoords.getUV( uv, ref du, ref dv ); drawUVMapCoord( view, uv, du, dv, drawNumbers ); } OpenGL.glPointSize(ptSize[0]); view.endGL(); }
public override bool doDrag(M3dView view) { updateDragInformation(); return(true); }
// // Description: // Draws the UV layout in wireframe mode. // private void drawUVWireframe( apiMeshGeom geom, M3dView view, MTextureEditorDrawInfo info) { view.beginGL(); // Draw the polygons // int vid = 0; int vid_start = 0; for ( int i=0; i<geom.faceCount; i++ ) { OpenGL.glBegin(OpenGL.GL_LINES); uint v; float du1 = 0.0f; float dv1 = 0.0f; float du2 = 0.0f; float dv2 = 0.0f; int uvId1, uvId2; vid_start = vid; for ( v=0; v<geom.face_counts[i]-1; v++ ) { uvId1 = geom.uvcoords.uvId(vid); uvId2 = geom.uvcoords.uvId(vid + 1); geom.uvcoords.getUV( uvId1, ref du1, ref dv1 ); geom.uvcoords.getUV( uvId2, ref du2, ref dv2 ); OpenGL.glVertex3f( du1, dv1, 0.0f ); OpenGL.glVertex3f( du2, dv2, 0.0f ); vid++; } uvId1 = geom.uvcoords.uvId(vid); uvId2 = geom.uvcoords.uvId(vid_start); geom.uvcoords.getUV( uvId1, ref du1, ref dv1 ); geom.uvcoords.getUV( uvId2, ref du2, ref dv2 ); OpenGL.glVertex3f(du1, dv1, 0.0f); OpenGL.glVertex3f(du2, dv2, 0.0f); vid ++ ; OpenGL.glEnd(); } view.endGL(); }
// for userInteraction example code // public void drawRedPointAtCenter( MDrawRequest request, M3dView view ) // // Description: // // Simple very fast draw routine // // Arguments: // // request - request to be drawn // view - view to draw into // { // Draw point // view.beginGL(); // save state // OpenGL.glPushAttrib(OpenGL.GL_CURRENT_BIT | OpenGL.GL_POINT_BIT); OpenGL.glPointSize(20.0f); OpenGL.glBegin(OpenGL.GL_POINTS); OpenGL.glColor3f(1.0f, 0.0f, 0.0f); OpenGL.glVertex3f(0.0f, 0.0f, 0.0f); OpenGL.glEnd(); // restore state // OpenGL.glPopAttrib(); view.endGL(); }
// // Description: // // Main (OpenGL) draw routine // // Arguments: // // request - request to be drawn // view - view to draw into // // Main draw routine. Gets called by Maya with draw requests. // public override void draw( MDrawRequest request, M3dView view ) { // Get the token from the draw request. // The token specifies what needs to be drawn. // int token = request.token; switch( token ) { case (int)DrawToken.kDrawWireframe: case (int)DrawToken.kDrawWireframeOnShaded: drawWireframe( request, view ); break; case (int)DrawToken.kDrawSmoothShaded: drawShaded( request, view ); break; case (int)DrawToken.kDrawFlatShaded: // Not implemented break; case (int)DrawToken.kDrawVertices: drawVertices( request, view ); break; case (int)DrawToken.kDrawBoundingBox: drawBoundingBox( request, view ); break; // for userChangingViewContext example code // case (int)DrawToken.kDrawRedPointAtCenter: drawRedPointAtCenter( request, view ); break; } }
// // Description: // Main entry point for UV drawing. This method is called by the UV // texture editor when the shape is 'active'. // // Input: // A 3dView. // // Main draw routine for UV editor. This is called by maya when the // shape is selected and the UV texture window is visible. // public override void drawUV( M3dView view, MTextureEditorDrawInfo info ) { apiMesh meshNode = (apiMesh)surfaceShape; apiMeshGeom geom = meshNode.meshGeom(); uint uv_len = geom.uvcoords.uvcount(); if (uv_len > 0) { view.setDrawColor( new MColor( 1.0f, 0.0f, 0.0f ) ); switch( info.drawingFunction ) { case MTextureEditorDrawInfo.DrawingFunction.kDrawWireframe: drawUVWireframe( geom, view, info ); break; case MTextureEditorDrawInfo.DrawingFunction.kDrawEverything: case MTextureEditorDrawInfo.DrawingFunction.kDrawUVForSelect: drawUVWireframe( geom, view, info ); drawUVMapCoordNum( geom, view, info, false ); break; case MTextureEditorDrawInfo.DrawingFunction.kDrawVertexForSelect: case MTextureEditorDrawInfo.DrawingFunction.kDrawEdgeForSelect: case MTextureEditorDrawInfo.DrawingFunction.kDrawFacetForSelect: default: drawUVWireframe( geom, view, info ); break; }; } }
public void drawShaded(MDrawRequest request, M3dView view) // // Description: // // Shaded drawing routine // // Arguments: // // request - request to be drawn // view - view to draw into // { MDrawData data = request.drawData(); apiMeshGeom geom = (apiMeshGeom)data.geometry(); if (geom == null) { return; } view.beginGL(); OpenGL.glEnable(OpenGL.GL_POLYGON_OFFSET_FILL); // Set up the material // MMaterial material = request.material; material.setMaterial(request.multiPath, request.isTransparent); // Enable texturing ... // // Note, Maya does not enable texturing if useDefaultMaterial is enabled. // However, you can choose to ignore this in your draw routine. // bool drawTexture = material.materialIsTextured && !view.usingDefaultMaterial; if (drawTexture) { OpenGL.glEnable(OpenGL.GL_TEXTURE_2D); } // Apply the texture to the current view // if (drawTexture) { material.applyTexture(view, data); } // Draw the polygons // int vid = 0; uint uv_len = geom.uvcoords.uvcount(); for (int i = 0; i < geom.faceCount; i++) { OpenGL.glBegin(OpenGL.GL_POLYGON); for (int v = 0; v < geom.face_counts[i]; v++) { MPoint vertex = geom.vertices[geom.face_connects[vid]]; MVector normal = geom.normals[geom.face_connects[vid]]; if (uv_len > 0) { // If we are drawing the texture, make sure the coord // arrays are in bounds. if (drawTexture) { int uvId1 = geom.uvcoords.uvId(vid); if (uvId1 < uv_len) { float tu = 0.0f; float tv = 0.0f; geom.uvcoords.getUV(uvId1, ref tu, ref tv); OpenGL.glTexCoord2f(tu, tv); } } } OpenGL.glNormal3f((float)normal[0], (float)normal[1], (float)normal[2]); OpenGL.glVertex3f((float)vertex[0], (float)vertex[1], (float)vertex[2]); vid++; } OpenGL.glEnd(); } // Turn off texture mode // if (drawTexture) { OpenGL.glDisable(OpenGL.GL_TEXTURE_2D); } view.endGL(); }