public override bool canDrawUV() // // Description: // Tells Maya that this surface shape supports uv drawing. // { apiMesh meshNode = (apiMesh)surfaceShape; apiMeshGeom geom = meshNode.meshGeom(); return(geom.uvcoords.uvcount() > 0); }
// Main selection routine // public override bool select(MSelectInfo selectInfo, MSelectionList selectionList, MPointArray worldSpaceSelectPts) // // Description: // // Main selection routine // // Arguments: // // selectInfo - the selection state information // selectionList - the list of selected items to add to // worldSpaceSelectPts - // { bool selected = false; bool componentSelected = false; bool hilited = false; hilited = (selectInfo.displayStatus == M3dView.DisplayStatus.kHilite); if (hilited) { componentSelected = selectVertices(selectInfo, selectionList, worldSpaceSelectPts); selected = selected || componentSelected; } if (!selected) { apiMesh meshNode = (apiMesh)surfaceShape; // NOTE: If the geometry has an intersect routine it should // be called here with the selection ray to determine if the // the object was selected. selected = true; MSelectionMask priorityMask = new MSelectionMask(MSelectionMask.SelectionType.kSelectNurbsSurfaces); MSelectionList item = new MSelectionList(); item.add(selectInfo.selectPath); MPoint xformedPt = new MPoint(); if (selectInfo.singleSelection) { MPoint center = meshNode.boundingBox().center; xformedPt = center; xformedPt.multiplyEqual(selectInfo.selectPath.inclusiveMatrix); } selectInfo.addSelection(item, xformedPt, selectionList, worldSpaceSelectPts, priorityMask, false); } return(selected); }
// Main draw routine for UV editor. This is called by maya when the // shape is selected and the UV texture window is visible. // public override void drawUV(M3dView view, MTextureEditorDrawInfo info) // // Description: // Main entry point for UV drawing. This method is called by the UV // texture editor when the shape is 'active'. // // Input: // A 3dView. // { apiMesh meshNode = (apiMesh)surfaceShape; apiMeshGeom geom = meshNode.meshGeom(); uint uv_len = geom.uvcoords.uvcount(); if (uv_len > 0) { view.setDrawColor(new MColor(1.0f, 0.0f, 0.0f)); switch (info.drawingFunction) { case MTextureEditorDrawInfo.DrawingFunction.kDrawWireframe: drawUVWireframe(geom, view, info); break; case MTextureEditorDrawInfo.DrawingFunction.kDrawEverything: case MTextureEditorDrawInfo.DrawingFunction.kDrawUVForSelect: drawUVWireframe(geom, view, info); drawUVMapCoordNum(geom, view, info, false); break; case MTextureEditorDrawInfo.DrawingFunction.kDrawVertexForSelect: case MTextureEditorDrawInfo.DrawingFunction.kDrawEdgeForSelect: case MTextureEditorDrawInfo.DrawingFunction.kDrawFacetForSelect: default: drawUVWireframe(geom, view, info); break; } ; } }
public bool selectVertices(MSelectInfo selectInfo, MSelectionList selectionList, MPointArray worldSpaceSelectPts) // // Description: // // Vertex selection. // // Arguments: // // selectInfo - the selection state information // selectionList - the list of selected items to add to // worldSpaceSelectPts - // { bool selected = false; M3dView view = selectInfo.view; MPoint xformedPoint = new MPoint(); MPoint selectionPoint = new MPoint(); double z = 0.0; double previousZ = 0.0; int closestPointVertexIndex = -1; MDagPath path = selectInfo.multiPath; // Create a component that will store the selected vertices // MFnSingleIndexedComponent fnComponent = new MFnSingleIndexedComponent(); MObject surfaceComponent = fnComponent.create(MFn.Type.kMeshVertComponent); uint vertexIndex; // if the user did a single mouse click and we find > 1 selection // we will use the alignmentMatrix to find out which is the closest // MMatrix alignmentMatrix = new MMatrix(); MPoint singlePoint = new MPoint(); bool singleSelection = selectInfo.singleSelection; if (singleSelection) { alignmentMatrix = selectInfo.alignmentMatrix; } // Get the geometry information // apiMesh meshNode = (apiMesh)surfaceShape; apiMeshGeom geom = meshNode.meshGeom(); // Loop through all vertices of the mesh and // see if they lie withing the selection area // uint numVertices = geom.vertices.length; for (vertexIndex = 0; vertexIndex < numVertices; vertexIndex++) { MPoint currentPoint = geom.vertices[(int)vertexIndex]; // Sets OpenGL's render mode to select and stores // selected items in a pick buffer // view.beginSelect(); OpenGL.glBegin(OpenGL.GL_POINTS); OpenGL.glVertex3f((float)currentPoint[0], (float)currentPoint[1], (float)currentPoint[2]); OpenGL.glEnd(); if (view.endSelect() > 0) // Hit count > 0 { selected = true; if (singleSelection) { xformedPoint = currentPoint; xformedPoint.homogenize(); xformedPoint.multiplyEqual(alignmentMatrix); z = xformedPoint.z; if (closestPointVertexIndex < 0 || z > previousZ) { closestPointVertexIndex = (int)vertexIndex; singlePoint = currentPoint; previousZ = z; } } else { // multiple selection, store all elements // fnComponent.addElement((int)vertexIndex); } } } // If single selection, insert the closest point into the array // if (selected && selectInfo.singleSelection) { fnComponent.addElement(closestPointVertexIndex); // need to get world space position for this vertex // selectionPoint = singlePoint; selectionPoint.multiplyEqual(path.inclusiveMatrix); } // Add the selected component to the selection list // if (selected) { MSelectionList selectionItem = new MSelectionList(); selectionItem.add(path, surfaceComponent); MSelectionMask mask = new MSelectionMask(MSelectionMask.SelectionType.kSelectComponentsMask); selectInfo.addSelection( selectionItem, selectionPoint, selectionList, worldSpaceSelectPts, mask, true); } return(selected); }
///////////////////////////////////////////////////////////////////// // // Overrides // ///////////////////////////////////////////////////////////////////// public override void getDrawRequests(MDrawInfo info, bool objectAndActiveOnly, MDrawRequestQueue queue) // // Description: // // Add draw requests to the draw queue // // Arguments: // // info - current drawing state // objectsAndActiveOnly - no components if true // queue - queue of draw requests to add to // { // Get the data necessary to draw the shape // MDrawData data = new MDrawData(); apiMesh meshNode = (apiMesh)surfaceShape; apiMeshGeom geom = meshNode.meshGeom(); if ((null == geom) || (0 == geom.faceCount)) { MGlobal.displayInfo("NO DrawRequest for apiMesh"); return; } // This call creates a prototype draw request that we can fill // in and then add to the draw queue. // MDrawRequest request = getDrawRequest(info); // info.getPrototype(this); getDrawData(geom, out data); request.setDrawData(data); // Decode the draw info and determine what needs to be drawn // M3dView.DisplayStyle appearance = info.displayStyle; M3dView.DisplayStatus displayStatus = info.displayStatus; // Are we displaying meshes? if (!info.objectDisplayStatus(M3dView.DisplayObjects.kDisplayMeshes)) { return; } // Use this code to help speed up drawing. // inUserInteraction() is true for any interaction with // the viewport, including object or component TRS and camera changes. // userChangingViewContext() is true only when the user is using view // context tools (tumble, dolly, track, etc.) // if (info.inUserInteraction || info.userChangingViewContext) { // User is using view context tools so // request fast draw and get out // request.token = (int)DrawToken.kDrawRedPointAtCenter; queue.add(request); return; } switch (appearance) { case M3dView.DisplayStyle.kWireFrame: { request.token = (int)DrawToken.kDrawWireframe; int activeColorTable = (int)M3dView.ColorTable.kActiveColors; int dormantColorTable = (int)M3dView.ColorTable.kDormantColors; switch (displayStatus) { case M3dView.DisplayStatus.kLead: request.setColor(LEAD_COLOR, activeColorTable); break; case M3dView.DisplayStatus.kActive: request.setColor(ACTIVE_COLOR, activeColorTable); break; case M3dView.DisplayStatus.kActiveAffected: request.setColor(ACTIVE_AFFECTED_COLOR, activeColorTable); break; case M3dView.DisplayStatus.kDormant: request.setColor(DORMANT_COLOR, dormantColorTable); break; case M3dView.DisplayStatus.kHilite: request.setColor(HILITE_COLOR, activeColorTable); break; default: break; } queue.add(request); break; } case M3dView.DisplayStyle.kGouraudShaded: { // Create the smooth shaded draw request // request.token = (int)DrawToken.kDrawSmoothShaded; // Need to get the material info // MDagPath path = info.multiPath; // path to your dag object M3dView view = info.view; // view to draw to MMaterial material = base.material(path); // If the user currently has the default material enabled on the // view then use the default material for shading. // if (view.usingDefaultMaterial) { material = MMaterial.defaultMaterial; } // Evaluate the material and if necessary, the texture. // material.evaluateMaterial(view, path); bool drawTexture = true; if (drawTexture && material.materialIsTextured) { material.evaluateTexture(data); } request.material = material; // request.setDisplayStyle( appearance ); bool materialTransparent = false; material.getHasTransparency(ref materialTransparent); if (materialTransparent) { request.isTransparent = true; } queue.add(request); // create a draw request for wireframe on shaded if // necessary. // if ((displayStatus == M3dView.DisplayStatus.kActive) || (displayStatus == M3dView.DisplayStatus.kLead) || (displayStatus == M3dView.DisplayStatus.kHilite)) { MDrawRequest wireRequest = getDrawRequest(info); // info.getPrototype(this); wireRequest.setDrawData(data); wireRequest.token = (int)DrawToken.kDrawWireframeOnShaded; wireRequest.displayStyle = M3dView.DisplayStyle.kWireFrame; int activeColorTable = (int)M3dView.ColorTable.kActiveColors; switch (displayStatus) { case M3dView.DisplayStatus.kLead: wireRequest.setColor(LEAD_COLOR, activeColorTable); break; case M3dView.DisplayStatus.kActive: wireRequest.setColor(ACTIVE_COLOR, activeColorTable); break; case M3dView.DisplayStatus.kHilite: wireRequest.setColor(HILITE_COLOR, activeColorTable); break; default: break; } queue.add(wireRequest); } break; } case M3dView.DisplayStyle.kFlatShaded: request.token = (int)DrawToken.kDrawFlatShaded; queue.add(request); break; case M3dView.DisplayStyle.kBoundingBox: request.token = (int)DrawToken.kDrawBoundingBox; queue.add(request); break; default: break; } // Add draw requests for components // if (!objectAndActiveOnly) { // Inactive components // if ((appearance == M3dView.DisplayStyle.kPoints) || (displayStatus == M3dView.DisplayStatus.kHilite)) { MDrawRequest vertexRequest = getDrawRequest(info); // info.getPrototype(this); vertexRequest.setDrawData(data); vertexRequest.token = (int)DrawToken.kDrawVertices; vertexRequest.setColor(DORMANT_VERTEX_COLOR, (int)M3dView.ColorTable.kActiveColors); queue.add(vertexRequest); } // Active components // if (((MPxSurfaceShape)surfaceShape).hasActiveComponents) { MDrawRequest activeVertexRequest = getDrawRequest(info); // info.getPrototype(this); activeVertexRequest.setDrawData(data); activeVertexRequest.token = (int)DrawToken.kDrawVertices; activeVertexRequest.setColor(ACTIVE_VERTEX_COLOR, (int)M3dView.ColorTable.kActiveColors); MObjectArray clist = ((MPxSurfaceShape)surfaceShape).activeComponents; MObject vertexComponent = clist[0]; // Should filter list activeVertexRequest.component = vertexComponent; queue.add(activeVertexRequest); } } }