Beispiel #1
0
        public void Should_be_disabled_be_default()
        {
            var feature = new MyFeature();


            Assert.False(feature.IsEnabledByDefault);
        }
Beispiel #2
0
        public void Should_be_disabled_be_default()
        {
            var feature = new MyFeature();

         
            Assert.False(feature.IsEnabledByDefault);
        }
Beispiel #3
0
    //public static void Dimesioning()
    //{
    //   Part workPart=theSession.Parts.Work;

    //   NXOpen.Annotations.DimensionData dimeData1;
    //   dimeData1 = workPart.Annotations.NewDimensionData();

    //   BodyCollection bodis = workPart.Bodies;
    //   Body[] bodys = bodis.ToArray();

    //   Face[] faces = bodys[0].GetFaces();
    //   int Faces_num = faces.Length;

    //   NXOpen.Annotations.Associativity ass1;
    //   ass1 = workPart.Annotations.NewAssociativity();

    //   ass1.FirstObject=faces[0];
    //   NXObject nullNXObject = null;
    //   ass1.SecondObject = nullNXObject;


    //   Expression expression1;
    //   expression1 = workPart.Expressions.CreateSystemExpressionWithUnits("p1=0.5", null);
    //   Scalar scalar1;
    //   scalar1 = workPart.Scalars.CreateScalarExpression(expression1, NXOpen.Scalar.DimensionalityType.None, NXOpen.SmartObject.UpdateOption.WithinModeling);
    //   Point point1=workPart.Points.CreatePoint(faces[0], scalar1, scalar1, NXOpen.SmartObject.UpdateOption.WithinModeling);


    //   Point3d pickPoint1 = new Point3d(point1.Coordinates.X, point1.Coordinates.Y, point1.Coordinates.Z);//面的中心点坐标
    //   ass1.PickPoint = pickPoint1;

    //   NXOpen.Annotations.Associativity[] associativity2 = new NXOpen.Annotations.Associativity[1];
    //   associativity2[0] = ass1;
    //   dimeData1.SetAssociativity(1, associativity2);

    //   ass1.Dispose();

    //   NXOpen.Annotations.Associativity ass3;
    //   ass3 = workPart.Annotations.NewAssociativity();


    ////        NXOpen.UF.UFSf ufsf1 = new NXOpen.UF.UFSf();
    ////        int num_adjacent;
    ////        Tag [] adjacentFaces;
    ////        ufsf1.FaceAskAdjacentFaces(faces[0].Tag,out num_adjacent,out adjacentFaces);

    //   int num_adjacent=0;
    //   Face [] adjacentFaces;
    //   //GetAdjacentFace(faces[0],num_adjacent,adjacentFaces);

    //    Edge[] edges = faces[0].GetEdges();

    //    Face[] AdFaces = new Face[4];
    //    for (int i = 0; i<edges.Length;++i )
    //    {
    //        Face[] tmpFace=edges[i].GetFaces();
    //        for(int j=0;j<tmpFace.Length;++j)
    //            if (tmpFace[j].Tag != faces[0].Tag)
    //            {
    //                AdFaces[num_adjacent] = tmpFace[j];
    //                num_adjacent++;
    //            }
    //    }
    //    Face parallelface;
    //    for (int i=0;i<faces.Length;++i)
    //    {
    //        if(faces[i]==)
    //    }
    //   ass3.FirstObject = faces[1];
    //   ass3.SecondObject = nullNXObject;

    //   Point3d pickPoint2 = new Point3d(17.7852174562136, 0.0, 28.080186384977);
    //   ass3.PickPoint = pickPoint2;

    //   NXOpen.Annotations.Associativity[] associativity4 = new NXOpen.Annotations.Associativity[1];
    //   associativity4[0] = ass3;
    //   dimeData1.SetAssociativity(2, associativity4);

    //   ass3.Dispose();

    //   NXOpen.Annotations.PmiData pmiData1;
    //   pmiData1 = workPart.Annotations.NewPmiData();


    //   Xform xform2;
    //   xform2 = dimeData1.GetInferredPlane(NXOpen.Annotations.PmiDefaultPlane.ModelView, NXOpen.Annotations.DimensionType.Perpendicular);

    //   Point3d origin1 = new Point3d(0.0, 0.0, 0.0);
    //   NXOpen.Annotations.PmiPerpendicularDimension pmiPerpendicularDimension1;
    //   pmiPerpendicularDimension1 = workPart.Dimensions.CreatePmiPerpendicularDimension(dimeData1, pmiData1, xform2, origin1);

    //   dimeData1.Dispose();
    //   pmiData1.Dispose();

    //   Xform xform3;
    //   xform3 = pmiPerpendicularDimension1.GetInferredAnnotationPlane(NXOpen.Annotations.PmiDefaultPlane.YzOfWcs);

    //   pmiPerpendicularDimension1.AnnotationPlane = xform3;

    //   pmiPerpendicularDimension1.IsOriginCentered = true;

    //   Point3d origin4 = new Point3d(0.0, 54.5601856649006, 75.4465181592287);
    //   pmiPerpendicularDimension1.AnnotationOrigin = origin4;

    //   pmiPerpendicularDimension1.LeaderOrientation = NXOpen.Annotations.LeaderOrientation.FromLeft;
    //}

    public static MenuBarManager.CallbackStatus FeatureRecognition(MenuButtonEvent buttonEvent)
    {
        //C++dll中函数用法实例
        //int a = 1;
        //int b = 2;
        //int c = Add(a, b);

        //double [] num={0.0,0.0,0.0};
        //Add(3,num);

        List <List <Tag> > Face_of_Feature;

        MyFace.GetFaceArray(out Face_of_Feature);//是否应该加一条相邻特征的判断?

        SelfDefFeatureType[] Def_Types;
        MyFeature.AskTypeOfFeature(Face_of_Feature, out Def_Types);

        string TypeMessage = "";

        foreach (SelfDefFeatureType Deftemp in Def_Types)
        {
            TypeMessage += Enum.GetName(typeof(SelfDefFeatureType), Deftemp) + "\n";
        }
        NXMessageBox box = theUI.NXMessageBox;

        box.Show("特征类型显示", NXMessageBox.DialogType.Information, TypeMessage);

        foreach (List <Tag> Face_list in Face_of_Feature)
        {
            for (int i = 0; i < Face_list.Count; ++i)
            {
                theUfobj.SetColor(Face_list[i], 106);//红色186
            }
        }
        //for (int i = 0; i < Face_of_Feature.Count; i++)
        //{
        //    if (Def_Types[i] == SelfDefFeatureType.RECT_SOLT)
        //    {
        //        foreach (Tag Face_tag in Face_of_Feature[i])
        //        {
        //             theUfobj.SetColor(Face_tag, 106);//红色186
        //        }

        //    }
        //}

        Part workPart = theSession.Parts.Work;

        BodyCollection bodis = workPart.Bodies;

        Body[] bodys = bodis.ToArray();

        Body body = bodys[0];

        NXOpen.Face[]             faces    = body.GetFaces();
        NXOpen.Features.Feature[] features = body.GetFeatures();

        Tag[] body_faces_tag;
        theUfmodel.AskFeatFaces(features[0].Tag, out body_faces_tag);

        //theUfobj.SetColor(body_faces_tag[0], 106);
        //theUfobj.SetColor(body_faces_tag[5], 106);

        double Arcangle;

        MyFace.AskAngleOfFace(body_faces_tag[0], body_faces_tag[5], out Arcangle);

        int fLength = features.Length;

        foreach (NXOpen.Features.Feature tempF in features)
        {
            string type = tempF.FeatureType;
            if (type == "DOVE_TAIL_SLOT")
            {
                Tag[] VSlot_face_Tag;
                theUfmodel.AskFeatFaces(tempF.Tag, out VSlot_face_Tag);
                theUfobj.SetColor(VSlot_face_Tag[0], 106);
                theUfobj.SetColor(VSlot_face_Tag[1], 106);

                MyFace.AskAngleOfFace(VSlot_face_Tag[0], VSlot_face_Tag[1], out Arcangle);

                //    obj.SetColor(tempF.Tag, 106);
                Tag[] faces_tag, edges_tag, adjacFace_tag;
                theUfmodel.AskFeatEdges(tempF.Tag, out edges_tag);

                theUfmodel.AskFeatFaces(tempF.Tag, out faces_tag);

                //高亮指定面
                //NXOpen.UF.UFDisp Disp = theUfSession.Disp;
                //Disp.SetHighlight(faces_tag[0], 1);
            }
        }
        //String tmpString = faces_tag[0].ToString();
        //Face face=(Face)feature.FindObject("FACE 0");
        //face.Highlight();

        return(MenuBarManager.CallbackStatus.Continue);
    }
Beispiel #4
0
 /// <summary>
 /// Creates the scenario.
 /// </summary>
 /// <param name="text">The text.</param>
 /// <returns></returns>
 protected virtual Scenario WithScenario(string text)
 {
     return(LastOutcome == null?MyFeature.WithScenario(text) : LastOutcome.WithScenario(text));
 }