internal static void Assign_all_faces() { sidefaces = var_es.InventorApp.TransientObjects.CreateFaceCollection(); try { int i = -1; foreach (Face s in partDef.SurfaceBodies[1].Faces) { if (s.SurfaceType == SurfaceTypeEnum.kConeSurface || s.SurfaceType == SurfaceTypeEnum.kCylinderSurface) { i++; var_es._list[i].Side_face = (Face)s; } else { } } //MessageBox.Show(sidefaces.Count.ToString()); /*foreach (Section s in var_es._list) * { * s.Side_face = (Face)sidefaces[sidefaces.Count]; * sidefaces.Remove(sidefaces.Count); * }*/ } catch (Exception e1) { MessageBox.Show(e1.ToString()); } }
/// <summary> /// Detects the faces in the given image. /// </summary> /// <returns>List of detected faces.</returns> /// <param name="imageBytes">Image bytes.</param> public Face[] DetectFaces(byte[] imageBytes) { if (string.IsNullOrEmpty(faceSubscriptionKey)) { throw new Exception("The face-subscription key is not set."); } string requestUrl = string.Format("{0}/detect?returnFaceId={1}&returnFaceLandmarks={2}&returnFaceAttributes={3}", GetFaceServiceUrl(), true, false, "age,gender,smile,headPose"); Dictionary <string, string> headers = new Dictionary <string, string>(); headers.Add("ocp-apim-subscription-key", faceSubscriptionKey); HttpWebResponse response = CloudWebTools.DoWebRequest(requestUrl, "POST", "application/octet-stream", imageBytes, headers, true, false); Face[] faces = null; if (!CloudWebTools.IsErrorStatus(response)) { StreamReader reader = new StreamReader(response.GetResponseStream()); //faces = JsonConvert.DeserializeObject<Face[]>(reader.ReadToEnd(), jsonSettings); string newJson = "{ \"faces\": " + reader.ReadToEnd() + "}"; FaceCollection facesCollection = JsonUtility.FromJson <FaceCollection>(newJson); faces = facesCollection.faces; } else { ProcessFaceError(response); } return(faces); }
public VerticalPrismalSurface(IPolyline @base, Interval zRange, bool isClosed = false) { if (zRange.IsEmpty) { throw new ArgumentException("z range should not be empty", "zRange"); } _base = @base; _zRange = zRange; _isClosed = isClosed; if (isClosed) { if ([email protected]) { throw new ArgumentException("Closed prism has to have closed base"); } _closedBase = (IClosedPolyline)@base; _isCounterclockwise = _closedBase.SignedArea() > 0; } _vertices = new VertexCollection(this); _edges = new EdgeCollection(this); _faces = new FaceCollection(this); _undirectedComparer = new UndirectedEdgeComparerImpl(this); }
/// <summary> /// Triangulate this polygon. /// </summary> public void Triangulate() { FaceCollection newFaces = new FaceCollection(); // Go through each face... foreach (Face face in faces) { // Number of triangles = vertices - 2. int triangles = face.Indices.Count - 2; // Is it a triangle already?... if (triangles == 1) { newFaces.Add(face); continue; } // Add a set of triangles. for (int i = 0; i < triangles; i++) { Face triangle = new Face(); triangle.Indices.Add(new Index(face.Indices[0])); triangle.Indices.Add(new Index(face.Indices[i + 1])); triangle.Indices.Add(new Index(face.Indices[i + 2])); triangle.Indices.Add(new Index(face.Indices[i + 2])); triangle.Indices.Add(new Index(face.Indices[i + 1])); newFaces.Add(triangle); } } faces.Clear(); faces = newFaces; }
/// <summary> /// Initializes a new, empty mesh. /// </summary> public HalfEdgeMesh() { Edges = new EdgeCollection(this); Faces = new FaceCollection(this); HalfEdges = new HalfedgeCollection(this); Vertices = new VertexCollection(this); }
public static IDictionary <Face, List <Lavorazione> > detectLavorazioni(PartDocument oDoc) { IDictionary <Face, List <Lavorazione> > result = new Dictionary <Face, List <Lavorazione> >(); SheetMetalComponentDefinition oCompDef = (SheetMetalComponentDefinition)oDoc.ComponentDefinition; FaceCollection oFaceColl = oCompDef.Bends[1].FrontFaces[1].TangentiallyConnectedFaces; oFaceColl.Add(oCompDef.Bends[1].FrontFaces[1]); foreach (Face f in oFaceColl) { if (f.EdgeLoops.Count > 1) { List <Lavorazione> lavorazione = IdentificazioneEntita.main(f.EdgeLoops, iApp); if (lavorazione.Count > 0) { result.Add(f, lavorazione); } } } return(result); }
// ! Setta la texture public static void setTexture(PartDocument oDoc) { SheetMetalComponentDefinition oCompDef = (SheetMetalComponentDefinition)oDoc.ComponentDefinition; FaceCollection fcFront = oCompDef.Bends[1].FrontFaces[1].TangentiallyConnectedFaces; fcFront.Add(oCompDef.Bends[1].FrontFaces[1]); FaceCollection fcBack = oCompDef.Bends[1].BackFaces[1].TangentiallyConnectedFaces; fcBack.Add(oCompDef.Bends[1].BackFaces[1]); double area0 = 0; foreach (Face oFace in fcFront) { area0 = area0 + oFace.Evaluator.Area; } double area1 = 0; foreach (Face oFace in fcBack) { area1 = area1 + oFace.Evaluator.Area; } Asset oAsset; try { oAsset = oDoc.Assets["RawSide"]; } catch (System.ArgumentException e) { Assets oAssets = oDoc.Assets; AssetLibrary oAssetsLib = iApp.AssetLibraries["3D_Pisa_Col"]; Asset oAssetLib = oAssetsLib.AppearanceAssets["RawSide"]; oAsset = oAssetLib.CopyTo(oDoc); } FaceCollection fc; if (area0 > area1) { fc = fcBack; } else { fc = fcFront; } foreach (Face f in fc) { f.Appearance = oAsset; } }
public TransformedPolysurface(IPolysurface localSurface, RotoTranslation3 localToGlobal) { _localSurface = localSurface; _localToGlobal = localToGlobal; _vertexCollection = new VertexCollection(this); _edgeCollection = new EdgeCollection(this); _faceCollection = new FaceCollection(this); _undirectedComparer = new UndirectedEdgeComparerImpl(this); }
public MultiPolygon3(IPolygon3 uniqueCompoenent) { _components = uniqueCompoenent.AsSingleton(); _componentCount = 1; _totalVertexCount = uniqueCompoenent.InPlane.CountVertices(); _vertices = new VertexCollection(this); _edges = new EdgeColllection(this); _faces = new FaceCollection(this); _undirectedEdgeComparer = new UndirectedEdgeComparerImpl(this); }
public MultiPolygon3(IEnumerable <IPolygon3> components) { _components = components.ToReadOnlyList(); _componentCount = _components.Count(); _totalVertexCount = _components.Select(p => p.InPlane.CountVertices()).Sum(); _vertices = new VertexCollection(this); _edges = new EdgeColllection(this); _faces = new FaceCollection(this); _undirectedEdgeComparer = new UndirectedEdgeComparerImpl(this); }
// ! Aggiunge il piano in mezzo alla lamiera public static WorkPlane addPlaneInTheMiddleOfBox(PartDocument oDoc) { SheetMetalComponentDefinition oComp = (SheetMetalComponentDefinition)oDoc.ComponentDefinition; try { return(oComp.WorkPlanes["Manual"]); } catch { } Box oRb = oComp.SurfaceBodies[1].RangeBox; TransientBRep oTransientBRep = iApp.TransientBRep; SurfaceBody oBody = oTransientBRep.CreateSolidBlock(oRb); NonParametricBaseFeature oBaseFeature = oComp.Features.NonParametricBaseFeatures.Add(oBody); FaceCollection oFaceColl = iApp.TransientObjects.CreateFaceCollection(); foreach (Face f in oBaseFeature.SurfaceBodies[1].Faces) { WorkPlane tmpWp = oComp.WorkPlanes.AddByPlaneAndOffset(f, 0); //if (tmpWp.Plane.IsParallelTo[oComp.WorkPlanes[1].Plane]) if (tmpWp.Plane.IsParallelTo[oComp.WorkPlanes[1].Plane]) { oFaceColl.Add(f); } tmpWp.Delete(); } WorkPlane wpWork = null; if (oFaceColl.Count >= 2) { WorkPlane wp1 = oComp.WorkPlanes.AddByPlaneAndOffset(oFaceColl[1], 0); WorkPlane wp2 = oComp.WorkPlanes.AddByPlaneAndOffset(oFaceColl[2], 0); wpWork = oComp.WorkPlanes.AddByTwoPlanes(wp1, wp2); wpWork.Name = "wpWorkReference"; wpWork.Grounded = true; wpWork.Visible = false; oBaseFeature.Delete(false, true, true); wp1.Delete(); wp2.Delete(); } return(wpWork); }
// ! Elimina tutti i fillet public static List <string> deleteFillet_(PartDocument oDoc) { SheetMetalComponentDefinition oCompDef = (SheetMetalComponentDefinition)oDoc.ComponentDefinition; List <string> faceCollToKeep = new List <string>(); if (oCompDef.Bends.Count > 0) { Bend oBend = oCompDef.Bends[1]; FaceCollection oFaceColl = oBend.FrontFaces[1].TangentiallyConnectedFaces; oFaceColl.Add(oBend.FrontFaces[1]); foreach (Face oFace in oFaceColl) { faceCollToKeep.Add(oFace.InternalName); } NonParametricBaseFeature oBaseFeature = oCompDef.Features.NonParametricBaseFeatures[1]; oBaseFeature.Edit(); SurfaceBody basebody = oBaseFeature.BaseSolidBody; ObjectCollection oColl = iApp.TransientObjects.CreateObjectCollection(); foreach (Face f in basebody.Faces) { if (faceCollToKeep.Contains(f.InternalName)) { if (f.SurfaceType == SurfaceTypeEnum.kCylinderSurface) { oColl.Add(f); } } } try { oBaseFeature.DeleteFaces(oColl); oBaseFeature.ExitEdit(); } catch { oBaseFeature.ExitEdit(); return(null); } } return(faceCollToKeep); }
public static void createProfile(PartDocument oDoc) { SheetMetalComponentDefinition oCompDef = (SheetMetalComponentDefinition)oDoc.ComponentDefinition; FaceCollection oFaceColl = iApp.TransientObjects.CreateFaceCollection(); foreach (Face f in oCompDef.SurfaceBodies[1].Faces) { oFaceColl.Add(f); } PartFeatures oFeat = oCompDef.Features; oFeat.ThickenFeatures.Add(oFaceColl, Math.Round(oCompDef.Thickness.Value * 100) / 100, PartFeatureExtentDirectionEnum.kNegativeExtentDirection, PartFeatureOperationEnum.kJoinOperation, false); }
public static FaceCollection manualFaceSelect(PartDocument oDoc) { FaceCollection fc = iApp.TransientObjects.CreateFaceCollection(); Face testFace = iApp.CommandManager.Pick(SelectionFilterEnum.kPartFacePlanarFilter, "Seleziona una faccia planare"); fc = testFace.TangentiallyConnectedFaces; fc.Add(testFace); foreach (Face f in fc) { coloroEntita(oDoc, 255, 0, 0, f); } return(fc); }
/// <summary> /// This function subdivides the faces of this polygon. /// </summary> /// <param name="smooth">If set to true the faces will be smoothed.</param> /// <returns>The number of faces in the new subdivided polygon.</returns> public int Subdivide() { FaceCollection newFaces = new FaceCollection(); foreach (Face face in Faces) { // Make sure the face is a triangle. if (face.Count != 3) { continue; } // Now get the vertices of the face. Vertex v1 = Vertices[face.Indices[0].Vertex]; Vertex v2 = Vertices[face.Indices[1].Vertex]; Vertex v3 = Vertices[face.Indices[2].Vertex]; // Add the vertices to get a the midpoint of the edge formed by those // vectors. Vertex vMidpoint = (v1 + v2 + v3) / 3; Index iMidpoint = new Index(Vertices.Add(vMidpoint)); // Now make three new faces from the old vertices and the midpoint. Face newFace = new Face(); newFace.Indices.Add(new Index(face.Indices[0])); newFace.Indices.Add(new Index(face.Indices[1])); newFace.Indices.Add(iMidpoint); newFaces.Add(newFace); newFace = new Face(); newFace.Indices.Add(new Index(face.Indices[1])); newFace.Indices.Add(new Index(face.Indices[2])); newFace.Indices.Add(iMidpoint); newFaces.Add(newFace); newFace = new Face(); newFace.Indices.Add(new Index(face.Indices[2])); newFace.Indices.Add(new Index(face.Indices[0])); newFace.Indices.Add(iMidpoint); newFaces.Add(newFace); } faces = newFaces; return(faces.Count); }
public static void deleteFace(PartDocument oDoc, List <string> faceCollToKeep) { SheetMetalComponentDefinition oCompDef = (SheetMetalComponentDefinition)oDoc.ComponentDefinition; FaceCollection faceCollToDelete = iApp.TransientObjects.CreateFaceCollection(); foreach (Face oFace in oCompDef.SurfaceBodies[1].Faces) { if (!faceCollToKeep.Contains(oFace.InternalName)) { faceCollToDelete.Add(oFace); } } PartFeatures oFeat = oCompDef.Features; oFeat.DeleteFaceFeatures.Add(faceCollToDelete); }
internal static void Assign_All_Faces() { qwe = var_es.InventorApp.TransientObjects.CreateFaceCollection(); foreach (Face s in partDef.SurfaceBodies[1].Faces) { try { if (s.SurfaceType == SurfaceTypeEnum.kPlaneSurface) { qwe.Add(s); } } catch (Exception e1) { MessageBox.Show(e1.ToString()); } } //MessageBox.Show(qwe.Count.ToString()); try { var_es._list[0].Start_face = qwe[qwe.Count] as Face; qwe.Remove(qwe.Count); for (int i = 0; i < var_es._list.Count; i++) { if (!var_es._list[i].Same_as_next) { var_es._list[i].End_face = (Face)qwe[1]; if (i < var_es._list.Count - 1) { var_es._list[i + 1].Start_face = (Face)qwe[1]; } qwe.Remove(1); } } } catch (Exception e1) { MessageBox.Show(e1.ToString()); } }
// --- initialization and deinitialization --- /// <summary>Initializes the renderer.</summary> internal static void Initialize() { TransparentWorldFaces = new FaceCollection(Program.CurrentOptions.FacesPerDisplayList, Program.CurrentOptions.SortInterval); // TODO: Isolate global settings from dynamic settings. // Global settings should be initialized here, dynamic ones via OpenGlState.Initialize(). Gl.glClearColor(0.5f, 0.5f, 0.5f, 1.0f); Gl.glClear(Gl.GL_COLOR_BUFFER_BIT | Gl.GL_DEPTH_BUFFER_BIT); Gl.glEnable(Gl.GL_DEPTH_TEST); Gl.glBlendFunc(Gl.GL_SRC_ALPHA, Gl.GL_ONE_MINUS_SRC_ALPHA); Gl.glDepthFunc(Gl.GL_LEQUAL); Gl.glHint(Gl.GL_FOG_HINT, Gl.GL_FASTEST); Gl.glHint(Gl.GL_LINE_SMOOTH_HINT, Gl.GL_FASTEST); Gl.glHint(Gl.GL_PERSPECTIVE_CORRECTION_HINT, Gl.GL_FASTEST); Gl.glHint(Gl.GL_POINT_SMOOTH_HINT, Gl.GL_FASTEST); Gl.glHint(Gl.GL_POLYGON_SMOOTH_HINT, Gl.GL_FASTEST); Gl.glHint(Gl.GL_GENERATE_MIPMAP_HINT, Gl.GL_NICEST); Gl.glDisable(Gl.GL_DITHER); Gl.glCullFace(Gl.GL_FRONT); if (true) { // Gl.glLightfv(Gl.GL_LIGHT0, Gl.GL_AMBIENT, new float[] { LightAmbientColor.R, LightAmbientColor.G, LightAmbientColor.B, 1.0f }); // Gl.glLightfv(Gl.GL_LIGHT0, Gl.GL_DIFFUSE, new float[] { LightDiffuseColor.R, LightDiffuseColor.G, LightDiffuseColor.B, 1.0f }); Gl.glLightModelfv(Gl.GL_LIGHT_MODEL_AMBIENT, new float[] { 0.0f, 0.0f, 0.0f, 1.0f }); Gl.glEnable(Gl.GL_LIGHTING); Gl.glEnable(Gl.GL_LIGHT0); Gl.glEnable(Gl.GL_COLOR_MATERIAL); Gl.glColorMaterial(Gl.GL_FRONT_AND_BACK, Gl.GL_AMBIENT_AND_DIFFUSE); Gl.glShadeModel(Gl.GL_SMOOTH); } else { Gl.glShadeModel(Gl.GL_FLAT); } Gl.glCullFace(Gl.GL_FRONT); Gl.glEnable(Gl.GL_CULL_FACE); OpenGlState.Initialize(); }
/// <summary> /// Imports known faces from file /// </summary> /// <param name="knownFacesFile">The file collection to load</param> private void ImportKnownFaces(string knownFacesFile) { if (knownFaces == null) knownFaces = new FaceCollection(); knownFaces.Import(knownFacesFile); OnFacesLoaded(); }
/// <summary> /// Analyzes current frame for availiable faces. /// If faces are found it tries to find a match record for each face; /// </summary> /// <param name="detectedFaces">Collection of detected faces</param> protected bool Detect(out FaceCollection detectedFaces) { Bitmap bmp; detectedFaces = new FaceCollection(); if (!VLExtractor.IsRegistered) return false; if (!VLMatcher.IsRegistered) return false; sleepCapture = false; //NImage image = capturedImages.Consume(); if ((bmp = imageSource.GetImage(100)) == null) return false; NImage image = NImage.FromBitmap(bmp); if (image == null) return false; VleFace[] vlFaces; NGrayscaleImage grayImage; // Stores the succeded recognition result for the single face detected RecognitionResult sRecoResult; // Stores all the recognition result (successfull and failed) for the single face detected RecognitionResult[] sRecoResults; // Stores the succeded recognition result for all the detected faces RecognitionResult[] mRecoResult; // Stores all the recognition result (successfull and failed) for all the detected faces RecognitionResult[][] mRecoResults; // Image recognition result Bitmap lciBmp; // Get gray image for face detection try { grayImage = (NGrayscaleImage)NImage.FromImage(NPixelFormat.Grayscale, 0, image); } catch { image.Dispose(); return false; } UseResources(); // Find all faces in frame vlFaces = vlExtractor.DetectFaces(grayImage); ReleaseResources(); #region No faces detected if ((vlFaces == null) || (vlFaces.Length < 1)) { grayImage.Dispose(); image.Dispose(); this.lastDetectedFaces.Clear(); return false; } #endregion lciBmp = image.ToBitmap(); // RecognitionResultUpdate(lciBmp, vlFaces); #region Only one face detected if (vlFaces.Length == 1) { Face detectedFace; sRecoResult = RecognizeFace(image, vlFaces[0], out detectedFace, out sRecoResults); if (detectedFace == null) { RecognitionResultUpdate(lciBmp, vlFaces); image.Dispose(); grayImage.Dispose(); return false; } detectedFace.CalculateFovAndCoords((int)image.Width, (int)image.Height); RecognitionResultUpdate(lciBmp, detectedFace); detectedFaces.Add(detectedFace); if (sRecoResult == null) { isLastRecoResultMultiple = false; lastRecognitionSucceded = false; lastRecognitionResult = null; ShowDetectionResults(lastDetectedFaces); } else { isLastRecoResultMultiple = false; lastRecognitionSucceded = true; lastRecognitionResult = null; ShowRecognitionResults(sRecoResults); } } #endregion #region Multiple faces detected else { // Array of detected face objects used during recognition mRecoResult = RecognizeMultipleFaces(image, vlFaces, out detectedFaces, out mRecoResults); if ((mRecoResult == null) || (mRecoResult.Length < 1)) { isLastRecoResultMultiple = true; lastRecognitionSucceded = false; lastRecognitionResult = null; ShowDetectionResults(detectedFaces); //ShowRecognitionResults(mRecoResult); } else { isLastRecoResultMultiple = true; lastRecognitionSucceded = true; lastRecognitionResult = mRecoResult; ShowRecognitionResults(mRecoResult); } } #endregion this.lastDetectedFaces = detectedFaces; image.Dispose(); grayImage.Dispose(); return true; }
/// <summary> /// Rises the FaceDetected event /// </summary> /// <param name="faces">The collection of detected faces</param> public void OnFaceDetected(FaceCollection faces) { if (this.FaceDetected != null) this.FaceDetected(this, faces); }
/// <summary> /// Initiates a new instance of FrmRecoHuman /// </summary> public HumanRecognizer(IImageSource imageSource) { if (imageSource == null) throw new ArgumentNullException(); this.imageSource = imageSource; LoadKnownFaces(); lastDetectedFaces = new FaceCollection(); settings = RecoHumanSettigs.Load("Settings.xml"); if (settings == null) settings = RecoHumanSettigs.Default; //capturedImages = new ProducerConsumer<NImage>(10); mainThread = new Thread(new ThreadStart(MainThreadTask)); mainThread.IsBackground = true; }
/// <summary> /// Recognizes multiple faces from a single image frame /// </summary> /// <param name="image">Neurotec Image in which is based the face recognition</param> /// <param name="vlFaces">Array of faces detected</param> /// <param name="recognizedFaces">Array of Face objects used to realize each recognition</param> /// <param name="MultipleRecognitionResults">An array containing all recognition results for each recognized face</param> /// <returns>An array containing best match in all known faces.</returns> private RecognitionResult[] RecognizeMultipleFaces(NImage image, VleFace[] vlFaces, out FaceCollection detectedFaces, out RecognitionResult[][] MultipleRecognitionResults) { #region Variables // Stores the original image as bitmap Bitmap bmp; // Bitmap to draw in the detected face region Bitmap croppedBitmap; // Graphics used to copy the face detected region Graphics g; // Rectangle used to copy the scaled region of face detected Rectangle rect; // Nurotec Image required in the process of recognize the face detected region NGrayscaleImage gray; // Verilook Detetion Details as result of face recognition VleDetectionDetails detectionDetails; // The face template result of a face recognition byte[][] templates = new byte[vlFaces.Length][]; // The face features result of a face recognition byte[] features; // Stores the current recognition face Face currentFace; // Stores the recognized faces //FaceCollection recognizedFaces = new FaceCollection(vlFaces.Length); detectedFaces = new FaceCollection(vlFaces.Length); // Stores the best recognition result for current face RecognitionResult currentResult; // Stores the recognition results for current face RecognitionResult[] currentRecognitionResults; // Stores all Recognition results List<RecognitionResult[]> recognitionResults = new List<RecognitionResult[]>(); // Stores the best recognition result matches List<RecognitionResult> selectedResults = new List<RecognitionResult>(); #endregion // Get the original image as bitmap bmp = new Bitmap(image.ToBitmap()); // Extract each face, and get its template foreach (VleFace vlFace in vlFaces) { // Get a rectangle a bit larger than the one the face has been recognized. // Its because some times in the exact area of the face the face cannot be recognized again //rect = new Rectangle(vlFace.Rectangle.X - 50, vlFace.Rectangle.Y - 50, vlFace.Rectangle.Width + 100, vlFace.Rectangle.Height + 100); rect = new Rectangle(vlFace.Rectangle.X - vlFace.Rectangle.Width / 2, vlFace.Rectangle.Y - vlFace.Rectangle.Height / 2, vlFace.Rectangle.Width * 2, vlFace.Rectangle.Height * 2); // Get the face bitmap croppedBitmap = new Bitmap(rect.Width, rect.Height); g = Graphics.FromImage(croppedBitmap); g.DrawImage(bmp, 0, 0, rect, GraphicsUnit.Pixel); // Get gray image for face detection gray = (NGrayscaleImage)NImage.FromImage(NPixelFormat.Grayscale, 0, NImage.FromBitmap(croppedBitmap)); // Extract the face and extract its template currentFace = new Face(vlFace); features = vlExtractor.Extract(gray, out detectionDetails); if (!detectionDetails.FaceAvailable) continue; UseResources(); currentFace.SetRecognitionData(features, detectionDetails, croppedBitmap); ReleaseResources(); currentFace.CalculateFovAndCoords((int)image.Width, (int)image.Height); detectedFaces.Add(currentFace); Console("Found face: location = (" + detectionDetails.Face.Rectangle.X + ", " + detectionDetails.Face.Rectangle.Y + "), width = " + detectionDetails.Face.Rectangle.Width + ", height = " + detectionDetails.Face.Rectangle.Height + ", confidence = " + detectionDetails.Face.Confidence); try { croppedBitmap.Dispose(); g.Dispose(); gray.Dispose(); } catch { } } if (detectedFaces.Count > 0) Console(detectedFaces.Count.ToString() + " faces found."); if (knownFaces.Count > 0) { Console("Initializing recognition"); // Recognize each detected face for (int i = 0; i < detectedFaces.Count; ++i) { if (detectedFaces[i].Features == null) continue; currentFace = detectedFaces[i]; // Start recognition currentResult = Recognize(currentFace, out currentRecognitionResults); if (currentResult == null) continue; selectedResults.Add(currentResult); recognitionResults.Add(currentRecognitionResults); } } MultipleRecognitionResults = recognitionResults.ToArray(); return selectedResults.ToArray(); }
/// <summary> /// Loads known faces from file /// </summary> /// <param name="knownFacesFile">The file collection to load</param> private void LoadKnownFaces(string knownFacesFile) { knownFaces = FaceCollection.Load(knownFacesFile); if (knownFaces == null) knownFaces = new FaceCollection(); UpdateKnownFacesPanel(); }
public static void setTexture(PartDocument oDoc) { SheetMetalComponentDefinition oCompDef = (SheetMetalComponentDefinition)oDoc.ComponentDefinition; Asset oAsset; try { oAsset = oDoc.Assets["RawSide"]; } catch (System.ArgumentException e) { Assets oAssets = oDoc.Assets; AssetLibrary oAssetsLib = iApp.AssetLibraries["3D_Pisa_Col"]; Asset oAssetLib = oAssetsLib.AppearanceAssets["RawSide"]; oAsset = oAssetLib.CopyTo(oDoc); } try { FaceCollection fcFront = oCompDef.Bends[1].FrontFaces[1].TangentiallyConnectedFaces; fcFront.Add(oCompDef.Bends[1].FrontFaces[1]); FaceCollection fcBack = oCompDef.Bends[1].BackFaces[1].TangentiallyConnectedFaces; fcBack.Add(oCompDef.Bends[1].BackFaces[1]); double area0 = 0; foreach (Face oFace in fcFront) { area0 = area0 + oFace.Evaluator.Area; } double area1 = 0; foreach (Face oFace in fcBack) { area1 = area1 + oFace.Evaluator.Area; } FaceCollection fc; if (area0 > area1) { fc = fcFront; } else { fc = fcBack; } foreach (Face f in fc) { f.Appearance = oAsset; } } catch { while (true) { var form = MessageBox.Show("Colorazione lato bello", "Selezionare facce manualmente?", MessageBoxButtons.YesNoCancel, MessageBoxIcon.Warning); if (form != DialogResult.Yes) { break; } FaceCollection fc = manualFaceSelect(oDoc); var result = MessageBox.Show("Colorazione lato bello", "Sicuro di impostare l'asset Raw Side per le facce selezionate?", MessageBoxButtons.YesNoCancel, MessageBoxIcon.Warning); if (result == DialogResult.Yes) { foreach (Face f in fc) { f.Appearance = oAsset; } } } } }
/// <summary> /// Triangulate this polygon. /// </summary> public void Triangulate() { FaceCollection newFaces = new FaceCollection(); // Go through each face... foreach(Face face in faces) { // Number of triangles = vertices - 2. int triangles = face.Indices.Count - 2; // Is it a triangle already?... if(triangles == 1) { newFaces.Add(face); continue; } // Add a set of triangles. for(int i=0; i<triangles; i++) { Face triangle = new Face(); triangle.Indices.Add(new Index(face.Indices[0])); triangle.Indices.Add(new Index(face.Indices[i+1])); triangle.Indices.Add(new Index(face.Indices[i+2])); triangle.Indices.Add(new Index(face.Indices[i+2])); triangle.Indices.Add(new Index(face.Indices[i+1])); newFaces.Add(triangle); } } faces.Clear(); faces = newFaces; }
/// <summary> /// This function subdivides the faces of this polygon. /// </summary> /// <param name="smooth">If set to true the faces will be smoothed.</param> /// <returns>The number of faces in the new subdivided polygon.</returns> public int Subdivide() { FaceCollection newFaces = new FaceCollection(); foreach(Face face in Faces) { // Make sure the face is a triangle. if(face.Count != 3) continue; // Now get the vertices of the face. Vertex v1 = Vertices[face.Indices[0].Vertex]; Vertex v2 = Vertices[face.Indices[1].Vertex]; Vertex v3 = Vertices[face.Indices[2].Vertex]; // Add the vertices to get a the midpoint of the edge formed by those // vectors. Vertex vMidpoint = (v1 + v2 + v3) / 3; Index iMidpoint = new Index(Vertices.Add(vMidpoint)); // Now make three new faces from the old vertices and the midpoint. Face newFace = new Face(); newFace.Indices.Add(new Index(face.Indices[0])); newFace.Indices.Add(new Index(face.Indices[1])); newFace.Indices.Add(iMidpoint); newFaces.Add(newFace); newFace = new Face(); newFace.Indices.Add(new Index(face.Indices[1])); newFace.Indices.Add(new Index(face.Indices[2])); newFace.Indices.Add(iMidpoint); newFaces.Add(newFace); newFace = new Face(); newFace.Indices.Add(new Index(face.Indices[2])); newFace.Indices.Add(new Index(face.Indices[0])); newFace.Indices.Add(iMidpoint); newFaces.Add(newFace); } faces = newFaces; return faces.Count; }
/// <summary> /// 'create more feature based on the start face of the first extrude feature /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void button4_Click(object sender, EventArgs e) { // create a new document with an extrude feature. createFeature(); PartDocument oPartDoc = mApp.ActiveDocument as PartDocument; // get a start face of the extrude feature ExtrudeFeature oExtrudeF = oPartDoc.ComponentDefinition.Features.ExtrudeFeatures[1]; Face oFirstFace = oExtrudeF.StartFaces[1]; // add a new sketch on the basis of the start face PlanarSketch oSketch = oPartDoc.ComponentDefinition.Sketches.Add(oFirstFace, false); TransientGeometry oTG = mApp.TransientGeometry; // create a circle and make a profile from the sketch oSketch.SketchCircles.AddByCenterRadius(oTG.CreatePoint2d(0, -5), 1); Profile oProfile = oSketch.Profiles.AddForSolid(true, null, null); // get ExtrudeFeatures collection ExtrudeFeatures extrudes = oPartDoc.ComponentDefinition.Features.ExtrudeFeatures; // Create an extrude definition in the new surface body ExtrudeDefinition extrudeDef = extrudes.CreateExtrudeDefinition(oProfile, PartFeatureOperationEnum.kJoinOperation); // Modify the extent extrudeDef.SetDistanceExtent(2, PartFeatureExtentDirectionEnum.kPositiveExtentDirection); // Create the extrusion. ExtrudeFeature extrude = extrudes.Add(extrudeDef); // FilletFeatures oFilletFs = oPartDoc.ComponentDefinition.Features.FilletFeatures; //create fillet definition FilletDefinition oFilletDef = oFilletFs.CreateFilletDefinition(); // FaceCollection FaceCollection oFacesCollOne = mApp.TransientObjects.CreateFaceCollection(); oFacesCollOne.Add(oFirstFace); FaceCollection oFacesCollTwo = mApp.TransientObjects.CreateFaceCollection(); oFacesCollTwo.Add(extrude.SideFaces[1]);//cylinder face oFilletDef.AddFaceSet(oFacesCollOne, oFacesCollTwo, 0.1); oFilletFs.Add(oFilletDef); //Fit the view programmatically Camera oCamera = mApp.ActiveView.Camera; oCamera.ViewOrientationType = ViewOrientationTypeEnum.kIsoTopRightViewOrientation; oCamera.Apply(); mApp.ActiveView.Fit(true); }
/// <summary> /// Loads known faces from file /// </summary> /// <param name="knownFacesFile">The file collection to load</param> private void LoadKnownFaces(string knownFacesFile) { knownFaces = FaceCollection.Load(knownFacesFile); if (knownFaces == null) knownFaces = new FaceCollection(); OnFacesLoaded(); }
private void engine_FaceDetected(HumanRecognizer sender, FaceCollection faces) { foreach (Face f in faces) { } if (sender.AutoFindHuman) { } }