Example #1
0
        public unsafe void UseFaceData(ref ImageData image, ref FaceData face)
        {
            if (quad == null)
            {
                quad = FindObjectOfType <AutoBackgroundQuad>();
                return;
            }

            ReadWebcam.instance.GetMirrorValue(out int mirrorX, out int mirrorY);
            mirrorX = mirrorX * ReadWebcam.instance.mirror3D;

            transform.parent.localScale = new Vector3(mirrorX, mirrorY, -1.0f);


            var pose = face.HeadPose;

            pose.position *= 100;
            HeadPose       = pose;

            for (int i = 0; i < 1220; ++i)
            {
                vertices[i] = face.Vertices[i];
            }

            mesh.vertices = vertices;
            mesh.RecalculateBounds();
            mesh.RecalculateNormals();
        }
 public override void SetDimForFace(ref LinearDimension ld, Vector3d vec)
 {
     foreach (Face face in this.ToolingBox.GetBodies()[0].GetFaces())
     {
         if (face.SolidFaceType == Face.FaceType.Cylindrical)
         {
             Point3d  originPt = new Point3d(0, 0, 0);
             Vector3d normal   = new Vector3d(0, 0, 0);
             FaceUtils.AskFaceOriginAndNormal(face, out originPt, out normal);
             double angle1 = UMathUtils.Angle(vec, new Vector3d(1, 1, 1));
             if (UMathUtils.IsEqual(angle1, 0))
             {
                 ld.HandleOrientation = normal;
                 ld.HandleOrigin      = originPt;
             }
         }
         else
         {
             FaceData fd   = FaceUtils.AskFaceData(face);
             Vector3d temp = fd.Dir;
             this.Matr.ApplyVec(ref temp);
             double angle = UMathUtils.Angle(vec, temp);
             if (UMathUtils.IsEqual(angle, 0))
             {
                 ld.HandleOrientation = fd.Dir;
                 ld.HandleOrigin      = fd.Point;
             }
         }
     }
 }
Example #3
0
        public static AbstractCircleFace Create(Face face)
        {
            FaceData           data = FaceUtils.AskFaceData(face);
            AbstractCircleFace abs  = null;

            switch (face.SolidFaceType)
            {
            case Face.FaceType.Cylindrical:
                abs = new CylinderFace(data);
                break;

            case Face.FaceType.Conical:
                abs = new CircularConeFace(data);
                break;

            case Face.FaceType.Planar:
                List <ArcEdgeData> edge = new List <ArcEdgeData>();
                if (CircleAnnylusFace.IsCircleAnnylus(face, out edge))
                {
                    abs = new CircleAnnylusFace(data, edge);
                }
                break;

            default:
                break;
            }
            return(abs);
        }
Example #4
0
        public unsafe void SetPoints(ref ImageData image, ref FaceData face)
        {
            var ptr = face.Landmark;

            var   height     = quad.texture.height < 16 ? 1 : quad.texture.height; //divide by zero 방지
            float adjustment = System.Math.Abs(quad.transform.localScale.y / height);

            float centerX = quad.texture.width / 2;
            float centerY = quad.texture.height / 2;

            if (ReadWebcam.instance.GetAdjustedVideoRotationAngle() % 180 != 0)
            {
                centerX = quad.texture.height / 2;
                centerY = quad.texture.width / 2;
            }
            ReadWebcam.instance.GetMirrorValue(out int mirrorX, out int mirrorY);

            for (int p = 0; p < FaceData.NumLandmark; ++p)
            {
                var posX = mirrorX * (ptr[p].x - centerX + image.OffsetX) * adjustment;
                var posY = mirrorY * (ptr[p].y - centerY + image.OffsetY) * adjustment; // opencv 와 unity 의 이미지 y 좌표계가 반대.
                var posZ = quad.transform.localPosition.z;

                var newPos = Vector3.Lerp(facePoints[p].localPosition, new Vector3(posX, posY, posZ), 0.8f);
                facePoints[p].localPosition = newPos;
                facePoints[p].localScale    = Vector3.one * adjustment * 6; //적당한 크기로 사이즈 조절
            }
        }
        public int InsertData(FaceData faceData)
        {
            string query = @"INSERT INTO FaceData (CustomerID, FaceFolder, FaceImage) OUTPUT INSERTED.Id VALUES (N'" + faceData.CustomerID + "',N'" +
                           faceData.FaceFolder + "',N'" + faceData.FaceImage + "')";

            return(connectDB.ExecuteScalar(query));
        }
Example #6
0
        public void GetInterferenceOfArea()
        {
            Body        eleBody       = GetOccsInBods(this.eleModel.PartTag)[0];
            Body        workpieceBody = GetOccsInBods(this.workpiece)[0];
            List <Face> faces         = AnalysisUtils.SetInterferenceOutFace(eleBody, workpieceBody);
            double      minArea       = 0;

            for (int i = 0; i < (faces.Count) / 2 - 1; i++)
            {
                FaceData data1 = FaceUtils.AskFaceData(faces[i * 2]);
                FaceData data2 = FaceUtils.AskFaceData(faces[i * 2 + 1]);
                if (data1.Equals(data2))
                {
                    double area1 = FaceUtils.GetFaceArea(faces[i * 2]);
                    double area2 = FaceUtils.GetFaceArea(faces[i * 2 + 1]);
                    if (area1 > area2)
                    {
                        minArea += area2;
                    }
                    else
                    {
                        minArea += area1;
                    }
                }
            }
            AttributeUtils.AttributeOperation("Area", minArea, this.eleModel.PartTag);
        }
Example #7
0
 public CircleAnnylusFace(FaceData data, List <ArcEdgeData> edge) : base(data)
 {
     this.edgeData = edge;
     GetFacePoint();
     this.IsHole = true;
     this.IsStep = true;
 }
Example #8
0
        public unsafe void SetPoints(ref ImageData image, ref FaceData face)
        {
            var ptr = face.Landmark;

            var   height     = quad.texture.height < 16 ? 1 : quad.texture.height;
            float adjustment = System.Math.Abs(quad.transform.localScale.y / height);

            float centerX = quad.texture.width / 2;
            float centerY = quad.texture.height / 2;

            if (ReadWebcam.instance.GetAdjustedVideoRotationAngle() % 180 != 0)
            {
                centerX = quad.texture.height / 2;
                centerY = quad.texture.width / 2;
            }
            ReadWebcam.instance.GetMirrorValue(out int mirrorX, out int mirrorY);

            for (int p = 0; p < FaceData.NumLandmark; ++p)
            {
                GameObject.Find("World").GetComponent <FaceSceneBehavior>().faceDetected = true;
                //Debug.Log("px: " + ptr[p].x + "py: " + ptr[p].y);
                var posX = mirrorX * (ptr[p].x - centerX + image.OffsetX) * adjustment;
                var posY = mirrorY * (ptr[p].y - centerY + image.OffsetY) * adjustment;
                var posZ = quad.transform.localPosition.z;

                var newPos = Vector3.Lerp(facePoints[p].localPosition, new Vector3(posX, posY, posZ), 0.8f);
                facePoints[p].localPosition = newPos;
                facePoints[p].localScale    = Vector3.one * adjustment * 6;
            }
        }
        } //EndMethod

        public static void SaveFaceShot(FaceData faceData, string _path, string _source)
        {
            string filename = System.IO.Path.Combine(_path + "-" + _source + ".csv");

            try
            {
                using (StreamWriter sw = new StreamWriter(filename))
                {
                    sw.WriteLine("{0}; {1}; {2}; {3}; {4}",
                                 "ColorSpace",
                                 faceData.boxColor.topX,
                                 faceData.boxColor.topY,
                                 faceData.boxColor.width,
                                 faceData.boxColor.height
                                 );

                    sw.WriteLine("{0}; {1}; {2}; {3}; {4}",
                                 "DepthSpace",
                                 faceData.boxDepth.topX,
                                 faceData.boxDepth.topY,
                                 faceData.boxDepth.width,
                                 faceData.boxDepth.height
                                 );
                }
            }
            catch (IOException)
            {
                Console.WriteLine("Cannot write {0}", filename);
            }
        } //EndMethod
Example #10
0
        /// <summary>
        /// 获取平面高度
        /// </summary>
        /// <param name="face"></param>
        /// <returns></returns>
        private double GetFaceIsHighst(FaceData faceData)
        {
            double zMax  = Math.Round(faceData.BoxMaxCorner.Z, 3);
            double faceZ = zMax;

            Edge[] egs = null;
            try
            {
                egs = faceData.Face.GetEdges();
            }
            catch
            {
                return(0);
            }
            if (egs != null)
            {
                foreach (Edge eg in egs)
                {
                    foreach (Face fa in eg.GetFaces())
                    {
                        if (fa.Tag != faceData.Face.Tag)
                        {
                            FaceData data = FaceUtils.AskFaceData(fa);
                            double   z    = Math.Round(data.BoxMaxCorner.Z, 3);
                            if (z > zMax)
                            {
                                zMax = z;
                            }
                        }
                    }
                }
            }
            return(zMax - faceZ);
        }
        public static AbstractFaceSlopeAndDia CreateFaceSlopeAndDia(Face face)
        {
            FaceData data = FaceUtils.AskFaceData(face);
            AbstractFaceSlopeAndDia faceSd = null;

            switch (face.SolidFaceType)
            {
            case Face.FaceType.Planar:
                faceSd = new PlaneFaceSlopeAndDia(data);
                break;

            case Face.FaceType.Cylindrical:
                faceSd = new CylinderFaceSlopeAndDia(data);
                break;

            case Face.FaceType.Conical:
                faceSd = new CircularConeFaceSlopeAndDia(data);
                break;

            default:
                faceSd = new SweptFaceFaceSlopeAndDia(data);
                break;
            }
            return(faceSd);
        }
        /// <summary>
        /// 获取最低面
        /// </summary>
        /// <returns></returns>
        private void PullFaceForWave(List <Body> bodys)
        {
            foreach (Body body in bodys)
            {
                FaceData maxFace = null;
                double   zMin    = 9999;

                foreach (Face face in body.GetFaces())
                {
                    FaceData data   = FaceUtils.AskFaceData(face);
                    Point3d  center = UMathUtils.GetMiddle(data.BoxMaxCorner, data.BoxMinCorner);
                    if (zMin > center.Z)
                    {
                        zMin    = center.Z;
                        maxFace = data;
                    }
                }
                if (maxFace != null)
                {
                    double z = maxFace.BoxMaxCorner.Z + this.datum.EleHeight;
                    if (z > 0)
                    {
                        try
                        {
                            SynchronousUtils.CreatePullFace(new Vector3d(0, 0, -1), z, maxFace.Face);
                        }
                        catch
                        {
                        }
                    }
                }
            }
        }
        public void UpdateData(FaceData faceData)
        {
            string query = @"UPDATE FaceData SET CustomerID =N'" + faceData.CustomerID + "', FaceFolder =N'" + faceData.FaceFolder +
                           "', FaceImage =N'" + faceData.FaceImage + "'";

            connectDB.ExecuteNonQuery(query);
        }
Example #14
0
    private static void CreateCharacterImage(FaceData faceData)
    {
        FaceLayerLibrary faceLibrary = null;

        if (faceData.gender == Gender.Female)
        {
            faceLibrary = femaleFaces;
        }
        else
        {
            faceLibrary = maleFaces;
        }
        faceData.faceImageNumber     = Random.Range(0, faceLibrary.face.Count);
        faceData.eyesImageNumber     = Random.Range(0, faceLibrary.eyes.Count);
        faceData.hairImageNumber     = Random.Range(0, faceLibrary.hair.Count);
        faceData.mouthImageNumber    = Random.Range(0, faceLibrary.mouth.Count);
        faceData.noseImageNumber     = Random.Range(0, faceLibrary.nose.Count);
        faceData.eyebrowsImageNumber = Random.Range(0, faceLibrary.eyebrows.Count);
        faceData.robeImageNumber     = Random.Range(0, faceLibrary.robe.Count);
        faceData.hatImageNumber      = Random.Range(0, faceLibrary.hat.Count);
        faceData.skinColour          = ColourMaths.ReturnRandomSkinColour(0.1f, 1f, 0.05f);
        faceData.hairColour          = ColourMaths.ReturnRandomHairColour(0.0f, 1f, Random.Range(0.0f, 1.0f));
        faceData.eyeColour           = ColourMaths.ReturnRandomEyeColour();
        faceData.clothingColour      = ColourMaths.ReturnRandomClothingColour();
    }
Example #15
0
        /// <summary>
        /// 干涉面
        /// </summary>
        public void GetInterferenceOfFace()
        {
            List <Tag>  outFace       = new List <Tag>();
            Body        eleBody       = GetOccsInBods(this.eleModel.PartTag)[0];
            Body        workpieceBody = GetOccsInBods(this.workpiece)[0];
            List <Face> faces         = AnalysisUtils.SetInterferenceOutFace(eleBody, workpieceBody);

            for (int i = 0; i < (faces.Count) / 2 - 1; i++)
            {
                FaceData data1 = FaceUtils.AskFaceData(faces[i * 2]);
                FaceData data2 = FaceUtils.AskFaceData(faces[i * 2 + 1]);
                if (data1.Equals(data2))
                {
                    Tag face3;
                    NXOpen.Features.Feature feat1 = AssmbliesUtils.WaveFace(faces[i * 2]);
                    NXOpen.Features.Feature feat2 = AssmbliesUtils.WaveFace(faces[i * 2 + 1]);
                    Body[] bodys1 = (feat1 as NXOpen.Features.BodyFeature).GetBodies();
                    Body[] bodys2 = (feat2 as NXOpen.Features.BodyFeature).GetBodies();

                    Tag bodyTag1 = Intersect(bodys1[0], bodys2[0]);
                    if (bodyTag1 != Tag.Null)
                    {
                        outFace.Add(bodyTag1);
                    }
                }
            }
            SewSolidBody(outFace);
        }
        private void OnFrame(byte[] frameColor32, ushort[] frameDepth16, byte[] frameBodyIndex8,
                             byte[] frameMapDepthToColor32, IList <Body> listBodies, FaceData faceData)
        {
            // Get pointers to the data in the current enviroment
            this.ptrColorImage32      = frameColor32;
            this.ptrDepthImage16      = frameDepth16;
            this.ptrBodyIndexImage8   = frameBodyIndex8;
            this.ptrMapDepthToColor32 = frameMapDepthToColor32;
            this.ptrBodies            = listBodies;
            this.ptrFaceData          = faceData;

            // To record data and update information
            if (this.depthSensor.IsRecording == true)
            {
                this.CounterOfFrames();
                this.UpdateCounterOfSeconds();
            }

            // Display frames
            this.DisplayFrames(frameColor32, frameDepth16, frameMapDepthToColor32);

            // Display 2D Skeleton
            //this.DisplayArm(listBodies, frameDepth16); //Elias

            // Display the grabber FPS
            this.lblFrameRate.Content = "Frame Rate: " + this.depthSensor.FPS;

            // Update this Function Frame Rate
            //this.UpdateFunctionFrameRate();
        }
Example #17
0
 // Detect faces in a local image
 private static async Task<FaceData> DetectStream(FaceClient faceClient, Stream stream)
 {
     try
     {
         var faceList =
             await faceClient.Face.DetectWithStreamAsync(
                 stream, true, false, FaceAttributes);
         var faces = faceList.Where(w => w.FaceId != null)
             .Select(s => s.FaceId.Value).ToList();
         var identify = await faceClient.Face.IdentifyWithHttpMessagesAsync(faces, Group);
         var result = identify.Body.FirstOrDefault();
         if (result == null)
         {
             return null;
         }
         var data = new FaceData();
         foreach (var face in faceList)
         {
             var age = face.FaceAttributes.Age;
             var gender = face.FaceAttributes.Gender.ToString();
             data.Age = age;
             data.FaceId = result.Candidates.First().PersonId.ToString();
             data.Gender = gender;
         }
         return data;
     }
     catch (APIErrorException e)
     {
         return null;
     }
 }
Example #18
0
        public unsafe void UseFaceData(ref ImageData image, ref FaceData face)
        {
            if (smr == null)
            {
                return;
            }
            if (quad == null)
            {
                quad = FindObjectOfType <AutoBackgroundQuad>();
                return;
            }

            ReadWebcam.instance.GetMirrorValue(out int mirrorX, out int mirrorY);
            mirrorX *= ReadWebcam.instance.mirror3D;

            transform.parent.localScale = new Vector3(mirrorX, mirrorY, -1.0f);

            var pose = face.HeadPose;

            pose.position *= 100;
            HeadPose       = pose;

            float *weights = stackalloc float[FaceData.NumAnimationWeights];

            face.GetAnimation(weights);
            SetAnimation(weights);

            SetCustomEyeBlink(ref face);
        }
Example #19
0
 public AbstractCircleFace(FaceData data)
 {
     this.Data = data;
     dir       = data.Dir;
     matr      = new Matrix4();
     matr.Identity();
     matr.TransformToZAxis(this.CenterPt, data.Dir);
 }
 public override void Init()
 {
     //初始化人脸识别模块
     Face = new FaceData();
     base.Init();
     //二代身份证验证线程
     OpenCvr();
 }
Example #21
0
        // set custom eye weight for AnimojiModel
        unsafe void SetCustomEyeBlink(ref FaceData face)
        {
            var left  = GetEyeValue(ref face, 52, 72, 55, 73); //0~100
            var right = GetEyeValue(ref face, 58, 75, 61, 76); //0~100

            SetBlendShapeWeight("PandaBlendshape.eyeBlinkLeft", left);
            SetBlendShapeWeight("PandaBlendshape.eyeBlinkRight", right);
        }
Example #22
0
    public static FaceData GenerateFace()
    {
        Gender   gender = ReturnRandomGender();
        FaceData fd     = new FaceData(gender);

        CreateCharacterImage(fd);
        return(fd);
    }
Example #23
0
 public void RandomiseWitch()
 {
     NameGenerator.NameFactory();
     firstName = NameGenerator.GenerateFemaleFirstName();
     surname   = NameGenerator.GenerateSurname();
     fullName  = firstName + " " + surname;
     faceData  = FaceGenerator.GenerateFace(Gender.Female);
 }
Example #24
0
    public static FaceData GenerateFace(Gender presetGender)
    {
        Gender   gender = presetGender;
        FaceData fd     = new FaceData(gender);

        CreateCharacterImage(fd);
        return(fd);
    }
Example #25
0
        /// <summary>
        /// 显示人?脸数据
        /// </summary>
        /// <param name="face"></param>
        void showFaceData(FaceData face)
        {
            Age_TextBox.Text    = face.Age;
            Gender_TextBox.Text = face.Gender;
            string smile = face.Smile;
            string glass = face.Glasses;

            Description_TextBox.Text = smile + glass;
        }
Example #26
0
    protected void Init()
    {
        try
        {
            // RealSense初期化
            // 参考:https://software.intel.com/sites/landingpage/realsense/camera-sdk/v2016r3/documentation/html/index.html?doc_face_general_procedure.html
            // 参考:.\Intel\RSSDK\sample\core\RawStreams.unity
            SenseManager = SenseManager.CreateInstance();

            FaceModule = FaceModule.Activate(SenseManager);
            FaceModule.FrameProcessed += FaceModule_FrameProcessed;
            FaceData = FaceModule.CreateOutput();

            FaceConfig = FaceModule.CreateActiveConfiguration();
            FaceConfig.TrackingMode = TrackingModeType.FACE_MODE_COLOR;
            FaceConfig.Expressions.Properties.Enabled = true;
            FaceConfig.ApplyChanges();

            SampleReader = SampleReader.Activate(SenseManager);
            SampleReader.EnableStream(StreamType.STREAM_TYPE_COLOR, 640, 480, 30);
            SampleReader.SampleArrived += SampleReader_SampleArrived;

            SenseManager.Init();
            SenseManager.StreamFrames(false);

            // RawStreams
            Texture = NativeTexturePlugin.Activate();
            Material.mainTexture      = new Texture2D(640, 480, TextureFormat.BGRA32, false);
            Material.mainTextureScale = new Vector2(-1, -1);
            TexPtr = Material.mainTexture.GetNativeTexturePtr();

            // 解像度取得
            StreamProfileSet profile;
            SenseManager.CaptureManager.Device.QueryStreamProfileSet(out profile);
            Resolution = profile.color.imageInfo;

            // 平滑化初期化
            // 参考:https://software.intel.com/sites/landingpage/realsense/camera-sdk/v2016r3/documentation/html/index.html?doc_utils_the_smoother_utility.html
            Smoother = Smoother.CreateInstance(SenseManager.Session);

            SmoothBody      = Smoother.Create3DWeighted(BodyPosSmoothWeight);
            SmoothHead      = Smoother.Create3DWeighted(HeadAngSmoothWeight);
            SmoothEyes      = Smoother.Create2DWeighted(EyesPosSmoothWeight);
            SmoothEyesClose = Smoother.Create1DWeighted(EyesCloseSmoothWeight);
            SmoothBrowRai   = Smoother.Create1DWeighted(FaceSmoothWeight);
            SmoothBrowLow   = Smoother.Create1DWeighted(FaceSmoothWeight);
            SmoothSmile     = Smoother.Create1DWeighted(FaceSmoothWeight);
            SmoothKiss      = Smoother.Create1DWeighted(FaceSmoothWeight);
            SmoothMouth     = Smoother.Create1DWeighted(FaceSmoothWeight);
            SmoothTongue    = Smoother.Create1DWeighted(FaceSmoothWeight);
        }
        catch (Exception e)
        {
            ErrorLog.text  = "RealSense Error\n";
            ErrorLog.text += e.Message;
        }
    }
Example #27
0
        static void CreateFaceData()
        {
            string path = "Assets/BundleRes/Config/FaceData.asset";

            if (!File.Exists(path))
            {
                FaceData fd = ScriptableObject.CreateInstance <FaceData>();
                CommonAssets.CreateAsset <FaceData>("Assets/BundleRes/Config", "FaceData", ".asset", fd);
            }
        }
Example #28
0
 private void GenerateTestFace()
 {
     faces = new List <FaceData>();
     for (int i = 0; i < generateThisNumber; i++)
     {
         FaceData f = FaceGenerator.GenerateFace(Gender.Female);
         faces.Add(f);
     }
     fd.DisplayFace(faces[0]);
 }
Example #29
0
        public static void drawRect(object obj)
        {
            FaceData   faceData = obj as FaceData;
            BitmapData bmpData  = faceData.bitmap.LockBits(faceData.rect, ImageLockMode.ReadWrite, faceData.bitmap.PixelFormat);

            Program.detectFace(bmpData.Scan0, bmpData.Width, bmpData.Height,
                               ref faceData.faceTopLeftX, ref faceData.faceTopLeftY,
                               ref faceData.faceWidth, ref faceData.faceHeight);
            faceData.bitmap.UnlockBits(bmpData);
        }
Example #30
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this._skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this._skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this._faceTracker == null)
                {
                    try
                    {
                        this._faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException e)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Console.WriteLine(Converter.EncodeError("FaceTracker.OnFrameReady - creating a new FaceTracker threw an InvalidOperationException: " + e.Message));
                        this._faceTracker = null;
                    }
                }

                if (this._faceTracker != null)
                {
                    FaceTrackFrame frame = this._faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this._lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this._lastFaceTrackSucceeded)
                    {
                        var animUnits = frame.GetAnimationUnitCoefficients();
                        var pos       = frame.Translation;
                        var rot       = frame.Rotation;
                        var faceData  = new FaceData();
                        faceData.Au0  = animUnits[0];
                        faceData.Au1  = animUnits[1];
                        faceData.Au2  = animUnits[2];
                        faceData.Au3  = animUnits[3];
                        faceData.Au4  = animUnits[4];
                        faceData.Au5  = animUnits[5];
                        faceData.PosX = pos.X;
                        faceData.PosY = pos.Y;
                        faceData.PosZ = pos.Z;
                        faceData.RotX = rot.X;
                        faceData.RotY = rot.Y;
                        faceData.RotZ = rot.Z;
                        var data = Converter.EncodeFaceTrackingData(faceData);
                        Console.WriteLine(data);
                    }
                }
            }
Example #31
0
        public MeshViewerWidget(double bedXSize, double bedYSize, double scale)
        {
            ShowWireFrame = false;
            RenderBed = true;
            PartColor = RGBA_Bytes.White;

            this.partScale = scale;
            trackballTumbleWidget = new TrackballTumbleWidget();
            trackballTumbleWidget.DrawRotationHelperCircle = false;
            trackballTumbleWidget.DrawGlContent += trackballTumbleWidget_DrawGlContent;

            AddChild(trackballTumbleWidget);

            CreateBedGridImage((int)(bedXSize / 10), (int)(bedYSize / 10));

            printerBed = PlatonicSolids.CreateCube(bedXSize, bedYSize, 2);
            Face face = printerBed.Faces[0];
            {
                FaceData faceData = new FaceData();
                faceData.Textures.Add(bedCentimeterGridImage);
                face.Data = faceData;
                foreach (FaceEdge faceEdge in face.FaceEdgeIterator())
                {
                    FaceEdgeData edgeUV = new FaceEdgeData();
                    edgeUV.TextureUV.Add(new Vector2((bedXSize / 2 + faceEdge.vertex.Position.x) / bedXSize,
                        (bedYSize / 2 + faceEdge.vertex.Position.y) / bedYSize));
                    faceEdge.Data = edgeUV;
                }
            }

            foreach (Vertex vertex in printerBed.Vertices)
            {
                vertex.Position = vertex.Position - new Vector3(0, 0, 1);
            }

            trackballTumbleWidget.AnchorAll();
        }
Example #32
0
            public BoxTextureCoords(int totalTextureWidth, int totalTextureHeight,
                FaceData front, FaceData back, FaceData top, FaceData bottom, FaceData left, FaceData right)
            {
                m_texWidth = totalTextureWidth;
                m_texHeight = totalTextureHeight;

                Vector2 textureTopLeft = new Vector2(0.0f, 0.0f);
                Vector2 textureTopRight = new Vector2(1.0f, 0.0f);
                Vector2 textureBottomLeft = new Vector2(0.0f, 1.0f);
                Vector2 textureBottomRight = new Vector2(1.0f, 1.0f);

                List<FaceData> cells = new List<FaceData>
                {
                    front,
                    back,
                    top,
                    bottom,
                    left,
                    right
                };

                List<Vector2> baseCoords = new List<Vector2>
                {
                    textureTopLeft,
                    textureBottomLeft,
                    textureTopRight,
                    textureBottomLeft,
                    textureBottomRight,
                    textureTopRight,

                    textureTopRight,
                    textureTopLeft,
                    textureBottomRight,
                    textureBottomRight,
                    textureTopLeft,
                    textureBottomLeft,

                    textureBottomLeft,
                    textureTopRight,
                    textureTopLeft,
                    textureBottomLeft,
                    textureBottomRight,
                    textureTopRight,

                    textureTopLeft,
                    textureBottomLeft,
                    textureBottomRight,
                    textureTopLeft,
                    textureBottomRight,
                    textureTopRight,

                    textureTopRight,
                    textureBottomLeft,
                    textureBottomRight,
                    textureTopLeft,
                    textureBottomLeft,
                    textureTopRight,

                    textureTopLeft,
                    textureBottomLeft,
                    textureBottomRight,
                    textureTopRight,
                    textureTopLeft,
                    textureBottomRight
                };

                for(int face = 0; face < 6; face++)
                {
                    Vector2 pixelCoords = new Vector2(cells[face].Rect.X, cells[face].Rect.Y);
                    float normalizeX = (float) (cells[face].Rect.Width) / (float) (totalTextureWidth);
                    float normalizeY = (float) (cells[face].Rect.Height) / (float) (totalTextureHeight);
                    Vector2 normalizedCoords = new Vector2(pixelCoords.X / (float) totalTextureWidth, pixelCoords.Y / (float) totalTextureHeight);
                    Bounds[face] = new Vector4(normalizedCoords.X + 0.001f, normalizedCoords.Y + 0.001f, normalizedCoords.X + normalizeX - 0.001f, normalizedCoords.Y + normalizeY - 0.001f);

                    for(int vert = 0; vert < 6; vert++)
                    {
                        int index = vert + face * 6;

                        if(!cells[face].FlipXY)
                        {
                            m_uvs[index] = new Vector2(normalizedCoords.X + baseCoords[index].Y * normalizeX, normalizedCoords.Y + baseCoords[index].X * normalizeY);
                        }
                        else
                        {
                            m_uvs[index] = new Vector2(normalizedCoords.X + baseCoords[index].X * normalizeX, normalizedCoords.Y + baseCoords[index].Y * normalizeY);
                        }
                    }
                }
            }
        private void CreateNewPrim(LLEntityBase ent)
        {
            m_log.Log(LogLevel.DRENDERDETAIL, "Create new prim {0}", ent.Name.Name);
            // entity render info is kept per region. Get the region prim structure
            RegionRenderInfo rri = GetRegionRenderInfo(ent.RegionContext);
            IEntityAvatar av;
            if (ent.TryGet<IEntityAvatar>(out av)) {
            // if this entity is an avatar, just put it on the display list
            lock (rri.renderAvatarList) {
                if (!rri.renderAvatarList.ContainsKey(av.LGID)) {
                    RenderableAvatar ravv = new RenderableAvatar();
                    ravv.avatar = av;
                    rri.renderAvatarList.Add(av.LGID, ravv);
                }
            }
            return;
            }
            OMV.Primitive prim = ent.Prim;
            /* don't do foliage yet
            if (prim.PrimData.PCode == OMV.PCode.Grass
                    || prim.PrimData.PCode == OMV.PCode.Tree
                    || prim.PrimData.PCode == OMV.PCode.NewTree) {
            lock (renderFoliageList)
                renderFoliageList[prim.LocalID] = prim;
            return;
            }
             */

            RenderablePrim render = new RenderablePrim();
            render.Prim = prim;
            render.acontext = ent.AssetContext;
            render.rcontext = ent.RegionContext;
            render.Position = prim.Position;
            render.Rotation = prim.Rotation;
            render.isVisible = true;    // initially assume visible

            if (m_meshMaker == null) {
            m_meshMaker = new Renderer.Mesher.MeshmerizerR();
            m_meshMaker.ShouldScaleMesh = false;
            }

            if (prim.Sculpt != null) {
            EntityNameLL textureEnt = EntityNameLL.ConvertTextureWorldIDToEntityName(ent.AssetContext, prim.Sculpt.SculptTexture);
            System.Drawing.Bitmap textureBitmap = ent.AssetContext.GetTexture(textureEnt);
            if (textureBitmap == null) {
                // the texture is not available. Request it.
                // Note that we just call this routine again when it is available. Hope it's not recursive
                ent.AssetContext.DoTextureLoad(textureEnt, AssetContextBase.AssetType.SculptieTexture,
                            delegate(string name, bool trans) {
                                CreateNewPrim(ent);
                                return;
                            }
                );
                return;
            }
            render.Mesh = m_meshMaker.GenerateSculptMesh(textureBitmap, prim, OMVR.DetailLevel.Medium);
            textureBitmap.Dispose();
            }
            else {
            render.Mesh = m_meshMaker.GenerateFacetedMesh(prim, OMVR.DetailLevel.High);
            }

            if (render.Mesh == null) {
            // mesh generation failed
            m_log.Log(LogLevel.DBADERROR, "FAILED MESH GENERATION: not generating new prim {0}", ent.Name.Name);
            return;
            }

            // Create a FaceData struct for each face that stores the 3D data
            // in an OpenGL friendly format
            for (int j = 0; j < render.Mesh.Faces.Count; j++) {
            OMVR.Face face = render.Mesh.Faces[j];
            FaceData data = new FaceData();

            // Vertices for this face
            data.Vertices = new float[face.Vertices.Count * 3];
            for (int k = 0; k < face.Vertices.Count; k++) {
                data.Vertices[k * 3 + 0] = face.Vertices[k].Position.X;
                data.Vertices[k * 3 + 1] = face.Vertices[k].Position.Y;
                data.Vertices[k * 3 + 2] = face.Vertices[k].Position.Z;
            }

            // Indices for this face
            data.Indices = face.Indices.ToArray();

            // Texture transform for this face
            OMV.Primitive.TextureEntryFace teFace = prim.Textures.GetFace((uint)j);
            m_meshMaker.TransformTexCoords(face.Vertices, face.Center, teFace);

            // Texcoords for this face
            data.TexCoords = new float[face.Vertices.Count * 2];
            for (int k = 0; k < face.Vertices.Count; k++) {
                data.TexCoords[k * 2 + 0] = face.Vertices[k].TexCoord.X;
                data.TexCoords[k * 2 + 1] = face.Vertices[k].TexCoord.Y;
            }

            data.Normals = new float[face.Vertices.Count * 3];
            for (int k = 0; k < face.Vertices.Count; k++) {
                data.Normals[k * 3 + 0] = face.Vertices[k].Normal.X;
                data.Normals[k * 3 + 1] = face.Vertices[k].Normal.Y;
                data.Normals[k * 3 + 2] = face.Vertices[k].Normal.Z;
            }

            // m_log.Log(LogLevel.DRENDERDETAIL, "CreateNewPrim: v={0}, i={1}, t={2}",
            //     data.Vertices.GetLength(0), data.Indices.GetLength(0), data.TexCoords.GetLength(0));

            // Texture for this face
            if (teFace.TextureID != OMV.UUID.Zero &&
                        teFace.TextureID != OMV.Primitive.TextureEntry.WHITE_TEXTURE) {
                lock (Textures) {
                    if (!Textures.ContainsKey(teFace.TextureID)) {
                        // temporarily add the entry to the table so we don't request it multiple times
                        Textures.Add(teFace.TextureID, new TextureInfo(0, true));
                        // We haven't constructed this image in OpenGL yet, get ahold of it
                        AssetContextBase.RequestTextureLoad(
                            EntityNameLL.ConvertTextureWorldIDToEntityName(ent.AssetContext, teFace.TextureID),
                            AssetContextBase.AssetType.Texture,
                            OnTextureDownloadFinished);
                    }
                }
            }

            // Set the UserData for this face to our FaceData struct
            face.UserData = data;
            render.Mesh.Faces[j] = face;
            }

            lock (rri.renderPrimList) {
            rri.renderPrimList[prim.LocalID] = render;
            }
        }