コード例 #1
0
        private void drawCamera(RegisteredCamera thisCamera)
        {
            // Set transform matrices.
            float aspect = GraphicsDevice.Viewport.AspectRatio;

            effect.World = Matrix.Identity;                                                    //Matrix.CreateScale(cameraModelScaling) * thisCamera.RotationMatrix;

            effect.World = effect.World * Matrix.CreateTranslation(thisCamera.PositionVector); // *Matrix.CreateTranslation(viewTranslationVector);

            //effect.World = effect.World * Matrix.CreateFromYawPitchRoll(yaw * dragSensitivity, pitch * dragSensitivity, roll * dragSensitivity);

            //effect.World = effect.World * Matrix.CreateScale(globalScaling);

            //effect.View = Matrix.CreateLookAt(new Vector3(0, 0, -5),
            //                                  Vector3.Zero, Vector3.Up);

            effect.View = ViewMatrix; //Matrix.CreateFromYawPitchRoll(yaw * dragSensitivity, pitch * dragSensitivity, roll * dragSensitivity) * Matrix.CreateLookAt(new Vector3(0, 0, -5 * globalScaling),
            //                    Vector3.Zero, Vector3.Up);


            effect.Projection = Matrix.CreatePerspectiveFieldOfView(1, aspect, 0.01f, 100);

            // Set renderstates.
            GraphicsDevice.RasterizerState = RasterizerState.CullNone;

            // Draw the triangle.
            effect.CurrentTechnique.Passes[0].Apply();

            GraphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleList,
                                              Vertices, 0, 1);
        }
コード例 #2
0
        /// <summary>
        /// Draws the control.
        /// </summary>
        protected override void Draw()
        {
            if (cameraConfig != null)
            {
                float averageX = cameraConfig.Cameras.Select(x => x.PositionVector.X).Average();
                float averageY = cameraConfig.Cameras.Select(x => x.PositionVector.Y).Average();
                float averageZ = cameraConfig.Cameras.Select(x => x.PositionVector.Z).Average();

                if (IRLocationRender)
                {
                    //ViewMatrix = Matrix.CreateTranslation(-currentTrackX, -currentTrackY, -currentTrackZ);
                    //ViewMatrix = ViewMatrix;// * Matrix.CreateFromYawPitchRoll(yaw * dragSensitivity, roll * dragSensitivity, pitch * dragSensitivity);
                    ViewMatrix = Matrix.Identity;

                    if (ViewTrackModel)
                    {
                        ViewMatrix = ViewMatrix * Matrix.CreateLookAt(new Vector3(currentTrackX, currentTrackY, currentTrackZ), new Vector3(ModelX, ModelY, ModelZ), new Vector3(customUpX, customUpY, customUpZ));
                    }
                    else
                    {
                        ViewMatrix = Matrix.CreateTranslation(-currentTrackX, -currentTrackY, -currentTrackZ);
                        ViewMatrix = ViewMatrix * Matrix.CreateFromYawPitchRoll(yaw * dragSensitivity, roll * dragSensitivity, pitch * dragSensitivity);
                    }
                }
                else
                {
                    ViewMatrix = Matrix.CreateTranslation(-averageX, -averageY, -averageZ);
                    ViewMatrix = ViewMatrix * Matrix.CreateFromYawPitchRoll(yaw * dragSensitivity, roll * dragSensitivity, pitch * dragSensitivity);
                    ViewMatrix = ViewMatrix * Matrix.CreateTranslation(0, 0, -(5 * globalScaling));// *Matrix.CreateReflection(new Plane(new Vector4(1, 1, 0, 0)));
                }
                GraphicsDevice.Clear(Color.CornflowerBlue);

                try
                {
                    if (cameraConfig != null)
                    {
                        IntersectionLines.Clear();

                        for (int i = 0; i < cameraConfig.Cameras.Count; i++)
                        {
                            RegisteredCamera thisCamera = cameraConfig.Cameras[i];
                            drawCamera(thisCamera);
                            //drawCamera2(thisCamera);
                            drawRays(thisCamera, i);
                        }
                        //drawPoints(cameraConfig);
                        drawIntersections();
                        if (ShowModel)
                        {
                            drawModel(ModelX, ModelY, ModelZ);
                        }
                    }
                }
                catch (Exception ex)
                {
                }
            }
        }
コード例 #3
0
ファイル: CameraConfig.cs プロジェクト: onarf/Free3DTrack
        private void CreateFromBundlerFile(string configFile)
        {
            string text = System.IO.File.ReadAllText(configFile);
            string[] lines = text.Split('\r');

            int numCameras = int.Parse(lines[1].Split(' ')[0]);
            int numPoints = int.Parse(lines[1].Split(' ')[1]);

            for (int i = 0; i < numCameras; i++)
            {
                //Extract the focal length and the radial distrortion cooefficients
                string[] line1split = lines[2 + i*5].Split(' ');
                float focalLength = float.Parse(line1split[0]);
                float k1 = float.Parse(line1split[1]);
                float k2 = float.Parse(line1split[2]);

                //Camera Rotation matrix
                //m1, m2, m3
                //m4, m5, m6
                //m7, m8, m9

                string[] line2split = lines[3 + i * 5].Split(' ');
                float m1 = float.Parse(line2split[0]);
                float m2 = float.Parse(line2split[1]);
                float m3 = float.Parse(line2split[2]);

                string[] line3split = lines[4 + i * 5].Split(' ');
                float m4 = float.Parse(line3split[0]);
                float m5 = float.Parse(line3split[1]);
                float m6 = float.Parse(line3split[2]);

                string[] line4split = lines[5 + i * 5].Split(' ');
                float m7 = float.Parse(line4split[0]);
                float m8 = float.Parse(line4split[1]);
                float m9 = float.Parse(line4split[2]);

                //Camera Translation Matrix
                string[] line5split = lines[6 + i * 5].Split(' ');
                float t1 = float.Parse(line5split[0]);
                float t2 = float.Parse(line5split[1]);
                float t3 = float.Parse(line5split[2]);

                //Create the new camera and store:
                Matrix RotationMatrix = new Matrix(m1, m2, m3, 0, m4, m5, m6, 0, m7, m8, m9, 0, 0, 0 , 0, 1);
                Vector3 TranslationVector = new Vector3(t1, t2, t3);
                RegisteredCamera thisCamera = new RegisteredCamera(focalLength, k1, k2, RotationMatrix, TranslationVector, 480, 640);
                //thisCamera.TrackedPoints.Add(new TrackedImagePoint(320, 240));
                //thisCamera.TrackedPoints.Add(new TrackedImagePoint(320, -240));
                //thisCamera.TrackedPoints.Add(new TrackedImagePoint(-320, 240));
                //thisCamera.TrackedPoints.Add(new TrackedImagePoint(-320, -240));
                //thisCamera.TrackedPoints.Add(new TrackedImagePoint(0, 0));

                //Add some matched image points, to check the calibration...
                System.IO.DirectoryInfo thisDIR = System.IO.Directory.GetParent(configFile);
                System.IO.DirectoryInfo upDIR = System.IO.Directory.GetParent(thisDIR.ToString());

                string sDIR = upDIR.ToString();
                string keyText = System.IO.File.ReadAllText(sDIR + "\\" + i + ".key");

                string[] keyLines = keyText.Split('\r');

                for (int y = 0; y < 20; y++)
                {
                    string[] matchcoords = keyLines[1+(y*8)].Split(' ');
                    float matchY = float.Parse(matchcoords[0]);
                    float matchX = float.Parse(matchcoords[1]);
                    thisCamera.TrackedPoints.Add(new TrackedImagePoint(matchX, matchY));
                }

                Cameras.Add(thisCamera);
            }

            int startLine = 1 + 6 + (numCameras - 1) * 5;
            CameraMatches cameraMatches = new CameraMatches();

            //Get the model points
            for (int i = 0; i < numPoints; i++)
            {
                string[] linesplit = lines[startLine + i * 3].Split(' ');
                float x = float.Parse(linesplit[0]);
                float y = float.Parse(linesplit[1]);
                float z = float.Parse(linesplit[2]);
                Point3D thisPoint = new Point3D(x, y, z);
                cameraMatches.points.Add(thisPoint);
            }

            modelPoints = cameraMatches;
        }
コード例 #4
0
        private void drawRays(RegisteredCamera thisCamera, int i)
        {
            //for each tracked point project from the camera centre to the point:

            float aspect = GraphicsDevice.Viewport.AspectRatio;

            effect.World = Matrix.Identity * Matrix.CreateTranslation(viewTranslationVector);

            //effect.World = effect.World * Matrix.CreateTranslation(thisCamera.PositionVector);

            //effect.World = effect.World * Matrix.CreateFromYawPitchRoll(yaw * dragSensitivity, pitch * dragSensitivity, roll * dragSensitivity);

            //effect.World = effect.World * Matrix.CreateScale(globalScaling);

            //effect.View = Matrix.CreateFromYawPitchRoll(yaw * dragSensitivity, pitch * dragSensitivity, roll * dragSensitivity) * Matrix.CreateLookAt(new Vector3(0, 0, -5 * globalScaling),
            //                                  Vector3.Zero, Vector3.Up);

            effect.View = ViewMatrix;

            effect.Projection = Matrix.CreatePerspectiveFieldOfView(1, aspect, 0.01f, 100);

            //for (int i = 0; i < thisCamera.TrackedPoints.Count; i++)
            //{

            //int i = (int)rotAngleX;
            //if (i < thisCamera.TrackedPoints.Count)
            //{

            //TrackedImagePoint thisPoint = thisCamera.TrackedPoints[i];
            int testy = 0;

            //if (WebCamPermute != null)
            //{
                //int thisCamIndex = WebCamPermute[i];
            int thisCamIndex = i;

                if (thisCamIndex < WebCamsEye.Count)
                {
                    if (WebCamsEye[thisCamIndex].FilteredTrackedPoints != null)
                    {
                        if (WebCamsEye[thisCamIndex].FilteredTrackedPoints.TrackedPoints != null)
                        {
                            if (WebCamsEye[thisCamIndex].FilteredTrackedPoints.TrackedPoints.Count > 0)
                            {
                                //if (WebCamPermute.Count > 0)
                               // {
                                    try
                                    {
                                        for (int r = 0; r < WebCamsEye[thisCamIndex].FilteredTrackedPoints.TrackedPoints.Count; r++)
                                        {
                                            WebCamTrack thisTrack = WebCamsEye[thisCamIndex].FilteredTrackedPoints.TrackedPoints[r];

                                            if (thisTrack.Points.Count > 0)
                                            {
                                                Vector3 WordCoordinates = thisCamera.ImageToWorld(thisTrack.Points.Select(x => x.X).Average(), thisTrack.Points.Select(x => x.Y).Average(), -100, transposeRot, negateRot, invertRot, invertZ, rotAngleX, rotAngleY, rotAngleZ);
                                                //Vector3 WordCoordinates = thisCamera.ImageToWorld(thisTrack.Points[0].X, thisTrack.Points[0].Y, -100, transposeRot, negateRot, invertRot, invertZ, rotAngleX, rotAngleY, rotAngleZ);

                                                Vector3 startPoint = new Vector3(thisCamera.PositionVector.X, thisCamera.PositionVector.Y, thisCamera.PositionVector.Z);
                                                Vector3 endPoint = new Vector3(WordCoordinates.X, (float)WordCoordinates.Y, (float)WordCoordinates.Z);

                                                //trackInCamera.Add(thisCamIndex);
                                                IntersectionLines.Add(new Line3D { LineStart = new Point3D(startPoint.X, startPoint.Y, startPoint.Z), LineEnd = new Point3D(endPoint.X, endPoint.Y, endPoint.Z) });

                                                effect.CurrentTechnique.Passes[0].Apply();
                                                var vertices = new[] { new VertexPositionColor(startPoint, Microsoft.Xna.Framework.Color.Red), new VertexPositionColor(endPoint, Microsoft.Xna.Framework.Color.Red) };
                                                effect.GraphicsDevice.DrawUserPrimitives(PrimitiveType.LineList, vertices, 0, 1);
                                            }
                                            else
                                            {
                                                //trackInCamera[thisCamIndex] = false;
                                            }
                                        }
                                    }
                                    catch (Exception test)
                                    {
                                        testy = 1;
                                    }
                                    return;
                                //}
                            }
                            else
                            {
                                //trackInCamera[thisCamIndex] = false;
                            }
                        }
                    }
                //}
                //IntersectionLines[thisCamIndex].Displ
            }
        }
コード例 #5
0
        private void drawCamera2(RegisteredCamera thisCamera)
        {
            // Set transform matrices.
            float aspect = GraphicsDevice.Viewport.AspectRatio;

            effect.World = Matrix.CreateScale(cameraModelScaling) * thisCamera.RotationMatrix;

            effect.World = effect.World * Matrix.CreateTranslation(thisCamera.TranslationVector);

            effect.World = effect.World * Matrix.CreateFromYawPitchRoll(yaw * dragSensitivity, pitch * dragSensitivity, roll * dragSensitivity);

            effect.World = effect.World * Matrix.CreateScale(globalScaling);

            effect.View = Matrix.CreateLookAt(new Vector3(0, 0, -5),
                                              Vector3.Zero, Vector3.Up);

            effect.Projection = Matrix.CreatePerspectiveFieldOfView(1, aspect, 0.01f, 100);

            // Set renderstates.
            GraphicsDevice.RasterizerState = RasterizerState.CullNone;

            // Draw the triangle.
            effect.CurrentTechnique.Passes[0].Apply();

            GraphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleList,
                                              Vertices, 0, 1);
        }
コード例 #6
0
ファイル: CameraConfig.cs プロジェクト: onarf/Free3DTrack
        private void CreateFromBundlerFile(string configFile)
        {
            string text = System.IO.File.ReadAllText(configFile);

            string[] lines = text.Split('\r');


            int numCameras = int.Parse(lines[1].Split(' ')[0]);
            int numPoints  = int.Parse(lines[1].Split(' ')[1]);

            for (int i = 0; i < numCameras; i++)
            {
                //Extract the focal length and the radial distrortion cooefficients
                string[] line1split  = lines[2 + i * 5].Split(' ');
                float    focalLength = float.Parse(line1split[0]);
                float    k1          = float.Parse(line1split[1]);
                float    k2          = float.Parse(line1split[2]);

                //Camera Rotation matrix
                //m1, m2, m3
                //m4, m5, m6
                //m7, m8, m9

                string[] line2split = lines[3 + i * 5].Split(' ');
                float    m1         = float.Parse(line2split[0]);
                float    m2         = float.Parse(line2split[1]);
                float    m3         = float.Parse(line2split[2]);

                string[] line3split = lines[4 + i * 5].Split(' ');
                float    m4         = float.Parse(line3split[0]);
                float    m5         = float.Parse(line3split[1]);
                float    m6         = float.Parse(line3split[2]);

                string[] line4split = lines[5 + i * 5].Split(' ');
                float    m7         = float.Parse(line4split[0]);
                float    m8         = float.Parse(line4split[1]);
                float    m9         = float.Parse(line4split[2]);

                //Camera Translation Matrix
                string[] line5split = lines[6 + i * 5].Split(' ');
                float    t1         = float.Parse(line5split[0]);
                float    t2         = float.Parse(line5split[1]);
                float    t3         = float.Parse(line5split[2]);

                //Create the new camera and store:
                Matrix           RotationMatrix    = new Matrix(m1, m2, m3, 0, m4, m5, m6, 0, m7, m8, m9, 0, 0, 0, 0, 1);
                Vector3          TranslationVector = new Vector3(t1, t2, t3);
                RegisteredCamera thisCamera        = new RegisteredCamera(focalLength, k1, k2, RotationMatrix, TranslationVector, 480, 640);
                //thisCamera.TrackedPoints.Add(new TrackedImagePoint(320, 240));
                //thisCamera.TrackedPoints.Add(new TrackedImagePoint(320, -240));
                //thisCamera.TrackedPoints.Add(new TrackedImagePoint(-320, 240));
                //thisCamera.TrackedPoints.Add(new TrackedImagePoint(-320, -240));
                //thisCamera.TrackedPoints.Add(new TrackedImagePoint(0, 0));

                //Add some matched image points, to check the calibration...
                System.IO.DirectoryInfo thisDIR = System.IO.Directory.GetParent(configFile);
                System.IO.DirectoryInfo upDIR   = System.IO.Directory.GetParent(thisDIR.ToString());

                string sDIR    = upDIR.ToString();
                string keyText = System.IO.File.ReadAllText(sDIR + "\\" + i + ".key");

                string[] keyLines = keyText.Split('\r');

                for (int y = 0; y < 20; y++)
                {
                    string[] matchcoords = keyLines[1 + (y * 8)].Split(' ');
                    float    matchY      = float.Parse(matchcoords[0]);
                    float    matchX      = float.Parse(matchcoords[1]);
                    thisCamera.TrackedPoints.Add(new TrackedImagePoint(matchX, matchY));
                }

                Cameras.Add(thisCamera);
            }

            int           startLine     = 1 + 6 + (numCameras - 1) * 5;
            CameraMatches cameraMatches = new CameraMatches();

            //Get the model points
            for (int i = 0; i < numPoints; i++)
            {
                string[] linesplit = lines[startLine + i * 3].Split(' ');
                float    x         = float.Parse(linesplit[0]);
                float    y         = float.Parse(linesplit[1]);
                float    z         = float.Parse(linesplit[2]);
                Point3D  thisPoint = new Point3D(x, y, z);
                cameraMatches.points.Add(thisPoint);
            }

            modelPoints = cameraMatches;
        }
コード例 #7
0
        private void drawRays(RegisteredCamera thisCamera, int i)
        {
            //for each tracked point project from the camera centre to the point:

            float aspect = GraphicsDevice.Viewport.AspectRatio;

            effect.World = Matrix.Identity * Matrix.CreateTranslation(viewTranslationVector);

            //effect.World = effect.World * Matrix.CreateTranslation(thisCamera.PositionVector);

            //effect.World = effect.World * Matrix.CreateFromYawPitchRoll(yaw * dragSensitivity, pitch * dragSensitivity, roll * dragSensitivity);

            //effect.World = effect.World * Matrix.CreateScale(globalScaling);

            //effect.View = Matrix.CreateFromYawPitchRoll(yaw * dragSensitivity, pitch * dragSensitivity, roll * dragSensitivity) * Matrix.CreateLookAt(new Vector3(0, 0, -5 * globalScaling),
            //                                  Vector3.Zero, Vector3.Up);

            effect.View = ViewMatrix;

            effect.Projection = Matrix.CreatePerspectiveFieldOfView(1, aspect, 0.01f, 100);



            //for (int i = 0; i < thisCamera.TrackedPoints.Count; i++)
            //{

            //int i = (int)rotAngleX;
            //if (i < thisCamera.TrackedPoints.Count)
            //{


            //TrackedImagePoint thisPoint = thisCamera.TrackedPoints[i];
            int testy = 0;

            //if (WebCamPermute != null)
            //{
            //int thisCamIndex = WebCamPermute[i];
            int thisCamIndex = i;

            if (thisCamIndex < WebCamsEye.Count)
            {
                if (WebCamsEye[thisCamIndex].FilteredTrackedPoints != null)
                {
                    if (WebCamsEye[thisCamIndex].FilteredTrackedPoints.TrackedPoints != null)
                    {
                        if (WebCamsEye[thisCamIndex].FilteredTrackedPoints.TrackedPoints.Count > 0)
                        {
                            //if (WebCamPermute.Count > 0)
                            // {
                            try
                            {
                                for (int r = 0; r < WebCamsEye[thisCamIndex].FilteredTrackedPoints.TrackedPoints.Count; r++)
                                {
                                    WebCamTrack thisTrack = WebCamsEye[thisCamIndex].FilteredTrackedPoints.TrackedPoints[r];

                                    if (thisTrack.Points.Count > 0)
                                    {
                                        Vector3 WordCoordinates = thisCamera.ImageToWorld(thisTrack.Points.Select(x => x.X).Average(), thisTrack.Points.Select(x => x.Y).Average(), -100, transposeRot, negateRot, invertRot, invertZ, rotAngleX, rotAngleY, rotAngleZ);
                                        //Vector3 WordCoordinates = thisCamera.ImageToWorld(thisTrack.Points[0].X, thisTrack.Points[0].Y, -100, transposeRot, negateRot, invertRot, invertZ, rotAngleX, rotAngleY, rotAngleZ);

                                        Vector3 startPoint = new Vector3(thisCamera.PositionVector.X, thisCamera.PositionVector.Y, thisCamera.PositionVector.Z);
                                        Vector3 endPoint   = new Vector3(WordCoordinates.X, (float)WordCoordinates.Y, (float)WordCoordinates.Z);

                                        //trackInCamera.Add(thisCamIndex);
                                        IntersectionLines.Add(new Line3D {
                                            LineStart = new Point3D(startPoint.X, startPoint.Y, startPoint.Z), LineEnd = new Point3D(endPoint.X, endPoint.Y, endPoint.Z)
                                        });

                                        effect.CurrentTechnique.Passes[0].Apply();
                                        var vertices = new[] { new VertexPositionColor(startPoint, Microsoft.Xna.Framework.Color.Red), new VertexPositionColor(endPoint, Microsoft.Xna.Framework.Color.Red) };
                                        effect.GraphicsDevice.DrawUserPrimitives(PrimitiveType.LineList, vertices, 0, 1);
                                    }
                                    else
                                    {
                                        //trackInCamera[thisCamIndex] = false;
                                    }
                                }
                            }
                            catch (Exception test)
                            {
                                testy = 1;
                            }
                            return;
                            //}
                        }
                        else
                        {
                            //trackInCamera[thisCamIndex] = false;
                        }
                    }
                }
                //}
                //IntersectionLines[thisCamIndex].Displ
            }
        }