public trackedPointsViewer(BaseTypes.trackingHistory TrackingHistory)
        {
            //
            // Required for Windows Form Designer support
            //
            this.TrackingHistory=new BaseTypes.trackingHistory();
            this.TrackingHistory=TrackingHistory;
            InitializeComponent();
            refreshData();

            //
            // TODO: Add any constructor code after InitializeComponent call
            //
        }
示例#2
0
        //handles closing project (to re-implement)
        private void CloseCurrentProject()
        {
            if (app_status.GetStatus()>=_app_status.project_active)
            {
                saveQuery();
                trackingPoints=null;
                projectSettings=null;

                viewportXY.Dispose();
                viewportXZ.Dispose();
                viewportYZ.Dispose();
                tempTrackingPoint=null;
                tf=null;
                trackingPointsBasicInfo=null;
                trackingPoints=null;
                totalTrackingPoints=0;

                trackingHist = null;
                viewportXY=null;
                viewportXZ=null;
                viewportYZ=null;
                trackingPointsLB.Items.Clear();
                app_status.SetStatus(_app_status.started);
            }
            else Message("No active project detected");
        }
示例#3
0
 private void trackFullMovie()
 {
     if (app_status.GetStatus()>=_app_status.points_trained)
     {
         TimeStart = DateTime.Now;
         trackingCompleted=false;
         framesToTrack=0;//viewportXY.;
         trackingHist=new BaseTypes.trackingHistory(projectSettings.projectName, trackingPointsBasicInfo,(int)viewportXY.getStartFrame(),(int)viewportXZ.getStartFrame(),(int)viewportYZ.getStartFrame() );
         tf = new BaseTypes.timeFrame(0);
         totalTrackingPoints=trackingPoints.Count;
         Message("Points to track : "+Convert.ToString(totalTrackingPoints));
         app_status.SetStatus(_app_status.tracking);
         trackCurrentFrame();
     }
     else Message("Unable to begin tracking! Some points are not trained!");
 }
示例#4
0
        private void trackCertainMovieLen()
        {
            if (app_status.GetStatus()>=_app_status.points_trained)
            {
                Message("Number of frames querying...");
                using (framesToTrackDialog xForm = new framesToTrackDialog())
                {
                    if (xForm.ShowDialog(this) == DialogResult.OK)
                    {
                        TimeStart = DateTime.Now;
                        trackingCompleted=false;
                        framesToTrack=xForm.framesCount;
                        trackingHist=new BaseTypes.trackingHistory(projectSettings.projectName, trackingPointsBasicInfo,(int)viewportXY.getStartFrame(),(int)viewportXZ.getStartFrame(),(int)viewportYZ.getStartFrame() );
                        tf = new BaseTypes.timeFrame(0);
                        totalTrackingPoints=trackingPoints.Count;
                        Message("Frames to track :"+Convert.ToString(framesToTrack));
                        Message("Points to track : "+Convert.ToString(totalTrackingPoints));
                        app_status.SetStatus(_app_status.tracking);
                        trackCurrentFrame();

                    }
                    else Message("Tracking certain movie length cancelled...");
                }
            }
            else Message("Unable to begin tracking! Some points are not trained!");
        }
示例#5
0
        //handles loading project
        private void LoadProject()
        {
            Message("Loading project");

            loadProjectD.Filter = "Motion capture project files (*.mcp)|*.mcp";

            if((loadProjectD.ShowDialog() == System.Windows.Forms.DialogResult.OK) &&
                (loadProjectD.FileName!=""))
            {

                if (viewportXY!=null || viewportXZ!=null || viewportYZ!=null)
                    CloseCurrentProject();

                InitializeSettings("temp");
                IFormatter formatter = new BinaryFormatter();
                Stream stream = new FileStream(loadProjectD.FileName, FileMode.Open, FileAccess.Read, FileShare.Read);
                projectSettings = new BaseTypes.Settings();
                projectSettings= (BaseTypes.Settings)formatter.Deserialize(stream);
                stream.Close();
                InitializeViewports();
                trackingPoints = new ArrayList();
                app_status.SetStatus(_app_status.project_active);

                if (projectSettings.viewportXYScalingType==BaseTypes.scalingType.Custom)
                    viewportXY.loadMedia(projectSettings.viewportXYFileName,(int)projectSettings.viewportXYHorizontalRes,(int)projectSettings.viewportXYVerticalRes);
                else if (projectSettings.viewportXYScalingType==BaseTypes.scalingType.ScaleBy)
                    viewportXY.loadMedia(projectSettings.viewportXYFileName,projectSettings.viewportXYScalingFactor);
                else viewportXY.loadMedia(projectSettings.viewportXYFileName);

                if (projectSettings.viewportXZScalingType==BaseTypes.scalingType.Custom)
                    viewportXZ.loadMedia(projectSettings.viewportXZFileName,(int)projectSettings.viewportXZHorizontalRes,(int)projectSettings.viewportXZVerticalRes);
                else if (projectSettings.viewportXZScalingType==BaseTypes.scalingType.ScaleBy)
                    viewportXZ.loadMedia(projectSettings.viewportXZFileName,projectSettings.viewportXZScalingFactor);
                else viewportXZ.loadMedia(projectSettings.viewportXZFileName);

                if (projectSettings.viewportYZScalingType==BaseTypes.scalingType.Custom)
                    viewportYZ.loadMedia(projectSettings.viewportYZFileName,(int)projectSettings.viewportYZHorizontalRes,(int)projectSettings.viewportYZVerticalRes);
                else if (projectSettings.viewportYZScalingType==BaseTypes.scalingType.ScaleBy)
                    viewportYZ.loadMedia(projectSettings.viewportYZFileName,projectSettings.viewportYZScalingFactor);
                else viewportYZ.loadMedia(projectSettings.viewportYZFileName);

                viewportXY.setStartFrame(projectSettings.viewportXYStartFrame);
                viewportXZ.setStartFrame(projectSettings.viewportXZStartFrame);
                viewportYZ.setStartFrame(projectSettings.viewportYZStartFrame);
                viewportXY.grayscale=projectSettings.viewportXYGreyScale;
                viewportXY.brightnessCorrection=projectSettings.viewportXYBrightnessCorrectionValue;
                viewportXY.contrastCorrection=projectSettings.viewportXYContrastCorrectionValue;
                viewportXZ.grayscale=projectSettings.viewportXZGreyScale;
                viewportXZ.brightnessCorrection=projectSettings.viewportXZBrightnessCorrectionValue;
                viewportXZ.contrastCorrection=projectSettings.viewportXZContrastCorrectionValue;
                viewportYZ.grayscale=projectSettings.viewportYZGreyScale;
                viewportYZ.brightnessCorrection=projectSettings.viewportYZBrightnessCorrectionValue;
                viewportYZ.contrastCorrection=projectSettings.viewportYZContrastCorrectionValue;
                app_status.SetStatus(_app_status.viewports_ready);

                if (File.Exists(loadProjectD.FileName+"th"))
                {
                    formatter = new BinaryFormatter();
                    stream = new FileStream(loadProjectD.FileName+"th", FileMode.Open, FileAccess.Read, FileShare.Read);
                    trackingHist = new BaseTypes.trackingHistory();
                    trackingHist= (BaseTypes.trackingHistory)formatter.Deserialize(stream);
                    stream.Close();
                    foreach (BaseTypes.trackingPointLite tpp in trackingHist.initialTrackingPointsSet)
                    {

                        trackingPoint tempTrackingPoint  = new trackingPoint(tpp.pName,tpp.xyCoordX,tpp.xyCoordY,tpp.xzCoordX,tpp.xzCoordZ,tpp.yzCoordY,tpp.yzCoordZ,tpp.xyRadius,tpp.xzRadius,tpp.yzRadius, tpp.pColor);
                        trackingPoints.Add(tempTrackingPoint);

                        if (trackingPointsBasicInfo==null) trackingPointsBasicInfo = new ArrayList();
                        trackingPointsBasicInfo.Add(tpp);

                    }
                    vprtsChngMode(); //visualizePoints() doesn't work in playback mode
                    visualizePoints();
                    trackingPointsLB.Items.Clear();
                    foreach (trackingPoint tp in trackingPoints)
                        trackingPointsLB.Items.Add(tp.PName);

                    app_status.SetStatus(_app_status.distance_calibrated);
                    app_status.SetStatus(_app_status.points_defined);

                } else

                    if (projectSettings.viewportXY1pxDistance!=-1 && projectSettings.viewportXZ1pxDistance!=-1 && projectSettings.viewportYZ1pxDistance!=-1)
                     app_status.SetStatus(_app_status.distance_calibrated);

                this.Text="LW3d Motion Capture Utility : "+projectSettings.projectName;

                Message("New project loaded: "+loadProjectD.FileName);

            }
            else Message("New project aborted");
        }
示例#6
0
        private void InitializeTracking()
        {
            if (app_status.GetStatus()>=_app_status.points_defined)
            {
                if (MessageBox.Show("Attempting to train neural networks. Application may be not responsive during training.", "ANN Training", MessageBoxButtons.YesNo, MessageBoxIcon.Question) == DialogResult.Yes)
                {

                    DateTime start = DateTime.Now;
                   // string timeStarted = DateTime.Now.ToString("mm:ss:fff");
                    trackingHist = new BaseTypes.trackingHistory(projectSettings.projectName, trackingPointsBasicInfo, (int)viewportXY.getStartFrame(), (int)viewportXZ.getStartFrame(), (int)viewportYZ.getStartFrame());

                    foreach (trackingPoint tp in trackingPoints)

                        tp.FrameProcessed += new trackingPoint.FrameProcessedEventHandler(frameTrackingCompletedHandler);

                    if ((viewportXY.getStartFrame() == viewportXY.currentFrame) &&
                        (viewportXY.getStartFrame() == viewportXY.currentFrame) &&
                        (viewportXY.getStartFrame() == viewportXY.currentFrame))
                    {

                        foreach (trackingPoint tp in trackingPoints)
                        {

                            tp.initPointBitmaps(viewportXY.GetFrame(true), viewportXZ.GetFrame(true), viewportYZ.GetFrame(true));//just in case

                            tp.initAndTrainANN(projectSettings.neuralNetworkTrainingIterations,
                                projectSettings.neuralNetworkTrainingRate,
                                4,//const
                                projectSettings.neuralNetworkHiddenLayersCount,
                                projectSettings.neuralNetworkHiddenLayersSize,
                                projectSettings.neuralNetworkMinimumWeight,
                                projectSettings.neuralNetworkMaximumWeight,
                                viewportXY.GetFrame(true),
                                viewportXZ.GetFrame(true),
                                viewportYZ.GetFrame(true));
                        }
                        TimeSpan total =DateTime.Now- start;        //DateTime.Now.ToString("mm:ss:fff");

                        Message("Training completed!");
                        Message("Time: " +total.Seconds+"s "+total.Milliseconds+"msec");

                        MessageBox.Show("Training sucessful :)","Training completed",MessageBoxButtons.OK,MessageBoxIcon.Information);
                        projectSaved = false;
                        app_status.SetStatus(_app_status.points_trained);

                    }
                    else Message("Unable to proceed with training. Check if viewports are in first frame...");
                }
             } else Message("No points defined yet...");
        }