protected override void Initialize()
        {
            // Add some objects as services
            Services.AddService(typeof(GraphicsDeviceManager), graphics);
            Services.AddService(typeof(GraphicsDevice), this.GraphicsDevice);

            // Initialize vr components
            render = new RenderSubsystem(this);
            update = new UpdateSubsystem(this);
            Components.Add(render);
            Components.Add(update);

            // Load virtual world
            GetStimulus();
            pType           = new VRProtocol();
            pType.tDuration = EP.durationTrial;
            pType.trials    = EP.stimTypes;
            Services.AddService(typeof(VRProtocol), pType);

            // Initialize frame acquisition objects
            pulsePal = new PulsePal();
            cam1     = new uEyeCamera(0, "C:\\Users\\Chiappee\\Desktop\\p1 600x600.ini", true, true, 850);
            cam2     = new uEyeCamera(1, "C:\\Users\\Chiappee\\Desktop\\p2 - 1024x544.ini", false, true, 250, pulsePal);

            if (cam1.m_IsLive == true)
            {
                // Initialize objects for online tracking
                ft  = new FastBlobTracking();
                kft = new KalmanFilterTrack();
                Services.AddService(typeof(KalmanFilterTrack), kft);

                // Initalize file for data storage
                filestream = File.OpenWrite("C:\\Users\\Chiappee\\Desktop\\Cameras.txt");
                textWriter = new StreamWriter(filestream);
                textWriter.Flush();

                // Calibration values
                c[0]  = 0.0011f;
                c[1]  = -0.7660f;
                c[2]  = 0.0000f;
                c[3]  = 0.0004f;
                c[4]  = 1;
                c[5]  = 0.0012f;
                c[6]  = -0.0000f;
                c[7]  = -0.3676f;
                c[8]  = 12.5535f;
                c[9]  = 4.6354f + 0.07f;
                c[10] = -12.9056f;
                c[11] = -1.0711f;
            }
            base.Initialize();
        }
Exemple #2
0
        public void Initialize(int Size)
        {
            // Initialize vr components
            render = new RenderSubsystem(this, new StaticCamera(Size), Size);
            update = new UpdateSubsystem(this);

            // Load virtual world
            if (protName == null)
            {
                GetStimulus();
            }
            else
            {
                GetStimulus(protName);
            }

            // Initialize Photodiode & Kalman filter
            if (vRProtocol.usePhotodiode)
            {
                pd = new Photodiode(vRProtocol.portPhotodiode);
                pd.StartPhotodiode();
            }
            kft = new KalmanFilterTrack <MovementData>(false, vRProtocol.twoFlies, pd);

            // Initialize data acquisition objects
            if (vRProtocol.usePulsePal)
            {
                pp = new PulsePal <MovementData>(vRProtocol.portPulsePal);
                pp.StartPulsePal();
            }
            if (vRProtocol.useCam1)
            {
                cam1 = new uEyeCamera(0, vRProtocol.paramsPathCam1, vRProtocol.trackCam1, vRProtocol.dispCam1, 0, vRProtocol.fpsCam1, null);
                while (!cam1.m_IsLive)
                {
                }
                if (cam1.m_IsLive)
                {
                    cam1.Start();
                    if (vRProtocol.trackCam1)
                    {
                        fastT = new FastTracking <Frame>(this, cam1.firstFrame, 1, 100, 50, 0, vRProtocol.twoFlies, true);
                        fastT.Start();
                        kft.Start();
                    }
                }
            }
            if (vRProtocol.useCam2)
            {
                if (vRProtocol.usePulsePal)
                {
                    cam2 = new uEyeCamera(1, vRProtocol.paramsPathCam2, vRProtocol.trackCam2, vRProtocol.dispCam2, 800, vRProtocol.fpsCam2, pp);
                }
                else
                {
                    cam2 = new uEyeCamera(1, vRProtocol.paramsPathCam2, vRProtocol.trackCam2, vRProtocol.dispCam2, 800, vRProtocol.fpsCam2, null);
                }

                if (cam2.m_IsLive)
                {
                    cam2.Start();
                    if (vRProtocol.trackCam2)
                    {
                        fastT = new FastTracking <Frame>(this, cam2.firstFrame, 10, 5000, 35, 0, vRProtocol.twoFlies, false);
                        fastT.Start();
                        kft.Start();
                    }
                }
            }

            if (vRProtocol.recordTracking & vRProtocol.useCam2)
            {
                dataRecorder = new DataRecorder <FilteredData>(vRProtocol.recordPathTracking, cam2, true, vRProtocol.twoFlies, this);
                dataRecorder.Start();
            }
            else if (vRProtocol.recordTracking & !vRProtocol.useCam2)
            {
                dataRecorder = new DataRecorder <FilteredData>(vRProtocol.recordPathTracking, true, vRProtocol.twoFlies, this);
                dataRecorder.Start();
            }

            if (vRProtocol.recordStimulus & vRProtocol.useCam2)
            {
                stimRecorder = new StimRecorder <StimData>(vRProtocol.recordPathStimulus, cam2, true, this);
                stimRecorder.Start();
            }
            else if (vRProtocol.recordStimulus & !vRProtocol.useCam2)
            {
                stimRecorder = new StimRecorder <StimData>(vRProtocol.recordPathStimulus, true, this);
                stimRecorder.Start();
            }

            if (vRProtocol.recordPhotodiode & vRProtocol.useCam2)
            {
                photoRecorder = new PhotoRecorder <PhotoData>(vRProtocol.recordPathPhotodiode, cam2, true, this);
                photoRecorder.Start();
            }
            else if (vRProtocol.recordPhotodiode & !vRProtocol.useCam2)
            {
                photoRecorder = new PhotoRecorder <PhotoData>(vRProtocol.recordPathPhotodiode, true, this);
                photoRecorder.Start();
            }
        }