Exemple #1
0
    //public Renderer renderer;

    private void OnEnable()
    {
        this.device = Device.Open(0);
        var config = new DeviceConfiguration
        {
            ColorResolution = ColorResolution.r720p,
            ColorFormat     = ImageFormat.ColorBGRA32,
            DepthMode       = DepthMode.NFOV_Unbinned
        };

        device.StartCameras(config);

        var calibration = device.GetCalibration(config.DepthMode, config.ColorResolution);

        var trackerConfiguration = new TrackerConfiguration {
            SensorOrientation = SensorOrientation.OrientationDefault,
            CpuOnlyMode       = false
        };

        this.tracker = BodyTracker.Create(calibration, trackerConfiguration);
        debugObjects = new GameObject[(int)JointType.Count];
        for (var i = 0; i < (int)JointType.Count; i++)
        {
            var cube = GameObject.CreatePrimitive(PrimitiveType.Cube);
            cube.name = Enum.GetName(typeof(JointType), i);
            cube.transform.localScale = Vector3.one * 0.4f;
            debugObjects[i]           = cube;
        }
    }
        public static BodyTracker Create(Calibration sensorCalibration, TrackerConfiguration trackerConfiguration)
        {
            AzureKinectException.ThrowIfNotSuccess(
                BodyTrackingNativeMethods.k4abt_tracker_create(sensorCalibration, trackerConfiguration, out BodyTrackingNativeMethods.k4abt_tracker_t handle));

            return(new BodyTracker(handle));
        }
Exemple #3
0
        private void StartAzureKinect()
        {
            device = Device.Open(0);

            var config = new DeviceConfiguration
            {
                CameraFPS       = FPS.FPS30,
                ColorResolution = ColorResolution.R720p,
                ColorFormat     = ImageFormat.ColorBGRA32,
                DepthMode       = DepthMode.NFOV_Unbinned,
                WiredSyncMode   = WiredSyncMode.Standalone,
            };

            device.StartCameras(config);
            Debug.Log("Open K4A device successful. sn:" + device.SerialNum);

            //var calibration = device.GetCalibration(config.DepthMode, config.ColorResolution);
            var calibration = device.GetCalibration();

            var trackerConfiguration = new TrackerConfiguration
            {
                ProcessingMode    = TrackerProcessingMode.Gpu,
                SensorOrientation = SensorOrientation.Default
            };

            this.tracker = Tracker.Create(calibration, trackerConfiguration);
            Debug.Log("Body tracker created.");
        }
 static ApplicationDbContext()
 {
     TrackerConfiguration <Comment>
     .EnableTableTracking()
     .SkipTrackingForColumn(x => x.Id)
     .SkipTrackingForColumn(x => x.ParentBlogId);
 }
Exemple #5
0
 public PageMain(Helper aHelper, OptionPagePrivacy aOptionPagePrivacy, HelperAutoUpdate aHelperAutoUpdate, IStartAtLoginOption aStartAtLoginOption)
     : base()
 {
     iTracker            = new OpenHome.Xapp.Tracker(TrackerConfiguration.TrackerAccount(aHelper), this);
     iOptionPagePrivacy  = aOptionPagePrivacy;
     iHelperAutoUpdate   = aHelperAutoUpdate;
     iStartAtLoginOption = aStartAtLoginOption;
     iOptionPagePrivacy.EventUsageDataChanged += HandleEventUsageDataChanged;
 }
        public void TestSetup()
        {
            string leagueJson = Encoding.UTF8.GetString(POEToolsTestsBase.Properties.Resources.LeagueWithEntries);

            league = JsonConvert.DeserializeObject <League>(leagueJson, GetJsonSettings());

            config = new TrackerConfiguration
            {
                League      = league,
                Entry       = league.Ladder.Entries[0],
                AccountName = accountName,
                Culture     = CultureInfo.CurrentCulture,
            };
        }
Exemple #7
0
        public void SetConfiguration(TrackerConfiguration configuration)
        {
            Contract.Requires(configuration != null);

            this.configuration = configuration;
            InitializeTranslations();
            secondsPlayed            = 0;
            initialExperience        = configuration.Entry.Character.Experience;
            formInteractionsDisabled = configuration.InteractionsDisabled;
            templateContent          = htmlService.GetTemplate(configuration.Template);
            initialLoading           = true;
            webBrowser.Navigate(new Uri("about:blank"));
            SetHotkey();
            SetCursor();
        }
Exemple #8
0
    /// <summary>
    /// Function handling the events raised by the KinectSensor when a new capture is available
    /// </summary>
    /// <param name="sender"></param>
    /// <param name="captureArg"> Object containing the new capture</param>
    private void KinectSensor_CaptureReady(object sender, CaptureEventArgs e)
    {
        if (!IsInitializationComplete)
        {
            var calibration           = kinectSensor.GetCalibration();
            TrackerConfiguration conf = TrackerConfiguration.Default;
            conf.ProcessingMode      = processingMode;
            conf.ModelPath           = modelPath;
            tracker                  = new Tracker(in calibration, conf);
            IsInitializationComplete = true;
        }

        if (IsInitializationComplete && IsAvailable)
        {
            tracker.TryEnqueueCapture(e.Capture);
        }
    }
        public BackgroundTrackingLoop(ref Calibration calibration, SensorOrientation sensorOrientation, float smoothingFactor)
        {
            var config = new TrackerConfiguration {
                SensorOrientation = sensorOrientation
            };

            tracker = new Tracker(ref calibration, config)
            {
                TemporalSmoothingFactor = smoothingFactor
            };
            isRunning        = true;
            backgroundThread = new Thread(BackgroundLoop)
            {
                IsBackground = true
            };
            backgroundThread.Start();
        }
        private void OkButton_Click(object sender, EventArgs e)
        {
            Hide();

            semaphoreService.CreateSemaphore();

            var configuration = new TrackerConfiguration
            {
                League               = selectedLeague,
                Entry                = selectedEntry,
                AccountName          = accountNameTextBox.Text,
                Culture              = CultureInfo.CurrentCulture,
                Template             = Properties.Settings.Default.Template,
                InteractionsDisabled = Properties.Settings.Default.InteractionsDisabled,
            };

            RankingTrackerContext.CurrentContext.ShowTrackerForm(configuration);
        }
Exemple #11
0
        public BackgroundTrackingLoop(ref Calibration calibration, bool cpuOnlyMode, SensorOrientation sensorOrientation, float smoothingFactor)
        {
            var config = new TrackerConfiguration
            {
                SensorOrientation = sensorOrientation,
                ProcessingMode    = cpuOnlyMode
                    ? TrackerProcessingMode.Cpu
                    : TrackerProcessingMode.Gpu
            };

            tracker = new Tracker(ref calibration, config)
            {
                TemporalSmoothingFactor = smoothingFactor
            };
            isRunning        = true;
            backgroundThread = new Thread(BackgroundLoop)
            {
                IsBackground = true
            };
            backgroundThread.Start();
        }
 public static extern NativeMethods.k4a_result_t k4abt_tracker_create(Calibration sensorCalibration, TrackerConfiguration trackerCalibration, out k4abt_tracker_t tracker_handle);
 public void ShowTrackerForm(TrackerConfiguration configuration)
 {
     trackerForm.SetConfiguration(configuration);
     configurationForm.Hide();
     trackerForm.Show();
 }
        static void Main(string[] args)
        {
            var sw = new System.Diagnostics.Stopwatch();

            wiimote.Connect();
            Console.WriteLine("ESC:STOP");
            String now = DateTime.Now.ToString("MMddHHmmss");

            Console.WriteLine("Start Body Tracking App!");

            //接続されている機器の数をチェック
            if (Device.GetInstalledCount() == 0)
            {
                Console.WriteLine("No k4a devices attached!");
                Console.ReadKey();
                return;
            }

            //■デバイスを開いてカメラを起動する
            Device device = null;

            // Open the first plugged in Kinect device
            try
            {
                //1台目に接続
                device = Device.Open(0);
            }
            catch (AzureKinectOpenDeviceException ex)
            {
                Console.WriteLine("Failed to open k4a device!!");
                Console.WriteLine(ex.Message);
                Console.WriteLine(ex.StackTrace.ToString());
                Console.ReadKey();
                return;
            }
            // Start camera. Make sure depth camera is enabled.
            var deviceConfig = new DeviceConfiguration();

            deviceConfig.DepthMode       = DepthMode.NFOV_Unbinned;
            deviceConfig.ColorResolution = ColorResolution.Off;
            try
            {
                device.StartCameras(deviceConfig);
            }
            catch (AzureKinectStartCamerasException ex)
            {
                Console.WriteLine("Failed to open k4a device!!");
                Console.WriteLine(ex.Message);
                Console.WriteLine(ex.StackTrace.ToString());
                device.Dispose();
                Console.ReadKey();
                return;
            }
            using (StreamWriter file = new StreamWriter(@"C:/Users/hackathon/Desktop/" + now + ".csv", true))
            {
                file.Write("Time[ms],PELVIS_X,PELVIS_Y,PELVIS_Z," +
                           "SPINE_NAVAL_X,SPINE_NAVAL_Y,SPINE_NAVAL_Z," +
                           "SPINE_CHEST_X,SPINE_CHEST_Y,SPINE_CHEST_Z," +
                           "NECK_X,NECK_Y,NECK_Z," +
                           "CLAVICLE_LEFT_X,CLAVICLE_LEFT_Y,CLAVICLE_LEFT_Z," +
                           "SHOULDER_LEFT_X,SHOULDER_LEFT_Y,SHOULDER_LEFT_Z," +
                           "ELBOW_LEFT_X,ELBOW_LEFT_Y,ELBOW_LEFT_Z," +
                           "WRIST_LEFT_X,WRIST_LEFT_Y,WRIST_LEFT_Z," +
                           "HAND_LEFT_X,HAND_LEFT_Y,HAND_LEFT_Z," +
                           "HANDTIP_LEFT_X,HANDTIP_LEFT_Y,HANDTIP_LEFT_Z," +
                           "THUMB_LEFT_X,THUMB_LEFT_Y,THUMB_LEFT_Z," +
                           "CLAVICLE_RIGHT_X,CLAVICLE_RIGHT_Y,CLAVICLE_RIGHT_Z," +
                           "SHOULDER_RIGHT_X,SHOULDER_RIGHT_Y,SHOULDER_RIGHT_Z," +
                           "ELBOW_RIGHT_X,ELBOW_RIGHT_Y,ELBOW_RIGHT_Z," +
                           "WRIST_RIGHT_X,WRIST_RIGHT_Y,WRIST_RIGHT_Z," +
                           "HAND_RIGHT_X,HAND_RIGHT_Y,HAND_RIGHT_Z," +
                           "HANDTIP_RIGHT_X,HANDTIP_RIGHT_Y,HANDTIP_RIGHT_Z," +
                           "THUMB_RIGHT_X,THUMB_RIGHT_Y,THUMB_RIGHT_Z," +
                           "HIP_LEFT_X,HIP_LEFT_Y,HIP_LEFT_Z," +
                           "KNEE_LEFT_X,KNEE_LEFT_Y,KNEE_LEFT_Z," +
                           "ANKLE_LEFT_X,ANKLE_LEFT_Y,ANKLE_LEFT_Z," +
                           "FOOT_LEFT_X,FOOT_LEFT_Y,FOOT_LEFT_Z," +
                           "HIP_RIGHT_X,HIP_RIGHT_Y,HIP_RIGHT_Z," +
                           "KNEE_RIGHT_X,KNEE_RIGHT_Y,KNEE_RIGHT_Z," +
                           "ANKLE_RIGHT_X,ANKLE_RIGHT_Y,ANKLE_RIGHT_Z," +
                           "FOOT_RIGHT_X,FOOT_RIGHT_Y,FOOT_RIGHT_Z," +
                           "HEAD_X,HEAD_Y,HEAD_Z," +
                           "NOSE_X,NOSE_Y,NOSE_Z," +
                           "EYE_LEFT_X,EYE_LEFT_Y,EYE_LEFT_Z," +
                           "EAR_LEFT_X,EAR_LEFT_Y,EAR_LEFT_Z," +
                           "EYE_RIGHT_X,EYE_RIGHT_Y,EYE_RIGHT_Z," +
                           "EAR_RIGHT_X,EAR_RIGHT_Y,EAR_RIGHT_Z," +
                           "Weight(kg),B_TopLeft(kg),B_TopRight(kg),B_UnderLeft(kg),B_UnderRight(kg),B_Center_X(cm),B_Center_Y(cm)" +
                           "\n");
            }


            //■トラッカーを作成する
            var calibration   = device.GetCalibration(deviceConfig.DepthMode, deviceConfig.ColorResolution);
            var trackerConfig = new TrackerConfiguration();

            trackerConfig.ProcessingMode    = TrackerProcessingMode.Gpu;    //GPUがない場合はCpuを指定
            trackerConfig.SensorOrientation = SensorOrientation.Default;
            using (var tracker = Tracker.Create(calibration, trackerConfig))
            {
                var wantExit = false;

                while (!wantExit)
                {
                    //■Azure Kinect デバイスからキャプチャを取得する
                    // Capture a depth frame
                    using (Capture sensorCapture = device.GetCapture())
                    {
                        // Queue latest frame from the sensor.
                        tracker.EnqueueCapture(sensorCapture);
                    }

                    // Try getting latest tracker frame.
                    using (Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                    {
                        if (frame != null)
                        {
                            if (frame.NumberOfBodies > 0)
                            {
                                var skeleton          = frame.GetBodySkeleton(0);
                                BalanceBoardState bbs = wiimote.WiimoteState.BalanceBoardState;
                                using (StreamWriter file = new StreamWriter(@"C:/Users/hackathon/Desktop/" + now + ".csv", true))
                                {
                                    sw.Start();
                                    file.Write(sw.ElapsedMilliseconds + ",");
                                    for (int i = 0; i < 32; i += 1)
                                    {
                                        file.Write(skeleton.GetJoint(i).Position.X + "," + skeleton.GetJoint(i).Position.Y + "," + skeleton.GetJoint(i).Position.Z + ",");
                                    }

                                    file.Write(bbs.WeightKg + "," + bbs.SensorValuesKg.TopLeft / 4 + "," + bbs.SensorValuesKg.TopRight / 4 + "," + bbs.SensorValuesKg.BottomLeft / 4 + "," + bbs.SensorValuesKg.BottomRight / 4 + "," + bbs.CenterOfGravity.X + "," + bbs.CenterOfGravity.Y);
                                    file.Write("\n");
                                    Console.WriteLine(bbs.WeightKg + "[kg]");
                                }
                            }
                        }
                    }
                    //Escキーで終了
                    if (Console.KeyAvailable)
                    {
                        var outChar = Console.ReadKey().Key.ToString();
                        if (outChar == "Escape")
                        {
                            return;
                        }
                    }
                }
            }
            device.StopCameras();
            device.Dispose();
        }
    public static void azureKinect()
    {
        _print(true, "start main");
        using (Device device = Device.Open())
        {
            _print(true, "opened device");
            device.StartCameras(new DeviceConfiguration()
            {
                CameraFPS       = FPS.FPS30,
                ColorResolution = ColorResolution.Off,
                DepthMode       = DepthMode.NFOV_Unbinned,
                WiredSyncMode   = WiredSyncMode.Standalone,
            });

            _print(true, "started camera");
            var deviceCalibration = device.GetCalibration();

            //small difference with PointCloud enabled
            //pos: head -0.2916188 -178.0469 853.1077
            //pos: head -5.753897 -183.444 856.1947
            // PointCloud.ComputePointCloudCache(deviceCalibration);

            TrackerConfiguration trackerConfiguration = new TrackerConfiguration()
            {
                ProcessingMode    = TrackerProcessingMode.Gpu,
                SensorOrientation = SensorOrientation.Default
            };
            using (Tracker tracker = Tracker.Create(deviceCalibration, trackerConfiguration))
            {
                _print(true, "tracker created");
                while (true)
                {
                    //_print(true"test0");
                    using (Capture sensorCapture = device.GetCapture())
                    {
                        // Queue latest frame from the sensor. thros System.FieldAccessException
                        tracker.EnqueueCapture(sensorCapture);
                    }
                    _print(true, "init'd Capture sensorCapture");

                    using (Frame frame = tracker.PopResult())
                    {
                        if (frame == null)
                        {
                            _print(true, "frame was null");
                            return;
                        }

                        if (frame.NumberOfBodies < 1)
                        {
                            _print(true, "no bodies");
                            return;
                        }

                        _print(true, "{0} bodies found" + frame.NumberOfBodies);
                        _print(true, "body id: " + frame.GetBodyId(0));
                    }
                }
            }
        }
    }
Exemple #16
0
 /// <summary>Configures common instrument settings for the envelope generator.</summary>
 /// <param name="envVsg">The open RFSG session to configure.</param>
 /// <param name="envVsgConfig">The common settings to apply to the envelope generator.</param>
 /// <param name="trackerConfig">The common settings pertaining to the tracker that is used to modulate the power supply voltage.</param>
 public static void ConfigureEnvelopeGenerator(NIRfsg envVsg, EnvelopeGeneratorConfiguration envVsgConfig, TrackerConfiguration trackerConfig)
 {
     // all function calls assume a differential terminal configuration since that is the only option supported by the PXIe-5820
     envVsg.FrequencyReference.Source           = RfsgFrequencyReferenceSource.FromString(envVsgConfig.ReferenceClockSource);
     envVsg.IQOutPort[""].LoadImpedance         = trackerConfig.InputImpedance_Ohms == 50.0 ? 100.0 : trackerConfig.InputImpedance_Ohms;
     envVsg.IQOutPort[""].TerminalConfiguration = RfsgTerminalConfiguration.Differential;
     envVsg.IQOutPort[""].CommonModeOffset      = trackerConfig.CommonModeOffset_V;
 }
        async private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            Device device;

            try
            {
                using (device = Device.Open(0))
                {
                    device.StartCameras(new DeviceConfiguration
                    {
                        ColorFormat            = ImageFormat.ColorBGRA32,
                        ColorResolution        = ColorResolution.R720p,
                        DepthMode              = DepthMode.NFOV_Unbinned,
                        SynchronizedImagesOnly = true,
                        CameraFPS              = FPS.FPS30,
                    });
                    int colorWidth    = device.GetCalibration().ColorCameraCalibration.ResolutionWidth;
                    int colorHeight   = device.GetCalibration().ColorCameraCalibration.ResolutionHeight;
                    var callibration  = device.GetCalibration(DepthMode.NFOV_Unbinned, ColorResolution.R720p);
                    var trackerConfig = new TrackerConfiguration();
                    trackerConfig.ProcessingMode    = TrackerProcessingMode.Gpu;
                    trackerConfig.SensorOrientation = SensorOrientation.Default;
                    using (var tracker = Tracker.Create(callibration, trackerConfig))
                    {
                        using (Transformation transform = device.GetCalibration().CreateTransformation())
                        {
                            while (true)
                            {
                                using (Capture capture = await Task.Run(() => { return(device.GetCapture()); }).ConfigureAwait(true))
                                {
                                    Task <BitmapSource> createInputColorBitmapTask = Task.Run(() =>
                                    {
                                        Image color         = capture.Color;
                                        BitmapSource source = BitmapSource.Create(color.WidthPixels, color.HeightPixels, 96, 96, PixelFormats.Bgra32, null, color.Memory.ToArray(), color.StrideBytes);
                                        source.Freeze();
                                        return(source);
                                    });
                                    this.inputColorBitmap = await createInputColorBitmapTask.ConfigureAwait(true);

                                    this.InputColorImageViewPane.Source = inputColorBitmap;
                                    tracker.EnqueueCapture(capture);
                                    using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                                    {
                                        if (frame != null)
                                        {
                                            Console.WriteLine("Number Body: " + frame.NumberOfBodies);
                                            if (frame.NumberOfBodies > 0)
                                            {
                                                await SaveFile(this.inputColorBitmap);

                                                if (await callDetectDarknessServices().ConfigureAwait(true))
                                                {
                                                    speech("The room is too dark please turn on the light.");
                                                    this.DarknessAlertCount = this.DarknessAlertCount + 1;;
                                                }
                                                else if (await callDetectionObjectServices().ConfigureAwait(true))
                                                {
                                                    speech("Please Beware of Object On the Floor. Please Beware of Object On the Floor.");
                                                    this.ObjectAlertCount = this.ObjectAlertCount + 1;;
                                                }
                                            }
                                        }
                                    }
                                }
                                switch ((DateTime.Now.ToString("HH:mm", System.Globalization.DateTimeFormatInfo.InvariantInfo)))
                                {
                                case "00:00":
                                case "00:01":
                                case "00:02": this.ObjectAlertCount = 0; this.DarknessAlertCount = 0; break;

                                default: break;
                                }
                                await CallLineApiService().ConfigureAwait(true);
                            }
                        }
                    }
                }
            }
            catch (Exception err)
            {
                Console.WriteLine(err);
            }
        }
Exemple #18
0
        /// <summary>Scales the envelope waveform data based on the settings in <paramref name="trackerConfig"/>, and downloads the waveform to the envelope generator.</summary>
        /// <param name="envVsg">The open RFSG session to configure.</param>
        /// <param name="envelopeWaveform">The envelope waveform created by <see cref="CreateDetroughEnvelopeWaveform(Waveform, DetroughConfiguration)"/> or
        /// <see cref="CreateLookUpTableEnvelopeWaveform(Waveform, LookUpTableConfiguration)"/> that is to be generated.</param>
        /// <param name="trackerConfig">The common settings pertaining to the tracker that is used to modulate the power supply voltage.</param>
        /// <returns>The envelope waveform with data scaled according to the tracker configuration.</returns>
        public static Waveform ScaleAndDownloadEnvelopeWaveform(NIRfsg envVsg, Waveform envelopeWaveform, TrackerConfiguration trackerConfig)
        {
            // grab the raw envelope so we can use linq to get statistics on it
            ComplexSingle.DecomposeArray(envelopeWaveform.Data.GetRawData(), out float[] envelope, out _);

            // scale envelope to adjust for tracker gain and offset
            for (int i = 0; i < envelope.Length; i++)
            {
                envelope[i] = (float)((envelope[i] - trackerConfig.OutputOffset_V) / trackerConfig.Gain_VperV);
            }

            // clone an envelope waveform to return to the user - want unique waveforms per tracker configuration
            Waveform scaledEnvelopeWaveform = envelopeWaveform;

            scaledEnvelopeWaveform.Data = envelopeWaveform.Data.Clone();
            WritableBuffer <ComplexSingle> scaledEnvelopeWaveformBuffer = scaledEnvelopeWaveform.Data.GetWritableBuffer();

            // populate cloned waveform with scaled waveform data
            for (int i = 0; i < envelope.Length; i++)
            {
                scaledEnvelopeWaveformBuffer[i] = ComplexSingle.FromSingle(envelope[i]);
            }

            // get peak of the waveform
            float absolutePeak = envelope.Max(i => Math.Abs(i)); // applies the absolute value function to each element and returns the max

            // scale waveform to peak voltage
            for (int i = 0; i < envelope.Length; i++)
            {
                envelope[i] = envelope[i] / (absolutePeak); // brings waveform down to +/- 1 magnitude
            }
            // set instrument properties
            envVsg.IQOutPort[""].Level  = 2.0 * absolutePeak; // gain is interpreted as peak-to-peak
            envVsg.IQOutPort[""].Offset = 0.0;                // set offset to 0 since this is done in DSP not in HW on the 5820 and only clips the waveform further

            // create another waveform that we can use to download the scaled envelope to the instrument
            Waveform instrEnvelopeWaveform = envelopeWaveform;

            instrEnvelopeWaveform.Data = envelopeWaveform.Data.Clone();
            WritableBuffer <ComplexSingle> instrEnvelopeWaveformBuffer = instrEnvelopeWaveform.Data.GetWritableBuffer();

            // populate cloned waveform with scaled waveform data
            for (int i = 0; i < envelope.Length; i++)
            {
                instrEnvelopeWaveformBuffer[i] = ComplexSingle.FromSingle(envelope[i]);
            }

            SG.DownloadWaveform(envVsg, instrEnvelopeWaveform); // download optimized waveform

            return(scaledEnvelopeWaveform);                     // return the waveform as it will appear coming out of the front end of the envelope generator
        }
        /// <summary>
        /// This example illustrates how to use RFSG drivers and envelope tracking APIs to configure envelope tracking.
        /// </summary>
        static void Main(string[] args)
        {
            #region Example Settings
            // Select mode for use in the example
            EnvelopeMode mode         = EnvelopeMode.Detrough;
            string       waveformPath = @"C:\Users\Public\Documents\National Instruments\RFIC Test Software\Waveforms\LTE_FDD_DL_1x20MHz_TM11_OS4.tdms";
            #endregion

            #region Configure RF Generator
            // Initialize instrument sessions
            NIRfsg rfVsg = new NIRfsg("5840", true, false);

            // Load up waveform
            Waveform rfWfm = LoadWaveformFromTDMS(waveformPath);

            // Configure RF generator
            InstrumentConfiguration rfInstrConfig = InstrumentConfiguration.GetDefault();
            ConfigureInstrument(rfVsg, rfInstrConfig);
            DownloadWaveform(rfVsg, rfWfm);
            ConfigureContinuousGeneration(rfVsg, rfWfm);
            #endregion

            #region Configure Tracker Generator
            NIRfsg envVsg = new NIRfsg("5820", true, false);

            // Configure envelope generator
            EnvelopeGeneratorConfiguration envInstrConfig = EnvelopeGeneratorConfiguration.GetDefault();
            TrackerConfiguration           trackerConfig  = TrackerConfiguration.GetDefault();
            ConfigureEnvelopeGenerator(envVsg, envInstrConfig, trackerConfig);

            Waveform envWfm = new Waveform();
            switch (mode)
            {
            case EnvelopeMode.Detrough:
                // Create envelope waveform
                DetroughConfiguration detroughConfig = DetroughConfiguration.GetDefault();
                detroughConfig.MinimumVoltage_V = 1.5;
                detroughConfig.MaximumVoltage_V = 3.5;
                detroughConfig.Exponent         = 1.2;
                detroughConfig.Type             = DetroughType.Exponential;
                envWfm = CreateDetroughEnvelopeWaveform(rfWfm, detroughConfig);
                break;

            case EnvelopeMode.LUT:
                LookUpTableConfiguration lutConfig = new LookUpTableConfiguration
                {
                    DutAverageInputPower_dBm = rfInstrConfig.DutAverageInputPower_dBm
                };
                // Todo - initialize lookup table
                envWfm = CreateLookUpTableEnvelopeWaveform(rfWfm, lutConfig);
                break;
            }

            ScaleAndDownloadEnvelopeWaveform(envVsg, envWfm, trackerConfig);
            ConfigureContinuousGeneration(envVsg, envWfm, "PFI0");
            #endregion

            // Start envelope tracking
            SynchronizationConfiguration syncConfig = SynchronizationConfiguration.GetDefault();
            InitiateSynchronousGeneration(rfVsg, envVsg, syncConfig);

            // Wait until user presses a button to stop
            Console.WriteLine("Press any key to abort envelope tracking..");
            Console.ReadKey();

            AbortGeneration(envVsg);
            AbortGeneration(rfVsg);

            // Close instruments
            rfVsg.Close();
            envVsg.Close();
        }