private void updateConfigurationFiles() { List <KinectCoordinates> kCoords = new List <KinectCoordinates>(); foreach (KinectViewport v in viewports) { KinectCoordinates k = new KinectCoordinates(); k.xOffset = v.xOffset.Text; k.yOffset = v.yOffset.Text; k.zOffset = v.zOffset.Text; k.pitch = v.pitchAngle.Text; k.roll = v.rollAngle.Text; k.yaw = v.yawAngle.Text; kCoords.Add(k); } File.WriteAllText(KinectCalibrationFilename, JsonConvert.SerializeObject(kCoords)); MetaConfiguration meta = new MetaConfiguration(); meta.OSCAddress = OscAddress.Text; meta.port = OscPort.Text; meta.XOffset = XOffsetTextBox.Text; meta.XScaling = XScaleTextBox.Text; meta.YOffset = YOffsetTextBox.Text; meta.YScaling = YScaleTextBox.Text; File.WriteAllText(MetaCalibrationFilename, JsonConvert.SerializeObject(meta)); }
/// <summary> /// Execute startup tasks /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void WindowLoaded(object sender, RoutedEventArgs e) { // Setup osc sender oscArgs[0] = "127.0.0.1"; oscArgs[1] = OscPort.Text; oscWriter = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1])); deltaToscWriter = new UdpWriter(oscArgs[0], 7114); // Initialize Data viewer oscViewer.Text = "\nData will be shown here\nwhen there is a skeleton\nbeing tracked."; kinectGroup = new VisualKinectGroup(); viewports = new List <KinectViewport>(); viewports.Add(this.TestViewport); viewports.Add(this.TestViewport2); // Look through all sensors and start the first connected one. // This requires that a Kinect is connected at the time of app startup. // To make your app robust against plug/unplug, // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit int numberOfKinects = 0; foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { // Start the sensor! try { potentialSensor.Start(); // Good to go, so count this one as connected! // So let's set up some environment for this... // LocatedSensor sensor = new LocatedSensor(potentialSensor, kinectXPositions[numberOfKinects], // kinectYPositions[numberOfKinects], // kinectZPositions[numberOfKinects], // kinectAngles[numberOfKinects]); LocatedSensor sensor = new LocatedSensor(potentialSensor, 0, 0, 0, 0, 0, 0); VisualKinectUnit newSensor = new VisualKinectUnit(sensor, viewports[numberOfKinects].skeletonDrawingImage, viewports[numberOfKinects].colorImage, viewports[numberOfKinects]); kinectGroup.AddVisualKinectUnit(newSensor); // This function sends out skeleton data as OSC //newSensor.locatedSensor.sensor.SkeletonFrameReady += sendOSCAsAnimataData; newSensor.locatedSensor.sensor.SkeletonFrameReady += sendOSCSkeletonPositions; numberOfKinects++; Console.WriteLine("Number of Kinects : " + numberOfKinects); } catch (IOException) { Console.WriteLine("Couldn't start one of the Kinect sensors..."); } } } // Now that we have all of our sensors loaded, let's see if we have any data // Try and load in data from a file.. string configSerialization = ""; try { configSerialization = File.ReadAllText(KinectCalibrationFilename); } catch (Exception exception) { Console.WriteLine("The file could not be read:"); Console.WriteLine(exception.Message); Console.WriteLine("Using default parameters..."); } // If we got data, parse it! if (configSerialization != "") { List <KinectCoordinates> coordinates = JsonConvert.DeserializeObject <List <KinectCoordinates> >(File.ReadAllText(KinectCalibrationFilename)); for (int i = 0; i < viewports.Count && i < coordinates.Count; i++) { viewports[i].xOffset.Text = coordinates[i].xOffset; viewports[i].yOffset.Text = coordinates[i].yOffset; viewports[i].zOffset.Text = coordinates[i].zOffset; viewports[i].pitchAngle.Text = coordinates[i].pitch; viewports[i].rollAngle.Text = coordinates[i].roll; viewports[i].yawAngle.Text = coordinates[i].yaw; } // Update the info from the kinect windows // TODO: Maybe this happens automagically, but maybe not... Test this. //Console.WriteLine("VisualKinectUnit 0's x offset is:"); // Console.WriteLine(kinectGroup.visualKinectUnits[0].locatedSensor.xOffset); } // Now get the overall calibration stuff, like OSC port and such configSerialization = ""; try { configSerialization = File.ReadAllText(MetaCalibrationFilename); } catch (Exception exception) { Console.WriteLine("The file could not be read:"); Console.WriteLine(exception.Message); Console.WriteLine("Using default parameters..."); } // If we got data, parse it! if (configSerialization != "") { MetaConfiguration config = JsonConvert.DeserializeObject <MetaConfiguration>(configSerialization); XOffsetTextBox.Text = config.XOffset; YOffsetTextBox.Text = config.YOffset; XScaleTextBox.Text = config.XScaling; YScaleTextBox.Text = config.YScaling; OscAddress.Text = config.OSCAddress; OscPort.Text = config.port; // Update the actual OSC port stuff // We don't change the scaling and offset stuff since that's // directly read from the text boxen oscArgs[1] = OscPort.Text; oscWriter = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1])); UpdateOscAddress(); } }
private void updateConfigurationFiles() { List<KinectCoordinates> kCoords = new List<KinectCoordinates>(); foreach (KinectViewport v in viewports){ KinectCoordinates k = new KinectCoordinates(); k.xOffset = v.xOffset.Text; k.yOffset = v.yOffset.Text; k.zOffset = v.zOffset.Text; k.pitch = v.pitchAngle.Text; k.roll = v.rollAngle.Text; k.yaw = v.yawAngle.Text; kCoords.Add(k); } File.WriteAllText(KinectCalibrationFilename, JsonConvert.SerializeObject(kCoords) ); MetaConfiguration meta = new MetaConfiguration(); meta.OSCAddress = OscAddress.Text; meta.port = OscPort.Text; meta.XOffset = XOffsetTextBox.Text; meta.XScaling = XScaleTextBox.Text; meta.YOffset = YOffsetTextBox.Text; meta.YScaling = YScaleTextBox.Text; File.WriteAllText(MetaCalibrationFilename, JsonConvert.SerializeObject(meta)); }