private UnifiedCameraNeuralNetwork[] _neuralNets; // A network for each pair of camaras (main camera and camera i) /// <summary> /// Creates a new calibration object. /// Calibration will occur for the session that was recorded at sessionTimestamp using the numOfCameras defined. /// </summary> /// <param name="sessionTimestamp"> Timestamp of the session recorded </param> /// <param name="numOfCameras"> Number of cameras in the recording </param> public Calibration(DateTime sessionTimestamp, int numOfCameras, SkelDisplay display) { // Non configurable ANN parameters (each network maps a pair of cameras) // The input is one of the cameras, the output is the absolute camera. _neuralNets = new UnifiedCameraNeuralNetwork[numOfCameras]; int inputLayerSize = SkelJointsData.numOfJoints * 3; int outputLayerSize = inputLayerSize; // Configurable ANN parameters int hiddenLayerSize = inputLayerSize * inputLayerSize; float learningRate = 0.15f; bool isStochastic = true; float momentum = 0.1f; // Create an ANN for each pair of cameras for (int i = 0; i < numOfCameras - 1; i++) { _neuralNets[i] = new UnifiedCameraNeuralNetwork(inputLayerSize, hiddenLayerSize, outputLayerSize, learningRate, isStochastic, momentum); } _numOfCameras = numOfCameras; _display = display; _replay = new SkelReplay(sessionTimestamp, numOfCameras); }
/// <summary> /// Initializes a new instance of the MainWindow class. /// </summary> public MainWindow() { this.skelRec = new SkelRecorder(); this.skelDisp = new SkelDisplay(null); InitializeComponent(); // TODO: Delete this if (isTesting) { AnnTester.initNeuralNetworkTest(); } try { if (!isTesting) { if (isRecord) { Client.BeginReceive(new AsyncCallback(recv), null); } else { this.calibration = new Calibration(SESSION_TIMESTAMP, numOfCameras, this.skelDisp); } } } catch (Exception e) { Debug.WriteLine(e.ToString()); } }
/// <summary> /// Execute startup tasks /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void WindowLoaded(object sender, RoutedEventArgs e) { // Look through all sensors and start the first connected one. // This requires that a Kinect is connected at the time of app startup. // To make your app robust against plug/unplug, // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit (See components in Toolkit Browser). foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; break; } } if (null != this.sensor) { // Turn on the skeleton stream to receive skeleton frames this.sensor.SkeletonStream.Enable(); // Add an event handler to be called whenever there is new color frame data this.sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady; // Start the sensor! try { this.sensor.Start(); //byte[] coordMapperParams = this.sensor.CoordinateMapper.ColorToDepthRelationalParameters.ToArray<byte>(); //File.WriteAllBytes("Coord_Mapper_Params.txt", coordMapperParams); } catch (IOException) { this.sensor = null; } this.skelDisp = new SkelDisplay(this.sensor); Image.Source = this.skelDisp.imageSource; } else // (null == this.sensor) { this.statusBarText.Text = Microsoft.Samples.Kinect.SkeletonClient.Properties.Resources.NoKinectReady; } }