private void StartSources(bool play) { var multiplexer = new SensorComparisonMultiplexer(); // TODO: path is hardcoded -> use some file selection dialog var recorder = new BinaryRecorder<SensorComparisonFeatureSet>(@"C:\sensorcomparisonsavedata.scd", play ? RecorderMode.PlayRealtime : RecorderMode.Record); this.Closed += (s, e) => { recorder.Dispose(); }; if (play) { // Playing mode: Just read the combined recording, extract all three raw-value-set objects // and put them back into the multiplexer (to reprocess the sequence) recorderControls.BindToRecorder<SensorComparisonFeatureSet>(recorder, plotterGroup); recorder.FrameRead += (s, e) => { multiplexer.PushRawSensor2Values(e.Frame.RawSensors2); // Push first multiplex stream item at the end (this triggers the multiplexing) multiplexer.PushRawSensor1Values(e.Frame.RawSensors1); }; // Dispose the recorder input stream when application is closed this.Closed += (s, e) => { recorder.Dispose(); }; } else { // Live mode: Set up the two phone sensor sources (the individual sources, in turn, can be either // recordings or live streams) and forward them into the multiplexer var sensorSource1 = new SensorRawFeatureSource(AbstractRawFeatureSource<SensorRawFeatureSet>.Mode.UseLiveStream, 4001); sensorSource1.NewItem += (s, e) => { multiplexer.PushRawSensor1Values(e.Item); }; listBoxStatusUpdates.Items.Add("Listening on tcp port 4001 for phone #1."); sensorSource1.ExceptionOccured += (s, e) => { listBoxStatusUpdates.Items.Add(string.Format("{0}: \"{1}\" {2}", DateTime.Now.ToShortTimeString(), e.Exception.Message, e.Context.ToString())); }; var sensorSource2 = new SensorRawFeatureSource(AbstractRawFeatureSource<SensorRawFeatureSet>.Mode.UseLiveStream, 4002); sensorSource2.NewItem += (s, e) => { multiplexer.PushRawSensor2Values(e.Item); }; listBoxStatusUpdates.Items.Add("Listening on tcp port 4002 for phone #2."); sensorSource2.ExceptionOccured += (s, e) => { listBoxStatusUpdates.Items.Add(string.Format("{0}: \"{1}\" {2}", DateTime.Now.ToShortTimeString(), e.Exception.Message, e.Context.ToString())); }; // Dispose all input streams when application is closed this.Closed += (s, e) => { sensorSource1.Dispose(); sensorSource2.Dispose(); }; } // A new object was multiplexed from all input values, update all GUI components accordingly multiplexer.ItemMultiplexed += (s, e) => { plotterGroup.Plot(e.MultiplexedItem); phoneModel1.Update3dPhoneModel(e.MultiplexedItem.RawSensors1); phoneModel2.Update3dPhoneModel(e.MultiplexedItem.RawSensors2); // Record the frame, if we are not playing a recording if (!play) recorder.RecordFrame(e.MultiplexedItem); }; }
private void StartSources(bool play, string path) { // Temporary quick and dirty solution: Only let the user chose once to play or // record a file, then the app has to get restarted // TODO: make this more robust, and let the user load and record more than once MenuItemOpenRecording.IsEnabled = false; MenuItemStartRecording.IsEnabled = false; this.play = play; var multiplexer = new CombinedMultiplexer(); // Obsolete: ARFF (Attribute-Relation File Format) exporter for the Weka data mining software //var arffExporter = new ArffExporter<SensorFeatureSet>(@"C:\arffexport.arff", new Dictionary<string, string[]>() { { "Class", Posture.EnumerateStateNames().ToArray() } }); //this.Closed += (s, e) => { arffExporter.Dispose(); }; rdaExporter = new RdaExporter<CombinedFeatureSet>(System.IO.Path.Combine(Environment.GetFolderPath( System.Environment.SpecialFolder.MyDocuments), @"bo.txt")); this.Closed += (s, e) => { rdaExporter.Dispose(); }; recorder = new BinaryRecorder<CombinedFeatureSet>(path, play ? RecorderMode.PlayRealtime : RecorderMode.Record); this.Closed += (s, e) => {recorder.Dispose(); }; if (play) { Quaternion calibQuat = new Quaternion(); // TODO: these lines are just for testing, remove at some point double calibAngle = 0d; this.phoneModel.CalibrationEnabled = true; this.phoneModel.Calibrated += (s, e) => { calibQuat = e.CalibrationQuaternion; }; multiplexer.CalibrationAngleCalculated += (s, e) => { calibAngle = e.CalibrationAngle; }; // Playing mode: Just read the combined recording, extract all three raw-value-set objects // and put them back into the multiplexer (to reprocess the sequence) recorderControls.BindToRecorder<CombinedFeatureSet>(recorder, plotterGroup); recorder.FrameRead += (s, e) => { // TODO: uncomment the following lines in final version, this is just commented out for testing // Read the stored calibration data and apply to the phone model view //if (e.Frame.RawManual.CalibrationQuaternion != calibQuat) //{ // calibQuat = e.Frame.RawManual.CalibrationQuaternion; // this.phoneModel.CalibrateManually(calibQuat); //} // TODO: just for testing, delete next 2 lines afterwards e.Frame.RawManual.CalibrationQuaternion = calibQuat; e.Frame.RawManual.CalibrationAngle = calibAngle; multiplexer.PushRawKinectValues(e.Frame.RawKinect); multiplexer.PushRawManualValues(e.Frame.RawManual); // Push first multiplex stream item at the end (this triggers the multiplexing) multiplexer.PushRawSensorValues(e.Frame.RawSensors); }; // Dispose the recorder input stream when application is closed this.Closed += (s, e) => { recorder.Dispose(); }; } else { // Live mode: Set up the three sources (the individual sources, in turn, can be either // recordings or live streams) and forward them into the multiplexer KinectRawFeatureSource kinectSource = null; try { kinectSource = new KinectRawFeatureSource(AbstractRawFeatureSource<KinectRawFeatureSet>.Mode.UseLiveStream); kinectSource.NewItem += (s, e) => { multiplexer.PushRawKinectValues(e.Item); }; } catch { } var sensorSource = new SensorRawFeatureSource(AbstractRawFeatureSource<SensorRawFeatureSet>.Mode.UseLiveStream, 3547); sensorSource.NewItem += (s, e) => { multiplexer.PushRawSensorValues(e.Item); }; sensorSource.ExceptionOccured += (s, e) => { listBoxStatusUpdates.Items.Add(string.Format("{0}: \"{1}\" {2}", DateTime.Now.ToShortTimeString(), e.Exception.Message, e.Context.ToString())); }; var manualSource = new ManualRawFeatureSource(AbstractRawFeatureSource<ManualRawFeatureSet>.Mode.UseLiveStream); manualSource.NewItem += (s, e) => { multiplexer.PushRawManualValues(e.Item); }; buttonNextStep.Click += (s, e) => { manualSource.NextPhase(); }; // React to changing calibration of the phone this.phoneModel.CalibrationEnabled = true; this.phoneModel.Calibrated += (s, e) => { manualSource.SetNewCalibration(e.CalibrationQuaternion); }; multiplexer.CalibrationAngleCalculated += (s, e) => { manualSource.SetNewCalibrationAngle(e.CalibrationAngle); }; // Dispose all input streams when application is closed this.Closed += (s, e) => { kinectSource.Dispose(); sensorSource.Dispose(); manualSource.Dispose(); }; } // A new object was multiplexed from all input values, update all GUI components accordingly multiplexer.ItemMultiplexed += new EventHandler<ItemMultiplexedEventArgs<CombinedFeatureSet>>(multiplexer_ItemMultiplexed); }