public override void Send(UdpWriter osc, StreamWriter fileWriter, int pointScale)
 {
     if (osc != null)
     {
         osc.Send(new OscElement(
                      "/face_animation",
                      sensorId, user,
                      c[AnimationUnit.LipRaiser],
                      c[AnimationUnit.LipStretcher],
                      c[AnimationUnit.LipCornerDepressor],
                      c[AnimationUnit.JawLower],
                      c[AnimationUnit.BrowLower],
                      c[AnimationUnit.BrowRaiser],
                      time));
     }
     if (fileWriter != null)
     {
         fileWriter.WriteLine("FaceAnimation," +
                              sensorId + "," + user + "," +
                              c[AnimationUnit.LipRaiser].ToString().Replace(",", ".") + "," +
                              c[AnimationUnit.LipStretcher].ToString().Replace(",", ".") + "," +
                              c[AnimationUnit.LipCornerDepressor].ToString().Replace(",", ".") + "," +
                              c[AnimationUnit.JawLower].ToString().Replace(",", ".") + "," +
                              c[AnimationUnit.BrowLower].ToString().Replace(",", ".") + "," +
                              c[AnimationUnit.BrowRaiser].ToString().Replace(",", ".") + "," +
                              time.ToString().Replace(",", "."));
     }
 }
Exemplo n.º 2
0
 public Instrument(string name, string oscHost = "127.0.0.1", int oscPort = 22345)
 {
     this.name = name;
     this.osc = new UdpWriter(oscHost, oscPort);
     this.filter = new UdpWriter(oscHost, 9001);
     isPlaying = false;
 }
Exemplo n.º 3
0
 public MessageSender(string address, int port)
 {
     this.address = address;
     this.port    = port;
     bundle       = new OscBundle();
     writer       = new UdpWriter(address, port);
 }
Exemplo n.º 4
0
        public TResp Request <TReq, TResp>(call_body callBody, TReq reqArgs)
        {
            UdpReader mr      = new UdpReader(_receivedArray);
            Reader    r       = Toolkit.CreateReader(mr);
            rpc_msg   respMsg = r.Read <rpc_msg>();


            rpc_msg reqHeader = new rpc_msg()
            {
                xid  = respMsg.xid,                // use xid in _receivedArray
                body = new body()
                {
                    mtype = msg_type.CALL,
                    cbody = callBody
                }
            };

            UdpWriter dtg = new UdpWriter();
            Writer    w   = Toolkit.CreateWriter(dtg);

            w.Write(reqHeader);
            w.Write(reqArgs);

            byte[] outBuff = dtg.Build();
            Assert.AreEqual(_expectedSendArray, outBuff, "send dump is difference");

            Toolkit.ReplyMessageValidate(respMsg);
            TResp respArgs = r.Read <TResp>();

            mr.CheckEmpty();

            return(respArgs);
        }
Exemplo n.º 5
0
 public override void Send(int pointScale, UdpWriter osc, StreamWriter fileWriter)
 {
     if (osc != null)
     {
         osc.Send(new OscElement(
                      "/osceleton2/face_property",
                      sensorId, user,
                      happy, engaged, wearingGlasses, leftEyeClosed, rightEyeClosed, mouthOpen, mouthMoved, lookingAway,
                      time));
     }
     if (fileWriter != null)
     {
         fileWriter.WriteLine("FaceProperty," +
                              sensorId + "," + user + "," +
                              happy.ToString().Replace(",", ".") + "," +
                              engaged.ToString().Replace(",", ".") + "," +
                              wearingGlasses.ToString().Replace(",", ".") + "," +
                              leftEyeClosed.ToString().Replace(",", ".") + "," +
                              rightEyeClosed.ToString().Replace(",", ".") + "," +
                              mouthOpen.ToString().Replace(",", ".") + "," +
                              mouthMoved.ToString().Replace(",", ".") + "," +
                              lookingAway.ToString().Replace(",", ".") + "," +
                              time.ToString().Replace(",", "."));
     }
 }
Exemplo n.º 6
0
 //Constructors
 public MessageSender()
 {
     address = "127.0.0.1";
     port    = 4444;
     bundle  = new OscBundle();
     writer  = new UdpWriter(address, port);
 }
Exemplo n.º 7
0
 private void OscPortKeyUp(object sender, KeyEventArgs e)
 {
     if (e.Key == Key.Enter)
     {
         oscArgs[1] = OscPort.Text;
         oscWriter  = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));
     }
 }
Exemplo n.º 8
0
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            this.udpwriter = new UdpWriter("192.168.187.56", 12345);
            this.uw2       = new UdpWriter("192.168.187.152", 12345);

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // Display the drawing using our image control
            Image.Source = this.imageSource;

            // Look through all sensors and start the first connected one.
            // This requires that a Kinect is connected at the time of app startup.
            // To make your app robust against plug/unplug,
            // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit
            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    this.sensor = potentialSensor;
                    break;
                }
            }

            if (null != this.sensor)
            {
                // Turn on the skeleton stream to receive skeleton frames
                this.sensor.SkeletonStream.Enable();

                // Add an event handler to be called whenever there is new color frame data
                this.sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady;

                this.sensor.DepthStream.Range = DepthRange.Near;

                // Start the sensor!
                try
                {
                    this.sensor.Start();
                }
                catch (IOException)
                {
                    this.sensor = null;
                }
            }

            if (null == this.sensor)
            {
                this.statusBarText.Text = Properties.Resources.NoKinectReady;
            }
            else
            {
                this.sensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated;
                this.checkBoxSeatedMode.IsChecked       = true;
            }
        }
 /// <summary>
 /// Reset OSC UdpWriter
 /// </summary>
 private void ResetOscUdpWriter()
 {
     if (this.OscUdpWriter != null)
     {
         this.OscUdpWriter.Dispose();
         this.OscUdpWriter = null;
         this.Status       = "Offline";
     }
 }
        /// <summary>
        /// Creates a new Ableton slider controller.
        /// </summary>
        /// <param name="osc">The OSC UDP writer</param>
        /// <param name="name">The name of the controller</param>
        /// <param name="min">The minimum value to send</param>
        /// <param name="max">The maximum value to send</param>
        /// <param name="contiguous">True if values should be contiguous. False if values should be discrete.</param>
        public AbletonSliderController(UdpWriter osc, string name, float min, float max, bool contiguous)
        {
            // Every controller should set these two values
            this.controlName = "/" + name;
            this.osc         = osc;

            this.min        = min;
            this.max        = max;
            this.contiguous = contiguous;
        }
Exemplo n.º 11
0
        private void SetRecipient(object sender, RoutedEventArgs e)
        {
            oscHost = RecipientIpAddress.Text;
            Console.WriteLine(oscHost);
            osc = new UdpWriter(oscHost, oscPort);

            // Save in settings
            //UserSettings.Default.RecipientIpAddress = RecipientIpAddress.Text;
            //UserSettings.Default.Save();
        }
Exemplo n.º 12
0
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // Display the drawing using our image control
            Image.Source = this.imageSource;

            // Look through all sensors and start the first connected one.
            // This requires that a Kinect is connected at the time of app startup.
            // To make your app robust against plug/unplug,
            // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit (See components in Toolkit Browser).
            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    this.sensor = potentialSensor;
                    break;
                }
            }

            if (null != this.sensor)
            {
                // http://msdn.microsoft.com/en-us/library/jj131024.aspx
                // Skeletal smoothing
                TransformSmoothParameters smoothingParam = new TransformSmoothParameters();
                smoothingParam.Smoothing          = 0.7f;
                smoothingParam.Correction         = 0.2f;
                smoothingParam.Prediction         = 1.0f;
                smoothingParam.JitterRadius       = 1.0f;
                smoothingParam.MaxDeviationRadius = 1.0f;

                // Turn on the skeleton stream to receive skeleton frames
                this.sensor.SkeletonStream.Enable(smoothingParam);

                // Add an event handler to be called whenever there is new color frame data
                this.sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady;

                // Start the sensor!
                try
                {
                    this.sensor.Start();
                    OSCsend = new UdpWriter("127.0.0.1", 7000);
                }
                catch (IOException)
                {
                    this.sensor = null;
                }
            }
        }
Exemplo n.º 13
0
 /// <summary>
 /// Initiate OSC UdpWriter
 /// </summary>
 private void InitOscUdpWriter()
 {
     if (this.OscUdpWriter != null)
     {
         this.ResetOscUdpWriter();
     }
     if (this.IP != null && this.IP != "" && this.Port > 0)
     {
         OscUdpWriter = new UdpWriter(this.IP, this.Port);
         this.Status  = "Connected to " + this.IP + ":" + this.Port.ToString();
     }
 }
 private void Reconnect_Click(object sender, RoutedEventArgs e)
 {
     try
     {
         oscArgs[0] = TbIpAddress.Text;
         oscArgs[1] = TbPortNumber.Text;
         oscWriter  = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));
     }
     catch (Exception ex)
     {
         txtMessage.Text = ex.Message;
     }
 }
Exemplo n.º 15
0
        public void SetUp()
        {
            this.server = new UdpClient(0);

            int port = ((IPEndPoint)this.server.Client.LocalEndPoint).Port;

            this.remoteIpEndPoint = new IPEndPoint(IPAddress.Loopback, port);

            this.client = new UdpClient();

            this.reader = new UdpReader(this.server.Client, 100);
            this.writer = new UdpWriter(this.client.Client, 100);
        }
Exemplo n.º 16
0
        void MainWindow_Loaded(object sender, RoutedEventArgs e)
        {
            try {
                udpWriter = new UdpWriter(ConfigurationManager.AppSettings["DestinationAddress"],
                                          int.Parse(ConfigurationManager.AppSettings["DestinationPort"]));

                kinect = new KinectSensorWrapper();
                kinect.AllFrameReady += kinect_AllFrameReady;
                kinect.Start();
            }
            catch (Exception ex) {
                MessageBox.Show(ex.Message);
                Close();
            }
        }
 public override void Send(UdpWriter osc, StreamWriter fileWriter, int pointScale)
 {
     if (skeleton == null)
     {
         return;
     }
     if (skeleton.Joints == null || skeleton.Joints.Count < 20)
     {
         return;
     }
     if (skeleton.BoneOrientations == null || skeleton.BoneOrientations.Count < 20)
     {
         return;
     }
     if (!fullBody)
     {
         ProcessJointInformation(15, skeleton.Joints[JointType.HandRight], skeleton.BoneOrientations[JointType.HandRight], time, osc, fileWriter, pointScale);
     }
     else
     {
         ProcessJointInformation(1, skeleton.Joints[JointType.Head], skeleton.BoneOrientations[JointType.Head], time, osc, fileWriter, pointScale);
         ProcessJointInformation(2, skeleton.Joints[JointType.ShoulderCenter], skeleton.BoneOrientations[JointType.ShoulderCenter], time, osc, fileWriter, pointScale);
         ProcessJointInformation(3, skeleton.Joints[JointType.Spine], skeleton.BoneOrientations[JointType.Spine], time, osc, fileWriter, pointScale);
         ProcessJointInformation(4, skeleton.Joints[JointType.HipCenter], skeleton.BoneOrientations[JointType.HipCenter], time, osc, fileWriter, pointScale);
         // ProcessJointInformation(5, skeleton.Joints[JointType.], skeleton.BoneOrientations[JointType.], time, osc, fileWriter, pointScale);
         ProcessJointInformation(6, skeleton.Joints[JointType.ShoulderLeft], skeleton.BoneOrientations[JointType.ShoulderLeft], time, osc, fileWriter, pointScale);
         ProcessJointInformation(7, skeleton.Joints[JointType.ElbowLeft], skeleton.BoneOrientations[JointType.ElbowLeft], time, osc, fileWriter, pointScale);
         ProcessJointInformation(8, skeleton.Joints[JointType.WristLeft], skeleton.BoneOrientations[JointType.WristLeft], time, osc, fileWriter, pointScale);
         ProcessJointInformation(9, skeleton.Joints[JointType.HandLeft], skeleton.BoneOrientations[JointType.HandLeft], time, osc, fileWriter, pointScale);
         // ProcessJointInformation(10, skeleton.Joints[JointType.], skeleton.BoneOrientations[JointType.], time, osc, fileWriter, pointScale);
         // ProcessJointInformation(11, skeleton.Joints[JointType.], skeleton.BoneOrientations[JointType.], time, osc, fileWriter, pointScale);
         ProcessJointInformation(12, skeleton.Joints[JointType.ShoulderRight], skeleton.BoneOrientations[JointType.ShoulderRight], time, osc, fileWriter, pointScale);
         ProcessJointInformation(13, skeleton.Joints[JointType.ElbowRight], skeleton.BoneOrientations[JointType.ElbowRight], time, osc, fileWriter, pointScale);
         ProcessJointInformation(14, skeleton.Joints[JointType.WristRight], skeleton.BoneOrientations[JointType.WristRight], time, osc, fileWriter, pointScale);
         ProcessJointInformation(15, skeleton.Joints[JointType.HandRight], skeleton.BoneOrientations[JointType.HandRight], time, osc, fileWriter, pointScale);
         // ProcessJointInformation(16, skeleton.Joints[JointType.], skeleton.BoneOrientations[JointType.], time, osc, fileWriter, pointScale);
         ProcessJointInformation(17, skeleton.Joints[JointType.HipLeft], skeleton.BoneOrientations[JointType.HipLeft], time, osc, fileWriter, pointScale);
         ProcessJointInformation(18, skeleton.Joints[JointType.KneeLeft], skeleton.BoneOrientations[JointType.KneeLeft], time, osc, fileWriter, pointScale);
         ProcessJointInformation(19, skeleton.Joints[JointType.AnkleLeft], skeleton.BoneOrientations[JointType.AnkleLeft], time, osc, fileWriter, pointScale);
         ProcessJointInformation(20, skeleton.Joints[JointType.FootLeft], skeleton.BoneOrientations[JointType.FootLeft], time, osc, fileWriter, pointScale);
         ProcessJointInformation(21, skeleton.Joints[JointType.HipRight], skeleton.BoneOrientations[JointType.HipRight], time, osc, fileWriter, pointScale);
         ProcessJointInformation(22, skeleton.Joints[JointType.KneeRight], skeleton.BoneOrientations[JointType.KneeRight], time, osc, fileWriter, pointScale);
         ProcessJointInformation(23, skeleton.Joints[JointType.AnkleRight], skeleton.BoneOrientations[JointType.AnkleRight], time, osc, fileWriter, pointScale);
         ProcessJointInformation(24, skeleton.Joints[JointType.FootRight], skeleton.BoneOrientations[JointType.FootRight], time, osc, fileWriter, pointScale);
     }
 }
Exemplo n.º 18
0
 public override void Send(int pointScale, UdpWriter osc, StreamWriter fileWriter)
 {
     if (osc != null)
     {
         osc.Send(new OscElement(
                      "/osceleton2/face_rotation",
                      sensorId, user,
                      pitch, yaw, roll,
                      time));
     }
     if (fileWriter != null)
     {
         fileWriter.WriteLine("FaceRotation," +
                              sensorId + "," + user + "," +
                              pitch + "," + yaw + "," + roll + "," +
                              time.ToString().Replace(",", "."));
     }
 }
Exemplo n.º 19
0
        void sendToOsc(string line, String channel, UdpWriter oscWriter)
        {
            String[]  splitted = line.Split(' ');
            OscBundle b        = new OscBundle(0);
            int       i        = 0;

            //Remove the Label
            i++;
            b.AddElement(new OscElement("/" + channel + "/timestamp", splitted[i++]));
            // splitted.Length - 1 because the linebreak creates one more blank string when splitting
            for (i = 2; i < splitted.Length - 1; i += 3)
            {
                b.AddElement(new OscElement("/kinect" + ((i + 1) / 3).ToString(),
                                            splitted[i],
                                            splitted[i + 1],
                                            splitted[i + 2])
                             );
            }
            oscWriter.Send(b);
        }
Exemplo n.º 20
0
        public void sendOsc(string channel, FaceTrackFrame faceFrame, UdpWriter oscWriter)
        {
            TimeSpan t    = DateTime.UtcNow - new DateTime(1970, 1, 1);
            String   time = ((long)t.TotalMilliseconds).ToString();

            var shape = faceFrame.Get3DShape();

            float[][] arr = shape
                            .Select(x => new float[3] {
                x.X, x.Y, x.Z
            })
                            .ToArray();

            OscBundle b = new OscBundle(0);
            int       i = 0;

            foreach (var v in shape)
            {
                var el = new OscElement("/kinect" + i++, v.X, v.Y, v.Z);
                b.AddElement(el);
            }
            oscWriter.Send(b);
        }
Exemplo n.º 21
0
        private void BuildMessage(object state)
        {
            byte[] datagram;
            try
            {
                UdpWriter uw = new UdpWriter();
                _sendingTicket.BuildRpcMessage(uw);
                datagram = uw.Build();
            }
            catch (Exception ex)
            {
                Log.Debug("UDP datagram not builded (xid:{0}) reason: {1}", _sendingTicket.Xid, ex);
                lock (_sync)
                    _handlers.Remove(_sendingTicket.Xid);
                _sendingTicket.Except(ex);
                OnSend();
                return;
            }


            Log.Debug("Begin sending UDP datagram (xid:{0})", _sendingTicket.Xid);
            _client.AsyncWrite(datagram, OnDatagramWrited);
        }
 public override void Send(UdpWriter osc, StreamWriter fileWriter, int pointScale)
 {
     if (osc != null)
     {
         osc.Send(new OscElement(
                      "/face",
                      sensorId, user,
                      (float)(x * pointScale), (float)(-y * pointScale), (float)(z * pointScale),
                      rotationX, rotationY, rotationZ,
                      time));
     }
     if (fileWriter != null)
     {
         fileWriter.WriteLine("Face," +
                              sensorId + "," + user + "," +
                              (x * pointScale).ToString().Replace(",", ".") + "," +
                              (-y * pointScale).ToString().Replace(",", ".") + "," +
                              (z * pointScale).ToString().Replace(",", ".") + "," +
                              rotationX.ToString().Replace(",", ".") + "," +
                              rotationY.ToString().Replace(",", ".") + "," +
                              rotationZ.ToString().Replace(",", ".") + "," +
                              time.ToString().Replace(",", "."));
     }
 }
Exemplo n.º 23
0
 public abstract void Send(int pointScale, UdpWriter osc, StreamWriter fileWriter);
Exemplo n.º 24
0
 void ProcessJointInformation(int joint, Joint j, JointOrientation jo, double time, int pointScale, UdpWriter osc, StreamWriter fileWriter)
 {
     if (j == null)
     {
         return;
     }
     if (jo == null)
     {
         return;
     }
     SendJointMessage(joint,
                      j.Position.X, j.Position.Y, j.Position.Z,
                      JointToConfidenceValue(j), time,
                      pointScale, osc, fileWriter);
 }
Exemplo n.º 25
0
        void MainWindow_Loaded( object sender, RoutedEventArgs e )
        {
            try {
                udpWriter = new UdpWriter( ConfigurationManager.AppSettings["DestinationAddress"],
                                           int.Parse( ConfigurationManager.AppSettings["DestinationPort"] ) );

                kinect = new KinectSensorWrapper();
                kinect.AllFrameReady += kinect_AllFrameReady;
                kinect.Start();
            }
            catch ( Exception ex ) {
                MessageBox.Show( ex.Message );
                Close();
            }
        }
Exemplo n.º 26
0
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, System.Windows.RoutedEventArgs e)
        {
            // Install Shortcut
            CheckForShortcut();

            // Parse commandline arguments
            string[] args = Environment.GetCommandLineArgs();
            for (int index = 1; index < args.Length; index += 2)
            {
                args[index] = args[index].ToLower();
                if ("allUsers".ToLower().Equals(args[index])) allUsers = StringToBool(args[index+1]);
                if ("fullBody".ToLower().Equals(args[index])) fullBody = StringToBool(args[index + 1]);
                if ("faceTracking".ToLower().Equals(args[index])) faceTracking = StringToBool(args[index + 1]);
                if ("faceTracking2DMesh".ToLower().Equals(args[index])) faceTracking2DMesh = StringToBool(args[index + 1]);
                if ("faceTrackingHeadPose".ToLower().Equals(args[index])) faceTrackingHeadPose = StringToBool(args[index + 1]);
                if ("faceTrackingAnimationUnits".ToLower().Equals(args[index])) faceTrackingAnimationUnits = StringToBool(args[index + 1]);
                if ("writeOSC".ToLower().Equals(args[index])) writeOSC = StringToBool(args[index + 1]);
                if ("writeCSV".ToLower().Equals(args[index])) writeCSV = StringToBool(args[index + 1]);
                if ("useUnixEpochTime".ToLower().Equals(args[index])) useUnixEpochTime = StringToBool(args[index + 1]);
                if ("oscHost".ToLower().Equals(args[index])) oscHost = args[index + 1];
                if ("oscPort".ToLower().Equals(args[index]))
                {
                    if (!int.TryParse(args[index+1], out oscPort)) {
                        System.Windows.MessageBox.Show("Failed to parse the oscPort argument: " + args[index + 1]);
                    }
                }
                if ("showSkeleton".ToLower().Equals(args[index])) showSkeleton = StringToBool(args[index + 1]);
            }

            // Initialisation
            shuttingDown = false;
            stopwatch = new Stopwatch();
            stopwatch.Reset();
            stopwatch.Start();
            if (writeOSC)
            {
                osc = new UdpWriter(oscHost, oscPort);
            }
            if (writeCSV)
            {
                OpenNewCSVFile();
            }

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // Display the drawing using our image control
            Image.Source = this.imageSource;

            this.checkBoxSeatedMode.IsEnabled = false;
            this.checkBoxShowSkeleton.IsChecked = showSkeleton;

            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    StartKinect(potentialSensor);
                }
            }

            if (this.sensors.Count == 0)
            {
                this.statusBarText.Text = Properties.Resources.NoKinectReady;
            }

            KinectSensor.KinectSensors.StatusChanged += KinectSensorsStatusChanged;
        }
Exemplo n.º 27
0
 internal LedStripesInvoker(string ipAddress, int port)
 {
     udpWriter = new Ventuz.OSC.UdpWriter(ipAddress, port);
 }
Exemplo n.º 28
0
		public void Disconnect()
		{
			if (m_Connected == true)
			{
				m_Message = "Disconnecting";

				try
				{
					OSCWriter.Dispose();					

					m_Message = "Disconnected"; 
				}
				catch (Exception ex)
				{
					m_Message = "Error: " + ex.Message;
				}

				OSCWriter = null; 

				m_Connected = false;

				OnConnectionChanged(); 
			}
		}
Exemplo n.º 29
0
 void SendHandStateMessage(int hand, double x, double y, double z, double confidence, int state, double stateConfidence, double time, int pointScale, UdpWriter osc, StreamWriter fileWriter)
 {
     if (osc != null)
     {
         osc.Send(new OscElement("/osceleton2/hand", sensorId, user, hand,
                                 (float)(x * pointScale), (float)(-y * pointScale), (float)(z * pointScale), (float)confidence,
                                 state, (float)stateConfidence, time));
     }
     if (fileWriter != null)
     {
         // Hand, user, joint, x, y, z, confidence, state, stateConfidence, time
         fileWriter.WriteLine("Hand," + sensorId + "," + user + "," + hand + "," +
                              (x * pointScale).ToString().Replace(",", ".") + "," +
                              (-y * pointScale).ToString().Replace(",", ".") + "," +
                              (z * pointScale).ToString().Replace(",", ".") + "," +
                              confidence.ToString().Replace(",", ".") + "," +
                              state + "," +
                              stateConfidence.ToString().Replace(",", ".") + "," +
                              time.ToString().Replace(",", "."));
     }
 }
        void MainWindow_Loaded(object sender, RoutedEventArgs e)
        {
            InitializeButtons();

            // Setup osc sender
            oscArgs[0] = "127.0.0.1";
            oscArgs[1] = "3333";
            oscWriter = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));

            //Since only a color video stream is needed, RuntimeOptions.UseColor is used.
            _runtime.Initialize(RuntimeOptions.UseDepthAndPlayerIndex | Microsoft.Research.Kinect.Nui.RuntimeOptions.UseColor | RuntimeOptions.UseSkeletalTracking);
            _runtime.SkeletonEngine.TransformSmooth = true;

            //Use to transform and reduce jitter
            _runtime.SkeletonEngine.SmoothParameters = new TransformSmoothParameters
            {
                Smoothing = 0.5f,
                Correction = 0.3f,
                Prediction = 0.4f,
                JitterRadius = 0.05f,
                MaxDeviationRadius = 0.04f
            };

            try
            {
                _runtime.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution640x480, ImageType.Color);
                _runtime.DepthStream.Open(ImageStreamType.Depth, 2, ImageResolution.Resolution320x240, ImageType.DepthAndPlayerIndex);
            }
            catch (InvalidOperationException)
            {
                System.Windows.MessageBox.Show(
                    "Failed to open stream. Please make sure to specify a supported image type and resolution.");
                return;
            }
            _lastTime = DateTime.Now;

            _dtw = new DtwGestureRecognizer(18, 0.6, 2, 2, 10);
            _video = new ArrayList();

            //// If you want to see the depth image and frames per second then include this
            //// I'mma turn this off 'cos my 'puter is proper slow
            _runtime.DepthFrameReady += NuiDepthFrameReady;

            _runtime.SkeletonFrameReady += NuiSkeletonFrameReady;
            _runtime.SkeletonFrameReady += SkeletonExtractSkeletonFrameReady;

            //// If you want to see the RGB stream then include this
            //_runtime.VideoFrameReady += NuiColorFrameReady;

            Skeleton3DDataExtract.Skeleton3DdataCoordReady += NuiSkeleton3DdataCoordReady;

            speechRecognizer = SpeechRecognizer.Create();         //returns null if problem with speech prereqs or instantiation.
            if (speechRecognizer != null)
            {
                speechRecognizer.Start(new KinectAudioSource());  //KinectSDK TODO: expose Runtime.AudioSource to return correct audiosource.
                speechRecognizer.SaidSomething += new EventHandler<SpeechRecognizer.SaidSomethingEventArgs>(recognizer_SaidSomething);
            }
            else
            {
                dtwTextOutput.Text = "No Speech";
                speechRecognizer = null;
            }
        }
Exemplo n.º 31
0
 static void SetUpOSCPort()
 {
     //get OSC PORT
     string OSC_PORT_FILE = "OSC_PORT.txt";
     try
     {
         TextReader tr = new StreamReader(OSC_PORT_FILE);
         OSC_PORT = Convert.ToInt32(tr.ReadLine());
     }
     catch
     {
         //do nothing.
     }
     
     OSCSender = new UdpWriter("127.0.0.1", OSC_PORT);
 }
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            // Setup osc sender
            oscArgs[0] = "127.0.0.1";
            oscArgs[1] = OscPort.Text;
            oscWriter = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));
            // Initialize Data viewer
            oscViewer.Text = "\nData will be shown here\nwhen there is a skeleton\nbeing tracked.";

            // Set up our lists
            visualKinectUnitList = new List<VisualKinectUnit>();

            skeletonImageList = new List<System.Windows.Controls.Image>();
            skeletonImageList.Add(Image0);
            skeletonImageList.Add(Image1);
            skeletonImageList.Add(Image2);
            skeletonImageList.Add(Image3);

            colorImageList = new List<System.Windows.Controls.Image>();
            colorImageList.Add(ColorImage0);
            colorImageList.Add(ColorImage1);
            colorImageList.Add(ColorImage2);
            colorImageList.Add(ColorImage3);

            masterSkeletonList = new List<Skeleton>();
            leadSkeletonIDs = new List<int>();

            // Look through all sensors and start the first connected one.
            // This requires that a Kinect is connected at the time of app startup.
            // To make your app robust against plug/unplug,
            // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit
            int numberOfKinects = 0;
            foreach (var potentialSensor in KinectSensor.KinectSensors) {
                if (potentialSensor.Status == KinectStatus.Connected) {
                    // Start the sensor!
                    try {
                        potentialSensor.Start();
                        // Good to go, so count this one as connected!
                        // So let's set up some environment for this...

                        LocatedSensor sensor = new LocatedSensor(potentialSensor, kinectXPositions[numberOfKinects],
                                                                                  kinectYPositions[numberOfKinects],
                                                                                  kinectZPositions[numberOfKinects],
                                                                                  kinectAngles[numberOfKinects]);
                        if ((numberOfKinects < colorImageList.Count) && (numberOfKinects < skeletonImageList.Count)) {
                            System.Windows.Controls.Image colorImage = colorImageList[numberOfKinects];
                            System.Windows.Controls.Image skeletonImage = skeletonImageList[numberOfKinects];
                            VisualKinectUnit newSensor = new VisualKinectUnit(sensor, skeletonImage, colorImage);
                            // Add a callback to our updateSkeletons function, so every frameReady event,
                            //  we update our global list of skeletons
                            newSensor.locatedSensor.sensor.SkeletonFrameReady += updateSkeletons;

                            newSensor.locatedSensor.sensor.SkeletonFrameReady += sendOSCHeadOnly;
                            visualKinectUnitList.Add(newSensor);
                        }
                        else {
                            visualKinectUnitList.Add(new VisualKinectUnit(sensor));
                        }
                        numberOfKinects++;
                        Console.WriteLine("Number of Kinects : " + numberOfKinects);
                    }
                    catch (IOException) {
                        Console.WriteLine("Couldn't start one of the Kinect sensors...");
                    }
                }
            }
        }
Exemplo n.º 33
0
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            // Setup osc sender
            oscArgs[0] = "127.0.0.1";
            oscArgs[1] = OscPort.Text;
            oscWriter = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));

            deltaToscWriter = new UdpWriter(oscArgs[0], 7114);
            // Initialize Data viewer
            oscViewer.Text = "\nData will be shown here\nwhen there is a skeleton\nbeing tracked.";

            kinectGroup = new VisualKinectGroup();

            viewports = new List<KinectViewport>();
            viewports.Add(this.TestViewport);
            viewports.Add(this.TestViewport2);

            // Look through all sensors and start the first connected one.
            // This requires that a Kinect is connected at the time of app startup.
            // To make your app robust against plug/unplug,
            // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit
            int numberOfKinects = 0;
            foreach (var potentialSensor in KinectSensor.KinectSensors) {
                if (potentialSensor.Status == KinectStatus.Connected) {
                    // Start the sensor!
                    try {
                        potentialSensor.Start();
                        // Good to go, so count this one as connected!
                        // So let's set up some environment for this...

                      //  LocatedSensor sensor = new LocatedSensor(potentialSensor, kinectXPositions[numberOfKinects],
                      //                                                            kinectYPositions[numberOfKinects],
                       //                                                           kinectZPositions[numberOfKinects],
                         //                                                         kinectAngles[numberOfKinects]);
                        LocatedSensor sensor = new LocatedSensor(potentialSensor, 0, 0,0,0, 0 , 0);

                        VisualKinectUnit newSensor = new VisualKinectUnit(sensor, viewports[numberOfKinects].skeletonDrawingImage, viewports[numberOfKinects].colorImage, viewports[numberOfKinects]);
                        kinectGroup.AddVisualKinectUnit(newSensor);

                        // This function sends out skeleton data as OSC
                        //newSensor.locatedSensor.sensor.SkeletonFrameReady += sendOSCAsAnimataData;
                        newSensor.locatedSensor.sensor.SkeletonFrameReady += sendOSCSkeletonPositions;

                        numberOfKinects++;
                        Console.WriteLine("Number of Kinects : " + numberOfKinects);
                    }
                    catch (IOException) {
                        Console.WriteLine("Couldn't start one of the Kinect sensors...");
                    }
                }
            }

            // Now that we have all of our sensors loaded, let's see if we have any data
            // Try and load in data from a file..
            string configSerialization = "";
            try
            {
                configSerialization = File.ReadAllText(KinectCalibrationFilename);
            }
            catch (Exception exception)
            {
                Console.WriteLine("The file could not be read:");
                Console.WriteLine(exception.Message);
                Console.WriteLine("Using default parameters...");
            }
            // If we got data, parse it!
            if (configSerialization != ""){
                List<KinectCoordinates> coordinates = JsonConvert.DeserializeObject<List<KinectCoordinates>>(File.ReadAllText(KinectCalibrationFilename));
                for (int i = 0; i < viewports.Count && i < coordinates.Count; i++)
                {
                    viewports[i].xOffset.Text = coordinates[i].xOffset;
                    viewports[i].yOffset.Text = coordinates[i].yOffset;
                    viewports[i].zOffset.Text = coordinates[i].zOffset;
                    viewports[i].pitchAngle.Text = coordinates[i].pitch;
                    viewports[i].rollAngle.Text = coordinates[i].roll;
                    viewports[i].yawAngle.Text = coordinates[i].yaw;
                }
                // Update the info from the kinect windows
                // TODO: Maybe this happens automagically, but maybe not... Test this.
                //Console.WriteLine("VisualKinectUnit 0's x offset is:");
               // Console.WriteLine(kinectGroup.visualKinectUnits[0].locatedSensor.xOffset);
            }

            // Now get the overall calibration stuff, like OSC port and such
            configSerialization = "";
            try
            {
                configSerialization = File.ReadAllText(MetaCalibrationFilename);
            }
            catch (Exception exception)
            {
                Console.WriteLine("The file could not be read:");
                Console.WriteLine(exception.Message);
                Console.WriteLine("Using default parameters...");
            }
            // If we got data, parse it!
            if (configSerialization != "")
            {
                MetaConfiguration config = JsonConvert.DeserializeObject<MetaConfiguration>(configSerialization);
                XOffsetTextBox.Text = config.XOffset;
                YOffsetTextBox.Text = config.YOffset;
                XScaleTextBox.Text = config.XScaling;
                YScaleTextBox.Text = config.YScaling;
                OscAddress.Text = config.OSCAddress;
                OscPort.Text = config.port;

                // Update the actual OSC port stuff
                //  We don't change the scaling and offset stuff since that's
                //  directly read from the text boxen
                oscArgs[1] = OscPort.Text;
                oscWriter = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));
                UpdateOscAddress();
            }
        }
Exemplo n.º 34
0
 private void OscPortKeyUp(object sender, KeyEventArgs e)
 {
     if (e.Key == Key.Enter)
     {
         oscArgs[1] = OscPort.Text;
         oscWriter = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));
     }
 }
Exemplo n.º 35
0
 private void Window_Loaded(object sender, RoutedEventArgs e)
 {
     stopwatch = new Stopwatch();
     stopwatch.Reset();
     stopwatch.Start();
     if (writeOSC)
     {
         osc = new UdpWriter("127.0.0.1", 7110);
     }
     if (writeFile)
     {
         fileWriter = new StreamWriter(Environment.GetFolderPath(Environment.SpecialFolder.Personal) + "/" + string.Format("points-{0:yyyy-MM-dd_hh-mm-ss-tt}.csv", DateTime.Now), false);
         fileWriter.WriteLine("Joint, user, joint, x, y, z, on");
     }
     kinectSensorChooser1.KinectSensorChanged += new DependencyPropertyChangedEventHandler(kinectSensorChooser1_KinectSensorChanged);
 }
Exemplo n.º 36
0
 void ProcessHandStateInformation(int joint, Joint j, JointOrientation jo, HandState state, TrackingConfidence confidence, double time, int pointScale, UdpWriter osc, StreamWriter fileWriter)
 {
     SendHandStateMessage(joint,
                          j.Position.X, j.Position.Y, j.Position.Z,
                          JointToConfidenceValue(j),
                          HandStateToValue(state),
                          TrackingConfidenceToValue(confidence), time,
                          pointScale, osc, fileWriter);
 }
Exemplo n.º 37
0
 void SendJointMessage(int joint, double x, double y, double z, double confidence, double time, int pointScale, UdpWriter osc, StreamWriter fileWriter)
 {
     if (osc != null)
     {
         osc.Send(new OscElement("/osceleton2/joint", oscMapping[joint], sensorId, user, (float)(x * pointScale), (float)(-y * pointScale), (float)(z * pointScale), (float)confidence, time));
     }
     if (fileWriter != null)
     {
         // Joint, user, joint, x, y, z, confidence, time
         fileWriter.WriteLine("Joint," + sensorId + "," + user + "," + joint + "," +
                              (x * pointScale).ToString().Replace(",", ".") + "," +
                              (-y * pointScale).ToString().Replace(",", ".") + "," +
                              (z * pointScale).ToString().Replace(",", ".") + "," +
                              confidence.ToString().Replace(",", ".") + "," +
                              time.ToString().Replace(",", "."));
     }
 }
Exemplo n.º 38
0
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // Display the drawing using our image control
            Image.Source = this.imageSource;

            // Look through all sensors and start the first connected one.
            // This requires that a Kinect is connected at the time of app startup.
            // To make your app robust against plug/unplug,
            // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit
            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    this.sensor = potentialSensor;
                    break;
                }
            }

            if (null != this.sensor)
            {
                // Turn on the skeleton stream to receive skeleton frames
                this.sensor.SkeletonStream.Enable();

                // Add an event handler to be called whenever there is new color frame data
                this.sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady;

                // Turn on the color stream to receive color frames
                this.sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);

                // Allocate space to put the pixels we'll receive
                this.colorPixels = new byte[this.sensor.ColorStream.FramePixelDataLength];

                // This is the bitmap we'll display on-screen
                this.colorBitmap = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null);

                // Set the image we display to point to the bitmap where we'll put the image data
                this.ColorImage.Source = this.colorBitmap;

                // Add an event handler to be called whenever there is new color frame data
                this.sensor.ColorFrameReady += this.SensorColorFrameReady;

                // Start the sensor!
                try
                {
                    this.sensor.Start();
                }
                catch (IOException)
                {
                    this.sensor = null;
                }
            }

            if (null == this.sensor)
            {
                this.statusBarText.Text = Properties.Resources.NoKinectReady;
            }

            // Setup osc sender
            oscArgs[0] = "127.0.0.1";
            oscArgs[1] = OscPort.Text;
            oscWriter = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));

            // Initialize Data viewer
            oscViewer.Text = "\nData will be shown here\nwhen there is a skeleton\nbeing tracked.";
        }
Exemplo n.º 39
0
		public void Connect()
		{
			if (m_Connected == false)
			{
				m_Message = "Connecting"; 

				if (String.IsNullOrWhiteSpace(Address))
				{
					m_Message = "Destination IP address has not been supplied";
					return; 
				}
				
				if (Port <= 0)
				{
					m_Message = "Destination Port is invalid";
					return; 
				}

				try 
				{
					OSCWriter = new UdpWriter(Address, Port);

					m_Connected = true; 

					m_Message = "Connected to '" + Address + ":" + Port + "'";
					
					OnConnectionChanged();
				}
				catch (Exception ex)
				{
					m_Message = "Error: " + ex.Message;
				}
			}
		}
Exemplo n.º 40
0
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            // Setup osc sender
            oscArgs[0] = "127.0.0.1";
            oscArgs[1] = OscPort.Text;
            oscWriter  = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));

            deltaToscWriter = new UdpWriter(oscArgs[0], 7114);
            // Initialize Data viewer
            oscViewer.Text = "\nData will be shown here\nwhen there is a skeleton\nbeing tracked.";

            kinectGroup = new VisualKinectGroup();

            viewports = new List <KinectViewport>();
            viewports.Add(this.TestViewport);
            viewports.Add(this.TestViewport2);

            // Look through all sensors and start the first connected one.
            // This requires that a Kinect is connected at the time of app startup.
            // To make your app robust against plug/unplug,
            // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit
            int numberOfKinects = 0;

            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    // Start the sensor!
                    try {
                        potentialSensor.Start();
                        // Good to go, so count this one as connected!
                        // So let's set up some environment for this...

                        //  LocatedSensor sensor = new LocatedSensor(potentialSensor, kinectXPositions[numberOfKinects],
                        //                                                            kinectYPositions[numberOfKinects],
                        //                                                           kinectZPositions[numberOfKinects],
                        //                                                         kinectAngles[numberOfKinects]);
                        LocatedSensor sensor = new LocatedSensor(potentialSensor, 0, 0, 0, 0, 0, 0);

                        VisualKinectUnit newSensor = new VisualKinectUnit(sensor, viewports[numberOfKinects].skeletonDrawingImage, viewports[numberOfKinects].colorImage, viewports[numberOfKinects]);
                        kinectGroup.AddVisualKinectUnit(newSensor);

                        // This function sends out skeleton data as OSC
                        //newSensor.locatedSensor.sensor.SkeletonFrameReady += sendOSCAsAnimataData;
                        newSensor.locatedSensor.sensor.SkeletonFrameReady += sendOSCSkeletonPositions;

                        numberOfKinects++;
                        Console.WriteLine("Number of Kinects : " + numberOfKinects);
                    }
                    catch (IOException) {
                        Console.WriteLine("Couldn't start one of the Kinect sensors...");
                    }
                }
            }

            // Now that we have all of our sensors loaded, let's see if we have any data
            // Try and load in data from a file..
            string configSerialization = "";

            try
            {
                configSerialization = File.ReadAllText(KinectCalibrationFilename);
            }
            catch (Exception exception)
            {
                Console.WriteLine("The file could not be read:");
                Console.WriteLine(exception.Message);
                Console.WriteLine("Using default parameters...");
            }
            // If we got data, parse it!
            if (configSerialization != "")
            {
                List <KinectCoordinates> coordinates = JsonConvert.DeserializeObject <List <KinectCoordinates> >(File.ReadAllText(KinectCalibrationFilename));
                for (int i = 0; i < viewports.Count && i < coordinates.Count; i++)
                {
                    viewports[i].xOffset.Text    = coordinates[i].xOffset;
                    viewports[i].yOffset.Text    = coordinates[i].yOffset;
                    viewports[i].zOffset.Text    = coordinates[i].zOffset;
                    viewports[i].pitchAngle.Text = coordinates[i].pitch;
                    viewports[i].rollAngle.Text  = coordinates[i].roll;
                    viewports[i].yawAngle.Text   = coordinates[i].yaw;
                }
                // Update the info from the kinect windows
                // TODO: Maybe this happens automagically, but maybe not... Test this.
                //Console.WriteLine("VisualKinectUnit 0's x offset is:");
                // Console.WriteLine(kinectGroup.visualKinectUnits[0].locatedSensor.xOffset);
            }

            // Now get the overall calibration stuff, like OSC port and such
            configSerialization = "";
            try
            {
                configSerialization = File.ReadAllText(MetaCalibrationFilename);
            }
            catch (Exception exception)
            {
                Console.WriteLine("The file could not be read:");
                Console.WriteLine(exception.Message);
                Console.WriteLine("Using default parameters...");
            }
            // If we got data, parse it!
            if (configSerialization != "")
            {
                MetaConfiguration config = JsonConvert.DeserializeObject <MetaConfiguration>(configSerialization);
                XOffsetTextBox.Text = config.XOffset;
                YOffsetTextBox.Text = config.YOffset;
                XScaleTextBox.Text  = config.XScaling;
                YScaleTextBox.Text  = config.YScaling;
                OscAddress.Text     = config.OSCAddress;
                OscPort.Text        = config.port;

                // Update the actual OSC port stuff
                //  We don't change the scaling and offset stuff since that's
                //  directly read from the text boxen
                oscArgs[1] = OscPort.Text;
                oscWriter  = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));
                UpdateOscAddress();
            }
        }
Exemplo n.º 41
0
        public override void Send(int pointScale, UdpWriter osc, StreamWriter fileWriter)
        {
            if (body == null)
            {
                return;
            }
            if (body.Joints == null)
            {
                return;
            }
            if (body.Joints.Count < 20)
            {
                return;
            }
            if (body.JointOrientations == null)
            {
                return;
            }
            if (body.JointOrientations.Count < 20)
            {
                return;
            }
            if (!body.IsTracked)
            {
                return;
            }
            try
            {
                if (handsOnly)
                {
                    ProcessHandStateInformation(1, body.Joints[JointType.HandLeft], body.JointOrientations[JointType.HandLeft], body.HandLeftState, body.HandLeftConfidence, time, pointScale, osc, fileWriter);
                    ProcessHandStateInformation(2, body.Joints[JointType.HandRight], body.JointOrientations[JointType.HandRight], body.HandRightState, body.HandRightConfidence, time, pointScale, osc, fileWriter);
                }
                else
                {
                    ProcessJointInformation(1, body.Joints[JointType.Head], body.JointOrientations[JointType.Head], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(2, body.Joints[JointType.SpineShoulder], body.JointOrientations[JointType.SpineShoulder], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(3, body.Joints[JointType.SpineMid], body.JointOrientations[JointType.SpineMid], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(4, body.Joints[JointType.SpineBase], body.JointOrientations[JointType.SpineBase], time, pointScale, osc, fileWriter);
                    // ProcessJointInformation(5, body.Joints[JointType.], body.JointOrientations[JointType.], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(6, body.Joints[JointType.ShoulderLeft], body.JointOrientations[JointType.ShoulderLeft], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(7, body.Joints[JointType.ElbowLeft], body.JointOrientations[JointType.ElbowLeft], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(8, body.Joints[JointType.WristLeft], body.JointOrientations[JointType.WristLeft], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(9, body.Joints[JointType.HandLeft], body.JointOrientations[JointType.HandLeft], time, pointScale, osc, fileWriter);
                    // ProcessJointInformation(10, body.Joints[JointType.], body.JointOrientations[JointType.], time, pointScale, osc, fileWriter);
                    // ProcessJointInformation(11, body.Joints[JointType.], body.JointOrientations[JointType.], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(12, body.Joints[JointType.ShoulderRight], body.JointOrientations[JointType.ShoulderRight], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(13, body.Joints[JointType.ElbowRight], body.JointOrientations[JointType.ElbowRight], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(14, body.Joints[JointType.WristRight], body.JointOrientations[JointType.WristRight], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(15, body.Joints[JointType.HandRight], body.JointOrientations[JointType.HandRight], time, pointScale, osc, fileWriter);
                    // ProcessJointInformation(16, body.Joints[JointType.], body.JointOrientations[JointType.], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(17, body.Joints[JointType.HipLeft], body.JointOrientations[JointType.HipLeft], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(18, body.Joints[JointType.KneeLeft], body.JointOrientations[JointType.KneeLeft], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(19, body.Joints[JointType.AnkleLeft], body.JointOrientations[JointType.AnkleLeft], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(20, body.Joints[JointType.FootLeft], body.JointOrientations[JointType.FootLeft], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(21, body.Joints[JointType.HipRight], body.JointOrientations[JointType.HipRight], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(22, body.Joints[JointType.KneeRight], body.JointOrientations[JointType.KneeRight], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(23, body.Joints[JointType.AnkleRight], body.JointOrientations[JointType.AnkleRight], time, pointScale, osc, fileWriter);
                    ProcessJointInformation(24, body.Joints[JointType.FootRight], body.JointOrientations[JointType.FootRight], time, pointScale, osc, fileWriter);

                    ProcessHandStateInformation(1, body.Joints[JointType.HandLeft], body.JointOrientations[JointType.HandLeft], body.HandLeftState, body.HandLeftConfidence, time, pointScale, osc, fileWriter);
                    ProcessHandStateInformation(2, body.Joints[JointType.HandRight], body.JointOrientations[JointType.HandRight], body.HandRightState, body.HandRightConfidence, time, pointScale, osc, fileWriter);
                }
            } catch (NullReferenceException ex) {
                // Happens sometimes. Probably because we should copy body before processing it in another thread.
                Console.WriteLine(ex.Message);
            }
        }
Exemplo n.º 42
0
 public OSC(string ipAdress = "127.0.0.1", ushort port = 5001)
 {
     disposed = false;
     writer = new UdpWriter(ipAdress, (int)port);
 }
        private void MainWindow_Loaded(object sender, RoutedEventArgs e)
        {
            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    sensor = potentialSensor;
                    break;
                }
            }

            if (sensor != null)
            {
                // don't want to turn it off anymore
                // using it to create reflection
                //try
                //{
                //    sensor.ForceInfraredEmitterOff = true;
                //}
                //catch (Exception)
                //{
                //    Console.WriteLine("You can't turn off the infrared emitter on XBOX Kinect");
                //}
                sensor.ColorStream.Enable(ColorImageFormat.InfraredResolution640x480Fps30);
                irPixels = new byte[sensor.ColorStream.FramePixelDataLength];
                colorPixels = new byte[sensor.ColorStream.FramePixelDataLength*2];
                colorBitmap = new WriteableBitmap(sensor.ColorStream.FrameWidth, sensor.ColorStream.FrameHeight, 96.0,
                                                  96.0, PixelFormats.Bgr32, null);
                irBitmap = new WriteableBitmap(sensor.ColorStream.FrameWidth, sensor.ColorStream.FrameHeight, 96.0, 96.0,
                                               PixelFormats.Gray16, null);

                convertedImage = new FormatConvertedBitmap();

                sensor.AllFramesReady += sensor_AllFramesReady;

                //_detector = new FGDetector<Bgr>(FORGROUND_DETECTOR_TYPE.FGD);

                //_tracker = new BlobTrackerAuto<Bgr>();

                // Setup osc sender
                oscArgs[0] = oscAddress;
                oscArgs[1] = oscPort;
                oscWriter = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));
                //oscWriter.Dispose();
                //oscWriter = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));
                timer = new Timer();
                timer.Interval = 4000;
                timer.Elapsed += TimerOnElapsed;

                LoadSettingsFromFile();

                try
                {
                    sensor.Start();
                }
                catch (IOException)
                {
                    sensor = null;
                }

            }

            if (sensor == null)
            {
                //outputViewbox.Visibility = System.Windows.Visibility.Collapsed;
                txtMessage.Text = "No Kinect Found.\nPlease plug in Kinect\nand restart this application.";
            }
        }
Exemplo n.º 44
0
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // Display the drawing using our image control
            Image.Source = this.imageSource;

            // Look through all sensors and start the first connected one.
            // This requires that a Kinect is connected at the time of app startup.
            // To make your app robust against plug/unplug,
            // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit (See components in Toolkit Browser).
            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    this.sensor = potentialSensor;
                    break;
                }
            }

            if (null != this.sensor)
            {
                // http://msdn.microsoft.com/en-us/library/jj131024.aspx
                // Skeletal smoothing
                TransformSmoothParameters smoothingParam = new TransformSmoothParameters();
                smoothingParam.Smoothing = 0.7f;
                smoothingParam.Correction = 0.2f;
                smoothingParam.Prediction = 1.0f;
                smoothingParam.JitterRadius = 1.0f;
                smoothingParam.MaxDeviationRadius = 1.0f;

                // Turn on the skeleton stream to receive skeleton frames
                this.sensor.SkeletonStream.Enable(smoothingParam);

                // Add an event handler to be called whenever there is new color frame data
                this.sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady;

                // Start the sensor!
                try
                {
                    this.sensor.Start();
                    OSCsend = new UdpWriter("127.0.0.1", 7000);
                }
                catch (IOException)
                {
                    this.sensor = null;
                }
            }
        }
Exemplo n.º 45
0
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, System.Windows.RoutedEventArgs e)
        {
            // Install Shortcut
            CheckForShortcut();

            // Parse commandline arguments
            string[] args = Environment.GetCommandLineArgs();
            for (int index = 1; index < args.Length; index += 2)
            {
                args[index] = args[index].ToLower();
                if ("allUsers".ToLower().Equals(args[index]))
                {
                    allUsers = StringToBool(args[index + 1]);
                }
                if ("fullBody".ToLower().Equals(args[index]))
                {
                    fullBody = StringToBool(args[index + 1]);
                }
                if ("faceTracking".ToLower().Equals(args[index]))
                {
                    faceTracking = StringToBool(args[index + 1]);
                }
                if ("faceTracking2DMesh".ToLower().Equals(args[index]))
                {
                    faceTracking2DMesh = StringToBool(args[index + 1]);
                }
                if ("faceTrackingHeadPose".ToLower().Equals(args[index]))
                {
                    faceTrackingHeadPose = StringToBool(args[index + 1]);
                }
                if ("faceTrackingAnimationUnits".ToLower().Equals(args[index]))
                {
                    faceTrackingAnimationUnits = StringToBool(args[index + 1]);
                }
                if ("writeOSC".ToLower().Equals(args[index]))
                {
                    writeOSC = StringToBool(args[index + 1]);
                }
                if ("writeCSV".ToLower().Equals(args[index]))
                {
                    writeCSV = StringToBool(args[index + 1]);
                }
                if ("useUnixEpochTime".ToLower().Equals(args[index]))
                {
                    useUnixEpochTime = StringToBool(args[index + 1]);
                }
                if ("oscHost".ToLower().Equals(args[index]))
                {
                    oscHost = args[index + 1];
                }
                if ("oscPort".ToLower().Equals(args[index]))
                {
                    if (!int.TryParse(args[index + 1], out oscPort))
                    {
                        System.Windows.MessageBox.Show("Failed to parse the oscPort argument: " + args[index + 1]);
                    }
                }
                if ("showSkeleton".ToLower().Equals(args[index]))
                {
                    showSkeleton = StringToBool(args[index + 1]);
                }
            }

            // Initialisation
            shuttingDown = false;
            stopwatch    = new Stopwatch();
            stopwatch.Reset();
            stopwatch.Start();
            if (writeOSC)
            {
                osc = new UdpWriter(oscHost, oscPort);
            }
            if (writeCSV)
            {
                OpenNewCSVFile();
            }

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // Display the drawing using our image control
            Image.Source = this.imageSource;

            this.checkBoxSeatedMode.IsEnabled   = false;
            this.checkBoxShowSkeleton.IsChecked = showSkeleton;

            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    StartKinect(potentialSensor);
                }
            }

            if (this.sensors.Count == 0)
            {
                this.statusBarText.Text = Properties.Resources.NoKinectReady;
            }

            KinectSensor.KinectSensors.StatusChanged += KinectSensorsStatusChanged;
        }
Exemplo n.º 46
0
 private void ChangePortClicked(object sender, RoutedEventArgs e)
 {
     oscArgs[1] = OscPort.Text;
     oscWriter = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));
 }