public Form1() { InitializeComponent(); // Turn on key preview this.KeyPreview = true; // Allocate Kinect data structure for speed data = new KinectData(); // Allocate bitmaps for speed //!! Should use System.Windows.Media.Imaging.WriteableBitmap colorBitmap = new Bitmap(640, 480, PixelFormat.Format24bppRgb); depthBitmap = new Bitmap(640, 480, PixelFormat.Format24bppRgb); testBitmap = new Bitmap(640, 480, PixelFormat.Format24bppRgb); demoBitmap = new Bitmap(640, 480, PixelFormat.Format24bppRgb); // Set up session parameters SessionParameters sessionParams = new SessionParameters(KinectDataParams.EnableType.All); sessionParams.DataParams.validityImageEnable = false; sessionParams.DataParams.testImageEnable = false; // Connect to a local Kinect and hook up to the data event client = KinectTableNet.KinectTable.ConnectLocal(sessionParams); client.DataReady += new Client.DataReadyHandler(client_DataReady); return; }
public void NewFrameProcessor(KinectData depth, EventArgs e) { if (Server != null) { //Server.Send(depth); } }
//Listener cada vez que se ha obtenido una nueva imagen de la camara public void NewFrameListener(KinectData data, EventArgs e) { if (data.DepthArray != null) { PrintFPS(); } if (EnablePreview) { if (data.DepthArray != null) { if (fixedCanvas != null) { PrintDepthOnCanvas(data.DepthArray, fixedCanvas, data.Width, data.Height, data.MaxDepth); } if (rawCanvas != null) { PrintDepthOnCanvas(data.RawDepthArray, rawCanvas, data.Width, data.Height, data.MaxDepth); } } if (data.ColorImage != null) { currentData = data; if (rawColorCanvas != null) { PrintColorOnCanvas(data.ColorImage, rawColorCanvas, data.Width, data.Height); } } PrintOutputCanvasLayer(outputCanvasLayer, data.DetectedObjects, data.Width, data.Height); } }
void GetColorForRegsiteredColorStreamPixel(ref int index, ref KinectData sourceKinectData, ref Color32 result) { result = new Color32(0, 0, 0, 255); if (fullResolutionKinectData.RawColorStreamColors != null) { int x = index % sourceKinectData.Width; int y = index / sourceKinectData.Width; int cx, cy; int hr = KinectWrapper.NuiImageGetColorPixelCoordinatesFromDepthPixelAtResolution( KinectWrapper.Constants.ColorImageResolution, KinectWrapper.Constants.DepthImageResolution, ref KinectCoordinatesAdjustment, x, y, sourceKinectData.RawDepths[index], out cx, out cy); if (hr == 0) { int colorIndex = cy * sourceKinectData.Width + cx; if (colorIndex >= 0 && colorIndex < sourceKinectData.Size) { result = sourceKinectData.RawColorStreamColors [colorIndex]; } } } return; }
// Initialize storage for processed kinect data protected override void InitializeStorage() { processedKinectData = new KinectData( KinectV1Wrapper.Constants.ColorImageWidth, KinectV1Wrapper.Constants.ColorImageHeight, KinectV1Wrapper.Constants.DepthImageWidth, KinectV1Wrapper.Constants.DepthImageHeight, KinectV1Wrapper.Constants.BytesPerPixel, downsampleSize, gameObject.transform); }
public GestureSecResult Update (KinectData client) { float allowedErr = 0.05f; if ((client.lefthand - client.head).magnitude <= allowedErr) return GestureSecResult.Succeeded; return GestureSecResult.Failed; }
void InitializeFeeds() { lowResolutionKinectData = new KinectData(KinectWrapper.Constants.TargetWidth, KinectWrapper.Constants.TargetHeight); fullResolutionKinectData = new KinectData(KinectWrapper.Constants.ColorImageWidth, KinectWrapper.Constants.ColorImageHeight); previousCorrectedDepthData = new ushort[fullResolutionKinectData.Size]; scaleX = (float)lowResolutionKinectData.Width / (float)fullResolutionKinectData.Width; scaleY = (float)lowResolutionKinectData.Height / (float)fullResolutionKinectData.Height; }
bool execute_upload_command(int action, string data) { UploadCommands c = (UploadCommands)action; // server.updateServerMsg("Received data from " + getTypeString(clientType) + clientID + " : " + c.ToString()); try { switch (c) { case UploadCommands.Update_knect_data_in_Base64_format: // Update knect data in Base64 String format Used by Kinect Client if (clientType == (int)clientTypes.KINECT) { KinectData k = new KinectData(); k.AssignByAllParameterStringInBase64(data); k.transformTo(GUIComponents.fc.fp.getTranfMatrix()[clientID]); kinectParameter = k; updateKinectInfo(kinectParameter.printKinectParameters()); return(true); } else { return(false); } case UploadCommands.Update_fused_knect_data_in_Base64_format: // Update fused knect data in Base64 String format Used by Data Processor Client if (clientType == (int)clientTypes.DATA_PROCESSOR) { // server.fusedKinectParametersString = data; server.fusedKinectParameter.AssignByAllParameterStringInBase64(data); return(true); } else { return(false); } case UploadCommands.Update_raw_data: if (clientType == (int)clientTypes.KINECT) { parentGUI.kinectCompList[clientID].updateKinectImages(data); //colorImageWritableBitmap,depthImageWritableBitmap); return(true); } return(false); default: return(false); } } catch { return(false); } }
public GestureSecResult Update (KinectData client) { float allowedErr = 0.05f; if ((client.righthand.y < client.rightelbow.y) && (client.lefthand.y < client.leftelbow.y) && ( client.righthand.x - client.rightelbow.x <= allowedErr ) && ( client.lefthand.x - client.leftelbow.x <= allowedErr ) ) return GestureSecResult.Succeeded; return GestureSecResult.Failed; }
// Use this for initialization void Start() { rb2d = GetComponent <Rigidbody2D>(); anim = GetComponent <Animator> (); playerFound = false; player = new KinectData(); player.Start(); birdFlap.clip = flap; played = false; }
public void OnKDNewTData(object sender, KinectData e) { if (e.Body.Joints[this.JT].TrackingState == TrackingState.Tracked) { this.KDQuatQueue.Add(e.Body.JointOrientations[this.JT].Orientation.ToQuaternion()); } else { this.KDQuatQueue.Add(null); } }
public static bool CreateDemoImage(Bitmap bitmap, KinectData data) { // Get color image if (data.Available.colorImageEnable) { ImageFrameConverter.SetColorImage(bitmap, data.ColorImage); } // Lighten table if (data.Available.tableEnable) { ImageFrame tableBlob = data.TableInfo.CreateTableBlob(true); DarkenRegion(bitmap, tableBlob, 0.25f); } // Draw arms on image if (data.Available.handsEnable) { // Draw arm shadows //DrawArmShadows(bitmap, data.TableInfo, data.Hands); // Draw boundaries around arms Color[] armColors = { Color.Black, Color.Red, Color.Green, Color.Blue }; foreach (Hand hand in data.Hands) { if (hand.Id + 1 < armColors.Length) { HighlightPoints(bitmap, hand.Boundary, armColors[hand.Id + 1]); } } // Draw finger tips IEnumerable<Point> allFingerTips = data.Hands.Select(x => x.FingerTips).SelectMany(x => x); DrawPoints(bitmap, allFingerTips, Color.Green, 3); // Draw finger bases IEnumerable<Point> allFingerBases = data.Hands.Select(x => x.FingerBases).SelectMany(x => x); DrawPoints(bitmap, allFingerBases, Color.Red, 3); // Draw arm bases IEnumerable<Point> allArmBases = data.Hands.Select(x => x.ArmBase); DrawPoints(bitmap, allArmBases, Color.Blue, 3); // Draw hand palm IEnumerable<Point> allPalmCenters = data.Hands.Select(x => x.PalmCenter); DrawPoints(bitmap, allPalmCenters, Color.Purple, 3); } return data.Available.colorImageEnable; }
public static bool CreateDemoImage(Bitmap bitmap, KinectData data) { // Get color image if (data.Available.colorImageEnable) { ImageFrameConverter.SetColorImage(bitmap, data.ColorImage); } // Lighten table if (data.Available.tableEnable) { ImageFrame tableBlob = data.TableInfo.CreateTableBlob(true); DarkenRegion(bitmap, tableBlob, 0.25f); } // Draw arms on image if (data.Available.handsEnable) { // Draw arm shadows //DrawArmShadows(bitmap, data.TableInfo, data.Hands); // Draw boundaries around arms Color[] armColors = { Color.Black, Color.Red, Color.Green, Color.Blue }; foreach (Hand hand in data.Hands) { if (hand.Id + 1 < armColors.Length) { HighlightPoints(bitmap, hand.Boundary, armColors[hand.Id + 1]); } } // Draw finger tips IEnumerable <Point> allFingerTips = data.Hands.Select(x => x.FingerTips).SelectMany(x => x); DrawPoints(bitmap, allFingerTips, Color.Green, 3); // Draw finger bases IEnumerable <Point> allFingerBases = data.Hands.Select(x => x.FingerBases).SelectMany(x => x); DrawPoints(bitmap, allFingerBases, Color.Red, 3); // Draw arm bases IEnumerable <Point> allArmBases = data.Hands.Select(x => x.ArmBase); DrawPoints(bitmap, allArmBases, Color.Blue, 3); // Draw hand palm IEnumerable <Point> allPalmCenters = data.Hands.Select(x => x.PalmCenter); DrawPoints(bitmap, allPalmCenters, Color.Purple, 3); } return(data.Available.colorImageEnable); }
void GetFourNeighborPixels(ref int[] inputIndexesOfNeighborPixels, ref KinectData sourceKinectData, ref KinectData resultNeighborPixels) { for (int i = 0; i < 4; i++) { resultNeighborPixels.RawDepths[i] = sourceKinectData.RawDepths[inputIndexesOfNeighborPixels[i]]; resultNeighborPixels.CorrectedDepths[i] = sourceKinectData.CorrectedDepths[inputIndexesOfNeighborPixels[i]]; resultNeighborPixels.NormalizedDepths[i] = GetNormalizedDepthForPixel(resultNeighborPixels.CorrectedDepths[i]); resultNeighborPixels.RawColorStreamColors[i] = sourceKinectData.RawColorStreamColors[inputIndexesOfNeighborPixels[i]]; GetColorForDepthStream(ref resultNeighborPixels.NormalizedDepths[i], ref resultNeighborPixels.DepthStreamColors[i]); GetColorForGradedDepthStreamPixel(ref sourceKinectData.CorrectedDepths[inputIndexesOfNeighborPixels[i]], ref resultNeighborPixels.GradedDepthStreamColors[i]); GetColorForRegsiteredColorStreamPixel(ref inputIndexesOfNeighborPixels[i], ref sourceKinectData, ref resultNeighborPixels.RegisteredColorStreamColors[i]); } return; }
public (TrackingState, int[, ]) LocateFigures(KinectData kinectData, double fieldSize, byte[] canniedBytes, UserDefinedParameters userParameters, TrackingResultData resultData, Bitmap colorBitmap, TrackingState gameTrackingState) { // Collection of pixel colors for each field on chessboard var colorsOfPointsOverIndividualFields = InitializeColorCollection(); colorsOfPointsOverIndividualFields = FillColorsOverFiledsArrayWithData(colorsOfPointsOverIndividualFields, kinectData, fieldSize, canniedBytes, userParameters); var trackingState = DetectPresenceAndColorOfFiguresOnFields(colorsOfPointsOverIndividualFields, gameTrackingState, userParameters); resultData.VisualisationBitmap = RenderLabelsToFigures(colorsOfPointsOverIndividualFields, trackingState, resultData.VisualisationBitmap); var pointCounts = GetPointsCountsOverIndividualFields(colorsOfPointsOverIndividualFields); return(trackingState, pointCounts); }
/// <summary> /// Envia datos y espera a que el cliente los procese /// </summary> private void Send(KinectData kinectData) { //Si el cliente no está conectado, esperamos a que se conecte if (client == null || !client.Connected) { Console.WriteLine("Awaiting Client "); client = listener.AcceptTcpClient(); ns = client.GetStream(); nsWriter = new StreamWriter(ns); nsReader = new StreamReader(ns); nsWriter.AutoFlush = true; Console.WriteLine("Client connected "); } try { //Creamos el objeto que se enviará TCPData data = DataProcessor.GetProcessedData(kinectData); data.Timestamp = kinectData.Timestamp; //Enviamos el dato Console.WriteLine("Sending data..."); String dataStr = JsonConvert.SerializeObject(data); nsWriter.WriteLine(dataStr); nsWriter.Flush(); //Esperamos un ACK del cliente (en este periodo no enviaremos más datos) //el dato devuelto es el Timestamp de la imagen procesada Console.WriteLine("Wait client response..."); long processed = Convert.ToInt64(nsReader.ReadLine()); } catch (Exception ex) { Console.WriteLine("Exception: " + ex.Message + " (Client Connected? " + client.Connected + ")"); } finally { ///Calculamos los FPS a los que estamos emitiendo mensajes Console.WriteLine("FPS (aprox): " + 1 / (DateTime.UtcNow - fpsTime).TotalSeconds); //Obtenemos la siguiente marca temporal con la que calcularemos los FPS fpsTime = DateTime.UtcNow; } }
// Use this for initialization void Start() { lowResolutionKinectDataInstance = KinectManager.Instance.GetLowResolutionKinectDataInstance(); fullResolutionKinectDataInstance = KinectManager.Instance.GetFullResolutionKinectDataInstance(); depthStreamTexture = new Texture2D(fullResolutionKinectDataInstance.Width, fullResolutionKinectDataInstance.Height, TextureFormat.BGRA32, false); rawColorStreamTexture = new Texture2D(fullResolutionKinectDataInstance.Width, fullResolutionKinectDataInstance.Height, TextureFormat.BGRA32, false); gradedDepthStreamTexture = new Texture2D(fullResolutionKinectDataInstance.Width, fullResolutionKinectDataInstance.Height, TextureFormat.BGRA32, false); registeredColorStreamTexture = new Texture2D(fullResolutionKinectDataInstance.Width, fullResolutionKinectDataInstance.Height, TextureFormat.BGRA32, false); lowResolutionDepthStreamTexture = new Texture2D(lowResolutionKinectDataInstance.Width, lowResolutionKinectDataInstance.Height, TextureFormat.BGRA32, false); lowResolutionRawColorStreamTexture = new Texture2D(lowResolutionKinectDataInstance.Width, lowResolutionKinectDataInstance.Height, TextureFormat.BGRA32, false); lowResolutionGradedDepthStreamTexture = new Texture2D(lowResolutionKinectDataInstance.Width, lowResolutionKinectDataInstance.Height, TextureFormat.BGRA32, false); lowResolutionRegisteredColorStreamTexture = new Texture2D(lowResolutionKinectDataInstance.Width, lowResolutionKinectDataInstance.Height, TextureFormat.BGRA32, false); SetStreamTextures(); }
public void Update(KinectData data) { // if the worker is still updating, just bail // note: this may add some update delay depending on system performance if (this.worker.IsBusy) { return; } // send the data to the worker var workerArgs = new CloudWorkerArgs { Data = data }; this.worker.RunWorkerAsync(workerArgs); }
private KinectData get_gesture_data(Body body, int bodyIndex) { KinectData data = new KinectData(); data.clippedEdges = (int)body.ClippedEdges; data.isTracked = body.IsTracked; data.trackingId = (ulong)bodyIndex; for (int i = 0; i < body.Joints.Count(); i++) { data.joints[i, 0] = body.Joints[(JointType)i].Position.X; data.joints[i, 1] = body.Joints[(JointType)i].Position.Y; data.joints[i, 2] = body.Joints[(JointType)i].Position.Z; data.trackingState[i] = (int)body.Joints[(JointType)i].TrackingState; } return(data); }
void getData(string Message) { kinectData = JsonUtility.FromJson <KinectData>(Message); if (!kinectData.isFloorInformation) { if (TrackingManager.instance != null) { GetComponent <KinectSensor>().GetUpdatedData(kinectData.joints[(int)JointType.Head]); } else { headPosition = kinectData.joints[(int)JointType.Head]; } } else { //Floor Information } }
void App_Startup(object sender, StartupEventArgs e) { Console.WriteLine("Starting app"); // bool indicating if the program should start a GUI showing Kinect images(true), //or only send tracked data via UDP(false) bool showWindow = false; for (int i = 0; i != e.Args.Length; ++i) { Console.WriteLine(e.Args[i]); if (e.Args[i] == "-s") { showWindow = true; } } showWindow = true; // LoadAndEstimate erer = new LoadAndEstimate(); // create new cameraData object to processes frames from the Kinect camera ICamera cameraData = new KinectData(); if (showWindow) { // Create main application window MainWindow mainWindow = new MainWindow(); ImageProcessing imgProc = new ImageProcessing(cameraData, mainWindow); mainWindow.SetProcessor(imgProc); mainWindow.Show(); } else { ImageProcessing imgProc = new ImageProcessing(cameraData, null); } }
/// <summary> /// Update fused values using kinect datas /// </summary> public void updateKinectList() { kinectList.Clear(); for (int i = 0; i < server.clientList.Count; ++i) { if (server.clientList[i].clientType == (int)clientTypes.KINECT && server.clientList[i].isWorking) { kinectList.Add(server.clientList[i].kinectParameter); } } if (kinectList.Count == 0) { return; } fusedKinectParameters = kinectList[0].Clone(); for (int i = 1; i < kinectList.Count; ++i) { //fusedKinectParameters = getAverageResult(1 / ((double)i + 1), i); fusedKinectParameters.skeletonArray = computeFusedSkeleton(); //fusedKinectParameters.faceArray = computeFusedFace(); } }
public TCPData GetProcessedData(KinectData kinectData) { TCPData pd = new TCPData() { Metadata = new Dictionary <string, string>() }; int max = kinectData.MaxDepth; int min = kinectData.MinDepth; if (kinectData.DepthArray != null) { //enviamos tambien el array de profundidades char[] depthsChar = kinectData.DepthArray.Select(p => Convert.ToChar(p)).ToArray(); string depthsArray = new string(depthsChar); string b64 = Base64Encode(depthsArray); //Metemos los atributos que queremos enviar pd.Metadata.Add(KEY_DEPTH_WIDTH, kinectData.Width.ToString()); pd.Metadata.Add(KEY_DEPTH_HEIGHT, kinectData.Height.ToString()); pd.Metadata.Add(KEY_DEPTH_MIN, min.ToString()); pd.Metadata.Add(KEY_DEPTH_MAX, max.ToString()); pd.Metadata.Add(KEY_DEPTH_ARRAY, b64); } if (kinectData.DetectedObjects != null && kinectData.DetectedObjects.Count > 0) { //enviamos los objetos que se han detectado foreach (Kinect.ObjectsDetection.DetectedObject o in kinectData.DetectedObjects) { pd.Metadata.Add(KEY_OBJ_PREFIX + o.Data, o.RelCenter.X + "," + o.RelCenter.Y); } } return(pd); }
// Creating low resolution image and other data using Bilinear Interpolation void ProduceLowResolutionData() { int i = 0, j = 0; float deltaY = 0, deltaX = 0; int[] indexesOfNeighborPixels = new int[4]; KinectData fourNeighborPixels = new KinectData(2, 2); for (int index = 0; index < lowResolutionKinectData.Size; index++) { BilinearInterpolation.GetBilinearInterpolationParameters(ref fullResolutionKinectData.Width, ref fullResolutionKinectData.Height, ref lowResolutionKinectData.Width, ref lowResolutionKinectData.Height, ref scaleX, ref scaleY, ref index, ref deltaX, ref deltaY, ref indexesOfNeighborPixels); if (lowResolutionKinectData != null) { GetFourNeighborPixels(ref indexesOfNeighborPixels, ref fullResolutionKinectData, ref fourNeighborPixels); BilinearInterpolation.GetBilinearInterpolatedValue(ref deltaX, ref deltaY, ref fourNeighborPixels.CorrectedDepths, ref lowResolutionKinectData.CorrectedDepths[index]); BilinearInterpolation.GetBilinearInterpolatedValue(ref deltaX, ref deltaY, ref fourNeighborPixels.NormalizedDepths, ref lowResolutionKinectData.NormalizedDepths[index]); BilinearInterpolation.GetBilinearInterpolatedValue(ref deltaX, ref deltaY, ref fourNeighborPixels.DepthStreamColors, ref lowResolutionKinectData.DepthStreamColors[index]); BilinearInterpolation.GetBilinearInterpolatedValue(ref deltaX, ref deltaY, ref fourNeighborPixels.RawColorStreamColors, ref lowResolutionKinectData.RawColorStreamColors[index]); BilinearInterpolation.GetBilinearInterpolatedValue(ref deltaX, ref deltaY, ref fourNeighborPixels.GradedDepthStreamColors, ref lowResolutionKinectData.GradedDepthStreamColors[index]); BilinearInterpolation.GetBilinearInterpolatedValue(ref deltaX, ref deltaY, ref fourNeighborPixels.RegisteredColorStreamColors, ref lowResolutionKinectData.RegisteredColorStreamColors[index]); } } }
/// <summary> /// Detects points over individual fields of chessboard satisfying required conditions /// </summary> private List <Point2DWithColor>[,] FillColorsOverFiledsArrayWithData(List <Point2DWithColor>[,] array, KinectData kinectData, double fieldSize, byte[] canniedBytes, UserDefinedParameters userParameters) { var csp = kinectData.CameraSpacePointsFromDepthData; var infraredData = kinectData.InfraredData; var colorFrameData = kinectData.ColorFrameData; var pointsFromDepthToColor = kinectData.PointsFromDepthToColor; var chessboardSize = fieldSize * 8; for (int i = 0; i < csp.Length; i++) { var isValid = !(float.IsInfinity(csp[i].Z) || float.IsNaN(csp[i].Z)); var lowerBoundOfXIsOk = csp[i].X > 0; var lowerBoundOfYIsOk = csp[i].Y > 0; var upperBoundOfXIsOk = csp[i].X < chessboardSize; var upperBoundOfYIsOk = csp[i].Y < chessboardSize; var infraredThresholdIsOk = infraredData[i] > 1500; var aintFlyingPixelAccordingToCannyDetectorOnDepth = canniedBytes[i] != 255; var isHighEnought = csp[i].Z < -(userParameters.MilimetersClippedFromFigure / 1000d); var isBelowThreshold = csp[i].Z > -0.5f; if (isValid && lowerBoundOfXIsOk && lowerBoundOfYIsOk && upperBoundOfXIsOk && upperBoundOfYIsOk && infraredThresholdIsOk && aintFlyingPixelAccordingToCannyDetectorOnDepth && isHighEnought && isBelowThreshold) { var reference = pointsFromDepthToColor[i]; if (reference.X > 0 && reference.X < 1920 && reference.Y > 0 && reference.Y < 1080) { var r = colorFrameData[((int)reference.X + (int)reference.Y * 1920) * 4 + 0]; var g = colorFrameData[((int)reference.X + (int)reference.Y * 1920) * 4 + 1]; var b = colorFrameData[((int)reference.X + (int)reference.Y * 1920) * 4 + 2]; int x = (int)Math.Floor(csp[i].X / fieldSize); int y = (int)Math.Floor(csp[i].Y / fieldSize); if (x >= 0 && y >= 0 && x < 8 && y < 8) { array[x, y].Add(new Point2DWithColor(Color.FromArgb(r, g, b), (int)reference.X, (int)reference.Y)); } } } } return(array); }
private void drawFrameAxis(int kinectIndex) { if ((bool)parentGUI.show3DCheck.IsChecked) { Box3D kinectBox = new Box3D(); DiffuseMaterial material = new DiffuseMaterial(Brushes.Black); kinectBox.Material = material; Transform3DGroup t = new Transform3DGroup(); double l = 0.2, w = 0.03, h = 0.03; t.Children.Add(new ScaleTransform3D(l, w, h)); t.Children.Add(new TranslateTransform3D(new Vector3D(-l / 2, -w / 2, -h / 2))); //new Vector3D(height*GUIComponents.synchyList[i].zPar.X, GUIComponents.synchyList[i].zPar.Y, GUIComponents.synchyList[i].zPar.Z))); Point3D origin = new Point3D(0, 0, 0); Point3D x_unit = new Point3D(1, 0, 0); Point3D y_unit = new Point3D(0, 1, 0); Point3D z_unit = new Point3D(0, 0, 1); KinectData kp = new KinectData(); origin = kp.transformPointto(origin, transfMatrix[kinectIndex]); x_unit = kp.transformPointto(x_unit, transfMatrix[kinectIndex]); y_unit = kp.transformPointto(y_unit, transfMatrix[kinectIndex]); z_unit = kp.transformPointto(z_unit, transfMatrix[kinectIndex]); Vector3D x = new Vector3D(x_unit.X - origin.X, x_unit.Y - origin.Y, x_unit.Z - origin.Z); Vector3D y = new Vector3D(y_unit.X - origin.X, y_unit.Y - origin.Y, y_unit.Z - origin.Z); Vector3D z = new Vector3D(z_unit.X - origin.X, z_unit.Y - origin.Y, z_unit.Z - origin.Z); x.Normalize(); y.Normalize(); z.Normalize(); Vector3D X = new Vector3D(1, 0, 0); Vector3D Y = new Vector3D(0, 1, 0); Vector3D Z = new Vector3D(0, 0, 1); if (kinectIndex != 0) { double rad2Deg = 180 / Math.PI; //use z-N-Z Euler Angle rotation Vector3D N = Vector3D.CrossProduct(new Vector3D(0, 0, 1), z); N.Normalize(); double Z1 = z.X; // Vector3D.DotProduct(Z, x); double Z2 = z.Y; // Vector3D.DotProduct(Z, y); double Z3 = z.Z; // Vector3D.DotProduct(Z, z); double X3 = x.Z; // Vector3D.DotProduct(X, z); double Y3 = y.Z; // Vector3D.DotProduct(Y, z); t.Children.Add(new RotateTransform3D( new AxisAngleRotation3D( Z, rad2Deg * Math.Atan2(Z1, -Z2)//Math.Acos(-z.Y / Math.Sqrt(1 - Math.Pow(z.Z, 2))) ))); //rotate around y t.Children.Add(new RotateTransform3D( new AxisAngleRotation3D(N, rad2Deg * Math.Acos(Z3)))); //rotate around z t.Children.Add(new RotateTransform3D( new AxisAngleRotation3D(z, rad2Deg * Math.Atan2(X3, Y3)//Math.Acos(y.Z / Math.Sqrt(1 - Math.Pow(z.Z, 2)))))); ))); t.Children.Add(new TranslateTransform3D(new Vector3D(origin.X, origin.Y, origin.Z))); } kinectBox.Transform = t; show_viewport3D.Children.Add(kinectBox); ScreenSpaceLines3D Vz = new ScreenSpaceLines3D(); int width = 2; double factor = 0.15; Vz.Points.Add(origin); Vz.Points.Add(new Point3D(z.X * factor + origin.X, z.Y * factor + origin.Y, z.Z * factor + origin.Z)); Vz.Thickness = width; Vz.Color = Colors.Red; show_viewport3D.Children.Add(Vz); } else { ScreenSpaceLines3D x = new ScreenSpaceLines3D(); ScreenSpaceLines3D y = new ScreenSpaceLines3D(); ScreenSpaceLines3D z = new ScreenSpaceLines3D(); int width = 3; double factor = 0.2; // Color c = Colors.Violet; x.Thickness = width; x.Color = Colors.Black; y.Thickness = width; y.Color = Colors.Blue; z.Thickness = width; z.Color = Colors.Red; Point3D origin = new Point3D(0, 0, 0); Point3D x_unit = new Point3D(factor, 0, 0); Point3D y_unit = new Point3D(0, factor, 0); Point3D z_unit = new Point3D(0, 0, factor); KinectData kp = new KinectData(); origin = kp.transformPointto(origin, transfMatrix[kinectIndex]); x_unit = kp.transformPointto(x_unit, transfMatrix[kinectIndex]); y_unit = kp.transformPointto(y_unit, transfMatrix[kinectIndex]); z_unit = kp.transformPointto(z_unit, transfMatrix[kinectIndex]); /* * x_unit = new Point3D(x_unit.X - origin.X, x_unit.Y - origin.Y, x_unit.Z - origin.Z); * y_unit = new Point3D(y_unit.X - origin.X, y_unit.Y - origin.Y, y_unit.Z - origin.Z); * z_unit = new Point3D(z_unit.X - origin.X, z_unit.Y - origin.Y, z_unit.Z - origin.Z); * origin = new Point3D(0, 0, 0); */ x.Points.Add(origin); x.Points.Add(x_unit); y.Points.Add(origin); y.Points.Add(y_unit); z.Points.Add(origin); z.Points.Add(z_unit); this.show_viewport3D.Children.Add(x); this.show_viewport3D.Children.Add(y); this.show_viewport3D.Children.Add(z); } }
// Use this for initialization void Start() { kinectData = new KinectData(); kinectData.Start(); }
// Returns the whole block of processed kinect data public override void GetProcessedKinectData(ref KinectData processedKinectDataReference) { processedKinectDataReference = processedKinectData; }
private void OnNewTData(object sender, KinectData e) { double[] virtualAcc = new double[] { 0, 0, 0 }; double[] virtualVel = new double[] { 0, 0, 0 }; double[] pos = new double[] { e.Joints[FurtherJoint].Position.X, e.Joints[FurtherJoint].Position.Y, e.Joints[FurtherJoint].Position.Z }; TrackingState trackingstate = e.Body.Joints[this.FurtherJoint].TrackingState; //= MathFunctions.midpoint(e.Joints[FurtherJoint], e.Joints[CloserJoint], 100); X_Filter.UpdateVal(pos[0], e.NowInTicks); Y_Filter.UpdateVal(pos[1], e.NowInTicks); Z_Filter.UpdateVal(pos[2], e.NowInTicks); //acceleration var temp = X_Filter.GetNDerivative(nd); virtualAcc[0] = temp.Item1; virtualAcc[1] = Y_Filter.GetNDerivative(nd).Item1 + 9.81; //hack make z - virtualAcc[2] = -Z_Filter.GetNDerivative(nd).Item1; //position double[] virtualPos = new double[] { pos[0], pos[1], pos[2] }; vsd = new VirtualSensorData(); vsd.NowInTicks = temp.Item2; //vsd.NowInTicks = e.NowInTicks; for (int i = 0; i < 3; i++) { vsd.acceleration[i] = virtualAcc[i]; vsd.velocity[i] = virtualVel[i]; vsd.position[i] = virtualPos[i]; } //Rotation vsd.rot = e.Body.JointOrientations[JointType.WristRight].Orientation.ToQuaternion(); //Apply filter under this note: X,Y,Z,W //Begin Quat Filter #region QuatFilter double[] quat = new double[] { vsd.rot.W, vsd.rot.X, vsd.rot.Y, vsd.rot.Z }; //If this is the first data point, record that. The filter will then not be triggered on the first data point //Assuming first data set is correct otherwise all values will be incorrectly flipped. Can improve conditionals later (delay for high confidence data, etc). if (DataTracker.vsdFirstQuat[0] == 0 && DataTracker.vsdFirstQuat[1] == 0 && DataTracker.vsdFirstQuat[2] == 0 && DataTracker.vsdFirstQuat[3] == 0) { DataTracker.vsdFirstQuat = quat; DataTracker.mvsdPrevQuat = quat; } int counter = 0; for (int i = 0; i < 4; i++) { if (System.Math.Sign(DataTracker.mvsdPrevQuat[i]) != System.Math.Sign(quat[i])) { counter++; } } if (counter == 4) { DataTracker.FlipCounter++; for (int j = 0; j < 4; j++) { quat[j] = -quat[j]; } } //NOTE: just for right wrist, this can be generalized later. Only keep high confidence reference quaternions (except for first data point) if (e.Body.Joints[JointType.ElbowRight].TrackingState == TrackingState.Tracked && e.Body.Joints[JointType.ShoulderRight].TrackingState == TrackingState.Tracked && e.Body.Joints[JointType.WristRight].TrackingState == TrackingState.Tracked) { for (int i = 0; i < 4; i++) { DataTracker.mvsdPrevQuat[i] = quat[i]; } } vsd.rot.W = quat[0]; vsd.rot.X = quat[1]; vsd.rot.Y = quat[2]; vsd.rot.Z = quat[3]; #endregion //End Rotation Filter if (this.FurtherJoint == JointType.WristRight) { //Only do strict tracking when right wrist is selected TrackingState wristState = e.Body.Joints[JointType.WristRight].TrackingState; HandState handState = e.Body.HandRightState; TrackingState thumbState = e.Body.Joints[JointType.ThumbRight].TrackingState; vsd.isinferredornottracked = (wristState != TrackingState.Tracked) || (thumbState != TrackingState.Tracked) || (handState != HandState.Open); } else { vsd.isinferredornottracked = (trackingstate == TrackingState.Inferred) || (trackingstate == TrackingState.NotTracked); } //If in Calibrator Setup (note: change calibrator setup to bool later? - 0 means not in setup) and the data is good, tag it. if (ICherryPicker.isDataGood(vsd.acceleration, !vsd.isinferredornottracked)) //don't need to repeat this in mappedVirtual { vsd.section = DataTracker.CurrentSection; DataTracker.ValidVSD = true; } else { vsd.section = 0; DataTracker.ValidVSD = false; } if (this.NewIData != null) { this.NewIData(this, vsd); //[not edited] This triggers data collection? (So changes must be before this) } if (this.NewTData != null) { this.NewTData(this, vsd); } }
/* * CROUCH: Imitates the speeding up from a player crouching while skiing. */ float Crouch (KinectData client) { float angle = (Vector3.Angle (client.lefthip - client.leftknee, client.leftfoot - client.leftknee) + Vector3.Angle (client.righthip - client.righthip, client.rightfoot - client.rightknee)) / 2.0f; return (180.0f - angle) / 180.0f; }
bool execute_download_command(int action, int kinect_index = -2) { //kinect_index == -1 means all kinects DownloadCommands c = (DownloadCommands)action; // server.updateServerMsg("Received data from " + getTypeString(clientType) + clientID + " : " + c.ToString()); if (action > (int)DownloadCommands.Get_head_joint_information_of_skeleton_array_from_fused_kinect_data && kinect_index == -2) //kinect index not specified { return(false); } try { switch (c) { case DownloadCommands.Get_total_number_of_alive_Kinect_Client: //Get total number of alive Kinect Client int count = 0; foreach (myClient mc in server.clientList.Where(cc => cc.isWorking && cc.clientType == (int)clientTypes.KINECT)) { count++; } sendData((int)DownloadCommands.Get_total_number_of_alive_Kinect_Client + "#" + count.ToString()); break; case DownloadCommands.Get_kinect_matrices: List <DenseMatrix> list = GUIComponents.fc.fp.getTranfMatrix(); string dataToSend = ""; for (int i = 0; i < list.Count; ++i) { for (int row = 0; row < 4; ++row) { for (int col = 0; col < 4; ++col) { dataToSend += list[i][row, col].ToString(); // +" " + list[i][1, 3].ToString() + " " + list[i][2, 3].ToString() + "#"; if (row == 3 && col == 3) { dataToSend += "#"; } else { dataToSend += " "; } } } } dataToSend = list.Count.ToString() + "#" + dataToSend; sendData((int)DownloadCommands.Get_kinect_matrices + "#" + dataToSend); //sendData("ERROR"); break; case DownloadCommands.Download_all_kinect_data_in_Base64_string_format: // Download all kinect data in Base64 string format dataToSend = ""; int clientcount = 0; foreach (myClient mc in server.clientList.Where(cc => cc.isWorking && cc.clientType == (int)clientTypes.KINECT)) { dataToSend += mc.kinectParameter.GetAllParameterStringInBase64() + "#"; clientcount++; } dataToSend = clientcount.ToString() + "#" + dataToSend; sendData((int)DownloadCommands.Download_all_kinect_data_in_Base64_string_format + "#" + dataToSend); break; case DownloadCommands.Download_fused_kinect_data_in_Base64_string_format: // Download fused kinect data in Base64 string format if (clientType == (int)clientTypes.KINECT && MainWindow.getFusedDataToKinect()) { KinectData k = server.fusedKinectParameter.Clone(); list = GUIComponents.fc.fp.getTranfMatrix(); k.transformTo(getInverseTranfMatrix(list[clientID])); sendData((int)DownloadCommands.Download_fused_kinect_data_in_Base64_string_format + "#" + k.GetAllParameterStringInBase64()); } else if (clientType != (int)clientTypes.KINECT) { sendData((int)DownloadCommands.Download_fused_kinect_data_in_Base64_string_format + "#" + server.fusedKinectParameter.GetAllParameterStringInBase64()); } break; //case DownloadCommands.Get_total_number_of_faces_from_fused_kinect_data:// Get total number of faces from fused kinect data // if (server.fusedKinectParameter.faceArray != null) // sendData((int)DownloadCommands.Get_total_number_of_faces_from_fused_kinect_data + "#" + server.fusedKinectParameter.faceArray.Length.ToString()); // else // sendData((int)DownloadCommands.Get_total_number_of_faces_from_fused_kinect_data + "#" + "0"); // break; case DownloadCommands.Get_total_number_of_skeletons_from_fused_kinect_data: // Get total number of skeletons from fused kinect data if (server.fusedKinectParameter.skeletonArray != null) { sendData((int)DownloadCommands.Get_total_number_of_skeletons_from_fused_kinect_data + "#" + server.fusedKinectParameter.skeletonArray.Length.ToString()); } else { sendData((int)DownloadCommands.Get_total_number_of_skeletons_from_fused_kinect_data + "#" + "0"); } break; //case DownloadCommands.Get_full_face_parameters_from_fused_kinect_data_orientation_point:// Get full face parameters from fused kinect data // sendData((int)DownloadCommands.Get_full_face_parameters_from_fused_kinect_data_orientation_point + "#" + server.fusedKinectParameter.getFaceArrayString(KinectData.FaceArrayType.FULL_ARRAY_POINT)); // break; //case DownloadCommands.Get_full_face_parameters_from_fused_kinect_data_radian:// Get full face parameters from fused kinect data // sendData((int)DownloadCommands.Get_full_face_parameters_from_fused_kinect_data_radian + "#" + server.fusedKinectParameter.getFaceArrayString(KinectData.FaceArrayType.FULL_ARRAY_RADIAN)); // break; //case DownloadCommands.Get_face_positions_from_fused_kinect_data:// Get face positions from fused kinect data // sendData((int)DownloadCommands.Get_face_positions_from_fused_kinect_data + "#" + server.fusedKinectParameter.getFaceArrayString(KinectData.FaceArrayType.POSITION_ONLY)); // break; //case DownloadCommands.Get_face_orientation_point_from_fused_kinect_data:// Get face orientation point from fused kinect data // sendData((int)DownloadCommands.Get_face_orientation_point_from_fused_kinect_data + "#" + server.fusedKinectParameter.getFaceArrayString(KinectData.FaceArrayType.POINT_ONLY)); // break; //case DownloadCommands.Get_face_orientation_angle_from_fused_kinect_data: // Get face orientation angle from fused kinect data // sendData((int)DownloadCommands.Get_face_orientation_angle_from_fused_kinect_data + "#" + server.fusedKinectParameter.getFaceArrayString(KinectData.FaceArrayType.RADIAN_ONLY)); // break; case DownloadCommands.Get_skeleton_position_array_from_fused_kinect_data: // Get skeleton position array from fused kinect data sendData((int)DownloadCommands.Get_skeleton_position_array_from_fused_kinect_data + "#" + server.fusedKinectParameter.GetSkeletonArrayString(KinectData.SkeletonArrayType.POSITION_ONLY)); break; case DownloadCommands.Get_full_skeleton_array_from_fused_kinect_data: // Get full skeleton array from fused kinect data sendData((int)DownloadCommands.Get_full_skeleton_array_from_fused_kinect_data + "#" + server.fusedKinectParameter.GetSkeletonArrayString(KinectData.SkeletonArrayType.FULL_ARRAY)); break; case DownloadCommands.Get_upper_body_joint_information_of_skeleton_array_from_fused_kinect_data: // Get upper body joint information of skeleton array from fused kinect data sendData((int)DownloadCommands.Get_upper_body_joint_information_of_skeleton_array_from_fused_kinect_data + "#" + server.fusedKinectParameter.GetSkeletonArrayString(KinectData.SkeletonArrayType.UPPER_BODY)); break; case DownloadCommands.Get_head_joint_information_of_skeleton_array_from_fused_kinect_data: // Get head joint information of skeleton array from fused kinect data sendData((int)DownloadCommands.Get_head_joint_information_of_skeleton_array_from_fused_kinect_data + "#" + server.fusedKinectParameter.GetSkeletonArrayString(KinectData.SkeletonArrayType.HEAD_ONLY)); break; //case DownloadCommands.Get_sound_parameters_from_a_specified_kinect:// Get sound parameters from a specified kinect // dataToSend = ""; // clientcount = 0; // foreach (myClient mc in server.clientList.Where(cc => cc.isWorking && cc.clientType == (int)clientTypes.KINECT)) // { // if (kinect_index == -1 || kinect_index == mc.clientID) // { // dataToSend += mc.kinectParameter.sp.getParameterString() + "#"; // clientcount++; // } // } // dataToSend = clientcount.ToString() + "#" + dataToSend; // sendData((int)DownloadCommands.Get_sound_parameters_from_a_specified_kinect + "#" + dataToSend); // break; //case DownloadCommands.Get_full_face_parameters_from_a_specified_kinect_rotation_in_orientation_point:// Get full face parameters from a specified kinect (rotation in orientation point) // sendData((int)DownloadCommands.Get_full_face_parameters_from_a_specified_kinect_rotation_in_orientation_point + "#" +sendFaceString(kinect_index, KinectData.FaceArrayType.FULL_ARRAY_POINT)); // break; //case DownloadCommands.Get_full_face_parameters_from_a_specified_kinect_rotation_in_radian:// Get full face parameters from a specified kinect (rotation in radian) // sendData((int)DownloadCommands.Get_full_face_parameters_from_a_specified_kinect_rotation_in_radian + "#" +sendFaceString(kinect_index, KinectData.FaceArrayType.FULL_ARRAY_RADIAN)); // break; //case DownloadCommands.Get_face_positions_from_a_specified_kinect:// Get face positions from a specified kinect // sendData((int)DownloadCommands.Get_face_positions_from_a_specified_kinect + "#" +sendFaceString(kinect_index, KinectData.FaceArrayType.POSITION_ONLY)); // break; //case DownloadCommands.Get_face_orientation_point_from_a_specified_kinect:// Get face orientation point from a specified kinect // sendData((int)DownloadCommands.Get_face_orientation_point_from_a_specified_kinect + "#" +sendFaceString(kinect_index, KinectData.FaceArrayType.POINT_ONLY)); // break; //case DownloadCommands.Get_face_orientation_angle_from_a_specified_kinect:// Get face orientation angle from a specified kinect // sendData((int)DownloadCommands.Get_face_orientation_angle_from_a_specified_kinect + "#" +sendFaceString(kinect_index, KinectData.FaceArrayType.RADIAN_ONLY)); // break; case DownloadCommands.Get_skeleton_position_array_from_a_specified_kinect: // Get skeleton position array from a specified kinect sendData((int)DownloadCommands.Get_skeleton_position_array_from_a_specified_kinect + "#" + sendSkeletonString(kinect_index, KinectData.SkeletonArrayType.POSITION_ONLY)); break; case DownloadCommands.Get_full_skeleton_array_from_a_specified_kinect: // Get full skeleton array from a specified kinect sendData((int)DownloadCommands.Get_full_skeleton_array_from_a_specified_kinect + "#" + sendSkeletonString(kinect_index, KinectData.SkeletonArrayType.FULL_ARRAY)); break; case DownloadCommands.Get_upper_body_joint_information_of_skeleton_array_from_a_specified_kinect: // Get upper body joint information of skeleton array from a specified kinect sendData((int)DownloadCommands.Get_upper_body_joint_information_of_skeleton_array_from_a_specified_kinect + "#" + sendSkeletonString(kinect_index, KinectData.SkeletonArrayType.UPPER_BODY)); break; case DownloadCommands.Get_head_joint_information_of_skeleton_array_from_a_specified_kinect: // Get head joint information of skeleton array from a specified kinect sendData((int)DownloadCommands.Get_head_joint_information_of_skeleton_array_from_a_specified_kinect + "#" + sendSkeletonString(kinect_index, KinectData.SkeletonArrayType.HEAD_ONLY)); break; case DownloadCommands.Get_object_frame: //dataToSend = GUIComponents.synchyList.Count.ToString()+ "*"; dataToSend = ""; int synchyCount = 0; for (int i = 0; i < GUIComponents.synchyList.Count; ++i) { if (GUIComponents.synchyList[i].Center.Z != 0 && GUIComponents.synchyList[i].xUnit.Z != Double.NaN) { ++synchyCount; dataToSend += GUIComponents.synchyList[i].getSynchyData() + "#"; } } dataToSend = synchyCount.ToString() + "#" + dataToSend; sendData((int)DownloadCommands.Get_object_frame + "#" + dataToSend); break; default: //unknow command return(false); } return(true); } catch { //command excution error return(false); } }
public Form1(string characterName) { InitializeComponent(); mKinectData = new KinectData(); mKinectData2 = new KinectData(); //second user data GazeOut1=new GazeDef(); GazeOut2 = new GazeDef(); GazeOut1 = GazeDef.none; GazeOut2 = GazeDef.none; LastGazeOut1 = new GazeDef(); LastGazeOut2 = new GazeDef(); LastGazeOut1 = GazeDef.none; LastGazeOut2 = GazeDef.none; simpleSound = new SoundPlayer(@"countA.wav"); eyebrowControllerUsr1 = new EyebrowsController(); eyebrowControllerUsr2 = new EyebrowsController(); serialPort1 = new System.IO.Ports.SerialPort(); serialPort2 = new System.IO.Ports.SerialPort(); sentonce = false; serialPort1.DataReceived += new System.IO.Ports.SerialDataReceivedEventHandler(serialPort1_DataReceived); serialPort2.DataReceived += new System.IO.Ports.SerialDataReceivedEventHandler(serialPort2_DataReceived); thalamusClient = new PerceptionClient(characterName); pipeServer = new PipeServer.Server(); engagementpipe = new PipeServer.Server(); Xp = 0; Yp = 0; Zp = 0; Xp2 = -1; Yp2 = -1; Zp2 = -1; Xpold = 0; Ypold = 0; Zpold = 0; Xp2old = -1; Yp2old = -1; Zp2old = -1; RotH = 0; RotV = 0; RotH2 = 0; RotV2 = 0; depth = 0; allOKAOdata = ""; allOKAOdata2 = ""; allQdata = ""; allQdata2 = ""; allkinectdata = ""; allkinectdata2 = ""; startstop = false; FparticipanID = 0; FparticipanID2 = 1; Fparticipantname = "None"; Fparticipantname2 = "None2"; isEmpathic = true; //default value OKAOdata = ""; OKAOdata2 = ""; Kinectdata = ""; Kinectdata2 = ""; Qdata1 = ""; Qdata2 = ""; saver = new System.Timers.Timer(); saver.Elapsed += new ElapsedEventHandler(OnsaverEvent); saver.Enabled = false; saver.Interval = 250; //Herz DetectedPerson = false; aTimer = new System.Timers.Timer(); aTimer.Stop(); aTimer.Elapsed += new ElapsedEventHandler(OnTimedEvent); //publisher aTimer.Interval = 350;//increased the timer to reduce thalamus messages videoTimer = new MicroLibrary.MicroTimer(); videoTimer.Stop(); videoTimer.MicroTimerElapsed += new MicroLibrary.MicroTimer.MicroTimerElapsedEventHandler(saveframe); videoTimer.Interval = 33333; //30fps //thalamusClient.ClientConnected += connected; this.pipeServer.MessageReceived += new PipeServer.Server.MessageReceivedHandler(pipeServer_MessageReceived); this.engagementpipe.MessageReceived += new PipeServer.Server.MessageReceivedHandler(engagementpipe_MessageReceived); button1_Click(this,null); //auto start the pipe servers if (scenarioselected == 0) scenarioselected = 1; }
/// <summary> /// Updates the avatar joints values (position and rotation) according to its <see cref="GenericDeviceUnity.GenericDeviceData"/> /// </summary> public void UpdateAvatar() { if (_kinectData == null) { _kinectData = (KinectData)GenericDeviceData; } //Body.transform.localPosition = _kinectData.GetPosition(KinectBones.waist.ToString()); if (Head != null) { Head.transform.rotation = Body.transform.rotation * //reference object GenericDeviceData.GetRotation(KinectBones.head.ToString()) * //rotation that comes from kinect (world rotation) _initialRotations[(int)KinectBones.head]; //initial rotation of the head inside unity } if (Neck != null) { Neck.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.neck.ToString()) * _initialRotations[(int)KinectBones.neck]; } if (Torso != null) { Torso.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.torso.ToString()) * _initialRotations[(int)KinectBones.torso]; } if (Waist != null) { Waist.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.waist.ToString()) * _initialRotations[(int)KinectBones.waist]; } if (LeftShoulder != null) { LeftShoulder.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.leftshoulder.ToString()) * _initialRotations[(int)KinectBones.leftshoulder]; } if (LeftElbow != null) { LeftElbow.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.leftelbow.ToString()) * _initialRotations[(int)KinectBones.leftelbow]; } if (LeftWrist != null) { LeftWrist.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.leftwrist.ToString()) * _initialRotations[(int)KinectBones.leftwrist]; } if (RightShoulder != null) { RightShoulder.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.rightshoulder.ToString()) * _initialRotations[(int)KinectBones.rightshoulder]; } if (RightElbow != null) { RightElbow.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.rightelbow.ToString()) * _initialRotations[(int)KinectBones.rightelbow]; } if (RightWrist != null) { RightWrist.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.rightwrist.ToString()) * _initialRotations[(int)KinectBones.rightwrist]; } if (LeftHip != null) { LeftHip.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.lefthip.ToString()) * _initialRotations[(int)KinectBones.lefthip]; } if (LeftKnee != null) { LeftKnee.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.leftknee.ToString()) * _initialRotations[(int)KinectBones.leftknee]; } if (LeftAnkle != null) { LeftAnkle.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.leftankle.ToString()) * _initialRotations[(int)KinectBones.leftankle]; } if (RightHip != null) { RightHip.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.righthip.ToString()) * _initialRotations[(int)KinectBones.righthip]; } if (RightKnee != null) { RightKnee.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.rightknee.ToString()) * _initialRotations[(int)KinectBones.rightknee]; } if (RightAnkle != null) { RightAnkle.transform.rotation = Body.transform.rotation * GenericDeviceData.GetRotation(KinectBones.rightankle.ToString()) * _initialRotations[(int)KinectBones.rightankle]; } }
public GameController() { kinect = new KinectHandler(); data = new KinectData(); }
public Form1(PolhemusController newPolhemusController, PhidgetController newPhidgetController, string host, string port) { this.host = host; this.port = port; InitializeComponent(); this.SetStyle( ControlStyles.AllPaintingInWmPaint | ControlStyles.UserPaint | ControlStyles.OptimizedDoubleBuffer, true); polhemusController = newPolhemusController; phidgetController = newPhidgetController; this.BackColor = Color.Black; this.Size = new Size(Program.tableWidth, Program.tableHeight); studyController = new HaikuStudyController(HaikuStudyPosition.SideBySide);//, HaikuStudyType.RealArmsPictureArms); wordBoxController = new WordBoxController(studyController); if (studyController.isActuatePenalty == true) { //phidgetController.setUpServos(); } //to snap words back to their original locations when dropped outside of a user's paper, set this to true. wordBoxController.boxesShouldSnapBack = true; studyController.wordBoxController = wordBoxController; studyController.currentCondition = HaikuStudyCondition.Cursors; studyController.isInSetUpMode = false; setMouseProperties(); setUpEmbodiments(); if (Program.kinectEnabled) { // Set up KinectTable kinectData = new KinectData(); // Set up session parameters SessionParameters sessionParams = new SessionParameters(KinectDataParams.EnableType.All); sessionParams.DataParams.validityImageEnable = false; sessionParams.DataParams.testImageEnable = false; // Connect to a local Kinect and hook up to the data event kinectClient = KinectTableNet.KinectTable.ConnectLocal(sessionParams); kinectClient.DataReady += kinectClient_DataReady; // Set up Kinect calibration kinectCalibration = new KinectCalibrationController(); // Read the saved table depth tweak value ReadTableDepthTweak(); } Cursor.Hide(); Load += Form1_Load; FormClosed += Form1_FormClosed; playerID = 0; updateTimer = new Timer(); updateTimer.Interval = 25; updateTimer.Tick += new EventHandler(updateTimer_Tick); updateTimer.Start(); }
public void NewFrameListener(KinectData depth, EventArgs e) { Send(depth); }