protected override void OnStart(string[] args) { // Try to open the first available Kinect sensor. this.kinect = KinectSensor.GetDefault(); if (this.kinect == null) { EventLog.WriteEntry("No Kinect device was detected."); ExitCode = -1; throw new KinectException("No kinect device was detected."); } else { this.filter = new KinectJointFilter(); this.filter.Init(0.5f, 0.5f, 0.5f, 0.05f, 0.04f); // change params if you want this.kinect.Open(); this.kinect.IsAvailableChanged += this.OnAvailableChanged; } // Register as a handler for the image data being returned by the Kinect. this.reader = this.kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); this.audioSource = this.kinect.AudioSource; if (this.reader == null) { EventLog.WriteEntry("Unable to connect to Kinect data stream."); ExitCode = -2; throw new KinectException("Unable to connect to Kinect data stream."); } else { this.reader.MultiSourceFrameArrived += this.OnFrameArrived; } if (this.audioSource == null) { EventLog.WriteEntry("Unable to open audio source on kinect"); ExitCode = -3; throw new KinectException("Unable to connect to kinect audio source"); } else { this.audioReader = this.audioSource.OpenReader(); if (this.audioReader == null) { Console.WriteLine("Issues with audio reader"); } else { this.audioReader.FrameArrived += this.onAudioFrameArrived; } } // Allocate storage for the data from the Kinect. this.colorArray = new byte[(this.kinect.ColorFrameSource.FrameDescription.Height * this.kinect.ColorFrameSource.FrameDescription.Width * BYTES_PER_COLOR_PIXEL)]; this.depthArray = new ushort[this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width]; this.irArray = new ushort[this.kinect.InfraredFrameSource.FrameDescription.Height * this.kinect.InfraredFrameSource.FrameDescription.Width]; this.byteColorArray = new byte[(this.kinect.ColorFrameSource.FrameDescription.Height * this.kinect.ColorFrameSource.FrameDescription.Width * BYTES_PER_COLOR_PIXEL) + sizeof(double)]; this.byteDepthArray = new byte[this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width * BYTES_PER_DEPTH_PIXEL + sizeof(double)]; this.byteIRArray = new byte[this.kinect.InfraredFrameSource.FrameDescription.Height * this.kinect.InfraredFrameSource.FrameDescription.Width * BYTES_PER_IR_PIXEL + sizeof(double)]; this.bodyArray = new Body[this.kinect.BodyFrameSource.BodyCount]; this.audioContainer = new AudioContainer(); this.audioContainer.samplingFrequency = 16000; this.audioContainer.frameLifeTime = 0.016; this.audioContainer.numSamplesPerFrame = (int)(this.audioContainer.samplingFrequency * this.audioContainer.frameLifeTime); this.audioContainer.numBytesPerSample = sizeof(float); this.audioContainer.audioStream = new float[256]; // Create network connectors that will send out the data when it is received. this.colorConnector = new AsyncNetworkConnector(Properties.Settings.Default.RgbImagePort); this.depthConnector = new AsyncNetworkConnector(Properties.Settings.Default.DepthImagePort); this.irConnector = new AsyncNetworkConnector(Properties.Settings.Default.IrImagePort); this.bodyConnector = new AsyncNetworkConnector(Properties.Settings.Default.BodyPort); this.audioConnector = new AsyncNetworkConnector(Properties.Settings.Default.AudioPort); // Open the server connections. this.colorConnector.Listen(); this.depthConnector.Listen(); this.irConnector.Listen(); this.bodyConnector.Listen(); this.audioConnector.Listen(); }
protected override void OnStart(string[] args) { // Try to open the first available Kinect sensor. this.kinect = KinectSensor.GetDefault(); if (this.kinect == null) { EventLog.WriteEntry("No Kinect device was detected."); ExitCode = -1; throw new KinectException("No kinect device was detected."); } else { this.kinect.Open(); this.kinect.IsAvailableChanged += this.OnAvailableChanged; } // Register as a handler for the image data being returned by the Kinect. this.reader = this.kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); this.audioSource = this.kinect.AudioSource; if (this.reader == null) { EventLog.WriteEntry("Unable to connect to Kinect data stream."); ExitCode = -2; throw new KinectException("Unable to connect to Kinect data stream."); } else { this.reader.MultiSourceFrameArrived += this.OnFrameArrived; } if (this.audioSource == null) { EventLog.WriteEntry("Unable to open audio source on kinect"); ExitCode = -3; throw new KinectException("Unable to connect to kinect audio source"); } else { this.audioReader = this.audioSource.OpenReader(); if (this.audioReader == null) { Console.WriteLine("Issues with audio reader"); } else { this.audioReader.FrameArrived += this.onAudioFrameArrived; } } // Allocate storage for the data from the Kinect. this.colorArray = new byte[(this.kinect.ColorFrameSource.FrameDescription.Height * this.kinect.ColorFrameSource.FrameDescription.Width * BYTES_PER_COLOR_PIXEL)]; this.depthArray = new ushort[this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width]; this.irArray = new ushort[this.kinect.InfraredFrameSource.FrameDescription.Height * this.kinect.InfraredFrameSource.FrameDescription.Width]; this.byteColorArray = new byte[(this.kinect.ColorFrameSource.FrameDescription.Height * this.kinect.ColorFrameSource.FrameDescription.Width * BYTES_PER_COLOR_PIXEL) + sizeof(double)]; this.byteDepthArray = new byte[this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width * BYTES_PER_DEPTH_PIXEL + sizeof(double)]; this.byteIRArray = new byte[this.kinect.InfraredFrameSource.FrameDescription.Height * this.kinect.InfraredFrameSource.FrameDescription.Width * BYTES_PER_IR_PIXEL + sizeof(double)]; this.bodyArray = new Body[this.kinect.BodyFrameSource.BodyCount]; this.scan2DArray = new float[this.kinect.DepthFrameSource.FrameDescription.Width * 6 + 3]; this.byteScan2DArray = new byte[this.kinect.DepthFrameSource.FrameDescription.Width * 6 * 4 + 12]; this.audioContainer = new AudioContainer(); this.audioContainer.samplingFrequency = 16000; this.audioContainer.frameLifeTime = 0.016; this.audioContainer.numSamplesPerFrame = (int)(this.audioContainer.samplingFrequency * this.audioContainer.frameLifeTime); this.audioContainer.numBytesPerSample = sizeof(float); this.audioContainer.audioStream = new float[256]; // Create network connectors that will send out the data when it is received. this.colorConnector = new AsyncNetworkConnector(Properties.Settings.Default.RgbImagePort); this.depthConnector = new AsyncNetworkConnector(Properties.Settings.Default.DepthImagePort); this.irConnector = new AsyncNetworkConnector(Properties.Settings.Default.IrImagePort); this.bodyConnector = new AsyncNetworkConnector(Properties.Settings.Default.BodyPort); this.audioConnector = new AsyncNetworkConnector(Properties.Settings.Default.AudioPort); this.scan2DConnector = new AsyncNetworkConnector(Properties.Settings.Default.Scan2DPort); // Open the server connections. this.colorConnector.Listen(); this.depthConnector.Listen(); this.irConnector.Listen(); this.bodyConnector.Listen(); this.audioConnector.Listen(); this.scan2DConnector.Listen(); // get the depth (display)extents FrameDescription colorFrameDescription = this.kinect.ColorFrameSource.FrameDescription; // get size of joint space this.displayWidth = colorFrameDescription.Width; this.displayHeight = colorFrameDescription.Height; // open the reader for the body frames // open the reader for the color frames // create the bitmap to display this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); mapper = this.kinect.CoordinateMapper; lower = new CvMat(3, 1, MatrixType.U8C1); upper = new CvMat(3, 1, MatrixType.U8C1); // Set specific color threshold for orange. lower[0] = 5; lower[1] = 170; lower[2] = 170; upper[0] = 10; upper[1] = 255; upper[2] = 255; }
private void onAudioFrameArrived(object sender, AudioBeamFrameArrivedEventArgs e) { // Return if there are no audio clients. if (!this.audioConnector.HasClients) { return; } // Create an audio container representing Kinect audio buffer data. var audioContainer = new AudioContainer(); audioContainer.samplingFrequency = 16000; audioContainer.frameLifeTime = 0.016; audioContainer.numSamplesPerFrame = (int)(audioContainer.samplingFrequency * audioContainer.frameLifeTime); audioContainer.numBytesPerSample = sizeof(float); audioContainer.audioStream = new float[256]; // Record the current Unix epoch timestamp. audioContainer.timestamp = DateTimeOffset.Now.ToUnixTimeMilliseconds(); // TODO: add relative timestamp to audio? // this.audioContainer.relativeTime = e.FrameReference.RelativeTime.TotalMilliseconds; // Retrieve audio beams for current frame. AudioBeamFrameList frameList = e.FrameReference.AcquireBeamFrames(); if (frameList == null) { return; } // Serialize all of the subframes and send as a JSON message. using (frameList) { // Only one audio beam is supported. Get the subframe list for the one beam. IReadOnlyList <AudioBeamSubFrame> subFrameList = frameList[0].SubFrames; // Consolidate the beam subframes into a single JSON message. foreach (AudioBeamSubFrame subFrame in subFrameList) { using (subFrame) { audioContainer.beamAngle = subFrame.BeamAngle; audioContainer.beamAngleConfidence = subFrame.BeamAngleConfidence; byte[] array = new byte[subFrame.FrameLengthInBytes]; subFrame.CopyFrameDataToArray(array); for (int i = 0; i < array.Length; i += sizeof(float)) { audioContainer.audioStream[(int)(i / sizeof(float))] = BitConverter.ToSingle(array, i); } // Send audio data to clients. string json = JsonConvert.SerializeObject(audioContainer, new JsonSerializerSettings { ContractResolver = new AudioContractResolver() }) + "\n"; byte[] bytes = System.Text.Encoding.ASCII.GetBytes(json); this.audioConnector.Broadcast(bytes); } } } }