Esempio n. 1
0
        private void Track(AsyncStateData asyncData)
        {
            asyncData.Running = true;

            if (InitOpenNi(asyncData))
            {
                while (!asyncData.Canceled)
                {
                    _niContext.WaitAndUpdateAll();

                    _imageNode.GetMetaData(_imageMeta);
                    _depthNode.GetMetaData(_depthMeta);
                    _sceneNode.GetMetaData(_sceneMeta);

                    asyncData.AsyncOperation.SynchronizationContext.Send(
                        delegate
                    {
                        //UpdateCameraInfo();
                        UpdateFrameData();
                        InvokeTrackingUpdated(EventArgs.Empty);
                    }, null);
                }
            }
            asyncData.Running = false;
            asyncData.AsyncOperation.PostOperationCompleted(evt => InvokeTrackinkgCompleted(EventArgs.Empty), null);
        }
Esempio n. 2
0
        private void InitOpenNi(AsyncStateData asyncData)
        {
            _niContext = new XnMOpenNIContextEx();
            _niContext.InitFromXmlFile("openni.xml");

            _imageNode = (XnMImageGenerator)_niContext.FindExistingNode(XnMProductionNodeType.Image);

            _imageMeta = new XnMImageMetaData();
            _imageNode.GetMetaData(_imageMeta);

            // create the image bitmap source on
            asyncData.AsyncOperation.SynchronizationContext.Send(
                md => CreateImageBitmap(_imageMeta, out _rgbImageSource),
                null);

            // add depth node
            _depthNode = (XnMDepthGenerator)_niContext.FindExistingNode(XnMProductionNodeType.Depth);

            _depthMeta = new XnMDepthMetaData();
            _depthNode.GetMetaData(_depthMeta);

            asyncData.AsyncOperation.SynchronizationContext.Send(
                state => CreateImageBitmap(_depthMeta, out _depthImageSource, PixelFormats.Pbgra32),
                null);

            // add scene node
            _sceneNode = (XnMSceneAnalyzer)_niContext.FindExistingNode(XnMProductionNodeType.Scene);

            _sceneMeta = new XnMSceneMetaData();
            _sceneNode.GetMetaData(_sceneMeta);

            asyncData.AsyncOperation.SynchronizationContext.Send(
                state => CreateImageBitmap(_sceneMeta, out _sceneImageSource, PixelFormats.Pbgra32),
                null);
        }
Esempio n. 3
0
    private static void asyncReceiveCallback(IAsyncResult asyncResult)
    {
        AsyncStateData rcvData = asyncResult.AsyncState as AsyncStateData;
        int            nRecv   = rcvData.Socket.EndReceive(asyncResult);
        string         txt     = Encoding.UTF8.GetString(rcvData.Buffer, 0, nRecv);

        byte[] sendBytes = Encoding.UTF8.GetBytes("Hello: " + txt);
        rcvData.Socket.BeginSend(sendBytes, 0, sendBytes.Length,
                                 SocketFlags.None, asyncSendCallback, rcvData.Socket);
    }
Esempio n. 4
0
        public void StartTracking()
        {
            StopTracking();

            AsyncStateData asyncData = new AsyncStateData(new object());

            TrackDelegate trackDelegate = Track;
            trackDelegate.BeginInvoke(asyncData, trackDelegate.EndInvoke, null);

            _currentState = asyncData;
        }
Esempio n. 5
0
        public void StartTracking()
        {
            StopTracking();

            AsyncStateData asyncData = new AsyncStateData(new object());

            TrackDelegate trackDelegate = Track;

            trackDelegate.BeginInvoke(asyncData, trackDelegate.EndInvoke, null);

            _currentState = asyncData;
        }
Esempio n. 6
0
        private bool InitOpenNi(AsyncStateData asyncData)
        {
            try
            {
                _niContext = new XnMOpenNIContextEx();
                _niContext.InitFromXmlFile("openni.xml");

                _imageNode = (XnMImageGenerator)_niContext.FindExistingNode(XnMProductionNodeType.Image);
                _imageMeta = new XnMImageMetaData();
                _imageNode.GetMetaData(_imageMeta);

                if (_imageMeta.PixelFormat != XnMPixelFormat.Rgb24)
                {
                    throw new InvalidOperationException("Only RGB24 pixel format is supported");
                }

                // add depth node
                _depthNode = (XnMDepthGenerator)_niContext.FindExistingNode(XnMProductionNodeType.Depth);
                _depthMeta = new XnMDepthMetaData();
                _depthNode.GetMetaData(_depthMeta);

                if (_depthMeta.PixelFormat != XnMPixelFormat.Grayscale16Bit)
                {
                    throw new InvalidOperationException("Only 16-bit depth precission is supported");
                }

                if (_depthMeta.XRes != _imageMeta.XRes || _depthMeta.YRes != _imageMeta.YRes)
                {
                    throw new InvalidOperationException("Image and depth map must have the same resolution");
                }

                // add scene node
                _sceneNode = (XnMSceneAnalyzer)_niContext.FindExistingNode(XnMProductionNodeType.Scene);
                _sceneMeta = new XnMSceneMetaData();
                _sceneNode.GetMetaData(_sceneMeta);

                asyncData.AsyncOperation.SynchronizationContext.Send(
                    delegate
                {
                    UpdateCameraInfo();
                    UpdateFrameData();
                    InvokeTrackinkgStarted(EventArgs.Empty);
                }, null);

                return(true);
            }
            catch (Exception ex)
            {
                return(false);
            }
        }
Esempio n. 7
0
    private static void serverFunc(object obj)
    {
        using (Socket srvSocket =
                   new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp))
        {
            IPEndPoint endPoint = new IPEndPoint(IPAddress.Any, 11200);
            srvSocket.Bind(endPoint);

            srvSocket.Listen(10);
            while (true)
            {
                Socket         clntSocket = srvSocket.Accept();
                AsyncStateData data       = new AsyncStateData();
                data.Buffer = new byte[1024];
                data.Socket = clntSocket;
                clntSocket.BeginReceive(data.Buffer, 0, data.Buffer.Length,
                                        SocketFlags.None, asyncReceiveCallback, data);
            }
        }
    }
Esempio n. 8
0
        private void Track(AsyncStateData asyncData)
        {
            asyncData.Running = true;

            InitOpenNi(asyncData);

            _frameCounter.Reset();

            while (!asyncData.Canceled)
            {
                _niContext.WaitAndUpdateAll();

                // update image metadata
                _imageNode.GetMetaData(_imageMeta);
                _depthNode.GetMetaData(_depthMeta);
                _sceneNode.GetMetaData(_sceneMeta);

                _depthHist.Update(_depthMeta);
                _sceneMap.Update(_sceneMeta);

                _frameCounter.AddFrame();

                // continue update on UI thread
                asyncData.AsyncOperation.SynchronizationContext.Send(
                    delegate
                {
                    // Must be called on the synchronization thread.
                    CopyWritableBitmap(_imageMeta, _rgbImageSource);

                    // CopyWritableBitmap(_depthMeta, _depthImageSource);
                    _depthHist.Paint(_depthMeta, _depthImageSource);

                    //CopyWritableBitmap(_sceneMeta, _sceneImageSource);
                    _sceneMap.Paint(_sceneMeta, _sceneImageSource);

                    InvokeUpdateViewPort(EventArgs.Empty);
                }, null);
            }
            asyncData.Running = false;
            asyncData.AsyncOperation.PostOperationCompleted(evt => InvokeTrackinkgCompleted(EventArgs.Empty), null);
        }
Esempio n. 9
0
        private static void accept(Object clntSocket)
        {
            try
            {
                Socket _clntSocket = clntSocket as Socket;

                AsyncStateData data = new AsyncStateData();
                data.Buffer = new byte[128];
                data.Socket = _clntSocket;
                data.Ip     = _clntSocket.RemoteEndPoint.ToString();


                _clntSocket.BeginReceive(data.Buffer, 0, data.Buffer.Length,
                                         SocketFlags.None, asyncReceiveCallback, data);
            }
            catch (Exception)
            {
                Console.WriteLine("ERROR! : Unable to Receive Data");
            }
            finally
            {
            }
        }
Esempio n. 10
0
        private void InitOpenNi(AsyncStateData asyncData)
        {
            _niContext = new Context("openni.xml");

            _imageNode = (ImageGenerator)_niContext.FindExistingNode(NodeType.Image);

            _imageMeta = new ImageMetaData();
            _imageNode.GetMetaData(_imageMeta);

            // create the image bitmap source on
            asyncData.AsyncOperation.SynchronizationContext.Send(
                md => CreateImageBitmap(_imageMeta, out _rgbImageSource),
                null);

            // add depth node
            _depthNode = (DepthGenerator)_niContext.FindExistingNode(NodeType.Depth);

            _depthMeta = new DepthMetaData();
            _depthNode.GetMetaData(_depthMeta);

            asyncData.AsyncOperation.SynchronizationContext.Send(
                state => CreateImageBitmap(_depthMeta, out _depthImageSource, PixelFormats.Pbgra32),
                null);

            // add scene node
            //_sceneNode = (SceneAnalyzer) _niContext.FindExistingNode(NodeType.Scene);

            //_sceneMeta = new SceneMetaData();
            ////_sceneNode.GetMetaData(_sceneMeta);

            //asyncData.AsyncOperation.SynchronizationContext.Send(
            //    state => CreateImageBitmap(_sceneMeta, out _sceneImageSource, PixelFormats.Pbgra32),
            //    null);
        }
Esempio n. 11
0
        private void InitOpenNi(AsyncStateData asyncData)
        {
            _niContext = new XnMOpenNIContextEx();
            _niContext.InitFromXmlFile("openni.xml");

            _imageNode = (XnMImageGenerator)_niContext.FindExistingNode(XnMProductionNodeType.Image);

            _imageMeta = new XnMImageMetaData();
            _imageNode.GetMetaData(_imageMeta);

            // create the image bitmap source on
            asyncData.AsyncOperation.SynchronizationContext.Send(
                md => CreateImageBitmap(_imageMeta, out _rgbImageSource),
                null);

            // add depth node
            _depthNode = (XnMDepthGenerator) _niContext.FindExistingNode(XnMProductionNodeType.Depth);

            _depthMeta = new XnMDepthMetaData();
            _depthNode.GetMetaData(_depthMeta);

            asyncData.AsyncOperation.SynchronizationContext.Send(
                state => CreateImageBitmap(_depthMeta, out _depthImageSource, PixelFormats.Pbgra32),
                null);

            // add scene node
            _sceneNode = (XnMSceneAnalyzer) _niContext.FindExistingNode(XnMProductionNodeType.Scene);

            _sceneMeta = new XnMSceneMetaData();
            _sceneNode.GetMetaData(_sceneMeta);

            asyncData.AsyncOperation.SynchronizationContext.Send(
                state => CreateImageBitmap(_sceneMeta, out _sceneImageSource, PixelFormats.Pbgra32),
                null);
        }
Esempio n. 12
0
        private void Track(AsyncStateData asyncData)
        {
            asyncData.Running = true;

            InitOpenNi(asyncData);

            _frameCounter.Reset();

            while (!asyncData.Canceled)
            {
                _niContext.WaitAndUpdateAll();

                // update image metadata
                _imageNode.GetMetaData(_imageMeta);
                _depthNode.GetMetaData(_depthMeta);
                _sceneNode.GetMetaData(_sceneMeta);

                _depthHist.Update(_depthMeta);
                _sceneMap.Update(_sceneMeta);

                _frameCounter.AddFrame();

                // continue update on UI thread
                asyncData.AsyncOperation.SynchronizationContext.Send(
                    delegate
                    {
                        // Must be called on the synchronization thread.
                        CopyWritableBitmap(_imageMeta, _rgbImageSource);

                       // CopyWritableBitmap(_depthMeta, _depthImageSource);
                        _depthHist.Paint(_depthMeta, _depthImageSource);

                        //CopyWritableBitmap(_sceneMeta, _sceneImageSource);
                        _sceneMap.Paint(_sceneMeta, _sceneImageSource);

                        InvokeUpdateViewPort(EventArgs.Empty);
                     }, null);
            }
            asyncData.Running = false;
            asyncData.AsyncOperation.PostOperationCompleted(evt => InvokeTrackinkgCompleted(EventArgs.Empty), null);
        }
Esempio n. 13
0
        /// <summary>
        /// Use a imageId to retrieve and save a thumbnail to the device.
        /// </summary>
        /// <param name="imageId">Id for the image</param>
        /// <returns></returns>
        private string RetrievePictureAsync(string pictureURL)
        {
            try
            {
                HttpWebRequest myRequest = WebRequestFactory.CreateHttpRequest(API_GET_THUMB);
                myRequest.Method = "GET";
                String pictureFileName = String.Empty;

                using (HttpWebResponse response = (HttpWebResponse)myRequest.GetResponse())
                {
                    using (dataStream = response.GetResponseStream())
                    {
                        readBuffer = new byte[PT_READ_BUFFER_SIZE];
                        pictureFileName = GetPicturePath(pictureURL);

                        AsyncStateData state = new AsyncStateData();

                        state.dataHolder = readBuffer;
                        state.dataStream = dataStream;
                        state.fileName = pictureFileName;
                        state.totalBytesToDownload = (int)response.ContentLength;

                        dataStream.BeginRead(readBuffer, 0, PT_READ_BUFFER_SIZE, new System.AsyncCallback(DownloadPartFinished), state);

                    }
                    response.Close();
                }

                return pictureFileName;
            }
            catch (Exception)
            {
                OnErrorOccured(new PictureServiceEventArgs(PictureServiceErrorLevel.Failed, string.Empty, API_ERROR_UPLOAD));
                return string.Empty;
            }
        }
Esempio n. 14
0
        private bool InitOpenNi(AsyncStateData asyncData)
        {
            try
            {
                _niContext = new XnMOpenNIContextEx();
                _niContext.InitFromXmlFile("openni.xml");

                _imageNode = (XnMImageGenerator)_niContext.FindExistingNode(XnMProductionNodeType.Image);
                _imageMeta = new XnMImageMetaData();
                _imageNode.GetMetaData(_imageMeta);

                if (_imageMeta.PixelFormat != XnMPixelFormat.Rgb24)
                    throw new InvalidOperationException("Only RGB24 pixel format is supported");

                // add depth node
                _depthNode = (XnMDepthGenerator)_niContext.FindExistingNode(XnMProductionNodeType.Depth);
                _depthMeta = new XnMDepthMetaData();
                _depthNode.GetMetaData(_depthMeta);

                if (_depthMeta.PixelFormat != XnMPixelFormat.Grayscale16Bit)
                    throw new InvalidOperationException("Only 16-bit depth precission is supported");

                if (_depthMeta.XRes != _imageMeta.XRes || _depthMeta.YRes != _imageMeta.YRes)
                    throw new InvalidOperationException("Image and depth map must have the same resolution");

                // add scene node
                _sceneNode = (XnMSceneAnalyzer)_niContext.FindExistingNode(XnMProductionNodeType.Scene);
                _sceneMeta = new XnMSceneMetaData();
                _sceneNode.GetMetaData(_sceneMeta);

                asyncData.AsyncOperation.SynchronizationContext.Send(
                    delegate
                    {
                        UpdateCameraInfo();
                        UpdateFrameData();
                        InvokeTrackinkgStarted(EventArgs.Empty);
                    }, null);

                return true;
            }
            catch (Exception ex)
            {
                return false;
            }
        }
Esempio n. 15
0
        private void Track(AsyncStateData asyncData)
        {
            asyncData.Running = true;

            if (InitOpenNi(asyncData))
            {
                while (!asyncData.Canceled)
                {
                    _niContext.WaitAndUpdateAll();

                    _imageNode.GetMetaData(_imageMeta);
                    _depthNode.GetMetaData(_depthMeta);
                    _sceneNode.GetMetaData(_sceneMeta);

                    asyncData.AsyncOperation.SynchronizationContext.Send(
                        delegate
                        {
                            //UpdateCameraInfo();
                            UpdateFrameData();
                            InvokeTrackingUpdated(EventArgs.Empty);
                        }, null);

                }
            }
            asyncData.Running = false;
            asyncData.AsyncOperation.PostOperationCompleted(evt => InvokeTrackinkgCompleted(EventArgs.Empty), null);
        }
Esempio n. 16
0
        //헤더정의.
        //클라이언트에게 넘겨주기
        private static void asyncReceiveCallback(IAsyncResult asyncResult)
        {
            try
            {
                //받은 데이터
                AsyncStateData rcvData = asyncResult.AsyncState as AsyncStateData;
                //받은데이터 길이
                int nRecv = rcvData.Socket.EndReceive(asyncResult);



                //스레드 끼리 겹치지 않기 위해 스레드가 생성된 후에 새 클래스 생성
                User_Identity uId = new User_Identity();

                //Form function
                uId.Fn = rcvData.Buffer[0];

                uId.set_Ip_Addr = rcvData.Ip;
                Console.WriteLine("\n<New Connection Established>\n" +
                                  "IP : " + uId.get_Ip_Addr());

                uId.Json = Encoding.UTF8.GetString(rcvData.Buffer, 1, nRecv);


                Console.WriteLine(uId.Json);

                // 1 / 38 / 13 / 1 / 1 / 1 / 3 / 1 / 1 / 1 / 1 /
                //로그인 -> return_to_client = new byte[50];
                //

                /*
                 * //클라이언트에게 보낼 값
                 * byte[] return_to_client;
                 * switch(uId.type())
                 * {
                 *  case (byte)SendFormCode.LOGIN:
                 *      return_to_client = new byte[26];
                 *      break;
                 *  case (byte)SendFormCode.FINDID:
                 *      return_to_client = new byte[13];
                 *      break;
                 *  case (byte)SendFormCode.EMAILVERTIFY:
                 *      return_to_client = new byte[3];
                 *      break;
                 *  default:
                 *      return_to_client = new byte[1];
                 *      break;
                 * }
                 */
                byte[] return_to_client = new byte[100];

                unsafe
                {
                    fixed(byte *_return_to_client = return_to_client)
                    {
                        //Form.chech_From(uId, _return_to_client);
                        Form.fm[uId.type()].excute(uId, _return_to_client);
                    }
                }


                rcvData.Socket.BeginSend(return_to_client, 0, return_to_client.Length,
                                         SocketFlags.None, asyncSendCallback, rcvData.Socket);
            }
            catch (Exception e)
            {
                Console.WriteLine("!!!Connection Error Occur!!!\n" +
                                  "<Connection Error>\n" +
                                  e.Message + "\n" +
                                  "<Connection Error>");
            }
            finally
            {
            }
        }
Esempio n. 17
0
        private void Track(AsyncStateData asyncData)
        {
            asyncData.Running = true;

            InitOpenNi(asyncData);

            while (!asyncData.Canceled)
            {
                try
                {
                    _niContext.WaitAndUpdateAll();

                    // update image metadata
                    _imageNode.GetMetaData(_imageMeta);
                    _depthNode.GetMetaData(_depthMeta);
                    //_sceneNode..GetMetaData(_sceneMeta);

                    GestureGenerator gg = new GestureGenerator(_niContext);
                    gg.AddGesture("Wave");
                    gg.GestureRecognized += new GestureGenerator.GestureRecognizedHandler(gg_GestureRecognized);
                    gg.StartGenerating();

                    _depthHist.Update(_depthMeta);
                   // _sceneMap.Update(_sceneMeta);

                    // continue update on UI thread
                    asyncData.AsyncOperation.SynchronizationContext.Send(
                        delegate
                        {
                            // Must be called on the synchronization thread.
                            CopyWritableBitmap(_imageMeta, _rgbImageSource);

                            // CopyWritableBitmap(_depthMeta, _depthImageSource);
                            _depthHist.Paint(_depthMeta, _depthImageSource);

                            //CopyWritableBitmap(_sceneMeta, _sceneImageSource);
                            //_sceneMap.Paint(_sceneMeta, _sceneImageSource);

                            InvokeUpdateViewPort(EventArgs.Empty);
                        }, null);
                }
                catch(Exception ex)
                {
                    System.Diagnostics.Debug.WriteLine("An error has occured in Track: " + ex.Message);
                }
            }
            asyncData.Running = false;
            asyncData.AsyncOperation.PostOperationCompleted(evt => InvokeTrackinkgCompleted(EventArgs.Empty), null);
        }