/// <summary>
        /// Shallow clone
        /// </summary>
        /// <returns>New instance</returns>
        public object Clone()
        {
            var copy = new DepthCamSensorState();

            this.CopyTo(copy);
            return(copy);
        }
        /// <summary>
        /// Initialize depthcam state.
        /// </summary>
        private void DoInitializeDepthCamAlternate()
        {
            this.cachedProcessedFrames.DepthImage =
                new short[this.kinectSensor.DepthStream.FrameWidth * this.kinectSensor.DepthStream.FrameHeight];

            this.utilitiesPort = DsspHttpUtilitiesService.Create(Environment);

            this.depthCamState = new depth.DepthCamSensorState
            {
                FieldOfView = KinectCameraConstants.HorizontalFieldOfViewRadians,

                ImageMode = depth.DepthCamSensorImageMode.Rgb,

                DepthImageSize =
                    new Size(
                        this.kinectSensor.DepthStream.FrameWidth,
                        this.kinectSensor.DepthStream.FrameHeight),

                MaximumRange = KinectCameraConstants.MaximumRangeMeters,

                TimeStamp = DateTime.MinValue,

                Pose = new Microsoft.Robotics.PhysicalModel.Pose(
                    new Microsoft.Robotics.PhysicalModel.Vector3(0, 0, 0),
                    new Microsoft.Robotics.PhysicalModel.Quaternion()),

                DepthImage =
                    new short[this.kinectSensor.DepthStream.FrameWidth * this.kinectSensor.DepthStream.FrameHeight]
            };
        }
        /// <summary>
        /// Handlers http query request for raw binary format.
        /// </summary>
        /// <param name="query">The http query</param>
        /// <param name="state">Depth camera state</param>
        /// <param name="transform"> XSLT transform to be applied</param>
        /// <param name="utilitiesPort">Utitilise port to post the response</param>
        /// <returns>CCR Task Chunk</returns>
        private static IEnumerator <ITask> HttpQueryHelperRawFormat(
            HttpQuery query,
            DepthCamSensorState state,
            string transform,
            DsspHttpUtilitiesPort utilitiesPort)
        {
            var type = query.Body.Query[QueryType];

            byte[] imageByteArray = null;
            switch (type.ToLowerInvariant())
            {
            case DepthPlusRgb:
                imageByteArray =
                    ConvertVisibleAndDepthToByteArray(
                        state.VisibleImage,
                        state.DepthImage);
                break;

            case Rgb:
                imageByteArray = state.VisibleImage;
                break;

            case Depth:
                imageByteArray = new byte[state.DepthImage.Length * sizeof(short)];
                Buffer.BlockCopy(state.DepthImage, 0, imageByteArray, 0, imageByteArray.Length);
                break;

            case Ir:
                imageByteArray = state.VisibleImage;
                break;
            }

            if (imageByteArray == null)
            {
                query.ResponsePort.Post(
                    new HttpResponseType(
                        HttpStatusCode.OK,
                        state,
                        transform));
            }
            else
            {
                using (var mem = new MemoryStream(imageByteArray))
                {
                    var response = new WriteResponseFromStream(
                        query.Body.Context,
                        mem,
                        OctetStream);
                    utilitiesPort.Post(response);

                    yield return(response.ResultPort.Choice());
                }
            }
        }
Ejemplo n.º 4
0
        public IEnumerator <ITask> ReplaceHandler(depthcam.Replace replace)
        {
            _state = replace.Body;
            if (replace.ResponsePort != null)
            {
                replace.ResponsePort.Post(dssp.DefaultReplaceResponseType.Instance);
            }

            // issue notification
            _subMgrPort.Post(new submgr.Submit(_state, dssp.DsspActions.ReplaceRequest));
            yield break;
        }
        /// <summary>
        /// Provides a standard implementation of HttpGet
        /// </summary>
        /// <param name="get">The HttpGet message to process</param>
        /// <param name="state">The depthcam state</param>
        /// <param name="transform">The embedded resource to use as the XSLT transform</param>
        public static void HttpGetHelper(HttpGet get, DepthCamSensorState state, string transform)
        {
            var copy = state.Clone() as DepthCamSensorState;

            copy.DepthImage   = null;
            copy.VisibleImage = null;

            get.ResponsePort.Post(
                new HttpResponseType(
                    HttpStatusCode.OK,
                    copy,
                    transform));
        }
        /// <summary>
        /// Handles http query request.
        /// </summary>
        /// <param name="query">The http query</param>
        /// <param name="state">Depth camera state</param>
        /// <param name="transform"> XSLT transform to be applied</param>
        /// <param name="utilitiesPort">Utitilise port to post the response</param>
        /// <param name="visibleWidth">Width of a visible image - needed to blend depth and rgb pictures for a visual represenation</param>
        /// <param name="visibleHeight">Height of a visible image - needed to blend depth and rgb pictures for a visual represenation</param>
        /// <returns>CCR Task Chunk</returns>
        public static IEnumerator <ITask> HttpQueryHelper(
            HttpQuery query,
            DepthCamSensorState state,
            string transform,
            DsspHttpUtilitiesPort utilitiesPort,
            int visibleWidth,
            int visibleHeight)
        {
            var type   = query.Body.Query[QueryType];
            var format = query.Body.Query[QueryFormat];

            // Default is Png
            if (format == null)
            {
                format = Png;
            }

            switch (type.ToLowerInvariant())
            {
            case DepthPlusRgb:
            // Intentional fall through.
            case Rgb:
                if (state.ImageMode == DepthCamSensor.DepthCamSensorImageMode.Infrared)
                {
                    state.ImageMode = DepthCamSensor.DepthCamSensorImageMode.Rgb;
                }

                break;

            case Depth:
                // Do nothing.
                break;

            case Ir:
                if (state.ImageMode == DepthCamSensor.DepthCamSensorImageMode.Rgb)
                {
                    state.ImageMode = DepthCamSensor.DepthCamSensorImageMode.Infrared;
                }

                break;
            }

            if (format.ToLowerInvariant().Equals(Raw))
            {
                return(HttpQueryHelperRawFormat(query, state, transform, utilitiesPort));
            }
            else
            {
                return(HttpQueryHelperBitmapSource(query, state, transform, utilitiesPort, visibleWidth, visibleHeight));
            }
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Start initializes SimulatedDepthcamService and listens for drop messages
        /// </summary>
        protected override void Start()
        {
            const double KinectMaxValidDepthMm = 4000.0;
            const double KinectMinValidDepthMm = 800.0;

            if (this._state == null)
            {
                // no config file found, initialize default state according to KinectReservedSampleValues
                this._state = new DepthCamSensorState();
                this._state.MaximumRange             = KinectMaxValidDepthMm / 1000.0;
                this._state.MinimumRange             = KinectMinValidDepthMm / 1000.0;
                this._state.FurtherThanMaxDepthValue = (short)4095.0;
                this._state.NearerThanMinDepthValue  = (short)0.0;

                this.SaveState(this._state);
            }

            this._physicsEngine = physics.PhysicsEngine.GlobalInstance;
            _notificationTarget = new simengine.SimulationEnginePort();

            utilitiesPort = DsspHttpUtilitiesService.Create(Environment);

            // PartnerType.Service is the entity instance name.
            simengine.SimulationEngine.GlobalInstancePort.Subscribe(ServiceInfo.PartnerList, _notificationTarget);


            // dont start listening to DSSP operations, other than drop, until notification of entity
            Activate(new Interleave(
                         new TeardownReceiverGroup
                         (
                             Arbiter.Receive <simengine.InsertSimulationEntity>(false, _notificationTarget, InsertEntityNotificationHandlerFirstTime),
                             Arbiter.Receive <dssp.DsspDefaultDrop>(false, _mainPort, DefaultDropHandler)
                         ),
                         new ExclusiveReceiverGroup(),
                         new ConcurrentReceiverGroup()
                         ));
        }
Ejemplo n.º 8
0
        private IEnumerator <ITask> RaycastResultsHandler(simengine.DepthCameraEntity.DepthCameraResult result)
        {
            try
            {
                var now = Microsoft.Robotics.Common.Utilities.ElapsedSecondsSinceStart;
                if (now - _lastRaycastUpdate < ((double)_entity.UpdateInterval / 1000.0))
                {
                    // dont update more than 20 times a second
                    yield break;
                }
                _lastRaycastUpdate = now;

                if (result.Data == null)
                {
                    yield break;
                }

                var latestResults = new depthcam.DepthCamSensorState();
                latestResults.MaximumRange             = this._state.MaximumRange;
                latestResults.MinimumRange             = this._state.MinimumRange;
                latestResults.FurtherThanMaxDepthValue = this._state.FurtherThanMaxDepthValue;
                latestResults.NearerThanMinDepthValue  = this._state.NearerThanMinDepthValue;
                var depthImage = new short[result.Data.Length];
                latestResults.DepthImage     = depthImage;
                latestResults.DepthImageSize = new Size(result.Width, result.Height);

                if (_entity != null)
                {
                    latestResults.FieldOfView = (float)(_entity.FieldOfView * Math.PI / 180.0f);
                }

                short maxDepthMM = (short)(this._state.MaximumRange * 1000);
                short minDepthMM = (short)(this._state.MinimumRange * 1000);

                for (int i = 0; i < result.Data.Length; i++)
                {
                    var s     = (short)(result.Data[i] & 0xFF);
                    var depth = (short)((s * (short)maxDepthMM) / byte.MaxValue);
                    // The camera's far plane is already set at MaxDepth so no pixels will be further than
                    // that. To compensate for that, we relax the test from '>' to '>=' so that the
                    // 'further-than' pixel value can be set. This enables similar behavior to Kinect where
                    // too-near and too-far pixels are both set to zero.
                    if (depth >= maxDepthMM)
                    {
                        // this if branch is redundant if the shader sets the depth limit but its defense in depth.
                        depthImage[i] = this._state.FurtherThanMaxDepthValue;
                    }
                    else if (depth < minDepthMM)
                    {
                        depthImage[i] = this._state.NearerThanMinDepthValue;
                    }
                    else
                    {
                        depthImage[i] = depth;
                    }
                }

                byte[] rgbImage = null;

                if (webcamPort != null)
                {
                    var stateOrFault = webcamPort.QueryFrame();
                    yield return(stateOrFault.Choice(
                                     response =>
                    {
                        rgbImage = response.Frame;
                    },
                                     fault => LogError(fault)));
                }

                if (rgbImage != null)
                {
                    latestResults.VisibleImage = rgbImage;
                }

                latestResults.Pose      = _state.Pose;
                latestResults.TimeStamp = DateTime.Now;
                // send replace message to self
                var replace = new depthcam.Replace();
                // for perf reasons dont set response port, we are just talking to ourself anyway
                replace.ResponsePort = null;
                replace.Body         = latestResults;
                _mainPort.Post(replace);
            }
            finally
            {
                _raycastResults.Clear();
                _rayCastQueue.Enqueue(Arbiter.ReceiveWithIterator(false, _raycastResults, RaycastResultsHandler));
            }
        }
        /// <summary>
        /// Handlers http query request for bitmap source format.
        /// </summary>
        /// <param name="query">The http query</param>
        /// <param name="state">Depth camera state</param>
        /// <param name="transform"> XSLT transform to be applied</param>
        /// <param name="utilitiesPort">Utitilise port to post the response</param>
        /// <param name="visibleWidth">Width of a visible image - needed to blend depth and rgb pictures for a visual represenation</param>
        /// <param name="visibleHeight">Height of a visible image - needed to blend depth and rgb pictures for a visual represenation</param>
        /// <returns>CCR Task Chunk</returns>
        private static IEnumerator <ITask> HttpQueryHelperBitmapSource(
            HttpQuery query,
            DepthCamSensorState state,
            string transform,
            DsspHttpUtilitiesPort utilitiesPort,
            int visibleWidth,
            int visibleHeight)
        {
            var          type         = query.Body.Query[QueryType];
            BitmapSource bitmapSource = null;

            try
            {
                switch (type.ToLowerInvariant())
                {
                case DepthPlusRgb:

                    // we must downscale visible image to match the resolution of depth image - in order to merge the
                    // two pictures. Its an expensive call - but we do it only here, where it was explicitly requested
                    byte[] resizedVisibleImage = ResizeVisibleImageToMatchDepthImageDimentions(
                        state.VisibleImage,
                        visibleWidth,
                        visibleHeight,
                        state.DepthImageSize.Width,
                        state.DepthImageSize.Height);

                    bitmapSource = CreateBitmapSourceFromByteArray(
                        ConvertVisibleAndDepthTo32bppByteArray(resizedVisibleImage, state.DepthImage),
                        state.DepthImageSize.Width,
                        state.DepthImageSize.Height,
                        swm.PixelFormats.Bgra32);
                    break;

                case Rgb:
                    bitmapSource = CreateBitmapSourceFromByteArray(
                        state.VisibleImage,
                        visibleWidth,
                        visibleHeight,
                        swm.PixelFormats.Bgr24);
                    break;

                case Depth:
                    bitmapSource = CreateBitmapSourceFromShortPixelArray(
                        state.DepthImage,
                        state.DepthImageSize.Width,
                        state.DepthImageSize.Height,
                        (int)(state.MaximumRange * 1000),
                        state.DepthImage.Clone() as short[]);
                    break;

                case Ir:
                    bitmapSource = CreateBitmapSourceFromInfraredArray(
                        state.VisibleImage,
                        state.DepthImageSize.Width,
                        state.DepthImageSize.Height,
                        swm.PixelFormats.Gray16);
                    break;
                }
            }
            catch
            {
                // We do not attempt to synchronize the HTTP view with
                // state switching, so we might switch to RGB but still
                // have data from IR mode

                var width    = state.DepthImageSize.Width;
                var height   = state.DepthImageSize.Height;
                var zeroData = new byte[3 * width * height];
                bitmapSource = BitmapSource.Create(
                    width,
                    height,
                    96,
                    96,
                    swm.PixelFormats.Bgr24,
                    null,
                    zeroData,
                    state.DepthImageSize.Width * 3);
            }

            if (bitmapSource == null)
            {
                query.ResponsePort.Post(
                    new HttpResponseType(
                        HttpStatusCode.OK,
                        state,
                        transform));
            }
            else
            {
                using (var mem = new MemoryStream())
                {
                    BitmapEncoder encoder = null;

                    PngBitmapEncoder pngEncoder = new PngBitmapEncoder();
                    pngEncoder.Interlace = PngInterlaceOption.Off;
                    encoder = pngEncoder;

                    encoder.Frames.Add(BitmapFrame.Create(bitmapSource));
                    encoder.Save(mem);

                    mem.Position = 0;

                    var response = new WriteResponseFromStream(
                        query.Body.Context,
                        mem,
                        MediaPng);
                    utilitiesPort.Post(response);

                    yield return(response.ResultPort.Choice());
                }
            }
        }