示例#1
0
    private void ImageSubscriptionHandler(bm_imsgs.Image image_message)
    {
        Debug.Log("Image Arrived");
        _displayPixels = image_message.data;

        //Texture2D tex = new Texture2D(2, 2);
        //bmp = new Texture2D(512, 424, TextureFormat.RGBA32, false);
        //bmp.LoadRawTextureData(_displayPixels);
        //bmp.Apply();
        ////uiM.rawPersonImage.texture = bmp;

        check_flag = 1;
    }
 public void rosImagesMessager()
 {
     bm_imsgs.Image image_message = new bm_imsgs.Image
     {
         height = 424,
         width  = 512,
         //encoding = "bgra32",
         is_bigendian = 0,
         step         = 2048,
         data         = _finaldisplayPixels
     };
     rosSocket.Publish(rosBMImage_Id, image_message);
 }
示例#3
0
    private void PublishFrame(object sender, HololensRobotController.WindowsSensors.FrameEventArgs e)
    {
        int[] structuredTime = HololensRobotController.Utilities.Timer.GetSecondsNanosecondsStructure(e.TimeSpan);
        RosSharp.RosBridgeClient.Messages.Sensor.Image image = new RosSharp.RosBridgeClient.Messages.Sensor.Image();
        image.header.frame_id    = Config.Mono;
        image.header.stamp.secs  = structuredTime[0];
        image.header.stamp.nsecs = structuredTime[1];
        image.header.seq         = frameNo++;
        image.data         = e.Buffer;
        image.encoding     = e.Encoding;
        image.is_bigendian = 0;
        image.height       = (int)e.Height;
        image.width        = (int)e.Width;
        image.step         = (int)e.Step;

        publisher.Publish(image);
        System.Diagnostics.Debug.WriteLine("Mono camera sent frame-" + image.header.seq + " at time " + ((double)structuredTime[0] + ((double)structuredTime[1]) / 1e9).ToString());
    }
        private void SubscriptionHandler(sensor_msgs.Image message)
        {
            var ignore = CoreApplication.MainView.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
            {
                var softwareBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, message.width, message.height, BitmapAlphaMode.Ignore);

                unsafe
                {
                    using (BitmapBuffer buffer = softwareBitmap.LockBuffer(BitmapBufferAccessMode.Write))
                    {
                        using (var reference = buffer.CreateReference())
                        {
                            byte *dataInBytes;
                            uint capacity;
                            ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacity);

                            // Fill-in the BGRA plane
                            BitmapPlaneDescription bufferLayout = buffer.GetPlaneDescription(0);
                            for (int y = 0; y < bufferLayout.Height; y++)
                            {
                                uint xoffset = 0;
                                for (int x = 0; x < bufferLayout.Stride; x += 4)
                                {
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * y + x + 0] = message.data[message.step * y + xoffset + 0];
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * y + x + 1] = message.data[message.step * y + xoffset + 1];
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * y + x + 2] = message.data[message.step * y + xoffset + 2];
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * y + x + 3] = (byte)255;

                                    xoffset += 3;
                                }
                            }
                        }
                    }
                }

                await MLView.SetBitmapAsync(softwareBitmap);

                NotifyPropertyChanged("MLView");
            });
        }