Example #1
0
    // Start is called before the first frame update
    async void Start()
    {
        Timer.Init();

        // set period for fixedUpdate() to be called where point cloud and pose can be shared
        Time.fixedDeltaTime = (float)0.033333;

        rosConnector = (RosSharp.RosBridgeClient.RosConnector)GetComponent(typeof(RosSharp.RosBridgeClient.RosConnector));

        hololensPosePublisher = new HololensPosePublisher();
        hololensPosePublisher.Init(ref rosConnector);

        hololensPointCloudPublisher = new HololensPointCloudPublisher();
        hololensPointCloudPublisher.Init(ref rosConnector);

        markerPosePublisher = new MarkerPosePublisher();
        markerPosePublisher.Init(ref rosConnector);

        internalPointCloudQueue         = new ConcurrentQueue <Vector3[]>();
        robotPointCloudSubscriber       = new RobotPointCloudSubscriber("/trimbot/alignedmap", 0, ref rosConnector, ref internalPointCloudQueue);
        robotPointCloudGameObject       = GameObject.Find("RobotPointCloud");
        lastTimeRobotPointCloudReceived = Timer.SampleCurrentStopwatch();

        robotTargetPosePublisher = new Publisher <PoseStamped>(ref rosConnector, "/trimbot/goal");

        InputManager.Instance.PushFallbackInputHandler(this.gameObject);

#if NETFX_CORE
        // initialize the camera data sources via Windows libraries
        dataSourceGroup = new HololensRobotController.WindowsSensors.DataSourceGroup(ref rosConnector);
        await dataSourceGroup.GetDataSources();
#endif
    }
Example #2
0
    /// <summary>
    /// Unity Monobehavior function. ARUWPVideo is set here. Target the render frame rate to 60.
    /// Create unaddedMarkers list, preparing the initialization. [internal use]
    /// </summary>
    private void Start()
    {
        rosConnector = (RosSharp.RosBridgeClient.RosConnector)GetComponentInParent(typeof(RosSharp.RosBridgeClient.RosConnector));

        videoManager = GetComponent <ARUWPVideo>();
        if (videoManager == null)
        {
            Debug.Log(TAG + ": not able to find ARUWPVideo");
            Application.Quit();
        }
        Application.targetFrameRate = 60;
        unaddedMarkers = GetComponents <ARUWPMarker>();
    }
Example #3
0
    public void Init(ref RosSharp.RosBridgeClient.RosConnector rosConnector)
    {
        this.rosConnector = rosConnector;
        publisher         = new RosSharp.RosBridgeClient.NonMono.Publisher <RosSharp.RosBridgeClient.Messages.Sensor.PointCloud2>(ref this.rosConnector, "/hololens/" + Config.PointCloud);

        spatialMappingManager = SpatialMappingManager.Instance;
        spatialMappingManager.SurfaceObserver.TimeBetweenUpdates     = Config.UnitySpatialMappingObserverTimeBetweenUpdates;
        spatialMappingManager.SurfaceObserver.TrianglesPerCubicMeter = Config.UnitySpatialMappingObserverTrianglesPerCubicMeter;
        spatialMappingManager.StartObserver(); // TODO: Check if offset is necessary, i.e. float startTime = SpatialMappingManager.Instance.StartTime;
        spatialMappingManager.DrawVisualMeshes = Config.UnitySpatialMappingObserverDrawVisualMeshes;

        CreatePointFieldArray();
    }
    public void Init(ref RosSharp.RosBridgeClient.RosConnector rosConnector)
    {
        this.rosConnector = rosConnector;
        publisher         = new RosSharp.RosBridgeClient.NonMono.Publisher <RosSharp.RosBridgeClient.Messages.Geometry.PoseStamped>(ref this.rosConnector, "/hololens/" + Config.MarkerPose);

        GameObject aruwpController = GameObject.Find("ARUWPController");

        if (aruwpController == null)
        {
            Debug.Log("MarkerPosePublisher: GameObject ARUWPController could not be found!");
            Application.Quit();
        }
        else
        {
            marker = aruwpController.GetComponent <ARUWPMarker>();
        }

        if (marker == null)
        {
            Debug.Log("MarkerPosePublisher: GameObject ARUWPController has no ARUWPMarker component!");
            Application.Quit();
        }
    }
 public void Init(ref RosSharp.RosBridgeClient.RosConnector rosConnector)
 {
     this.rosConnector = rosConnector;
     publisher         = new RosSharp.RosBridgeClient.NonMono.Publisher <RosSharp.RosBridgeClient.Messages.Geometry.PoseStamped>(ref this.rosConnector, "/hololens/" + Config.HololensPose);
 }
 protected virtual void Start()
 {
     rosConnector = GetComponent <RosConnector>();
     new Thread(Subscribe).Start();
 }
Example #7
0
    /// <summary>
    /// The Task to asynchronously initialize MediaCapture in UWP. The camera of HoloLens will
    /// be configured to preview video of 896x504 at 30 fps, pixel format is NV12. MediaFrameReader
    /// will be initialized and register the callback function OnFrameArrived to each video
    /// frame. Note that this task does not start running the video preview, but configures the
    /// running behavior. This task should be executed when ARUWPController status is
    /// ARUWP_STATUS_CLEAN, and will change it to ARUWP_STATUS_VIDEO_INITIALIZED if no error
    /// occurred. [internal use]
    /// </summary>
    /// <returns>Whether video pipeline is successfully initialized</returns>
    public async Task <bool> InitializeMediaCaptureAsyncTask()
    {
        if (controller.status != ARUWP.ARUWP_STATUS_CLEAN)
        {
            Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() unsupported status");
            return(false);
        }

        if (mediaCapture != null)
        {
            Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because mediaCapture is not null");
            return(false);
        }

        var allGroups = await MediaFrameSourceGroup.FindAllAsync();

        foreach (var group in allGroups)
        {
            Debug.Log(group.DisplayName + ", " + group.Id);
        }

        if (allGroups.Count <= 0)
        {
            Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because there is no MediaFrameSourceGroup");
            return(false);
        }

        Config.SourceSelectionDictionary.TryGetValue(Config.Mono, out isPublishing);
        if (isPublishing)
        {
            rosConnector         = controller.rosConnector;
            publisher            = new RosSharp.RosBridgeClient.NonMono.Publisher <RosSharp.RosBridgeClient.Messages.Sensor.Image>(ref rosConnector, "/hololens/" + Config.Mono);
            FrameReadyToPublish += PublishFrame;
        }

        // Initialize mediacapture with the source group.
        mediaCapture = new MediaCapture();
        var settings = new MediaCaptureInitializationSettings
        {
            SourceGroup = allGroups[0],
            // This media capture can share streaming with other apps.
            SharingMode = MediaCaptureSharingMode.SharedReadOnly,
            // Only stream video and don't initialize audio capture devices.
            StreamingCaptureMode = StreamingCaptureMode.Video,
            // Set to CPU to ensure frames always contain CPU SoftwareBitmap images
            // instead of preferring GPU D3DSurface images.
            MemoryPreference = MediaCaptureMemoryPreference.Cpu
        };

        await mediaCapture.InitializeAsync(settings);

        Debug.Log(TAG + ": MediaCapture is successfully initialized in shared mode.");

        try
        {
            int   targetVideoWidth, targetVideoHeight;
            float targetVideoFrameRate;
            switch (videoParameter)
            {
            case VideoParameter.Param1280x720x15:
                targetVideoWidth     = 1280;
                targetVideoHeight    = 720;
                targetVideoFrameRate = 15.0f;
                break;

            case VideoParameter.Param1280x720x30:
                targetVideoWidth     = 1280;
                targetVideoHeight    = 720;
                targetVideoFrameRate = 30.0f;
                break;

            case VideoParameter.Param1344x756x15:
                targetVideoWidth     = 1344;
                targetVideoHeight    = 756;
                targetVideoFrameRate = 15.0f;
                break;

            case VideoParameter.Param1344x756x30:
                targetVideoWidth     = 1344;
                targetVideoHeight    = 756;
                targetVideoFrameRate = 30.0f;
                break;

            case VideoParameter.Param896x504x15:
                targetVideoWidth     = 896;
                targetVideoHeight    = 504;
                targetVideoFrameRate = 15.0f;
                break;

            case VideoParameter.Param896x504x30:
                targetVideoWidth     = 896;
                targetVideoHeight    = 504;
                targetVideoFrameRate = 30.0f;
                break;

            default:
                targetVideoWidth     = 896;
                targetVideoHeight    = 504;
                targetVideoFrameRate = 30.0f;
                break;
            }
            var mediaFrameSourceVideoPreview  = mediaCapture.FrameSources.Values.Single(x => x.Info.MediaStreamType == MediaStreamType.VideoPreview);
            MediaFrameFormat targetResFormat  = null;
            float            framerateDiffMin = 60f;
            foreach (var f in mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height))
            {
                if (f.VideoFormat.Width == targetVideoWidth && f.VideoFormat.Height == targetVideoHeight)
                {
                    if (targetResFormat == null)
                    {
                        targetResFormat  = f;
                        framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate);
                    }
                    else if (Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate) < framerateDiffMin)
                    {
                        targetResFormat  = f;
                        framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate);
                    }
                }
            }
            if (targetResFormat == null)
            {
                targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats[0];
                Debug.Log(TAG + ": Unable to choose the selected format, fall back");
                targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault();
            }

            await mediaFrameSourceVideoPreview.SetFormatAsync(targetResFormat);

            frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSourceVideoPreview, targetResFormat.Subtype);

            frameReader.FrameArrived += OnFrameArrived;
            controller.frameWidth     = Convert.ToInt32(targetResFormat.VideoFormat.Width);
            controller.frameHeight    = Convert.ToInt32(targetResFormat.VideoFormat.Height);
            videoBufferSize           = controller.frameWidth * controller.frameHeight * 4;
            Debug.Log(TAG + ": FrameReader is successfully initialized, " + controller.frameWidth + "x" + controller.frameHeight +
                      ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator);
        }
        catch (Exception e)
        {
            Debug.Log(TAG + ": FrameReader is not initialized");
            Debug.Log(TAG + ": Exception: " + e);
            return(false);
        }

        if (isPublishing)
        {
            double fps;
            if (Config.FrameRateDictionary.TryGetValue(Config.Mono, out fps))
            {
                publishPeriod = 1 / fps;
                var bytesPerPixel   = (int)HololensRobotController.WindowsSensors.CameraHandler.GetBytesPerPixel(MediaFrameSourceKind.Color);
                int pixelBufferSize = (int)(controller.frameHeight * controller.frameWidth * bytesPerPixel);
                publishRowSize = controller.frameWidth * bytesPerPixel;
                publishBuffer  = new byte[pixelBufferSize];
            }
            else
            {
                isPublishing = false;
            }
        }


        controller.status = ARUWP.ARUWP_STATUS_VIDEO_INITIALIZED;
        signalInitDone    = true;
        Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() is successful");
        return(true);
    }
 public void Start()
 {
     rosConnector = GetComponent <RosConnector>();
 }
Example #9
0
 protected override void Start()
 {
     RosConnector = GetComponent(typeof(RosConnector)) as RosConnector;
     base.Start();
     message = new MessageTypes.Std.Bool();
 }
 public PosePublisher(string topic, ref RosConnector connector)
 {
     base.Start(topic, ref connector);
     InitializeMessage();
     new Thread(FixedUpdate).Start();
 }
 public JoySubscriber(string topic, float timeStep, ref RosConnector connector)
 {
     base.Start(topic, timeStep, ref connector);
 }