Esempio n. 1
0
        private void trackTest(Touch touch)
        {
            List <ARHitResult> hitResults = ARFrame.HitTest(touch);

            foreach (ARHitResult singleHit in hitResults)
            {
                ARTrackable trackable = singleHit.GetTrackable();
                if ((trackable is ARPlane && ((ARPlane)trackable).IsPoseInPolygon(singleHit.HitPose)) ||
                    (trackable is ARPoint))
                {
                    ARAnchor anchor = singleHit.CreateAnchor();
                    ARDebug.LogInfo("GridARScript:trackTest anchor world position {0}", anchor.GetPose().position);
                    Vector3 screenPos = Camera.main.WorldToScreenPoint(anchor.GetPose().position);
                    ARDebug.LogInfo("GridARScript:trackTest anchor screen position {0}", screenPos);


                    if (m_touchIndex % 2 == 0)
                    {
                        m_touchBeginModel.GetComponent <disToolLogoVisualizer>().setAnchor(anchor);

                        var script = m_grid.GetComponent <GridARScpript>();
                        if (script)
                        {
                            script.setBeginAnchor(anchor);
                        }
                    }
                    else
                    {
                        m_touchEndModel.GetComponent <disToolLogoVisualizer>().setAnchor(anchor);
                    }
                    ++m_touchIndex;
                    break;
                }
            }
        }
Esempio n. 2
0
        public void Update()
        {
            // Do not update if huaweiAR is not tracking.
            if (ARFrame.GetTrackingState() != ARTrackable.TrackingState.TRACKING)
            {
                m_mesh.Clear();
                return;
            }

            // Fill in the data to draw the point cloud.
            ARPointCloud pointCloud = ARFrame.AcquirePointCloud();

            pointCloud.GetPoints(ref m_points);
            pointCloud.Release();
            if (m_points.Count > 0)
            {
                // Update the mesh indicies array.
                m_pointIndex.Clear();
                for (int i = 0; i < Mathf.Min(m_points.Count, k_maxPointCount); i++)
                {
                    m_pointIndex.Add(i);
                }

                m_mesh.Clear();
                m_mesh.vertices = m_points.ToArray();
                m_mesh.SetIndices(m_pointIndex.ToArray(), MeshTopology.Points, 0);
            }
        }
Esempio n. 3
0
 public override void DidUpdateFrame(ARSession session, ARFrame frame)
 {
     if (arkitApp.TryGetTarget(out var ap))
     {
         Urho.Application.InvokeOnMain(() => ap.ProcessARFrame(session, frame));
     }
 }
Esempio n. 4
0
        public override void OnFrameUpdate(ARSession session, ARFrame frame)
        {
            base.OnFrameUpdate(session, frame);

            if (PlaneTrackingEnabled)
            {
                return;
            }

            var hits = frame.HitTest(new CoreGraphics.CGPoint(0.5, 0.5), ARHitTestResultType.ExistingPlaneUsingExtent);

            if (!hits?.Any() ?? true)
            {
                Crosshair.BackgroundColor = UIColor.Gray;
            }
            else
            {
                Crosshair.BackgroundColor = UIColor.Green;

                if (PlaySoundOnNodeDetection())
                {
                    SoundManager.PlaySound("text");
                }
            }
        }
Esempio n. 5
0
        public static bool TryGetCameraImageBytes(out ARCameraImageBytes image)
        {
            bool isHD = false;

            if (ImmersalSDK.Instance.androidResolution == ImmersalSDK.CameraResolution.Max)
            {
                try
                {
                    image = ARFrame.AcquirPreviewImageBytes();
                    isHD  = true;
                }
                catch (SystemException e)
                {
                    Debug.LogError("Cannot acquire FullHD image: " + e.Message);

                    image = ARFrame.AcquireCameraImageBytes();
                }
            }
            else
            {
                image = ARFrame.AcquireCameraImageBytes();
            }

            return(isHD);
        }
Esempio n. 6
0
 void ArkitComponent_ARFrame(ARFrame frame)
 {
     if (ContinuesHitTestAtCenter)
     {
         LastHitTest = arkitComponent.HitTest(frame, 0.5f, 0.5f);
     }
 }
Esempio n. 7
0
        private void _CreateWorld(Touch touch)
        {
            List <ARHitResult> hitResults = ARFrame.HitTest(touch);

            ARDebug.LogInfo("_DrawARLogo hitResults count {0}", hitResults.Count);
            foreach (ARHitResult singleHit in hitResults)
            {
                ARTrackable trackable = singleHit.GetTrackable();
                ARDebug.LogInfo("_DrawARLogo GetTrackable {0}", singleHit.GetTrackable());
                if (trackable is ARPlane && ((ARPlane)trackable).IsPoseInPolygon(singleHit.HitPose) ||
                    trackable is ARPoint)
                {
                    ARAnchor anchor = singleHit.CreateAnchor();

                    Vector3 anchorPosition = anchor.GetPose().position;

                    if (world)
                    {
                        world.transform.position = anchorPosition;
                    }
                    else
                    {
                        world = Instantiate(worldPrefab, anchorPosition, Quaternion.identity);
                    }
                    break;
                }
            }
        }
        public void DidUpdateFrame(ARSession session, ARFrame frame)
        {
            // I've added this as frame.Camera get accessor returns null at the time of execution on line 51.
            // Storing it will prevent the value from changing mid-method
            var camera = frame.Camera;

            if (frame == null || camera == null)
            {
                return;
            }

            switch (frame.WorldMappingStatus)
            {
            case ARWorldMappingStatus.Extending:
            case ARWorldMappingStatus.Mapped:
                _saveButton.Enabled = objAnchor != null && frame.Anchors.Contains(objAnchor);
                break;

            default:
                _saveButton.Enabled = false;
                break;
            }

            _statusLabel.Text =
                $"Mapping: {frame.WorldMappingStatus}" +
                Environment.NewLine +
                $"Tracking: {camera.TrackingState}";

            UpdateSessionInfoLabel(frame, camera.TrackingState, camera.TrackingStateReason);
        }
Esempio n. 9
0
        public static bool TryGetTrackingQuality(out int quality)
        {
            quality = default;

            if (!ARFrame.TextureIsAvailable())
            {
                return(false);
            }

            ARTrackable.TrackingState trackingState = ARFrame.GetTrackingState();

            switch (trackingState)
            {
            case ARTrackable.TrackingState.TRACKING:
                quality = 4;
                break;

            case ARTrackable.TrackingState.PAUSED:
                quality = 1;
                break;

            default:
                quality = 0;
                break;
            }

            return(true);
        }
        private void UpdateTextures(ARFrame frame)
        {
            try
            {
                using (var img = frame.CapturedImage)
                {
                    if (!this.texturesInitialized)
                    {
                        this.CreateVideoMesh(img);
                        this.CreateTextures(img);
                        this.texturesInitialized = true;
                    }

                    var yPtr  = img.GetBaseAddress(0);
                    var uvPtr = img.GetBaseAddress(1);

                    if (yPtr != IntPtr.Zero &&
                        uvPtr != IntPtr.Zero)
                    {
                        this.graphicsDevice.Textures.SetData(this.cameraTextureY, yPtr, this.sizeY);
                        this.graphicsDevice.Textures.SetData(this.cameraTextureUV, uvPtr, this.sizeUV);
                    }
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
            }
        }
Esempio n. 11
0
 public override void DidUpdateFrame(ARSession session, ARFrame frame)
 {
     using (frame)
     {
         OnFrameUpdate?.Invoke(session, frame);
     }
 }
Esempio n. 12
0
        public void OnDrawFrame(IGL10 gl)
        {
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            if (mSession == null)
            {
                return;
            }
            if (mDisplayRotationManager.GetDeviceRotation())
            {
                mDisplayRotationManager.UpdateArSessionDisplayGeometry(mSession);
            }

            try {
                mSession.SetCameraTextureName(mTextureDisplay.GetExternalTextureId());
                ARFrame  arFrame  = mSession.Update();
                ARCamera arCamera = arFrame.Camera;

                // The size of the projection matrix is 4 * 4.
                float[] projectionMatrix = new float[16];

                arCamera.GetProjectionMatrix(projectionMatrix, PROJ_MATRIX_OFFSET, PROJ_MATRIX_NEAR, PROJ_MATRIX_FAR);
                mTextureDisplay.OnDrawFrame(arFrame);
                StringBuilder sb = new StringBuilder();
                UpdateMessageData(sb);
                mTextDisplay.OnDrawFrame(sb);

                // The size of ViewMatrix is 4 * 4.
                float[] viewMatrix = new float[16];
                arCamera.GetViewMatrix(viewMatrix, 0);
                var allTrackables = mSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(ARPlane)));

                foreach (ARPlane plane in allTrackables)
                {
                    if (plane.Type != ARPlane.PlaneType.UnknownFacing &&
                        plane.TrackingState == ARTrackableTrackingState.Tracking)
                    {
                        HideLoadingMessage();
                        break;
                    }
                }
                mLabelDisplay.OnDrawFrame(allTrackables, arCamera.DisplayOrientedPose,
                                          projectionMatrix);
                HandleGestureEvent(arFrame, arCamera, projectionMatrix, viewMatrix);
                ARLightEstimate lightEstimate       = arFrame.LightEstimate;
                float           lightPixelIntensity = 1;
                if (lightEstimate.GetState() != ARLightEstimate.State.NotValid)
                {
                    lightPixelIntensity = lightEstimate.PixelIntensity;
                }
                DrawAllObjects(projectionMatrix, viewMatrix, lightPixelIntensity);
            } catch (ArDemoRuntimeException e) {
                Log.Info(TAG, "Exception on the ArDemoRuntimeException!");
            } catch (Exception t) {
                // This prevents the app from crashing due to unhandled exceptions.
                Log.Info(TAG, "Exception on the OpenGL thread: " + t.Message);
            }
        }
Esempio n. 13
0
        public void OnDrawFrame(IGL10 gl)
        {
            // Clear the color buffer and notify the driver not to load the data of the previous frame.
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            if (mSession == null)
            {
                return;
            }
            if (mDisplayRotationManager.GetDeviceRotation())
            {
                mDisplayRotationManager.UpdateArSessionDisplayGeometry(mSession);
            }

            try
            {
                mSession.SetCameraTextureName(mTextureDisplay.GetExternalTextureId());
                ARFrame  arFrame  = mSession.Update();
                ARCamera arCamera = arFrame.Camera;

                // The size of the projection matrix is 4 * 4.
                float[] projectionMatrix = new float[16];

                // Obtain the projection matrix through ARCamera.
                arCamera.GetProjectionMatrix(projectionMatrix, PROJECTION_MATRIX_OFFSET, PROJECTION_MATRIX_NEAR,
                                             PROJECTION_MATRIX_FAR);
                mTextureDisplay.OnDrawFrame(arFrame);
                ICollection hands = mSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(ARHand)));
                if (hands.Count == 0)
                {
                    mTextDisplay.OnDrawFrame(null);
                    return;
                }
                foreach (ARHand hand in hands)
                {
                    // Update the hand recognition information to be displayed on the screen.
                    StringBuilder sb = new StringBuilder();
                    UpdateMessageData(sb, hand);

                    // Display hand recognition information on the screen.
                    mTextDisplay.OnDrawFrame(sb);
                }
                foreach (HandRelatedDisplay handRelatedDisplay in mHandRelatedDisplays)
                {
                    handRelatedDisplay.OnDrawFrame(hands, projectionMatrix);
                }
            }
            catch (ArDemoRuntimeException e)
            {
                Log.Info(TAG, "Exception on the ArDemoRuntimeException!");
            }
            catch (Exception t)
            {
                // This prevents the app from crashing due to unhandled exceptions.
                Log.Info(TAG, "Exception on the OpenGL thread " + t.Message);
            }
        }
Esempio n. 14
0
 public override void DidUpdateFrame(ARSession session, ARFrame frame)
 {
     if (arkitApp.TryGetTarget(out var ap))
     {
         ap.ProcessARFrame(session, frame);
         //we use ApplicationOptions.DelayedStart=true:
         ap.Engine.RunFrame();
     }
 }
        private void _DrawARLogo(Touch touch)
        {
            List <ARHitResult> hitResults = ARFrame.HitTest(touch);
            ARHitResult        hitResult  = null;
            ARTrackable        trackable  = null;
            Boolean            hasHitFlag = false;

            ARDebug.LogInfo("_DrawARLogo hitResults count {0}", hitResults.Count);
            foreach (ARHitResult singleHit in hitResults)
            {
                trackable = singleHit.GetTrackable();
                ARDebug.LogInfo("_DrawARLogo GetTrackable {0}", singleHit.GetTrackable());
                if ((trackable is ARPlane && ((ARPlane)trackable).IsPoseInPolygon(singleHit.HitPose)) ||
                    (trackable is ARPoint))
                {
                    hitResult  = singleHit;
                    hasHitFlag = true;
                    if (trackable is ARPlane)
                    {
                        break;
                    }
                }
            }

            if (hasHitFlag != true)
            {
                ARDebug.LogInfo("_DrawARLogo can't hit!");
                return;
            }

            if (addedAnchors.Count > 16)
            {
                ARAnchor toRemove = addedAnchors[0];
                toRemove.Detach();
                addedAnchors.RemoveAt(0);
            }

            GameObject prefab;

            trackable = hitResult.GetTrackable();
            if (trackable is ARPlane)
            {
                prefab = arDiscoveryLogoPlanePrefabs;
            }
            else
            {
                prefab = arDiscoveryLogoPointPrefabs;
            }

/*
 *          ARAnchor anchor = hitResult.CreateAnchor();
 *          var logoObject = Instantiate(prefab, anchor.GetPose().position, anchor.GetPose().rotation);
 *          logoObject.GetComponent<ARDiscoveryLogoVisualizer>().Initialize(anchor);
 *          addedAnchors.Add(anchor);
 */
        }
Esempio n. 16
0
        public void Update()
        {
            if (!world)
            {
                _DrawPlane();
            }

            Touch touch;

            if (ARFrame.GetTrackingState() == ARTrackable.TrackingState.TRACKING) // Only check for touch if we are tracking our environment.
            {
                touch = Input.GetTouch(0);
                switch (Input.touchCount)
                {
                case 1:
                    switch (touch.phase)
                    {
                    case TouchPhase.Began:
                        if (slingshot)         // Cleanup in case we missed the touch phase end or release
                        {
                            slingshot.Release();
                        }

                        slingshot = world.AddComponent <Slingshot>();
                        slingshot.Create(slingshotPrefab, projectilePrefab, projectileSource, indicator);

                        if (Camera.main != null)
                        {
                            slingshot.ProjectileToScreenDirection = Camera.main.ScreenPointToRay(touch.position).direction * 0.5f;
                        }
                        break;

                    case TouchPhase.Ended:
                    case TouchPhase.Canceled:
                        slingshot.Release();
                        slingshot = null;
                        break;

                    default:
                        if (Camera.main != null)
                        {
                            slingshot.ProjectileToScreenDirection = Camera.main.ScreenPointToRay(touch.position).direction * 0.5f;
                        }
                        break;
                    }
                    break;

                default:
                    if (touch.phase == TouchPhase.Began)
                    {
                        _CreateWorld(touch);
                    }
                    break;
                }
            }
        }
Esempio n. 17
0
 private void _DrawFace()
 {
     m_newFaces.Clear();
     ARFrame.GetTrackables <ARFace>(m_newFaces, ARTrackableQueryFilter.NEW);
     for (int i = 0; i < m_newFaces.Count; i++)
     {
         GameObject faceObject = Instantiate(facePrefabs, Vector3.zero, Quaternion.identity, transform);
         faceObject.GetComponent <FaceVisualizer>().Initialize(m_newFaces[i]);
     }
 }
 private void _DrawHand()
 {
     newHands.Clear();
     ARFrame.GetTrackables <ARHand>(newHands, ARTrackableQueryFilter.NEW);
     for (int i = 0; i < newHands.Count; i++)
     {
         GameObject handObject = Instantiate(handPrefabs, Vector3.zero, Quaternion.identity, transform);
         handObject.GetComponent <HandVisualizer>().Initialize(newHands[i]);
     }
 }
 private void _DrawPlane()
 {
     newPlanes.Clear();
     ARFrame.GetTrackables <ARPlane>(newPlanes, ARTrackableQueryFilter.NEW);
     for (int i = 0; i < newPlanes.Count; i++)
     {
         GameObject planeObject = Instantiate(planePrefabs, Vector3.zero, Quaternion.identity, transform);
         planeObject.GetComponent <TrackedPlaneVisualizer>().Initialize(newPlanes[i]);
     }
 }
Esempio n. 20
0
 private void UpdateSceneEnvironmentProbe(ARFrame frame)
 {
     if (this.sceneEnvironmentProbeAnchor == null && this.currentTexturingMode == AREnvironmentTexturing.Manual)
     {
         // Create an environment probe anchor with room-sized extent to act as fallback when the probe anchor of
         // an object is removed and added during translation and scaling
         this.sceneEnvironmentProbeAnchor = new AREnvironmentProbeAnchor("sceneProbe", OpenTK.NMatrix4.Identity, new OpenTK.NVector3(5f, 5f, 5f));
         this.sceneView.Session.AddAnchor(this.sceneEnvironmentProbeAnchor);
     }
 }
Esempio n. 21
0
        private void UpdatePositionDisplay(ARFrame frame)
        {
            if (frame?.Camera == null)
            {
                return;
            }

            var t = frame.Camera.Transform;

            PositionLabel.Text = $"{t.Column3.X:N2}, {t.Column3.Y:N2}, {t.Column3.Z:N2}";
        }
        private void _DrawBody()
        {
            newBodys.Clear();
            ARFrame.GetTrackables <ARBody>(newBodys, ARTrackableQueryFilter.NEW);

            for (int i = 0; i < newBodys.Count; i++)
            {
                GameObject planeObject = Instantiate(bodyPrefabs, Vector3.zero, Quaternion.identity, transform);
                planeObject.GetComponent <BodySkeletonVisualizer>().Initialize(newBodys[i]);
            }
        }
Esempio n. 23
0
        public Vector3?HitTest(ARFrame frame, float screenX = 0.5f, float screenY = 0.5f)
        {
            var result = frame?.HitTest(new CoreGraphics.CGPoint(screenX, screenY),
                                        ARHitTestResultType.ExistingPlaneUsingExtent)?.FirstOrDefault();

            if (result != null && result.Distance > 0.2f)
            {
                var row = result.WorldTransform.Column3;
                return(new Vector3(row.X, row.Y, -row.Z));
            }
            return(null);
        }
        private void UpdateSessionInfoLabel(ARFrame currentFrame, ARTrackingState trackingState, ARTrackingStateReason trackingStateReason)
        {
            string message = "";

            _snapShotThumbnail.Hidden = true;

            switch (trackingState)
            {
            case ARTrackingState.Normal:
                switch (currentFrame.WorldMappingStatus)
                {
                case ARWorldMappingStatus.Mapped:
                case ARWorldMappingStatus.Extending:
                    if (currentFrame.Anchors.Any(anchor => anchor.Name == virtualObjectAnchorName))
                    {
                        message = "Tap 'Save Experience' to save the current map.";
                    }
                    else
                    {
                        message = "Tap on the screen to place an object.";
                    }
                    break;

                default:
                    if (DataFromFile == null)
                    {
                        message = "Move around to map the environment.";
                    }
                    else if (isRelocalizingMap == false)
                    {
                        message = "Move around to map the environment or tap 'Load Experience' to load a saved experience.";
                    }
                    break;
                }
                break;

            case ARTrackingState.Limited:
                if (trackingStateReason == ARTrackingStateReason.Relocalizing && isRelocalizingMap)
                {
                    message = "Move your device to the location shown in the image.";
                    _snapShotThumbnail.Hidden = false;
                }
                break;

            default:
                message = currentFrame.Camera.GetLocalisedFeedback();
                break;
            }

            _sessionInfoLabel.Text  = message;
            _sessionInfoView.Hidden = message == string.Empty;
        }
Esempio n. 25
0
        public void Update()
        {
            _DrawPlane();
            Touch touch;

            if (
                ARFrame.GetTrackingState() == ARTrackable.TrackingState.TRACKING && // Only check for touch if we are tracking our environment.
                Input.touchCount >= 1 &&  // Check if we touched.
                (touch = Input.GetTouch(0)).phase == TouchPhase.Began)    // Check if this is the start of the touch action
            {
                _DrawARLogo(touch);
            }
        }
Esempio n. 26
0
        public void DidUpdateFrame(ARSession session, ARFrame frame)
        {
            // Do not enqueue other buffers for processing while another Vision task is still running.
            // The camera stream has only a finite amount of buffers available; holding too many buffers for analysis would starve the camera.
            if (this.currentBuffer == null && frame.Camera.TrackingState == ARTrackingState.Normal)
            {
                // Retain the image buffer for Vision processing.
                this.currentBuffer = frame.CapturedImage;
                this.ClassifyCurrentImage();
            }

            frame.Dispose();
        }
Esempio n. 27
0
        public void Update()
        {
            Touch touch;

            if (ARFrame.GetTrackingState() != ARTrackable.TrackingState.TRACKING ||
                Input.touchCount < 1 || (touch = Input.GetTouch(0)).phase != TouchPhase.Began)
            {
            }
            else
            {
                trackTest(touch);
            }
        }
        public void OnDrawFrame(IGL10 gl)
        {
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            if (mArSession == null)
            {
                return;
            }
            if (mDisplayRotationManager.GetDeviceRotation())
            {
                mDisplayRotationManager.UpdateArSessionDisplayGeometry(mArSession);
            }

            try
            {
                mArSession.SetCameraTextureName(mTextureDisplay.GetExternalTextureId());
                ARFrame frame = mArSession.Update();
                mTextureDisplay.OnDrawFrame(frame);
                float fpsResult = DoFpsCalculate();

                System.Collections.ICollection faces = (System.Collections.ICollection)mArSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(ARFace)));

                if (faces.Count == 0)
                {
                    mTextDisplay.OnDrawFrame(null);
                    return;
                }
                Log.Debug(TAG, "Face number: " + faces.Count);
                ARCamera camera = frame.Camera;
                foreach (ARFace face in faces)
                {
                    if (face.TrackingState == ARTrackableTrackingState.Tracking)
                    {
                        mFaceGeometryDisplay.OnDrawFrame(camera, face);
                        StringBuilder sb = new StringBuilder();
                        UpdateMessageData(sb, fpsResult, face);
                        mTextDisplay.OnDrawFrame(sb);
                    }
                }
            }
            catch (ArDemoRuntimeException e)
            {
                Log.Debug(TAG, "Exception on the ArDemoRuntimeException!");
            }
            catch (Throwable t)
            {
                // This prevents the app from crashing due to unhandled exceptions.
                Log.Debug(TAG, "Exception on the OpenGL thread", t);
            }
        }
Esempio n. 29
0
        public void Update()
        {
            _DrawPlane();
            Touch touch;

            if (ARFrame.GetTrackingState() != ARTrackable.TrackingState.TRACKING ||
                Input.touchCount < 1 || (touch = Input.GetTouch(0)).phase != TouchPhase.Began)
            {
            }
            else
            {
                _DrawARLogo(touch);
            }
        }
Esempio n. 30
0
        public override void LocalizeServer()
        {
            ARCameraImageBytes image = null;

            if (m_Sdk.androidResolution == ImmersalSDK.CameraResolution.Max)
            {
                try
                {
                    image = ARFrame.AcquirPreviewImageBytes();
                }
                catch (NullReferenceException e)
                {
                    Debug.LogError("Cannot acquire FullHD image: " + e.Message);

                    image = ARFrame.AcquireCameraImageBytes();
                }
            }
            else
            {
                image = ARFrame.AcquireCameraImageBytes();
            }

            if (image != null && image.IsAvailable)
            {
                CoroutineJobLocalizeServer j = new CoroutineJobLocalizeServer();
                j.host = this;

                if (this.useGPS)
                {
                    j.useGPS    = true;
                    j.latitude  = m_Latitude;
                    j.longitude = m_Longitude;
                    j.radius    = DefaultRadius;
                }

                Camera cam = this.mainCamera;
                j.rotation   = cam.transform.rotation;
                j.position   = cam.transform.position;
                j.intrinsics = HWARHelper.GetIntrinsics();
                j.width      = image.Width;
                j.height     = image.Height;

                HWARHelper.GetPlaneData(out j.pixels, image);
                j.channels = 1;

                m_Jobs.Add(j);
                image.Dispose();
            }
        }