public FbParameterCollection AddBatchParameters()
    {
        var result = new FbParameterCollection();

        BatchParameters.Add(result);
        return(result);
    }
    private void FinalizeJobs()
    {
        int[] batchInstanceCnts = GetBatchInstanceCount(job.bounds.Length);
        int   batchCnt          = job.bounds.Length;

        SplitedBatches = new BatchParameters[batchCnt];
        for (int i = 0; i < batchCnt; ++i)
        {
            Vector3 size   = job.bounds[i].max - job.bounds[i].min;
            Vector3 center = job.bounds[i].min;
            center           += 0.5f * size;
            SplitedBatches[i] = new BatchParameters()
            {
                InstanceCount = batchInstanceCnts[i], Bnd = new Bounds(center, size)
            };
        }
        for (int i = 0; i < batchIds.Length; ++i)
        {
            int batchId = batchIds[i] - 1;
            if (batchId < 0)
            {
                continue;
            }
            BatchParameters param = SplitedBatches[batchId];
            param.Matrices.Add(matrices[i]);
            param.Colors.Add(colors[i]);
        }
        LogResults();
        job.bounds.Dispose();
        batchIds.Dispose();
        matrices.Dispose();
    }
Beispiel #3
0
    private void AddBatch(Mesh mesh, Material mat, BatchParameters param, ref int cullOffset)
    {
        MaterialPropertyBlock matBlock = new MaterialPropertyBlock();

        matBlock.SetVectorArray("_PerInstanceColor", param.Colors);
        var batchIndex = this.batchRendererGroup.AddBatch(mesh, 0, mat, 0,
                                                          ShadowCastingMode.On, true, false,
                                                          param.Bnd, param.InstanceCount, matBlock, null);
        var batchMatrices = this.batchRendererGroup.GetBatchMatrices(batchIndex);
        var pos           = new float4(0, 0, 0, 1);

        for (int i = 0; i < param.InstanceCount; i++)
        {
            batchMatrices[i]         = param.Matrices[i];
            cullData[cullOffset + i] = new CullData()
            {
                extents     = 0.5f * param.Bnd.size,
                position    = math.mul(param.Matrices[i], pos).xyz,
                minDistance = 0,
                maxDistance = detailDistance,
            };
        }
        cullOffset += param.InstanceCount;
    }
    public BatchSplitor(byte[] data, Bounds bnd, Mesh mesh)
    {
        MemoryStream ms = new MemoryStream(data);

        TotalCount = MTFileUtils.ReadInt(ms);
        matrices   = new NativeArray <float4x4>(TotalCount, Allocator.Persistent);
        batchIds   = new NativeArray <int>(TotalCount, Allocator.Persistent);
        colors     = new Vector4[TotalCount];
        int spawned = 0;

        while (ms.Position < ms.Length && spawned < matrices.Length)
        {
            ushort spawnedCount = MTFileUtils.ReadUShort(ms);
            for (int i = 0; i < spawnedCount; ++i)
            {
                var pos   = MTFileUtils.ReadVector3(ms);
                var scale = MTFileUtils.ReadVector3(ms);
                var color = MTFileUtils.ReadColor(ms);
                matrices[spawned] = float4x4.TRS(pos, Quaternion.identity, scale);
                colors[spawned]   = color;
                batchIds[spawned] = 0;
                ++spawned;
            }
        }
        ms.Close();
        if (spawned != TotalCount)
        {
            Debug.LogError("terrain detail layer total count is different with spawned count");
        }
        //使用空间4叉树分batch
        if (TotalCount >= count_per_batch)
        {
            job             = new MTDetailBatchSeparateJob();
            job.batchIds    = batchIds;
            job.spawnMatrix = matrices;
            NativeArray <SeparateBound> bounds = new NativeArray <SeparateBound>(4, Allocator.TempJob);
            int offset = 0;
            SplitBounds(TotalCount, bnd.min, bnd.max, bounds, ref offset);
            job.bounds   = bounds;
            separateJobs = job.Schedule(TotalCount, 16);
            IsComplete   = false;
        }
        else
        {
            SplitedBatches = new BatchParameters[1];
            var param = new BatchParameters()
            {
                InstanceCount = TotalCount, Bnd = new Bounds(bnd.center, bnd.size)
            };
            for (int i = 0; i < matrices.Length; ++i)
            {
                param.Matrices.Add(matrices[i]);
                param.Colors.Add(colors[i]);
            }
            SplitedBatches[0] = param;
            LogResults();
            batchIds.Dispose();
            matrices.Dispose();
            IsComplete = true;
        }
    }
Beispiel #5
0
    public MainWindow(string[] args)
    {
        // Set configuration parameters
        InitParameters init_params = new InitParameters();

        init_params.resolution             = RESOLUTION.HD1080;
        init_params.depthMode              = DEPTH_MODE.ULTRA;
        init_params.coordinateUnits        = UNIT.METER;
        init_params.coordinateSystem       = COORDINATE_SYSTEM.RIGHT_HANDED_Y_UP;
        init_params.depthMaximumDistance   = 10f;
        init_params.cameraDisableSelfCalib = true;

        maxDepthDistance = init_params.depthMaximumDistance;
        parseArgs(args, ref init_params);
        // Open the camera
        zedCamera = new Camera(0);
        ERROR_CODE err = zedCamera.Open(ref init_params);

        if (err != ERROR_CODE.SUCCESS)
        {
            Environment.Exit(-1);
        }

        if (zedCamera.CameraModel != sl.MODEL.ZED2)
        {
            Console.WriteLine(" ERROR : Use ZED2 Camera only");
            return;
        }

        // Enable tracking (mandatory for object detection)
        Quaternion quat = Quaternion.Identity;
        Vector3    vec  = Vector3.Zero;

        zedCamera.EnablePositionalTracking(ref quat, ref vec);

        runtimeParameters = new RuntimeParameters();

        // Enable the Objects detection module
        ObjectDetectionParameters obj_det_params = new ObjectDetectionParameters();

        obj_det_params.enableObjectTracking = true; // the object detection will track objects across multiple images, instead of an image-by-image basis
        isTrackingON = obj_det_params.enableObjectTracking;
        obj_det_params.enable2DMask   = false;
        obj_det_params.imageSync      = true; // the object detection is synchronized to the image
        obj_det_params.detectionModel = sl.DETECTION_MODEL.MULTI_CLASS_BOX_ACCURATE;

        if (USE_BATCHING)
        {
            batchParameters                = new BatchParameters();
            batchParameters.latency        = 2.0f;
            batchParameters.enable         = true;
            batchHandler                   = new BatchSystemHandler((int)batchParameters.latency * 2);
            obj_det_params.batchParameters = batchParameters;
        }

        zedCamera.EnableObjectDetection(ref obj_det_params);

        // Configure object detection runtime parameters
        obj_runtime_parameters = new ObjectDetectionRuntimeParameters();
        detection_confidence   = 60;
        obj_runtime_parameters.detectionConfidenceThreshold = detection_confidence;
        obj_runtime_parameters.objectClassFilter            = new int[(int)OBJECT_CLASS.LAST];
        obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.PERSON] = Convert.ToInt32(true);
        //obj_runtime_parameters.objectClassFilter[(int)sl.OBJECT_CLASS.VEHICLE] = Convert.ToInt32(true);
        // To set a specific threshold
        obj_runtime_parameters.objectConfidenceThreshold = new int[(int)OBJECT_CLASS.LAST];
        obj_runtime_parameters.objectConfidenceThreshold[(int)sl.OBJECT_CLASS.PERSON] = detection_confidence;
        //obj_runtime_parameters.object_confidence_threshold[(int)sl.OBJECT_CLASS.VEHICLE] = detection_confidence;

        // Create ZED Objects filled in the main loop
        objects   = new Objects();
        imageLeft = new sl.Mat();
        int Height = zedCamera.ImageHeight;
        int Width  = zedCamera.ImageWidth;

        displayRes = new Resolution(Math.Min((uint)Width, 1280), Math.Min((uint)Height, 720));
        Resolution tracksRes = new Resolution(400, (uint)displayRes.height);

        // create a global image to store both image and tracks view
        globalImage = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width + (int)tracksRes.width, OpenCvSharp.MatType.CV_8UC4);
        // retrieve ref on image part
        imageLeftOcv = new OpenCvSharp.Mat(globalImage, new OpenCvSharp.Rect(0, 0, (int)displayRes.width, (int)displayRes.height));
        // retrieve ref on tracks part
        imageTrackOcv = new OpenCvSharp.Mat(globalImage, new OpenCvSharp.Rect((int)displayRes.width, 0, (int)tracksRes.width, (int)tracksRes.height));
        // init an sl::Mat from the ocv image ref (which is in fact the memory of global_image)
        imageLeft.Create(displayRes, MAT_TYPE.MAT_8U_C4, MEM.CPU);
        imageRenderLeft = new OpenCvSharp.Mat((int)displayRes.height, (int)displayRes.width, OpenCvSharp.MatType.CV_8UC4, imageLeft.GetPtr());
        imgScale        = new sl.float2((int)displayRes.width / (float)Width, (int)displayRes.height / (float)Height);

        // Create OpenGL Viewer
        viewer = new GLViewer();

        camWorldPose  = new Pose();
        camCameraPose = new Pose();
        pointCloud    = new sl.Mat();
        pcRes         = new Resolution(Math.Min((uint)Width, 720), Math.Min((uint)Height, 404));
        pointCloud.Create(pcRes, MAT_TYPE.MAT_32F_C4, MEM.CPU);

        // 2D tracks
        trackViewGenerator = new TrackingViewer(tracksRes, (int)zedCamera.GetCameraFPS(), maxDepthDistance, 3);
        trackViewGenerator.setCameraCalibration(zedCamera.GetCalibrationParameters());

        window_name = "ZED| 2D View and Birds view";
        Cv2.NamedWindow(window_name, WindowMode.Normal);// Create Window
        Cv2.CreateTrackbar("Confidence", window_name, ref detection_confidence, 100);

        // Create OpenGL window
        CreateWindow();
    }