public static CreateTrajectoryFragmentsJob Prepare(ref Builder.FragmentArray fragmentArray, ref Binary binary)
 {
     return(new CreateTrajectoryFragmentsJob()
     {
         binary = new MemoryRef <Binary>(ref binary),
         metricIndex = fragmentArray.metricIndex,
         numFragments = fragmentArray.numFragments,
         numFeatures = fragmentArray.numFeatures,
         samplingTimes = fragmentArray.samplingTimes,
         features = new MemoryArray <float3>(fragmentArray.features)
     });
 }
예제 #2
0
        public unsafe void ComputeCodes(ref Builder.FragmentArray fragmentArray, NativeSlice <byte> codes)
        {
            ComputeCodesJob job = new ComputeCodesJob()
            {
                ksub           = ksub,
                dsub           = dsub,
                M              = M,
                centroids      = centroids,
                features       = fragmentArray.Features.Reinterpret <float>(),
                strideFeatures = fragmentArray.numFeatures * 3,
                codes          = new MemoryArray <byte>(codes),
                strideCodes    = codeSize,
                startIndex     = 0
            };

            JobHandle handle = job.Schedule(fragmentArray.numFragments, 1);

            handle.Complete();
        }
예제 #3
0
        public TrainingData ScheduleTraining(ref Builder.FragmentArray fragmentArray)
        {
            //
            // TODO: Variable bitrate encoding.
            // TODO: Use Jobs to train slices in parallel,
            //       requires alternative random generator.
            //

            var numInputSamples = fragmentArray.numFragments;

            var numTrainingSamples = math.clamp(
                numInputSamples, settings.minimumNumberSamples * ksub,
                settings.maximumNumberSamples * ksub);

            TrainingData trainingData = new TrainingData()
            {
                permutation = new NativeArray <int>(numTrainingSamples, Allocator.Persistent),
                slice       = new NativeArray <float>(numTrainingSamples * dsub, Allocator.Persistent),
            };

            var random = new RandomGenerator(settings.seed);

            if ((numTrainingSamples < numInputSamples) || (numTrainingSamples > numInputSamples))
            {
                for (int i = 0; i < numTrainingSamples; i++)
                {
                    trainingData.permutation[i] = random.Integer(numInputSamples);
                }
            }
            else
            {
                for (int i = 0; i < numTrainingSamples; i++)
                {
                    trainingData.permutation[i] = i;
                }
            }

            for (int i = 0; i + 1 < numTrainingSamples; i++)
            {
                int i2 = i + random.Integer(numTrainingSamples - i);
                int t  = trainingData.permutation[i];

                trainingData.permutation[i]  = trainingData.permutation[i2];
                trainingData.permutation[i2] = t;
            }

            //
            // Loop over features (M)
            //

            for (int m = 0; m < M; m++)
            {
                //
                // Prepare feature slice for all samples (n)
                //

                int writeOffset = 0;

                unsafe
                {
                    for (int i = 0; i < numTrainingSamples; i++)
                    {
                        int sampleIndex = trainingData.permutation[i];

                        Assert.IsTrue(fragmentArray.FragmentFeatures(sampleIndex).Length == M);

                        float *x = (float *)fragmentArray.FragmentFeatures(sampleIndex).ptr;

                        int readOffset = m * dsub;

                        for (int j = 0; j < dsub; ++j)
                        {
                            trainingData.slice[writeOffset++] = x[readOffset++];
                        }
                    }
                }

                Assert.IsTrue(writeOffset == trainingData.slice.Length);
            }

            trainingData.jobQueues     = new JobQueue[M];
            trainingData.featureKmeans = new KMeans[M];

            for (int m = 0; m < M; m++)
            {
                var kms = KMeans.Settings.Default;

                kms.numIterations = settings.numIterations;
                kms.numAttempts   = settings.numAttempts;
                kms.seed          = settings.seed;

                KMeans kmeans = new KMeans(dsub, ksub, numTrainingSamples, kms);

                Assert.IsTrue(numTrainingSamples >= settings.minimumNumberSamples * ksub);
                Assert.IsTrue(numTrainingSamples <= settings.maximumNumberSamples * ksub);

                trainingData.jobQueues[m] = kmeans.PrepareTrainingJobQueue(new MemoryArray <float>(trainingData.slice), numTrainingSamples, 5);
                trainingData.jobQueues[m].AddJob(new CopyKMeansCentroidsJob()
                {
                    index     = m * ksub * dsub,
                    numFloats = ksub * dsub,
                    kmeans    = kmeans,
                    centroids = new MemoryArray <float>(centroids)
                });

                trainingData.featureKmeans[m] = kmeans;
            }

            return(trainingData);
        }