示例#1
0
        public void Execute(int fragmentIndex)
        {
            MemoryArray <float3> fragmentFeatures = fragmentArray.FragmentFeatures(fragmentIndex);

            for (int featureIndex = 0; featureIndex < numQuantizedFeatures; ++featureIndex)
            {
                float3 featureValue = fragmentFeatures[featureIndex];

                float length = math.length(featureValue);

                byte code = quantizers[featureIndex].Encode(length);

                quantizedValues[numQuantizedFeatures * fragmentIndex + featureIndex] = code;

                float3 defaultDirection = Missing.zero;

                if (length < 0.02f)
                {
                    fragmentFeatures[featureIndex] = defaultDirection;
                }
                else
                {
                    fragmentFeatures[featureIndex] =
                        math.normalizesafe(
                            featureValue, defaultDirection);
                }
            }
        }
示例#2
0
        public void Execute(int fragmentIndex)
        {
            MemoryArray <float3> fragmentFeatures = fragmentArray.FragmentFeatures(fragmentIndex);

            for (int featureIndex = 0; featureIndex < numTransformedFeatures; ++featureIndex)
            {
                fragmentFeatures[transformedIndex + featureIndex] = boundingBoxes[featureIndex].normalize(fragmentFeatures[transformedIndex + featureIndex]);
            }
        }
示例#3
0
        public TrainingData ScheduleTraining(ref Builder.FragmentArray fragmentArray)
        {
            //
            // TODO: Variable bitrate encoding.
            // TODO: Use Jobs to train slices in parallel,
            //       requires alternative random generator.
            //

            var numInputSamples = fragmentArray.numFragments;

            var numTrainingSamples = math.clamp(
                numInputSamples, settings.minimumNumberSamples * ksub,
                settings.maximumNumberSamples * ksub);

            TrainingData trainingData = new TrainingData()
            {
                permutation = new NativeArray <int>(numTrainingSamples, Allocator.Persistent),
                slice       = new NativeArray <float>(numTrainingSamples * dsub, Allocator.Persistent),
            };

            var random = new RandomGenerator(settings.seed);

            if ((numTrainingSamples < numInputSamples) || (numTrainingSamples > numInputSamples))
            {
                for (int i = 0; i < numTrainingSamples; i++)
                {
                    trainingData.permutation[i] = random.Integer(numInputSamples);
                }
            }
            else
            {
                for (int i = 0; i < numTrainingSamples; i++)
                {
                    trainingData.permutation[i] = i;
                }
            }

            for (int i = 0; i + 1 < numTrainingSamples; i++)
            {
                int i2 = i + random.Integer(numTrainingSamples - i);
                int t  = trainingData.permutation[i];

                trainingData.permutation[i]  = trainingData.permutation[i2];
                trainingData.permutation[i2] = t;
            }

            //
            // Loop over features (M)
            //

            for (int m = 0; m < M; m++)
            {
                //
                // Prepare feature slice for all samples (n)
                //

                int writeOffset = 0;

                unsafe
                {
                    for (int i = 0; i < numTrainingSamples; i++)
                    {
                        int sampleIndex = trainingData.permutation[i];

                        Assert.IsTrue(fragmentArray.FragmentFeatures(sampleIndex).Length == M);

                        float *x = (float *)fragmentArray.FragmentFeatures(sampleIndex).ptr;

                        int readOffset = m * dsub;

                        for (int j = 0; j < dsub; ++j)
                        {
                            trainingData.slice[writeOffset++] = x[readOffset++];
                        }
                    }
                }

                Assert.IsTrue(writeOffset == trainingData.slice.Length);
            }

            trainingData.jobQueues     = new JobQueue[M];
            trainingData.featureKmeans = new KMeans[M];

            for (int m = 0; m < M; m++)
            {
                var kms = KMeans.Settings.Default;

                kms.numIterations = settings.numIterations;
                kms.numAttempts   = settings.numAttempts;
                kms.seed          = settings.seed;

                KMeans kmeans = new KMeans(dsub, ksub, numTrainingSamples, kms);

                Assert.IsTrue(numTrainingSamples >= settings.minimumNumberSamples * ksub);
                Assert.IsTrue(numTrainingSamples <= settings.maximumNumberSamples * ksub);

                trainingData.jobQueues[m] = kmeans.PrepareTrainingJobQueue(new MemoryArray <float>(trainingData.slice), numTrainingSamples, 5);
                trainingData.jobQueues[m].AddJob(new CopyKMeansCentroidsJob()
                {
                    index     = m * ksub * dsub,
                    numFloats = ksub * dsub,
                    kmeans    = kmeans,
                    centroids = new MemoryArray <float>(centroids)
                });

                trainingData.featureKmeans[m] = kmeans;
            }

            return(trainingData);
        }