示例#1
0
        /// <summary>
        /// Dispose our model <see cref="m_model"/> and interpreter <see cref="m_interpreter"/>.
        /// </summary>
        public void DisposeObjects()
        {
            if (m_model != null)
            {
                m_model.Dispose();
                m_model = null;
            }

            if (m_interpreter != null)
            {
                m_interpreter.Dispose();
                m_interpreter = null;
            }
        }
示例#2
0
        /// <summary>
        /// Constructor with arguments defining the values of <see cref="m_frozenModelPath"/>, <see cref="m_model"/>,
        /// <see cref="m_interpreter"/>. It also defines <see cref="m_outputTensors"/> as the <see cref="m_interpreter.Outputs">
        /// and <see cref="m_inputTensor"/> as <see cref="m_inputTensor.Inputs[0]"/>, assuming the input tensor will be a
        /// 3 channels BGR image.
        /// </summary>
        /// <param name="frozenModelPath">Path to a PoseNet model saved with tensorflow lite (.tflite file).</param>
        /// <param name="numberOfThreads">Number of threads the neural network will be able to use (default: 2, from base class)</param>
        public PoseNetEstimator(String frozenModelPath,
                                int numberOfThreads = 4)
        {
            // Check file
            if (!File.Exists(frozenModelPath))
            {
                Console.WriteLine("ERROR:");
                Console.WriteLine("FrozenModelPath specified in DeepNetworkLite " +
                                  "construtor with argument does not exist.");
                Console.WriteLine("Network not loaded.");
                return;
            }
            if (Path.GetExtension(frozenModelPath) != m_expectedModelExtension)
            {
                Console.WriteLine("ERROR:");
                Console.WriteLine("Extension of specified frozen model path in DeepNetworkLite " +
                                  "constructor with argument does not" +
                                  "match " + m_expectedModelExtension);
                Console.WriteLine("Network not loaded.");
                return;
            }

            if (m_frozenModelPath == "")
            {
                m_frozenModelPath = frozenModelPath;
            }

            try
            {
                if (m_frozenModelPath != "")
                {
                    m_model       = new Emgu.TF.Lite.FlatBufferModel(filename: m_frozenModelPath);
                    m_interpreter = new Emgu.TF.Lite.Interpreter(flatBufferModel: m_model);
                    m_interpreter.AllocateTensors();
                    m_interpreter.SetNumThreads(numThreads: numberOfThreads);
                }
            }
            catch
            {
                DisposeObjects();

                Console.WriteLine("ERROR:");
                Console.WriteLine("Unable to load frozen model in DeepNetworkLite constructor with arguments " +
                                  "despite files was found with correct extension. " +
                                  "Please, make sure you saved your model using tensorflow lite pipelines." +
                                  "Current path found is : " + m_frozenModelPath);
                return;
            }

            if (m_inputTensor == null)
            {
                int[] input = m_interpreter.InputIndices;
                m_inputTensor = m_interpreter.GetTensor(input[0]);
            }

            if (m_outputTensors == null)
            {
                m_outputTensors = m_interpreter.Outputs;
            }

            // Populate our array of keypoints
            for (int i = 0; i < m_keypoints.Length; i++)
            {
                m_keypoints[i] = new Keypoint();
            }

            return;
        }