public TrackedObject(OpenCVForUnity.Rect rect) { lastPositions = new PositionsVector(); numDetectedFrames = 1; numFramesNotDetected = 0; state = TrackedState.NEW; lastPositions.Add(rect.clone()); _id = getNextId(); id = _id; }
//웹캠 이미지를 Resources/ 경로에 png 형식으로 저장 void SaveImage(int middle_x, int middle_y) { /* * //Create a Texture2D with the size of the rendered image on the screen. * Texture2D texture = new Texture2D(rawimage.texture.width, rawimage.texture.height, TextureFormat.ARGB32, false); * * //Save the image to the Texture2D * texture.SetPixels(webcamTexture.GetPixels()); * texture.Apply(); * * //PNG로 인코딩 * byte[] bytes = texture.EncodeToPNG(); * * //촬영한 사진 saved.png 로저장 * File.WriteAllBytes(Application.dataPath + "/DlibFaceLandmarkDetector/Examples/Test/Resources/saved.png", bytes); */ OpenCVForUnity.Mat srcMat = new OpenCVForUnity.Mat(webCamTexture.height, webCamTexture.width, OpenCVForUnity.CvType.CV_8UC4); OpenCVForUnity.Utils.webCamTextureToMat(webCamTexture, srcMat); //Debug.Log("scrMat:" + srcMat.ToString()); OpenCVForUnity.Rect area = new OpenCVForUnity.Rect(middle_x + 5, middle_y + 5, 10, 10); OpenCVForUnity.Mat desMat = srcMat.submat(area); Texture2D desTex = new Texture2D(desMat.cols(), desMat.rows(), TextureFormat.RGBA32, false); OpenCVForUnity.Utils.matToTexture2D(desMat, desTex); //Debug.Log("desTex:" + desTex.ToString()); byte[] img = desTex.EncodeToPNG(); File.WriteAllBytes(Application.dataPath + "/DlibFaceLandmarkDetector/Examples/WebCamTextureExample/Resources/saved1.png", img); //Debug.Log("saved"); //File.WriteAllBytes(Application.persistentDataPath + @"/saved1.png", img); //12.13수정 Debug.Log("application.persistentdatapath" + Application.persistentDataPath); Debug.Log("application.datapath" + Application.dataPath); //File.WriteAllBytes(Application.persistentDataPath + "/DlibFaceLandmarkDetector/Examples/WebCamTextureExample/Resources/saved1.png", img); }