public MatchPoint(OpenCV.Net.Point pt, double val) { X = pt.X; Y = pt.Y; Punctuality = val; }
public static Pixbuf FlippedVertical(VirtualCamera camera, OpenCV cv) { return(Flip(camera, cv, FlipType.Vertical)); }
public static Pixbuf FlippedAll(VirtualCamera camera, OpenCV cv) { return(Flip(camera, cv, FlipType.None)); }
public static Pixbuf Flip(VirtualCamera camera, OpenCV cv, FlipType flipCode) { return(camera != null?cv.Flip(camera.Pixbuf(), flipCode) : null); }
public static Pixbuf FlippedHorizontal(VirtualCamera camera, OpenCV cv) { return(Flip(camera, cv, FlipType.Horizontal)); }
public static Pixbuf ProcessPixbufBMP(VirtualCamera camera, OpenCV cv) { return(camera != null?cv.ToPixbuf(cv.ToBitmap(camera.Pixbuf())) : null); }
public static Pixbuf ProcessMatPixbuf(VirtualCamera camera, OpenCV cv) { return(camera != null?cv.ToPixbuf(cv.ToMat(camera.Pixbuf())) : null); }
public string GetVersion() { return(OpenCV.GetVersion()); }
public string GetBuildInformation() { return(OpenCV.GetBuildInformation()); }
// Use this for initialization IEnumerator Start() { OpenCV.Init(); yield return(null); List <int> allCams = new List <int>(); for (int i = 0; i < WebCamTexture.devices.Length; i++) { if (WebCamTexture.devices[i].name.Contains("Live")) { allCams.Add(i); Debug.Log(WebCamTexture.devices[i].name); } } if (allCams.Count != 2) { yield break; } Debug.Log("Found " + allCams.Count + " cameras, first cam is index = "); // + allCams[0]); WebCamTexture cam1 = new WebCamTexture(WebCamTexture.devices[allCams[0]].name); WebCamTexture cam2 = new WebCamTexture(WebCamTexture.devices[allCams[1]].name); float startTime = Time.realtimeSinceStartup; cam1.Play(); while (cam1.width != 1280) { yield return(null); } Debug.Log("cam1 " + cam1.width + " " + cam1.height); Color[] imageOne = cam1.GetPixels(); cam1.Stop(); while (cam1.isPlaying) { yield return(null); } cam2.Play(); while (cam2.width != 1280) { yield return(null); } Debug.Log("cam2 " + cam2.width + " " + cam2.height); float totalTime = Time.realtimeSinceStartup - startTime; Debug.Log("Total time " + totalTime); Texture2D newTex = new Texture2D(1280, 720); newTex.SetPixels(imageOne); newTex.Apply(); view1.renderer.material.mainTexture = newTex; view2.renderer.material.mainTexture = cam2; cam2.Stop(); yield break; /* UNREACHABLE * while(!Input.GetKeyDown(KeyCode.Escape)) yield return null; * * IntPtr cvCapOne = OpenCV.cvCreateCameraCapture(allCams[0]); * //IntPtr cvCapTwo = OpenCV.cvCreateCameraCapture(allCams[1]); * OpenCV.cvSetCaptureProperty(cvCapOne, (int)CvCapture.CV_CAP_PROP_FRAME_WIDTH, 1280); * OpenCV.cvSetCaptureProperty(cvCapOne, (int)CvCapture.CV_CAP_PROP_FRAME_HEIGHT, 720); * * * while(!Input.GetKeyDown(KeyCode.Space)) yield return null; * * //while(!Input.GetKeyDown(KeyCode.N)) { * yield return StartCoroutine(CreatePicture(cvCapOne, view1)); * //yield return StartCoroutine(CreatePicture(cvCapTwo, view2)); * yield return new WaitForSeconds(3); * //} * * OpenCV.cvReleaseCapture(ref cvCapOne); * //OpenCV.cvReleaseCapture(ref cvCapTwo); * Debug.Log("Cams turned off"); */ }
/// <summary> /// Uses a silhouette scanning setup /// </summary> /// <returns>The scan.</returns> IEnumerator SilhouetteScan() { Text.Log("SilhouetteScan."); bool isCancelled = false; manager.pauseUpdateCollision = true; m_blobSize = manager.m_blob.size; foreach (ScanningCamera sc in calibratedCameras) { yield return(Scheduler.StartCoroutine(sc.StartCapturing())); yield return(Scheduler.StartCoroutine(sc.TakeCalibrationPicture())); sc.Stop(); } m_scanDataBlob = new byte[m_blobSize.x, m_blobSize.y, m_blobSize.z]; m_readyToScan = false; bool shouldBreak = false; Dispatcher <Panel> .Broadcast(PanelController.kEventClosePanel, Panel.Message); Dispatcher <string, PanelController.Handler, PanelController.Handler> .Broadcast( PanelController.kEventConfirm, kPlaceObjectString, delegate { m_readyToScan = true; }, delegate { m_readyToScan = shouldBreak = true; StopCameras(); } ); yield return(new WaitUntil(() => m_readyToScan || shouldBreak)); if (shouldBreak) { yield break; } m_readyToScan = false; Dispatcher <string, string, PanelController.Handler, PanelController.Handler> .Broadcast( PanelController.kEventShowProgress, Scanner.kOnScanningProgress, "Scanning {0:0%}", Nop, delegate { isCancelled = true; m_isScanning = false; scanningView.SetActive(false); StopCameras(); }); float initialTime = Time.realtimeSinceStartup; scanningView.SetActive(true); m_isScanning = true; m_workQueue = new Queue <ScanWorkPacket>(); int numThreads = Math.Max(1, System.Environment.ProcessorCount / 2); Text.Log(@"Starting {0} thread{1}.", numThreads, Text.S(numThreads)); m_rowsProcessed = 0; foreach (Thread t in m_scanningThreads) { if (t.IsAlive) { t.Abort(); } } m_scanningThreads.Clear(); for (int i = 0; i < numThreads; ++i) { Thread worker = new Thread(ImageProcessor); worker.Start(); m_scanningThreads.Add(worker); } int cameraIndex = 0; int rowsExpected = 0; foreach (ScanningCamera sc in calibratedCameras) { yield return(Scheduler.StartCoroutine(sc.StartCapturing())); scanningView.renderer.material.mainTexture = sc.webcamImage; for (int i = 0; i < numberOfSilhouettePicsToTake; i++) { yield return(Scheduler.StartCoroutine(sc.TakePicture())); float progressAddition = (cameraIndex == 1) ? 0.5f : 0f; Dispatcher <float> .Broadcast(kOnScanningProgress, ((float)i / (float)numberOfSilhouettePicsToTake) / (float)calibratedCameras.Count + progressAddition); rowsExpected += sc.imageHeight; m_workQueue.Enqueue(new ScanWorkPacket(centerOfVoxelBlob, sc)); if (Scheduler.ShouldYield()) { yield return(null); } Matrix eulerRotate = Matrix.Zero(3, 1); eulerRotate[1, 0] = -m_rotationPerPicture; // The original. //eulerRotate[1,0] = m_rotationPerPicture; Matrix rotateAroundY = Matrix.Zero(3, 3); OpenCV.cvRodrigues2(eulerRotate.matPtr, rotateAroundY.matPtr, IntPtr.Zero); sc.worldToCameraRotation = sc.worldToCameraRotation * rotateAroundY; OpenCV.cvReleaseMat(ref rotateAroundY.matPtr); OpenCV.cvReleaseMat(ref eulerRotate.matPtr); pc.BeginMotorChanges(m_workingPrinter); pc.RotateBySteps(0, m_workingPrinter.platform.stepsPerRotation / numberOfSilhouettePicsToTake, m_workingPrinter.horizTrack.stepDirection, Change.Execute); pc.EndMotorChanges(); yield return(Scheduler.StartCoroutine(pc.WaitUntilDoneMoving())); if (isCancelled) { pc.TurnBacklightOff(); m_workQueue.Clear(); //if (printer.serialController != null) { // printer.serialController.ClearRxBuffer(); //} scanningView.SetActive(false); StopCameras(); yield break; } } sc.Stop(); ++cameraIndex; } scanningView.SetActive(false); Dispatcher <string, string, PanelController.Handler, PanelController.Handler> .Broadcast( PanelController.kEventShowProgress, Scanner.kOnScanningProgress, "Processing {0:0%}", Nop, // TODO: Actually cancel things…to do this, we may need to make some changes // in the blob format and undo system. null); do { // NOTE: Changed 2.0 -> 5.0f yield return(new WaitSeconds(5.0f)); Dispatcher <float> .Broadcast(kOnScanningProgress, ((float)m_rowsProcessed / (float)rowsExpected) * 0.75f); } while (m_rowsProcessed < rowsExpected && !isCancelled); m_isScanning = false; VoxelBlob blob = new VoxelBlob(m_blobSize.x, m_blobSize.y, m_blobSize.z, false); yield return(Scheduler.StartCoroutine(SetVoxelsInBlob((byte)cutThreshold, blob, m_scanDataBlob, null))); m_scanDataBlob = null; Dispatcher <float> .Broadcast(kOnScanningProgress, 0.75f); yield return(Scheduler.StartCoroutine(manager.AddVoxelBlob(blob, Vector3.zero))); manager.pauseUpdateCollision = false; while (manager.isUpdating) { yield return(new WaitSeconds(1.0f)); } Dispatcher <float> .Broadcast(kOnScanningProgress, 1f); Text.Log("Done scanning and updating meshes after {0} sec.", Time.realtimeSinceStartup - initialTime); pc.TurnBacklightOff(); Contract.Assert(m_workQueue.Count == 0, @"Didn't process {0} image{1}.", m_workQueue.Count, Text.S(m_workQueue.Count)); }
public void Init() { openCV = GetComponent <OpenCV>(); webCamTex = new WebCamTexture(WebCamTexture.devices[0].name, 640, 480, 30); webCamTex.Play(); }