void Start() { Time.timeScale = 1; CameraResolution.SetCamera(); SoundPlayer.instance.PlayBGM("MainBgm", 0.5f); matchlookingObject.SetActive(false); matchFoundObject.SetActive(false); matchReconnectObject.SetActive(false); loadingObject.SetActive(false); errorObject.SetActive(false); cardUpgrade.SetActive(false); cardPurchase.SetActive(false); rankObject.SetActive(false); BackEndMatchManager.instance.JoinMatchServer(); BackEndMatchManager.instance.HandlerSetting(); BackEndServerManager.instance.myInfo.oldPoint = BackEndServerManager.instance.myInfo.point; Invoke("StartDataSetting", 0.25f); }
private static CameraResolution ParseCameraResolution(JsonObject startupConfig) { CameraResolution cameraResolution = CameraResolution.SD_640x480; if (startupConfig.TryGetValue("camera_resolution", out JsonValue cameraResolutionValue)) { switch ((string)cameraResolutionValue) { case "sd_640x480": cameraResolution = CameraResolution.SD_640x480; break; case "hd_1280x720": cameraResolution = CameraResolution.HD_1280x720; break; case "full_hd_1920x1080": cameraResolution = CameraResolution.Full_HD_1920x1080; break; case "auto": cameraResolution = CameraResolution.Auto; break; } } return(cameraResolution); }
private static CameraResolution SelectLowestResolutionMatchingDisplayAspectRatio(List <CameraResolution> availableResolutions) { CameraResolution result = null; //a tolerance of 0.1 should not be recognizable for users double aspectTolerance = 0.2; //calculating our targetRatio var targetRatio = DeviceDisplay.MainDisplayInfo.Height / DeviceDisplay.MainDisplayInfo.Width; var targetHeight = DeviceDisplay.MainDisplayInfo.Height; var minDiff = double.MaxValue; //camera API lists all available resolutions from highest to lowest, perfect for us //making use of this sorting, following code runs some comparisons to select the lowest resolution that matches the screen aspect ratio //selecting the lowest makes QR detection actual faster most of the time foreach (var r in availableResolutions) { //if current ratio is bigger than our tolerance, move on //camera resolution is provided landscape ... var a = Math.Abs((double)r.Width / r.Height - targetRatio); if (a > aspectTolerance) { continue; } if (Math.Abs(r.Height - targetHeight) < minDiff) { minDiff = Math.Abs(r.Height - targetHeight); } result = r; } return(result); }
void Start() { CameraResolution.SetCamera(); SoundPlayer.instance.PlaySound("BattleStart"); HideScreen(); }
public CameraResolution SelectLowestResolutionMatchingDisplayAspectRatio(List <CameraResolution> availableResolutions) { CameraResolution result = null; //a tolerance of 0.1 should not be visible to the user double aspectTolerance = 0.1; var displayOrientationHeight = DeviceDisplay.MainDisplayInfo.Orientation == DisplayOrientation.Portrait ? DeviceDisplay.MainDisplayInfo.Height : DeviceDisplay.MainDisplayInfo.Width; var displayOrientationWidth = DeviceDisplay.MainDisplayInfo.Orientation == DisplayOrientation.Portrait ? DeviceDisplay.MainDisplayInfo.Width : DeviceDisplay.MainDisplayInfo.Height; //calculating our targetRatio var targetRatio = displayOrientationHeight / displayOrientationWidth; var targetHeight = displayOrientationHeight; var minDiff = double.MaxValue; //camera API lists all available resolutions from highest to lowest, perfect for us //making use of this sorting, following code runs some comparisons to select the lowest resolution that matches the screen aspect ratio and lies within tolerance //selecting the lowest makes Qr detection actual faster most of the time foreach (var r in availableResolutions.Where(r => Math.Abs(((double)r.Width / r.Height) - targetRatio) < aspectTolerance)) { //slowly going down the list to the lowest matching solution with the correct aspect ratio if (Math.Abs(r.Height - targetHeight) < minDiff) { minDiff = Math.Abs(r.Height - targetHeight); } result = r; } return(result); }
public CameraResolution SelectLowestResolutionMatchingDisplayAspectRatio( List <CameraResolution> availableResolutions) { CameraResolution result = null; //a tolerance of 0.1 should not be visible to the user double aspectTolerance = 0.1; var displayOrientationHeight = DeviceDisplay.MainDisplayInfo.Orientation == DisplayOrientation.Portrait ? DeviceDisplay.MainDisplayInfo.Height : DeviceDisplay.MainDisplayInfo.Width; var displayOrientationWidth = DeviceDisplay.MainDisplayInfo.Orientation == DisplayOrientation.Portrait ? DeviceDisplay.MainDisplayInfo.Width : DeviceDisplay.MainDisplayInfo.Height; //calculating our targetRatio var targetRatio = displayOrientationHeight / displayOrientationWidth; var targetHeight = displayOrientationHeight; var minDiff = double.MaxValue; //camera API lists all available resolutions from highest to lowest, perfect for us //making use of this sorting, following code runs some comparisons to select the lowest //resolution that matches the screen aspect ratio and lies within tolerance //selecting the lowest makes Qr detection actual faster most of the time // (make sure we at least get 600 pixels on the width) foreach (var r in availableResolutions.Where(r => (Math.Abs(((double)r.Width / r.Height) - targetRatio) < aspectTolerance) && r.Width > 600)) { //slowly going down the list to the lowest matching solution with the correct aspect ratio if (Math.Abs(r.Height - targetHeight) < minDiff) { minDiff = Math.Abs(r.Height - targetHeight); } result = r; } if (result == null) { var smallestDiff = availableResolutions.OrderBy(s => { var ratio = DeviceDisplay.MainDisplayInfo.Orientation == DisplayOrientation.Portrait ? (double)s.Width / s.Height : (double)s.Height / s.Width; return(Math.Abs( ratio = (DeviceDisplay.MainDisplayInfo.Height / DeviceDisplay.MainDisplayInfo.Width))); }).FirstOrDefault(); result = new CameraResolution() { Width = smallestDiff.Width, Height = smallestDiff.Height }; } lastResolutionSet = new CameraResolution(); lastResolutionSet.Width = result.Width; lastResolutionSet.Height = result.Height; return(result); }
public static List <CameraResolution> GetAvailableResolutions(int deviceIndex) // DsDevice vidDev) { try { DsDevice[] captureDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); DsDevice vidDev = captureDevices[deviceIndex]; int hr; int max = 0; int bitCount = 0; IBaseFilter sourceFilter = null; var mFilterGraph2 = new FilterGraph() as IFilterGraph2; hr = mFilterGraph2.AddSourceFilterForMoniker(vidDev.Mon, null, vidDev.Name, out sourceFilter); var pRaw2 = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); var AvailableResolutions = new List <CameraResolution>(); VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); IEnumMediaTypes mediaTypeEnum; hr = pRaw2.EnumMediaTypes(out mediaTypeEnum); AMMediaType[] mediaTypes = new AMMediaType[1]; IntPtr fetched = IntPtr.Zero; hr = mediaTypeEnum.Next(1, mediaTypes, fetched); while (fetched != null && mediaTypes[0] != null) { Marshal.PtrToStructure(mediaTypes[0].formatPtr, videoInfoHeader); if (videoInfoHeader.BmiHeader.Size != 0 && videoInfoHeader.BmiHeader.BitCount != 0) { if (videoInfoHeader.BmiHeader.BitCount > bitCount) { AvailableResolutions.Clear(); max = 0; bitCount = videoInfoHeader.BmiHeader.BitCount; } CameraResolution availableResolution = new CameraResolution(); availableResolution.HorizontalResolution = videoInfoHeader.BmiHeader.Width; availableResolution.VerticalResolution = videoInfoHeader.BmiHeader.Height; AvailableResolutions.Add(availableResolution); if (videoInfoHeader.BmiHeader.Width > max || videoInfoHeader.BmiHeader.Height > max) { max = (Math.Max(videoInfoHeader.BmiHeader.Width, videoInfoHeader.BmiHeader.Height)); } } hr = mediaTypeEnum.Next(1, mediaTypes, fetched); } return(AvailableResolutions); } catch (Exception ex) { return(null); } }
public virtual void Start() { fpsMonitor = GetComponent <FpsMonitor> (); if (!NatCam.Implementation.HasPermissions) { Debug.LogError("NatCam.Implementation.HasPermissions == false"); if (fpsMonitor != null) { fpsMonitor.consoleText = "NatCam.Implementation.HasPermissions == false"; } } // Load global camera benchmark settings. int width, height, fps; NatCamWithOpenCVForUnityExample.GetCameraResolution(out width, out height); NatCamWithOpenCVForUnityExample.GetCameraFps(out fps); previewResolution = new NatCamU.Core.CameraResolution(width, height); requestedFPS = fps; // Set the active camera NatCam.Camera = useFrontCamera ? DeviceCamera.FrontCamera : DeviceCamera.RearCamera; // Null checking if (!NatCam.Camera) { Debug.LogError("Camera is null. Consider using " + (useFrontCamera ? "rear" : "front") + " camera"); return; } if (!preview) { Debug.LogError("Preview RawImage has not been set"); return; } // Set the camera's preview resolution NatCam.Camera.PreviewResolution = previewResolution; // Set the camera framerate NatCam.Camera.Framerate = requestedFPS; NatCam.Play(); NatCam.OnStart += OnStart; NatCam.OnFrame += OnFrame; if (fpsMonitor != null) { fpsMonitor.Add("Name", "NatCamPreviewOnlyExample"); fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1")); fpsMonitor.Add("drawFPS", drawFPS.ToString("F1")); fpsMonitor.Add("width", ""); fpsMonitor.Add("height", ""); fpsMonitor.Add("orientation", ""); } imageProcessingTypeDropdown.value = (int)imageProcessingType; }
private void SetScanOptions() { var options = new ZXing.Mobile.MobileBarcodeScanningOptions(); //options.TryHarder = true; //options.TryInverted = true; //options.AutoRotate = true; options.PossibleFormats = new List <ZXing.BarcodeFormat> { ZXing.BarcodeFormat.QR_CODE }; if (Parameters.Options.AcceptBarcode_Code) { options.PossibleFormats.Append(ZXing.BarcodeFormat.CODE_39); options.PossibleFormats.Append(ZXing.BarcodeFormat.CODE_93); options.PossibleFormats.Append(ZXing.BarcodeFormat.CODE_128); options.PossibleFormats.Append(ZXing.BarcodeFormat.CODABAR); } if (Parameters.Options.AcceptBarcode_Ean) { options.PossibleFormats.Append(ZXing.BarcodeFormat.EAN_8); options.PossibleFormats.Append(ZXing.BarcodeFormat.EAN_13); } if (Parameters.Options.AcceptBarcode_Upc) { options.PossibleFormats.Append(ZXing.BarcodeFormat.UPC_A); options.PossibleFormats.Append(ZXing.BarcodeFormat.UPC_E); options.PossibleFormats.Append(ZXing.BarcodeFormat.UPC_EAN_EXTENSION); } // solve camera resolution bug (up to ZXing 3.1.0 beta2) options.CameraResolutionSelector = new CameraResolutionSelectorDelegate((List <CameraResolution> availableResolutions) => { CameraResolution result = null; double aspectTolerance = 0.1; var displayOrientationHeight = DeviceDisplay.MainDisplayInfo.Orientation == DisplayOrientation.Portrait ? DeviceDisplay.MainDisplayInfo.Height : DeviceDisplay.MainDisplayInfo.Width; var displayOrientationWidth = DeviceDisplay.MainDisplayInfo.Orientation == DisplayOrientation.Portrait ? DeviceDisplay.MainDisplayInfo.Width : DeviceDisplay.MainDisplayInfo.Height; var targetRatio = displayOrientationHeight / displayOrientationWidth; var targetHeight = displayOrientationHeight; var minDiff = double.MaxValue; foreach (var r in availableResolutions.Where(r => Math.Abs(((double)r.Width / r.Height) - targetRatio) < aspectTolerance)) { if (Math.Abs(r.Height - targetHeight) < minDiff) { minDiff = Math.Abs(r.Height - targetHeight); } result = r; } return(result); }); zxing.Options = options; }
public ArExperience(string name, string path, Features features, CameraPosition position = CameraPosition.Default, CameraResolution resolution = CameraResolution.SD_640x480, CameraFocusMode focusMode = CameraFocusMode.AutofocusContinuous, string extension = null, bool camera2Enabled = true) { Name = name; Path = path; FeaturesMask = features; CameraPosition = position; CameraResolution = resolution; CameraFocusMode = focusMode; Extension = extension; Camera2Enabled = camera2Enabled; }
/// <summary> /// Initializes a new instance of the <see cref="KinectDepthCamera"/> class. /// </summary> /// /// <param name="deviceID">Kinect's device ID (index) to connect to.</param> /// <param name="resolution">Resolution of depth sensor to set.</param> /// <param name="provideOriginalDepthImage">Provide original depth image or colored depth map /// (see <see cref="ProvideOriginalDepthImage"/> property).</param> /// public KinectDepthCamera(int deviceID, CameraResolution resolution, bool provideOriginalDepthImage) { this.deviceID = deviceID; this.resolution = resolution; this.provideOriginalDepthImage = provideOriginalDepthImage; // initialize gamma values (as shown in the original Kinect samples) for (int i = 0; i < 2048; i++) { double value = i / 2048.0; value = Math.Pow(value, 3.0); gamma[i] = (ushort)(value * 36.0 * 256.0); } }
void Start() { CameraResolution.SetCamera(); titleObject.SetActive(true); loginObject.SetActive(true); nicknameObject.SetActive(false); loadingObject.SetActive(false); errorObject.SetActive(false); nicknameField = nicknameObject.GetComponentInChildren <InputField>(); titleObject.transform.GetChild(0).GetComponent <Text>().text = string.Format(VERSION_STR, Application.version); StartGame(); }
private CameraResolution HandleCameraResolutionSelectorDelegate(List <CameraResolution> availableResolutions) //костыль для выбора максимального разрешения камеры { CameraResolution maxResolution; int maxWidth = 0; maxResolution = new CameraResolution(); foreach (var resolution in availableResolutions) { if (resolution.Width > maxWidth) { maxWidth = resolution.Width; maxResolution = resolution; } } return(maxResolution); }
private string CameraResolutionToString(CameraResolution cameraResolution) { switch (cameraResolution) { case CameraResolution.SD_640x480: return("640x480"); case CameraResolution.HD_1280x720: return("1280x720"); case CameraResolution.Full_HD_1920x1080: return("1920x1080"); default: return("Auto"); } }
public NatCamCameraSource(CameraResolution cameraResolution) { NatCam.Camera = DeviceCamera.RearCamera ?? DeviceCamera.FrontCamera; NatCam.Camera.PreviewResolution = cameraResolution; //NatCam.Camera.Framerate = FrameratePreset.Default; if (!Application.isEditor) // "NatCam Error: Focus mode is not supported on legacy" { NatCam.Camera.FocusMode = FocusMode.AutoFocus; } NatCam.Play(); NatCam.OnStart += OnStart; NatCam.OnFrame += OnFrame; frameCounter = 0; }
public CameraResolution SelectLowestResolutionMatchingDisplayAspectRatio(List <CameraResolution> availableResolutions) { CameraResolution result = null; try { double aspectTolerance = 0.1; var displayOrientationHeight = Resources.DisplayMetrics.HeightPixels; var displayOrientationWidth = Resources.DisplayMetrics.WidthPixels; var targetRatio = displayOrientationHeight / displayOrientationWidth; var targetHeight = displayOrientationHeight; result = availableResolutions?[0]; } catch { } return(result); }
private static ImageSize GetSize(CameraResolution resolution) { switch (resolution) { case CameraResolution.QQQQVGA: return(new ImageSize { Width = 40, Height = 30 }); case CameraResolution.QQQVGA: return(new ImageSize { Width = 80, Height = 60 }); case CameraResolution.QQVGA: return(new ImageSize { Width = 160, Height = 120 }); case CameraResolution.QVGA: return(new ImageSize { Width = 320, Height = 240 }); case CameraResolution.VGA: return(new ImageSize { Width = 640, Height = 480 }); case CameraResolution.VGA4: return(new ImageSize { Width = 1280, Height = 960 }); case CameraResolution.VGA16: return(new ImageSize { Width = 2560, Height = 1920 }); default: throw new InvalidOperationException(); } }
private ArExperience ArExperienceFromCurrentInput() { string title = titleTextField.Text; string URL = URLTextField.Text; Features features = FeaturesFromSwitchInputs(); CameraResolution resolution = CameraResolutionFromPickerInput(); CameraPosition position = CameraPositionFromPickerInput(); CameraFocusMode focusMode = CameraFocusModeFromPickerInput(); if (URL.Contains("https")) { ArExperience arExperience = new ArExperience(title, URL, features, position, resolution, focusMode); return(arExperience); } else { return(null); } }
public static WTCaptureDeviceResolution ConvertCameraResolution(CameraResolution cameraResolution) { switch (cameraResolution) { case CameraResolution.SD_640x480: return(WTCaptureDeviceResolution.WTCaptureDeviceResolution_SD_640x480); case CameraResolution.HD_1280x720: return(WTCaptureDeviceResolution.WTCaptureDeviceResolution_HD_1280x720); case CameraResolution.Full_HD_1920x1080: return(WTCaptureDeviceResolution.WTCaptureDeviceResolution_FULL_HD_1920x1080); case CameraResolution.Auto: return(WTCaptureDeviceResolution.WTCaptureDeviceResolution_AUTO); default: return(WTCaptureDeviceResolution.WTCaptureDeviceResolution_SD_640x480); } }
public static List <ArExperienceGroup> ParseExampleDefintion(string defintions) { var experienceGroups = new List <ArExperienceGroup>(); JsonArray sampleGroups = (JsonArray)JsonValue.Parse(defintions); foreach (JsonObject sampleGroup in sampleGroups) { string groupName = sampleGroup["windowTitle"]; JsonArray experiences = (JsonArray)sampleGroup["samples"]; var arExperiences = new List <ArExperience>(); foreach (JsonObject experience in experiences) { string experienceName = experience["title"]; string experiencePath = experience["path"]; string experienceExtension = null; if (experience.TryGetValue("requiredExtension", out JsonValue extension)) { experienceExtension = extension; } JsonArray featuresArray = (JsonArray)experience["requiredFeatures"]; JsonObject startupConfig = (JsonObject)experience["startupConfiguration"]; Features experienceFeatures = ParseFeatures(featuresArray); CameraPosition cameraPosition = ParseCameraPosition(startupConfig); CameraResolution cameraResolution = ParseCameraResolution(startupConfig); var arExperience = new ArExperience(experienceName, experiencePath, experienceFeatures, cameraPosition, cameraResolution, extension: experienceExtension); arExperiences.Add(arExperience); } var arExperienceGroup = new ArExperienceGroup(groupName, arExperiences); experienceGroups.Add(arExperienceGroup); } return(experienceGroups); }
/// <summary> /// Initializes a new instance of the <see cref="KinectVideoCamera"/> class. /// </summary> /// /// <param name="deviceID">Kinect's device ID (index) to connect to.</param> /// <param name="resolution">Resolution of video camera to set.</param> /// public KinectVideoCamera( int deviceID, CameraResolution resolution ) { this.deviceID = deviceID; this.resolution = resolution; }
public static extern BitmapInfoHeader freenect_find_depth_mode( CameraResolution resolution, DepthCameraFormat depthFormat );
public static extern BitmapInfoHeader freenect_find_video_mode( CameraResolution resolution, VideoCameraFormat videoFormat );
public static extern BitmapInfoHeader freenect_find_depth_mode(CameraResolution resolution, DepthCameraFormat depthFormat);
public static extern BitmapInfoHeader freenect_find_video_mode(CameraResolution resolution, VideoCameraFormat videoFormat);
/// <summary> /// Initializes a new instance of the <see cref="KinectVideoCamera"/> class. /// </summary> /// /// <param name="deviceID">Kinect's device ID (index) to connect to.</param> /// <param name="resolution">Resolution of video camera to set.</param> /// <param name="cameraMode">Sets video camera mode.</param> /// public KinectVideoCamera(int deviceID, CameraResolution resolution, VideoCameraMode cameraMode) { this.deviceID = deviceID; this.resolution = resolution; this.cameraMode = cameraMode; }
/// <summary> /// Initializes a new instance of the <see cref="KinectVideoCamera"/> class. /// </summary> /// /// <param name="deviceID">Kinect's device ID (index) to connect to.</param> /// <param name="resolution">Resolution of video camera to set.</param> /// public KinectVideoCamera(int deviceID, CameraResolution resolution) { this.deviceID = deviceID; this.resolution = resolution; }
bool SetupCaptureSession() { var availableResolutions = new List <CameraResolution> (); var consideredResolutions = new Dictionary <NSString, CameraResolution> { { AVCaptureSession.Preset352x288, new CameraResolution { Width = 352, Height = 288 } }, { AVCaptureSession.PresetMedium, new CameraResolution { Width = 480, Height = 360 } }, //480x360 { AVCaptureSession.Preset640x480, new CameraResolution { Width = 640, Height = 480 } }, { AVCaptureSession.Preset1280x720, new CameraResolution { Width = 1280, Height = 720 } }, { AVCaptureSession.Preset1920x1080, new CameraResolution { Width = 1920, Height = 1080 } } }; // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session // var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); AVCaptureDevice captureDevice = null; var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { captureDevice = device; if (ScanningOptions.UseFrontCameraIfAvailable.HasValue && ScanningOptions.UseFrontCameraIfAvailable.Value && device.Position == AVCaptureDevicePosition.Front) { break; //Front camera successfully set } else if (device.Position == AVCaptureDevicePosition.Back && (!ScanningOptions.UseFrontCameraIfAvailable.HasValue || !ScanningOptions.UseFrontCameraIfAvailable.Value)) { break; //Back camera succesfully set } } if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } CameraResolution resolution = null; // Find resolution // Go through the resolutions we can even consider foreach (var cr in consideredResolutions) { // Now check to make sure our selected device supports the resolution // so we can add it to the list to pick from if (captureDevice.SupportsAVCaptureSessionPreset(cr.Key)) { availableResolutions.Add(cr.Value); } } resolution = ScanningOptions.GetResolution(availableResolutions); // See if the user selected a resolution if (resolution != null) { // Now get the preset string from the resolution chosen var preset = (from c in consideredResolutions where c.Value.Width == resolution.Width && c.Value.Height == resolution.Height select c.Key).FirstOrDefault(); // If we found a matching preset, let's set it on the session if (!string.IsNullOrEmpty(preset)) { session.SessionPreset = preset; } } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } foundResult = false; //Detect barcodes with built in avcapture stuff AVCaptureMetadataOutput metadataOutput = new AVCaptureMetadataOutput(); captureDelegate = new CaptureDelegate(metaDataObjects => { if (!analyzing) { return; } //Console.WriteLine("Found MetaData Objects"); var msSinceLastPreview = (DateTime.UtcNow - lastAnalysis).TotalMilliseconds; if (msSinceLastPreview < ScanningOptions.DelayBetweenAnalyzingFrames || (wasScanned && msSinceLastPreview < ScanningOptions.DelayBetweenContinuousScans) || working) //|| CancelTokenSource.IsCancellationRequested) { return; } try { working = true; wasScanned = false; lastAnalysis = DateTime.UtcNow; var mdo = metaDataObjects.FirstOrDefault(); var readableObj = mdo as AVMetadataMachineReadableCodeObject; if (readableObj == null) { return; } wasScanned = true; var zxingFormat = ZXingBarcodeFormatFromAVCaptureBarcodeFormat(readableObj.Type.ToString()); var rs = new ZXing.Result(readableObj.StringValue, null, null, zxingFormat); resultCallback(rs); } finally { working = false; } }); metadataOutput.SetDelegate(captureDelegate, DispatchQueue.MainQueue); session.AddOutput(metadataOutput); //Setup barcode formats if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0) { #if __UNIFIED__ var formats = AVMetadataObjectType.None; foreach (var f in ScanningOptions.PossibleFormats) { formats |= AVCaptureBarcodeFormatFromZXingBarcodeFormat(f); } formats &= ~AVMetadataObjectType.None; metadataOutput.MetadataObjectTypes = formats; #else var formats = new List <string> (); foreach (var f in ScanningOptions.PossibleFormats) { formats.AddRange(AVCaptureBarcodeFormatFromZXingBarcodeFormat(f)); } metadataOutput.MetadataObjectTypes = (from f in formats.Distinct() select new NSString(f)).ToArray(); #endif } else { metadataOutput.MetadataObjectTypes = metadataOutput.AvailableMetadataObjectTypes; } previewLayer = new AVCaptureVideoPreviewLayer(session); // //Framerate set here (15 fps) // if (previewLayer.RespondsToSelector(new Selector("connection"))) // { // if (UIDevice.CurrentDevice.CheckSystemVersion (7, 0)) // { // var perf1 = PerformanceCounter.Start (); // NSError lockForConfigErr = null; // captureDevice.LockForConfiguration (out lockForConfigErr); // if (lockForConfigErr == null) // { // captureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 10); // captureDevice.UnlockForConfiguration (); // } // PerformanceCounter.Stop (perf1, "PERF: ActiveVideoMinFrameDuration Took {0} ms"); // } // else // previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); // } #if __UNIFIED__ previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; #else previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; #endif previewLayer.Frame = new CGRect(0, 0, this.Frame.Width, this.Frame.Height); previewLayer.Position = new CGPoint(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(new CGRect(0, 0, this.Frame.Width, this.Frame.Height)); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); //overlayView.LayoutSubviews (); } session.StartRunning(); Console.WriteLine("RUNNING!!!"); //output.AlwaysDiscardsLateVideoFrames = true; Console.WriteLine("SetupCamera Finished"); //session.AddOutput (output); //session.StartRunning (); if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { NSError err = null; if (captureDevice.LockForConfiguration(out err)) { if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; } else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus; } if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; } else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose)) { captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose; } if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; } else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported) { captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; } if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new CGPoint(0.5f, 0.5f); } if (captureDevice.ExposurePointOfInterestSupported) { captureDevice.ExposurePointOfInterest = new CGPoint(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } } return(true); }
/// <summary> /// Initializes a new instance of the <see cref="KinectDepthCamera"/> class. /// </summary> /// /// <param name="deviceID">Kinect's device ID (index) to connect to.</param> /// <param name="resolution">Resolution of depth sensor to set.</param> /// public KinectDepthCamera( int deviceID, CameraResolution resolution ) : this( deviceID, resolution, false ) { }
/// <summary> /// Initializes a new instance of the <see cref="KinectVideoCamera"/> class. /// </summary> /// /// <param name="deviceID">Kinect's device ID (index) to connect to.</param> /// <param name="resolution">Resolution of video camera to set.</param> /// <param name="cameraMode">Sets video camera mode.</param> /// public KinectVideoCamera( int deviceID, CameraResolution resolution, VideoCameraMode cameraMode ) { this.deviceID = deviceID; this.resolution = resolution; this.cameraMode = cameraMode; }
/// <summary> /// Initializes a new instance of the <see cref="KinectDepthCamera"/> class. /// </summary> /// /// <param name="deviceID">Kinect's device ID (index) to connect to.</param> /// <param name="resolution">Resolution of depth sensor to set.</param> /// public KinectDepthCamera(int deviceID, CameraResolution resolution) : this(deviceID, resolution, false) { }
void Awake() { CameraResolution.SetCamera(); Loader.LoaderCallback(); }
public static CameraSettings.CameraResolution ConvertSharedToPlatformResolution(CameraResolution resolution) { switch (resolution) { case CameraResolution.HD_1280x720: return(CameraSettings.CameraResolution.HD1280x720); case CameraResolution.Full_HD_1920x1080: return(CameraSettings.CameraResolution.FULLHD1920x1080); case CameraResolution.Auto: return(CameraSettings.CameraResolution.Auto); default: return(CameraSettings.CameraResolution.SD640x480); } }
/// <summary> /// Initializes a new instance of the <see cref="KinectDepthCamera"/> class. /// </summary> /// /// <param name="deviceID">Kinect's device ID (index) to connect to.</param> /// <param name="resolution">Resolution of depth sensor to set.</param> /// <param name="provideOriginalDepthImage">Provide original depth image or colored depth map /// (see <see cref="ProvideOriginalDepthImage"/> property).</param> /// public KinectDepthCamera( int deviceID, CameraResolution resolution, bool provideOriginalDepthImage ) { this.deviceID = deviceID; this.resolution = resolution; this.provideOriginalDepthImage = provideOriginalDepthImage; // initialize gamma values (as shown in the original Kinect samples) for ( int i = 0; i < 2048; i++ ) { double value = i / 2048.0; value = Math.Pow( value, 3.0 ); gamma[i] = (ushort) ( value * 36.0 * 256.0 ); } }
bool SetupCaptureSession() { var started = DateTime.UtcNow; var availableResolutions = new List <CameraResolution> (); var consideredResolutions = new Dictionary <NSString, CameraResolution> { { AVCaptureSession.Preset352x288, new CameraResolution { Width = 352, Height = 288 } }, { AVCaptureSession.PresetMedium, new CameraResolution { Width = 480, Height = 360 } }, //480x360 { AVCaptureSession.Preset640x480, new CameraResolution { Width = 640, Height = 480 } }, { AVCaptureSession.Preset1280x720, new CameraResolution { Width = 1280, Height = 720 } }, { AVCaptureSession.Preset1920x1080, new CameraResolution { Width = 1920, Height = 1080 } } }; // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session // var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); AVCaptureDevice captureDevice = null; var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { captureDevice = device; if (ScanningOptions.UseFrontCameraIfAvailable.HasValue && ScanningOptions.UseFrontCameraIfAvailable.Value && device.Position == AVCaptureDevicePosition.Front) { break; //Front camera successfully set } else if (device.Position == AVCaptureDevicePosition.Back && (!ScanningOptions.UseFrontCameraIfAvailable.HasValue || !ScanningOptions.UseFrontCameraIfAvailable.Value)) { break; //Back camera succesfully set } } if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } CameraResolution resolution = null; // Find resolution // Go through the resolutions we can even consider foreach (var cr in consideredResolutions) { // Now check to make sure our selected device supports the resolution // so we can add it to the list to pick from if (captureDevice.SupportsAVCaptureSessionPreset(cr.Key)) { availableResolutions.Add(cr.Value); } } resolution = ScanningOptions.GetResolution(availableResolutions); // See if the user selected a resolution if (resolution != null) { // Now get the preset string from the resolution chosen var preset = (from c in consideredResolutions where c.Value.Width == resolution.Width && c.Value.Height == resolution.Height select c.Key).FirstOrDefault(); // If we found a matching preset, let's set it on the session if (!string.IsNullOrEmpty(preset)) { session.SessionPreset = preset; } } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } var startedAVPreviewLayerAlloc = PerformanceCounter.Start(); previewLayer = new AVCaptureVideoPreviewLayer(session); PerformanceCounter.Stop(startedAVPreviewLayerAlloc, "Alloc AVCaptureVideoPreviewLayer took {0} ms."); // //Framerate set here (15 fps) // if (UIDevice.CurrentDevice.CheckSystemVersion (7, 0)) // { // var perf1 = PerformanceCounter.Start (); // NSError lockForConfigErr = null; // captureDevice.LockForConfiguration (out lockForConfigErr); // if (lockForConfigErr == null) // { // captureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 10); // captureDevice.UnlockForConfiguration (); // } // PerformanceCounter.Stop (perf1, "PERF: ActiveVideoMinFrameDuration Took {0} ms"); // } // else // previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); var perf2 = PerformanceCounter.Start(); #if __UNIFIED__ previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; #else previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; #endif previewLayer.Frame = new CGRect(0, 0, this.Frame.Width, this.Frame.Height); previewLayer.Position = new CGPoint(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(new CGRect(0, 0, this.Frame.Width, this.Frame.Height)); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); //overlayView.LayoutSubviews (); } PerformanceCounter.Stop(perf2, "PERF: Setting up layers took {0} ms"); var perf3 = PerformanceCounter.Start(); session.StartRunning(); PerformanceCounter.Stop(perf3, "PERF: session.StartRunning() took {0} ms"); var perf4 = PerformanceCounter.Start(); var videoSettings = NSDictionary.FromObjectAndKey(new NSNumber((int)CVPixelFormatType.CV32BGRA), CVPixelBuffer.PixelFormatTypeKey); // create a VideoDataOutput and add it to the sesion output = new AVCaptureVideoDataOutput { WeakVideoSettings = videoSettings }; // configure the output queue = new DispatchQueue("ZxingScannerView"); // (Guid.NewGuid().ToString()); var barcodeReader = ScanningOptions.BuildBarcodeReader(); outputRecorder = new OutputRecorder(this, img => { var ls = img; if (!IsAnalyzing) { return(false); } try { var perfDecode = PerformanceCounter.Start(); if (shouldRotatePreviewBuffer) { ls = ls.rotateCounterClockwise(); } var result = barcodeReader.Decode(ls); PerformanceCounter.Stop(perfDecode, "Decode Time: {0} ms"); if (result != null) { resultCallback(result); return(true); } } catch (Exception ex) { Console.WriteLine("DECODE FAILED: " + ex); } return(false); }); output.AlwaysDiscardsLateVideoFrames = true; output.SetSampleBufferDelegate(outputRecorder, queue); PerformanceCounter.Stop(perf4, "PERF: SetupCamera Finished. Took {0} ms."); session.AddOutput(output); //session.StartRunning (); var perf5 = PerformanceCounter.Start(); NSError err = null; if (captureDevice.LockForConfiguration(out err)) { if (ScanningOptions.DisableAutofocus) { captureDevice.FocusMode = AVCaptureFocusMode.Locked; } else { if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; } else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus; } } if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; } else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose)) { captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose; } if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; } else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported) { captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; } if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f); } if (captureDevice.ExposurePointOfInterestSupported) { captureDevice.ExposurePointOfInterest = new PointF(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } PerformanceCounter.Stop(perf5, "PERF: Setup Focus in {0} ms."); return(true); }
private void ApplyCameraSettings() { if (Camera == null) { OpenCamera(); } // do nothing if something wrong with camera if (Camera == null) { return; } var parameters = Camera.GetParameters(); parameters.PreviewFormat = ImageFormatType.Nv21; var supportedFocusModes = parameters.SupportedFocusModes; if (_scannerHost.ScanningOptions.DisableAutofocus) { parameters.FocusMode = Camera.Parameters.FocusModeFixed; } else if (Build.VERSION.SdkInt >= BuildVersionCodes.IceCreamSandwich && supportedFocusModes.Contains(Camera.Parameters.FocusModeContinuousPicture)) { parameters.FocusMode = Camera.Parameters.FocusModeContinuousPicture; } else if (supportedFocusModes.Contains(Camera.Parameters.FocusModeContinuousVideo)) { parameters.FocusMode = Camera.Parameters.FocusModeContinuousVideo; } else if (supportedFocusModes.Contains(Camera.Parameters.FocusModeAuto)) { parameters.FocusMode = Camera.Parameters.FocusModeAuto; } else if (supportedFocusModes.Contains(Camera.Parameters.FocusModeFixed)) { parameters.FocusMode = Camera.Parameters.FocusModeFixed; } var selectedFps = parameters.SupportedPreviewFpsRange.FirstOrDefault(); if (selectedFps != null) { // This will make sure we select a range with the lowest minimum FPS // and maximum FPS which still has the lowest minimum // This should help maximize performance / support for hardware foreach (var fpsRange in parameters.SupportedPreviewFpsRange) { if (fpsRange[0] <= selectedFps[0] && fpsRange[1] > selectedFps[1]) { selectedFps = fpsRange; } } parameters.SetPreviewFpsRange(selectedFps[0], selectedFps[1]); } CameraResolution resolution = null; var supportedPreviewSizes = parameters.SupportedPreviewSizes; if (supportedPreviewSizes != null) { var availableResolutions = supportedPreviewSizes.Select(sps => new CameraResolution { Width = sps.Width, Height = sps.Height }); // Try and get a desired resolution from the options selector resolution = _scannerHost.ScanningOptions.GetResolution(availableResolutions.ToList()); // If the user did not specify a resolution, let's try and find a suitable one if (resolution == null) { foreach (var sps in supportedPreviewSizes) { if (sps.Width >= 640 && sps.Width <= 1000 && sps.Height >= 360 && sps.Height <= 1000) { resolution = new CameraResolution { Width = sps.Width, Height = sps.Height }; break; } } } } // Google Glass requires this fix to display the camera output correctly if (Build.Model.Contains("Glass")) { resolution = new CameraResolution { Width = 640, Height = 360 }; // Glass requires 30fps parameters.SetPreviewFpsRange(30000, 30000); } // Hopefully a resolution was selected at some point if (resolution != null) { Android.Util.Log.Debug(MobileBarcodeScanner.TAG, "Selected Resolution: " + resolution.Width + "x" + resolution.Height); parameters.SetPreviewSize(resolution.Width, resolution.Height); } Camera.SetParameters(parameters); SetCameraDisplayOrientation(); }
private void ApplyCameraSettings() { var parameters = Camera.GetParameters(); parameters.PreviewFormat = ImageFormatType.Nv21; var supportedFocusModes = parameters.SupportedFocusModes; if (Build.VERSION.SdkInt >= BuildVersionCodes.IceCreamSandwich && supportedFocusModes.Contains(Camera.Parameters.FocusModeContinuousPicture)) parameters.FocusMode = Camera.Parameters.FocusModeContinuousPicture; else if (supportedFocusModes.Contains(Camera.Parameters.FocusModeContinuousVideo)) parameters.FocusMode = Camera.Parameters.FocusModeContinuousVideo; else if (supportedFocusModes.Contains(Camera.Parameters.FocusModeAuto)) parameters.FocusMode = Camera.Parameters.FocusModeAuto; else if (supportedFocusModes.Contains(Camera.Parameters.FocusModeFixed)) parameters.FocusMode = Camera.Parameters.FocusModeFixed; var selectedFps = parameters.SupportedPreviewFpsRange.FirstOrDefault(); if (selectedFps != null) { // This will make sure we select a range with the lowest minimum FPS // and maximum FPS which still has the lowest minimum // This should help maximize performance / support for hardware foreach (var fpsRange in parameters.SupportedPreviewFpsRange) { if (fpsRange[0] <= selectedFps[0] && fpsRange[1] > selectedFps[1]) selectedFps = fpsRange; } parameters.SetPreviewFpsRange(selectedFps[0], selectedFps[1]); } var availableResolutions = parameters.SupportedPreviewSizes.Select(sps => new CameraResolution { Width = sps.Width, Height = sps.Height }); // Try and get a desired resolution from the options selector var resolution = _scanningOptions.GetResolution(availableResolutions.ToList()); // If the user did not specify a resolution, let's try and find a suitable one if (resolution == null) { foreach (var sps in parameters.SupportedPreviewSizes) { if (sps.Width >= 640 && sps.Width <= 1000 && sps.Height >= 360 && sps.Height <= 1000) { resolution = new CameraResolution { Width = sps.Width, Height = sps.Height }; break; } } } // Google Glass requires this fix to display the camera output correctly if (Build.Model.Contains("Glass")) { resolution = new CameraResolution { Width = 640, Height = 360 }; // Glass requires 30fps parameters.SetPreviewFpsRange(30000, 30000); } // Hopefully a resolution was selected at some point if (resolution != null) { Android.Util.Log.Debug(MobileBarcodeScanner.TAG, "Selected Resolution: " + resolution.Width + "x" + resolution.Height); parameters.SetPreviewSize(resolution.Width, resolution.Height); } Camera.SetParameters(parameters); SetCameraDisplayOrientation(); }