public FrameExtractor(UIView view) : base() { background = view; CheckPermission(true); sessionQueue.DispatchAsync(ConfigureSession); sessionQueue.DispatchAsync(captureSession.StartRunning); musicProcessor = new MusicProcessor(26); var supported_sounds = Utils.GetSupportedSounds("sounds"); Console.WriteLine("supported sounds"); foreach (string s in supported_sounds) { Console.WriteLine(s); } var supported_recordings = Utils.GetSupportedSounds(Path.GetTempPath()); Console.WriteLine("supported recordings"); foreach (string s in supported_recordings) { Console.WriteLine(s); } customObjectsProcessor = new CustomObjectsProcessor(supported_sounds, supported_recordings); objectsProcessor = new ObjectsProcessor(supported_sounds, supported_recordings); }
public void Prepare(AVCaptureVideoOrientation captureVideoOrientation) { var status = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video); if (status == AVAuthorizationStatus.NotDetermined) { _sessionQueue.Suspend(); AVCaptureDevice.RequestAccessForMediaType(AVAuthorizationMediaType.Video, granted => { if (granted) { DispatchQueue.MainQueue.DispatchAsync(() => { _captureSessionDelegate.CaptureGrantedSession(AVAuthorizationStatus.Authorized); }); } else { _setupResult = SessionSetupResult.NotAuthorized; } _sessionQueue.Resume(); }); } else if (status != AVAuthorizationStatus.Authorized) { _setupResult = SessionSetupResult.NotAuthorized; } _sessionQueue.DispatchAsync(() => { ConfigureSession(); UpdateVideoOrientation(captureVideoOrientation); }); }
public void SetTargetQueue() { int n = 0; int ct = Thread.CurrentThread.ManagedThreadId; int t0 = ct; int t1 = ct; int t2 = ct; using (var q = new DispatchQueue("my")) { Console.WriteLine(); q.DispatchAsync(delegate { t0 = Thread.CurrentThread.ManagedThreadId; n++; }); DispatchQueue.DefaultGlobalQueue.SetTargetQueue(DispatchQueue.MainQueue); q.DispatchAsync(delegate { t1 = Thread.CurrentThread.ManagedThreadId; n++; }); DispatchQueue.DefaultGlobalQueue.SetTargetQueue(null); q.DispatchAsync(delegate { t2 = Thread.CurrentThread.ManagedThreadId; n++; }); Assert.That(q.Label, Is.EqualTo("my"), "label"); } while (n != 3) { NSRunLoop.Current.RunUntil(NSDate.FromTimeIntervalSinceNow(1.0)); } // ensure async dispatches were done on another thread Assert.That(ct, Is.Not.EqualTo(t0), "t0"); Assert.That(ct, Is.Not.EqualTo(t1), "t1"); Assert.That(ct, Is.Not.EqualTo(t2), "t2"); }
/// <summary> /// Saves the movie to the camera roll. /// </summary> void SaveMovieToCameraRoll() { //Console.WriteLine ("Save movie to camera roll"); using (var library = new ALAssetsLibrary()) { library.WriteVideoToSavedPhotosAlbum(movieURL, (assetUrl, error) => { if (error != null) { ShowError(error); } else { RemoveFile(movieURL); } movieWritingQueue.DispatchAsync(() => { recordingWillBeStopped = false; IsRecording = false; if (RecordingDidStop != null) { RecordingDidStop(); } }); }); } }
public void CapturePhoto(LivePhotoMode livePhotoMode, bool saveToPhotoLibrary) { _sessionQueue.DispatchAsync(() => { var photoSettings = AVCapturePhotoSettings.Create(); if (_photoOutput.SupportedFlashModes.Contains(NSNumber.FromInt32((int)AVCaptureFlashMode.Auto))) { photoSettings.FlashMode = AVCaptureFlashMode.Auto; } photoSettings.IsHighResolutionPhotoEnabled = true; var availablePhotoCodecTypes = photoSettings.AvailableEmbeddedThumbnailPhotoCodecTypes; if (UIDevice.CurrentDevice.CheckSystemVersion(10, 0) && availablePhotoCodecTypes.Length > 0) { photoSettings.EmbeddedThumbnailPhotoFormat = new NSMutableDictionary { { AVVideo.CodecKey, availablePhotoCodecTypes[0].GetConstant() } }; } if (livePhotoMode == LivePhotoMode.On) { if (_presetConfiguration == SessionPresetConfiguration.LivePhotos && _photoOutput.IsLivePhotoCaptureSupported) { photoSettings.LivePhotoMovieFileUrl = NSUrl.CreateFileUrl(new[] { Path.GetTempPath(), $"{Guid.NewGuid()}.mov" }); } else { Console.WriteLine( "capture session: warning - trying to capture live photo but it's not supported by current configuration, capturing regular photo instead"); } } // Use a separate object for the photo capture delegate to isolate each capture life cycle. var photoCaptureDelegate = new PhotoCaptureDelegate(photoSettings, () => WillCapturePhotoAnimationAction(photoSettings), CapturingLivePhotoAction, CapturingCompletedAction) { ShouldSavePhotoToLibrary = saveToPhotoLibrary }; _photoOutput.CapturePhoto(photoSettings, photoCaptureDelegate); }); }
public override void CompletionForString(TKAutoCompleteTextView autocomplete, NSString input) { prefix = input; DispatchQueue queue = DispatchQueue.GetGlobalQueue(DispatchQueuePriority.High); queue.DispatchAsync(delegate { ReloadData(autocomplete); }); }
private void NotifyFlagsChanged(NetworkReachabilityFlags flags) { _queue.DispatchAsync(() => { FlagsChanged(flags); }); }
void OnLoadNewPostComplted(CKQueryCursor cursor, NSError operationError, List <Post> newPosts, Post lastRecordInOperation, Post retryPost) { Error error = HandleError(operationError); switch (error) { case Error.Success: // lastRecordCreationDate is the most recent record we've seen on server, let's set our property to that for next time we get a push lastPostSeenOnServer = lastRecordInOperation ?? lastPostSeenOnServer; // This sorts the newPosts array in ascending order newPosts.Sort(PostComparison); // Takes our newPosts array and inserts the items into the table array one at a time foreach (Post p in newPosts) { updateCellArrayQueue.DispatchAsync(() => { PostCells.Insert(0, p); DispatchQueue.MainQueue.DispatchAsync(reloadHandler); }); } DispatchQueue.MainQueue.DispatchAsync(RefreshControl.EndRefreshing); break; case Error.Retry: Utils.Retry(() => LoadNewPosts(retryPost), operationError); break; case Error.Ignore: Console.WriteLine("Error: {0}", operationError.Description); DispatchQueue.MainQueue.DispatchAsync(RefreshControl.EndRefreshing); break; default: throw new NotImplementedException(); } }
public override void LayoutSubviews() { base.LayoutSubviews(); SetupUserInterface(); if (OnDevice) { AuthorizeCameraUse(); sessionQueue.DispatchAsync(() => { SetupSession(); }); } else { var heightScale = (double)9 / 16; var vidHeight = NativeView.Frame.Width * heightScale; var yPos = (NativeView.Frame.Height / 2) - (vidHeight / 2); liveCameraStream.Frame = new CGRect(0f, yPos, NativeView.Bounds.Width, vidHeight); liveCameraStream.BackgroundColor = UIColor.Clear; liveCameraStream.Add(new UILabel(new CGRect(0f, 0f, NativeView.Bounds.Width, 20)) { Text = "The Emulator does not support Camera Usage.", TextColor = UIColor.White }); } }
partial void ChooseVideoTapped(UIBarButtonItem sender) { var videoPicker = new UIImagePickerController { ModalPresentationStyle = UIModalPresentationStyle.CurrentContext, SourceType = UIImagePickerControllerSourceType.SavedPhotosAlbum, MediaTypes = new string[] { UTType.Movie } }; videoPicker.FinishedPickingMedia += (object s, UIImagePickerMediaPickedEventArgs e) => { displayLink.Paused = true; playButton.Title = "Play"; popover.Dismiss(true); outputFrames.Clear(); presentationTimes.Clear(); lastCallbackTime = 0.0; var asset = AVAsset.FromUrl(e.MediaUrl); if (assetReader != null && assetReader.Status == AVAssetReaderStatus.Reading) { bufferSemaphore.Release(); assetReader.CancelReading(); } backgroundQueue.DispatchAsync(() => ReadSampleBuffers(asset)); }; videoPicker.Canceled += (object s, EventArgs e) => DismissViewController(true, null); if (UIDevice.CurrentDevice.UserInterfaceIdiom == UIUserInterfaceIdiom.Pad) { popover = new UIPopoverController(videoPicker); popover.PresentFromBarButtonItem(sender, UIPopoverArrowDirection.Down, true); } }
public override void ViewDidLoad() { base.ViewDidLoad(); // Disable UI. The UI is enabled if and only if the session starts running. MetadataObjectTypesButton.Enabled = false; SessionPresetsButton.Enabled = false; CameraButton.Enabled = false; ZoomSlider.Enabled = false; // Add the open barcode gesture recognizer to the region of interest view. PreviewView.AddGestureRecognizer(OpenBarcodeURLGestureRecognizer); // Set up the video preview view. PreviewView.Session = session; // Check video authorization status. Video access is required and audio // access is optional. If audio access is denied, audio is not recorded // during movie recording. switch (AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video)) { case AVAuthorizationStatus.Authorized: // The user has previously granted access to the camera. break; case AVAuthorizationStatus.NotDetermined: // The user has not yet been presented with the option to grant // video access. We suspend the session queue to delay session // setup until the access request has completed. sessionQueue.Suspend(); AVCaptureDevice.RequestAccessForMediaType(AVMediaType.Video, granted => { if (!granted) { setupResult = SessionSetupResult.NotAuthorized; } sessionQueue.Resume(); }); break; default: // The user has previously denied access. setupResult = SessionSetupResult.NotAuthorized; break; } // Setup the capture session. // In general it is not safe to mutate an AVCaptureSession or any of its // inputs, outputs, or connections from multiple threads at the same time. // Why not do all of this on the main queue? // Because AVCaptureSession.StartRunning() is a blocking call which can // take a long time. We dispatch session setup to the sessionQueue so // that the main queue isn't blocked, which keeps the UI responsive. sessionQueue.DispatchAsync(ConfigureSession); }
public void AddImageFromRecord(CKRecord toAdd) { Image fetchedImage = new Image(toAdd); // Ensures that only one object will be added to the imageRecords array at a time updateArrayQueue.DispatchAsync(() => { imageRecords.Add(fetchedImage); InvokeOnMainThread(ReloadData); }); }
public void DispatchAsync() { TestRuntime.AssertSystemVersion(PlatformName.iOS, 8, 0, throwIfOtherPlatform: false); TestRuntime.AssertSystemVersion(PlatformName.MacOSX, 10, 10, throwIfOtherPlatform: false); using (var queue = new DispatchQueue("DispatchAsync")) { var called = false; var callback = new Action(() => called = true); queue.DispatchAsync(callback); TestRuntime.RunAsync(TimeSpan.FromSeconds(5), () => { }, () => called); Assert.IsTrue(called, "Called"); called = false; using (var dg = new DispatchBlock(callback)) { queue.DispatchAsync(dg); dg.Wait(TimeSpan.FromSeconds(5)); } Assert.IsTrue(called, "Called DispatchBlock"); } }
public override void CancelAllPendingVideoCompositionRequests() { // pending requests will call finishCancelledRequest, those already rendering will call finishWithComposedVideoFrame shouldCancelAllRequests = true; renderingQueue.DispatchAsync(() => { // start accepting requests again shouldCancelAllRequests = false; }); }
private void ReachabilityChanged(object sender, NetworkReachabilityChangeEventArgs e) { Debug.Assert(e != null); DispatchQueue.DispatchAsync(() => { if (_repl != null /* just to be safe */) { Native.c4repl_setHostReachable(_repl, e.Status == NetworkReachabilityStatus.Reachable); } }); }
public override void StartVideoCompositionRequest(AVAsynchronousVideoCompositionRequest asyncVideoCompositionRequest) { renderingQueue.DispatchAsync(() => { if (shouldCancelAllRequests) { asyncVideoCompositionRequest.FinishCancelledRequest(); } else { NSError error = null; CVPixelBuffer resultPixels = newRenderedPixelBufferForRequest(asyncVideoCompositionRequest, out error); if (resultPixels != null) { asyncVideoCompositionRequest.FinishWithComposedVideoFrame(resultPixels); resultPixels.Dispose(); } else { asyncVideoCompositionRequest.FinishWithError(error); } } }); }
public void StartVideoRecording(bool shouldSaveVideoToLibrary) { if (_videoFileOutput == null) { Console.WriteLine("capture session: trying to record a video but no movie file output is set"); return; } _sessionQueue.DispatchAsync(() => { // if already recording do nothing if (_videoFileOutput.Recording) { Console.WriteLine( "capture session: trying to record a video but there is one already being recorded"); return; } // start recording to a temporary file. var outputFileName = new NSUuid().AsString(); var outputUrl = NSFileManager.DefaultManager.GetTemporaryDirectory().Append(outputFileName, false) .AppendPathExtension("mov"); var recordingDelegate = new VideoCaptureDelegate(DidStartCaptureAction, captureDelegate => DidFinishCaptureAction(captureDelegate, outputUrl), (captureDelegate, error) => DidCaptureFail(captureDelegate, error, outputUrl)) { ShouldSaveVideoToLibrary = shouldSaveVideoToLibrary }; _videoFileOutput.StartRecordingToOutputFile(outputUrl, recordingDelegate); _videoCaptureDelegate = recordingDelegate; }); }
void ScheduleLoop() { if (file == null) { throw new NullReferenceException("`file` must not be null in 'scheduleLoop'"); } player.ScheduleFile(file, null, () => stateChangeQueue.DispatchAsync(() => { if (isPlaying) { ScheduleLoop(); } }) ); }
List <Activity> CreateActivityDataWithActivities(CMMotionActivity[] activities, Action completionHandler) { var results = new List <Activity> (); var group = DispatchGroup.Create(); var queue = new DispatchQueue("resultQueue"); var filteredActivities = activities.Where(activity => activity.HasActivitySignature() && !activity.Stationary && activity.Confidence != CMMotionActivityConfidence.Low).ToArray <CMMotionActivity> (); var activitySegments = FindActivitySegments(filteredActivities); foreach (var segment in activitySegments) { group.Enter(); pedometer.QueryPedometerData(segment.Item1.StartDate, (NSDate)segment.Item2, (pedometerData, error) => { queue.DispatchAsync(() => { var activity = new Activity(segment.Item1, ((DateTime)segment.Item1.StartDate).ToLocalTime(), segment.Item2.ToLocalTime(), pedometerData); results.Add(activity); }); if (error != null) { HandleError(error); } group.Leave(); }); } group.Notify(DispatchQueue.MainQueue, () => { queue.DispatchSync(() => { RecentActivities = results; RecentActivities.Reverse(); completionHandler?.Invoke(); }); }); return(results); }
// Writes the data to `NSUserDefaults` protected void WriteData() { UserDefaultsAccessQueue.DispatchAsync(() => { try { NSData encodedData = NSKeyedArchiver.ArchivedDataWithRootObject(ManagedDataBackingInstance); IgnoreLocalUserDefaultsChanges = true; UserDefaults.SetValueForKey(encodedData, (NSString)StorageDescriptor.Key); IgnoreLocalUserDefaultsChanges = false; NotifyClientsDataChanged(); } catch (Exception e) { throw new Exception($"Could not save data. Reason: {e.Message}"); } }); }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { if (ready) { ready = false; CVPixelBuffer cVPixelBuffer = (CVPixelBuffer)sampleBuffer.GetImageBuffer(); cVPixelBuffer.Lock(CVPixelBufferLock.ReadOnly); nint dataSize = cVPixelBuffer.DataSize; width = cVPixelBuffer.Width; height = cVPixelBuffer.Height; IntPtr baseAddress = cVPixelBuffer.BaseAddress; bpr = cVPixelBuffer.BytesPerRow; cVPixelBuffer.Unlock(CVPixelBufferLock.ReadOnly); buffer = NSData.FromBytes(baseAddress, (nuint)dataSize); cVPixelBuffer.Dispose(); queue.DispatchAsync(ReadTask); } sampleBuffer.Dispose(); }
public void Wait_DispatchTime() { TestRuntime.AssertXcodeVersion(8, 0); var called = false; var callback = new Action(() => called = true); using (var db = new DispatchBlock(callback)) { using (var queue = new DispatchQueue("Background")) { queue.Activate(); var rv = db.Wait(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(0.1))); Assert.AreNotEqual(0, rv, "Timed Out"); queue.DispatchAsync(db); rv = db.Wait(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(5))); Assert.AreEqual(0, rv, "Timed Out 2"); Assert.IsTrue(called, "Called"); } } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { if (ready) { ready = false; pixelBuffer = (CVPixelBuffer)sampleBuffer.GetImageBuffer(); pixelBuffer.Lock(CVPixelBufferLock.None); width = pixelBuffer.Width; height = pixelBuffer.Height; bytesPerRow = pixelBuffer.BytesPerRow; context = new CGBitmapContext(pixelBuffer.BaseAddress, width, height, 8, bytesPerRow, colorSpace, CGImageAlphaInfo.PremultipliedFirst); cgImage = context.ToImage(); uiImage = new UIImage(cgImage); pixelBuffer.Unlock(CVPixelBufferLock.None); pixelBuffer.Dispose(); queue.DispatchAsync(ReadTask); } sampleBuffer.Dispose(); }
public void Translate(VirtualObject vObject, ARSCNView sceneView, CGPoint screenPos, bool instantly, bool infinitePlane) { DispatchQueue.MainQueue.DispatchAsync(() => { var result = WorldPositionFromScreenPosition(screenPos, sceneView, vObject.Position, infinitePlane); var newPosition = result.Item1; if (newPosition == null) { if (this.Delegate != null) { this.Delegate.CouldNotPlace(this, vObject); return; } } var currentFrame = ViewController.CurrentFrame; if (currentFrame == null || currentFrame.Camera == null) { return; } var cameraTransform = currentFrame.Camera.Transform; queue.DispatchAsync(() => SetPosition(vObject, newPosition.Value, instantly, result.Item3, cameraTransform)); }); }
private void StartAnalysing() { Model = GetModel(); AudioEngine = new AVAudioEngine(); AnalysisQueue = new DispatchQueue("com.r2.SoundAnalysis", false); var inputFormat = AudioEngine.InputNode.GetBusInputFormat(0); var request = new SNClassifySoundRequest(Model, out var soundRequestError); Analyser = new SNAudioStreamAnalyzer(inputFormat); Analyser.AddRequest(request, this, out var addRequestError); AudioEngine.InputNode.InstallTapOnBus( bus: 0, bufferSize: 8192, format: inputFormat, tapBlock: (buffer, when) => AnalysisQueue.DispatchAsync(() => Analyser.Analyze(buffer, when.SampleTime))); AudioEngine.Prepare(); AudioEngine.StartAndReturnError(out var initEngineError); }
public async Task Start() { var result = await BeaconsUtil.RequestPermissionAsync(); if (!result) { return; } _locationMgr.DidRangeBeacons += HandleDidRangeBeacons; _locationMgr.DidDetermineState += HandleDidDetermineState; _locationMgr.PausesLocationUpdatesAutomatically = false; _locationMgr.StartUpdatingLocation(); beacon_operations_queue.DispatchAsync(StartScanningSynchronized); var location = await BeaconsUtil.GetCurrentLocationAsync(); var ibeacons = await BeaconsService.Instance.LoadBeaconsByUserLocation(location.Coordinate.Latitude, location.Coordinate.Longitude); //Начинаем мониторинг foreach (var ibeacon in ibeacons) { var clBeaconRegion = new CLBeaconRegion(new NSUuid(ibeacon.UUID), (ushort)ibeacon.Major, (ushort)ibeacon.Minor, $"{BEACONS_REGION_HEADER}.{ibeacon.ToString()}"); clBeaconRegion.NotifyEntryStateOnDisplay = true; clBeaconRegion.NotifyOnEntry = true; clBeaconRegion.NotifyOnExit = true; _listOfCLBeaconRegion.Add(clBeaconRegion); _locationMgr.StartMonitoring(clBeaconRegion); _locationMgr.StartRangingBeacons(clBeaconRegion); MvxTrace.TaggedTrace(MvxTraceLevel.Diagnostic, "Beacons", "Start monitoring " + JsonConvert.SerializeObject(ibeacon)); } }
public async override void ViewDidLoad () { base.ViewDidLoad (); // Disable UI. The UI is enabled if and only if the session starts running. CameraButton.Enabled = false; RecordButton.Enabled = false; StillButton.Enabled = false; // Create the AVCaptureSession. Session = new AVCaptureSession (); // Setup the preview view. PreviewView.Session = Session; // Communicate with the session and other session objects on this queue. SessionQueue = new DispatchQueue ("session queue"); SetupResult = AVCamSetupResult.Success; // Check video authorization status. Video access is required and audio access is optional. // If audio access is denied, audio is not recorded during movie recording. switch (AVCaptureDevice.GetAuthorizationStatus (AVMediaType.Video)) { // The user has previously granted access to the camera. case AVAuthorizationStatus.Authorized: break; // The user has not yet been presented with the option to grant video access. // We suspend the session queue to delay session setup until the access request has completed to avoid // asking the user for audio access if video access is denied. // Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup. case AVAuthorizationStatus.NotDetermined: SessionQueue.Suspend (); var granted = await AVCaptureDevice.RequestAccessForMediaTypeAsync (AVMediaType.Video); if (!granted) SetupResult = AVCamSetupResult.CameraNotAuthorized; SessionQueue.Resume (); break; // The user has previously denied access. default: SetupResult = AVCamSetupResult.CameraNotAuthorized; break; } // Setup the capture session. // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time. // Why not do all of this on the main queue? // Because AVCaptureSession.StartRunning is a blocking call which can take a long time. We dispatch session setup to the sessionQueue // so that the main queue isn't blocked, which keeps the UI responsive. SessionQueue.DispatchAsync (() => { if (SetupResult != AVCamSetupResult.Success) return; backgroundRecordingID = -1; NSError error; AVCaptureDevice videoDevice = CreateDevice (AVMediaType.Video, AVCaptureDevicePosition.Back); AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice (videoDevice, out error); if (videoDeviceInput == null) Console.WriteLine ("Could not create video device input: {0}", error); Session.BeginConfiguration (); if (Session.CanAddInput (videoDeviceInput)) { Session.AddInput (VideoDeviceInput = videoDeviceInput); DispatchQueue.MainQueue.DispatchAsync (() => { // Why are we dispatching this to the main queue? // Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView // can only be manipulated on the main thread. // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes // on the AVCaptureVideoPreviewLayer’s connection with other session manipulation. // Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by // ViewWillTransitionToSize method. UIInterfaceOrientation statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientation.Portrait; if (statusBarOrientation != UIInterfaceOrientation.Unknown) initialVideoOrientation = (AVCaptureVideoOrientation)(long)statusBarOrientation; var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer; previewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else { Console.WriteLine ("Could not add video device input to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } AVCaptureDevice audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio); AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice (audioDevice, out error); if (audioDeviceInput == null) Console.WriteLine ("Could not create audio device input: {0}", error); if (Session.CanAddInput (audioDeviceInput)) Session.AddInput (audioDeviceInput); else Console.WriteLine ("Could not add audio device input to the session"); var movieFileOutput = new AVCaptureMovieFileOutput (); if (Session.CanAddOutput (movieFileOutput)) { Session.AddOutput (MovieFileOutput = movieFileOutput); AVCaptureConnection connection = movieFileOutput.ConnectionFromMediaType (AVMediaType.Video); if (connection.SupportsVideoStabilization) connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } else { Console.WriteLine ("Could not add movie file output to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } var stillImageOutput = new AVCaptureStillImageOutput (); if (Session.CanAddOutput (stillImageOutput)) { stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed { Codec = AVVideoCodec.JPEG }; Session.AddOutput (StillImageOutput = stillImageOutput); } else { Console.WriteLine ("Could not add still image output to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } Session.CommitConfiguration (); }); }
unsafe void PlayToneForColor(float carrierFrequency, float modulatorFrequency, uint sampleLength, float pan, NSColor color) { const float modulatorAmplitude = .8f; var unitVelocity = 2 * Math.PI / AudioFormat.SampleRate; var carrierVelocity = carrierFrequency * unitVelocity; var modulatorVelocity = modulatorFrequency * unitVelocity; AudioQueue.DispatchAsync(() => { var sampleTime = 0f; if (!ForceStop) { Semaphore.Wait(); } else { return; } var outChannels = AudioFormat.ChannelCount; var outDataPointers = new float *[outChannels]; var buffer = AudioBuffers[BufferIndex]; for (int i = 0; i < outChannels; i++) { // buffer.FloatChannelData is a native array of pointers to audio data. // convert that into a managed array of pointers to audio data. outDataPointers[i] = (float *)Marshal.ReadIntPtr(buffer.FloatChannelData, i * IntPtr.Size); } var leftChannel = outDataPointers[0]; var rightChannel = outDataPointers[1]; for (int sampleIndex = 0; sampleIndex < sampleLength; sampleIndex++) { var sample = (float)Math.Sin(carrierVelocity * sampleTime + modulatorAmplitude * Math.Sin(modulatorVelocity * sampleTime)); leftChannel[sampleIndex] = sample; rightChannel[sampleIndex] = sample; sampleTime++; } buffer.FrameLength = sampleLength; PlayerNode.Pan = pan; NotePlayedForColor?.Invoke(color); PlayerNode.ScheduleBuffer(buffer, () => { Semaphore.Release(); }); BufferIndex = (BufferIndex + 1) % AudioBuffers.Count; }); }
List<Activity> CreateActivityDataWithActivities (CMMotionActivity[] activities, Action completionHandler) { var results = new List<Activity> (); var group = DispatchGroup.Create (); var queue = new DispatchQueue ("resultQueue"); var filteredActivities = activities.Where (activity => activity.HasActivitySignature () && !activity.Stationary && activity.Confidence != CMMotionActivityConfidence.Low).ToArray<CMMotionActivity> (); var activitySegments = FindActivitySegments (filteredActivities); foreach (var segment in activitySegments) { group.Enter (); pedometer.QueryPedometerData (segment.Item1.StartDate, (NSDate)segment.Item2, (pedometerData, error) => { queue.DispatchAsync (() => { var activity = new Activity (segment.Item1, ((DateTime)segment.Item1.StartDate).ToLocalTime (), segment.Item2.ToLocalTime (), pedometerData); results.Add (activity); }); if (error != null) HandleError (error); group.Leave (); }); } group.Notify (DispatchQueue.MainQueue, () => { queue.DispatchSync (() => { RecentActivities = results; RecentActivities.Reverse (); completionHandler?.Invoke (); }); }); return results; }
public void Initalize() { sessionQueue.DispatchAsync(() => { ConfigureSession(); }); }
bool DispatchImplementation(Action action) { _dispatchQueue.DispatchAsync(() => action()); return(true); }
public override void ViewDidLoad () { base.ViewDidLoad (); // Disable UI until the session starts running CameraButton.Enabled = false; RecordButton.Enabled = false; PhotoButton.Enabled = false; CaptureModeControl.Enabled = false; HUDButton.Enabled = false; ManualHUD.Hidden = true; ManualHUDPhotoView.Hidden = true; ManualHUDFocusView.Hidden = true; ManualHUDExposureView.Hidden = true; ManualHUDWhiteBalanceView.Hidden = true; ManualHUDLensStabilizationView.Hidden = true; // Create the AVCaptureSession Session = new AVCaptureSession (); // Set up preview PreviewView.Session = Session; sessionQueue = new DispatchQueue ("session queue"); setupResult = SetupResult.Success; // Check video authorization status. Video access is required and audio access is optional. // If audio access is denied, audio is not recorded during movie recording. CheckDeviceAuthorizationStatus (); // Setup the capture session. // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time. // Why not do all of this on the main queue? // Because AVCaptureSession.StartRunning is a blocking call which can take a long time. We dispatch session setup to the sessionQueue // so that the main queue isn't blocked, which keeps the UI responsive. sessionQueue.DispatchAsync (ConfigureSession); }
public void Create() { TestRuntime.AssertXcodeVersion(8, 0); var called = false; var callback = new Action(() => called = true); DispatchBlockFlags flags; Assert.Throws <ArgumentNullException> (() => DispatchBlock.Create(null), "ANE 1"); Assert.Throws <ArgumentNullException> (() => DispatchBlock.Create(null, DispatchBlockFlags.AssignCurrent), "ANE 2"); Assert.Throws <ArgumentNullException> (() => DispatchBlock.Create((Action)null, DispatchBlockFlags.AssignCurrent, DispatchQualityOfService.Background, 2), "ANE 3"); Assert.Throws <ArgumentNullException> (() => DispatchBlock.Create((DispatchBlock)null, DispatchBlockFlags.AssignCurrent, DispatchQualityOfService.Background, 2), "ANE 4"); // Invalid input results in NULL and an exception Assert.Throws <Exception> (() => DispatchBlock.Create(callback, (DispatchBlockFlags)12345678), "E 1"); Assert.Throws <Exception> (() => DispatchBlock.Create(callback, (DispatchBlockFlags)12345678, DispatchQualityOfService.UserInteractive, 0), "E 2"); Assert.Throws <Exception> (() => DispatchBlock.Create(callback, DispatchBlockFlags.None, (DispatchQualityOfService)12345678, 0), "E 3"); Assert.Throws <Exception> (() => DispatchBlock.Create(callback, DispatchBlockFlags.None, DispatchQualityOfService.Default, 12345678), "E 4"); called = false; using (var db = DispatchBlock.Create(callback)) { using (var queue = new DispatchQueue("Background")) { queue.Activate(); queue.DispatchAsync(db); var rv = db.Wait(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(5))); Assert.AreEqual(0, rv, "Timed Out A"); Assert.IsTrue(called, "Called A"); } } called = false; flags = DispatchBlockFlags.None; using (var db = DispatchBlock.Create(callback, flags)) { using (var queue = new DispatchQueue("Background")) { queue.Activate(); queue.DispatchAsync(db); var rv = db.Wait(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(5))); Assert.AreEqual(0, rv, "Timed Out " + flags); Assert.IsTrue(called, "Called " + flags); } } called = false; flags = DispatchBlockFlags.AssignCurrent; using (var db = DispatchBlock.Create(callback, flags)) { using (var queue = new DispatchQueue("Background")) { queue.Activate(); queue.DispatchAsync(db); var rv = db.Wait(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(5))); Assert.AreEqual(0, rv, "Timed Out " + flags); Assert.IsTrue(called, "Called " + flags); } } called = false; flags = DispatchBlockFlags.Detached; using (var db = DispatchBlock.Create(callback, flags)) { using (var queue = new DispatchQueue("Background")) { queue.Activate(); queue.DispatchAsync(db); var rv = db.Wait(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(5))); Assert.AreEqual(0, rv, "Timed Out " + flags); Assert.IsTrue(called, "Called " + flags); } } called = false; flags = DispatchBlockFlags.Detached; using (var db = DispatchBlock.Create(callback, flags, DispatchQualityOfService.Background, -8)) { using (var queue = new DispatchQueue("Background")) { queue.Activate(); queue.DispatchAsync(db); var rv = db.Wait(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(5))); Assert.AreEqual(0, rv, "Timed Out Background 8" + flags); Assert.IsTrue(called, "Called Background 8" + flags); } } called = false; flags = DispatchBlockFlags.Detached; using (var db = DispatchBlock.Create(callback, flags, DispatchQualityOfService.Background, -8)) { using (var db2 = db.Create(DispatchBlockFlags.EnforceQosClass, DispatchQualityOfService.Unspecified, -7)) { using (var queue = new DispatchQueue("Background")) { queue.Activate(); queue.DispatchAsync(db2); var rv = db2.Wait(new DispatchTime(DispatchTime.Now, TimeSpan.FromSeconds(5))); Assert.AreEqual(0, rv, "Timed Out Background DB" + flags); Assert.IsTrue(called, "Called Background DB" + flags); } } } }