void UpdateDeviceFocus(AVCaptureFocusMode focusMode, AVCaptureExposureMode exposureMode, CGPoint point, bool monitorSubjectAreaChange) { SessionQueue.DispatchAsync(() => { if (VideoDeviceInput == null) { return; } AVCaptureDevice device = VideoDeviceInput.Device; NSError error; if (device.LockForConfiguration(out error)) { // Setting (Focus/Exposure)PointOfInterest alone does not initiate a (focus/exposure) operation. // Set (Focus/Exposure)Mode to apply the new point of interest. if (device.FocusPointOfInterestSupported && device.IsFocusModeSupported(focusMode)) { device.FocusPointOfInterest = point; device.FocusMode = focusMode; } if (device.ExposurePointOfInterestSupported && device.IsExposureModeSupported(exposureMode)) { device.ExposurePointOfInterest = point; device.ExposureMode = exposureMode; } device.SubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange; device.UnlockForConfiguration(); } else { Console.WriteLine("Could not lock device for configuration: {0}", error); } }); }
void ChangeCamera(CameraViewController sender) { CameraButton.Enabled = false; RecordButton.Enabled = false; StillButton.Enabled = false; SessionQueue.DispatchAsync(() => { AVCaptureDevice currentVideoDevice = VideoDeviceInput.Device; AVCaptureDevicePosition preferredPosition = AVCaptureDevicePosition.Unspecified; AVCaptureDevicePosition currentPosition = currentVideoDevice.Position; switch (currentPosition) { case AVCaptureDevicePosition.Unspecified: case AVCaptureDevicePosition.Front: preferredPosition = AVCaptureDevicePosition.Back; break; case AVCaptureDevicePosition.Back: preferredPosition = AVCaptureDevicePosition.Front; break; } AVCaptureDevice videoDevice = CreateDevice(AVMediaType.Video, preferredPosition); AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice); Session.BeginConfiguration(); // Remove the existing device input first, since using the front and back camera simultaneously is not supported. Session.RemoveInput(VideoDeviceInput); if (Session.CanAddInput(videoDeviceInput)) { subjectSubscriber.Dispose(); SetFlashModeForDevice(AVCaptureFlashMode.Auto, videoDevice); subjectSubscriber = NSNotificationCenter.DefaultCenter.AddObserver(AVCaptureDevice.SubjectAreaDidChangeNotification, SubjectAreaDidChange, videoDevice); Session.AddInput(videoDeviceInput); VideoDeviceInput = videoDeviceInput; } else { Session.AddInput(VideoDeviceInput); } AVCaptureConnection connection = MovieFileOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection.SupportsVideoStabilization) { connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } Session.CommitConfiguration(); DispatchQueue.MainQueue.DispatchAsync(() => { CameraButton.Enabled = true; RecordButton.Enabled = true; StillButton.Enabled = true; }); }); }
void ResumeInterruptedSession(CameraViewController sender) { SessionQueue.DispatchAsync(() => { // The session might fail to start running, e.g., if a phone or FaceTime call is still using audio or video. // A failure to start the session running will be communicated via a session runtime error notification. // To avoid repeatedly failing to start the session running, we only try to restart the session running in the // session runtime error handler if we aren't trying to resume the session running. Session.StartRunning(); SessionRunning = Session.Running; if (!Session.Running) { DispatchQueue.MainQueue.DispatchAsync(() => { const string message = "Unable to resume"; UIAlertController alertController = UIAlertController.Create("AVCam", message, UIAlertControllerStyle.Alert); UIAlertAction cancelAction = UIAlertAction.Create("OK", UIAlertActionStyle.Cancel, null); alertController.AddAction(cancelAction); PresentViewController(alertController, true, null); }); } else { DispatchQueue.MainQueue.DispatchAsync(() => { ResumeButton.Hidden = true; }); } }); }
void SessionRuntimeError(NSNotification notification) { var error = (NSError)notification.UserInfo [AVCaptureSession.ErrorKey]; Console.WriteLine("Capture session runtime error: {0}", error); // Automatically try to restart the session running if media services were reset and the last start running succeeded. // Otherwise, enable the user to try to resume the session running. if (error.Code == (int)AVError.MediaServicesWereReset) { SessionQueue.DispatchAsync(() => { if (SessionRunning) { Session.StartRunning(); SessionRunning = Session.Running; } else { DispatchQueue.MainQueue.DispatchAsync(() => { ResumeButton.Hidden = false; }); } }); } else { ResumeButton.Hidden = false; } }
static void Main(string[] args) { string commandLine = ""; Console.WriteLine("Enter C for checkin patient, P for pending patient queue, M for Mongo tests, F for fingerprint identity and Q to quit"); while (commandLine != "q") { if (commandLine == "c") { // call PatentCheckinUri Console.WriteLine("Sending test patient FHIR message."); Patient testPt = TestPatient(); SendJSON(testPt); Console.WriteLine("Sending FHIR message from file."); Patient readPt = ReadPatient(@"C:\JSONTest\sample-new-patient.json"); SendJSON(readPt); } else if (commandLine == "p") //send profiles { // call PendingPatientsUri IList <PatientProfile> patientProfiles = GetCheckinList(); Console.WriteLine("Patient profiles received."); } else if (commandLine == "m") // MongoDB tests { MongoDBWrapper dbwrapper = new MongoDBWrapper(NoIDMongoDBAddress, SparkMongoDBAddress); SessionQueue seq = new SessionQueue(); seq._id = Guid.NewGuid().ToString(); seq.ClinicArea = "Test Clinic"; seq.LocalReference = "123456"; seq.SparkReference = "spark5"; seq.ApprovalStatus = "pending"; seq.PatientStatus = "new"; seq.RemoteHubReference = "rem440403"; seq.SessionComputerName = "Prototype Computer 1"; seq.SubmitDate = DateTime.UtcNow.AddMinutes(-15); seq.PatientBeginDate = DateTime.UtcNow.AddMinutes(-19); Console.WriteLine(seq.Serialize()); dbwrapper.AddPendingPatient(seq); List <SessionQueue> PendingPatients = dbwrapper.GetPendingPatients(); dbwrapper.UpdateSessionQueueRecord(seq._id, "approved", "TestUser", "TestComputer"); } else if (commandLine == "f") // test fingerprint identity web service { Media readMedia = ReadMedia(@"C:\JSONTest\sample-media-fhir-message.json"); SendJSON(readMedia); } string previousCommand = commandLine; commandLine = Console.ReadLine(); if (commandLine.Length > 0) { commandLine = commandLine.ToLower().Substring(0, 1); } else { commandLine = previousCommand; } } }
public override void ViewDidDisappear(bool animated) { SessionQueue.DispatchAsync(() => { if (SetupResult == AVCamSetupResult.Success) { Session.StopRunning(); RemoveObservers(); } }); base.ViewDidDisappear(animated); }
public override void ViewWillAppear(bool animated) { base.ViewWillAppear(animated); SessionQueue.DispatchAsync(() => { switch (SetupResult) { // Only setup observers and start the session running if setup succeeded. case AVCamSetupResult.Success: AddObservers(); Session.StartRunning(); SessionRunning = Session.Running; break; case AVCamSetupResult.CameraNotAuthorized: DispatchQueue.MainQueue.DispatchAsync(() => { string message = "AVCam doesn't have permission to use the camera, please change privacy settings"; UIAlertController alertController = UIAlertController.Create("AVCam", message, UIAlertControllerStyle.Alert); UIAlertAction cancelAction = UIAlertAction.Create("OK", UIAlertActionStyle.Cancel, null); alertController.AddAction(cancelAction); // Provide quick access to Settings. UIAlertAction settingsAction = UIAlertAction.Create("Settings", UIAlertActionStyle.Default, action => { UIApplication.SharedApplication.OpenUrl(new NSUrl(UIApplication.OpenSettingsUrlString)); }); alertController.AddAction(settingsAction); PresentViewController(alertController, true, null); }); break; case AVCamSetupResult.SessionConfigurationFailed: DispatchQueue.MainQueue.DispatchAsync(() => { string message = "Unable to capture media"; UIAlertController alertController = UIAlertController.Create("AVCam", message, UIAlertControllerStyle.Alert); UIAlertAction cancelAction = UIAlertAction.Create("OK", UIAlertActionStyle.Cancel, null); alertController.AddAction(cancelAction); PresentViewController(alertController, true, null); }); break; } }); }
public static SessionQueue PatientToSessionQueue(Patient pt, string sparkReference, string localNoID, string status, string approval) { SessionQueue seq = null; try { if (pt != null) { seq = new SessionQueue(); if (pt.Identifier.Count > 0) { foreach (Identifier id in pt.Identifier) { if (id.System.ToString().ToLower().Contains("session") == true) { seq._id = id.Value.ToString(); } } } if (seq._id == null) { seq._id = sparkReference; } if (seq._id.Length == 0) { seq._id = sparkReference; } seq.LocalReference = localNoID; seq.SparkReference = sparkReference; seq.PatientStatus = status; //"new" seq.ApprovalStatus = approval; // "pending"; } } catch (Exception ex) { throw ex; } return(seq); }
void ToggleMovieRecording(CameraViewController sender) { // Disable the Camera button until recording finishes, and disable the Record button until recording starts or finishes. CameraButton.Enabled = false; RecordButton.Enabled = false; SessionQueue.DispatchAsync(() => { if (!MovieFileOutput.Recording) { if (UIDevice.CurrentDevice.IsMultitaskingSupported) { // Setup background task. This is needed because the IAVCaptureFileOutputRecordingDelegate.FinishedRecording // callback is not received until AVCam returns to the foreground unless you request background execution time. // This also ensures that there will be time to write the file to the photo library when AVCam is backgrounded. // To conclude this background execution, UIApplication.SharedApplication.EndBackgroundTask is called in // IAVCaptureFileOutputRecordingDelegate.FinishedRecording after the recorded file has been saved. backgroundRecordingID = UIApplication.SharedApplication.BeginBackgroundTask(null); } // Update the orientation on the movie file output video connection before starting recording. AVCaptureConnection connection = MovieFileOutput.ConnectionFromMediaType(AVMediaType.Video); var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer; connection.VideoOrientation = previewLayer.Connection.VideoOrientation; // Turn OFF flash for video recording. SetFlashModeForDevice(AVCaptureFlashMode.Off, VideoDeviceInput.Device); // Start recording to a temporary file. MovieFileOutput.StartRecordingToOutputFile(new NSUrl(GetTmpFilePath("mov"), false), this); } else { MovieFileOutput.StopRecording(); } }); }
public void ProcessRequest(HttpContext context) { try { bool biometricsSaved = false; string missingReason = ""; string question1 = ""; string question2 = ""; string answer1 = ""; string answer2 = ""; if (uint.TryParse(MinimumAcceptedMatchScore, out _minimumAcceptedMatchScore) == false) { _minimumAcceptedMatchScore = 30; } Stream httpStream = context.Request.InputStream; StreamReader httpStreamReader = new StreamReader(httpStream); Resource newResource = FHIRUtilities.StreamToFHIR(httpStreamReader); _patient = (Patient)newResource; //TODO: make sure this FHIR message has a new pending status. //TODO: make this an atomic transaction. // delete the FHIR message from Spark if there is an error in the minutia. Patient ptSaved = (Patient)SendPatientToSparkServer(); //LogUtilities.LogEvent("AddNewPatient.ashx Saved FHIR in spark."); if (ptSaved == null) { _responseText = "Error sending Patient FHIR message to the Spark FHIR endpoint. " + ExceptionString; return; } SourceAFIS.Templates.NoID noID = new SourceAFIS.Templates.NoID(); noID.SessionID = ptSaved.Id.ToString(); //TODO: Add Argon2d hash here: noID.LocalNoID = "noid://" + DomainName + "/" + StringUtilities.SHA256(DomainName + noID.SessionID + NodeSalt); SessionQueue seq = Utilities.PatientToSessionQueue(_patient, ptSaved.Id.ToString(), noID.LocalNoID, "new", "pending"); seq.SubmitDate = DateTime.UtcNow; //TODO: send to selected match hub and get the remote hub ID. // Hub ID in the same format: noid://domain/LocalID if (_patient.Photo.Count > 0) { dbMinutia = new FingerPrintMatchDatabase(DatabaseDirectory, BackupDatabaseDirectory, _minimumAcceptedMatchScore); foreach (var minutia in _patient.Photo) { byte[] byteMinutias = minutia.Data; Stream stream = new MemoryStream(byteMinutias); Media media = (Media)FHIRUtilities.StreamToFHIR(new StreamReader(stream)); // Save minutias for matching. Template fingerprintTemplate = ConvertFHIR.FHIRToTemplate(media); fingerprintTemplate.NoID = noID; try { dbMinutia.LateralityCode = (FHIRUtilities.LateralitySnoMedCode)fingerprintTemplate.NoID.LateralitySnoMedCode; dbMinutia.CaptureSite = (FHIRUtilities.CaptureSiteSnoMedCode)fingerprintTemplate.NoID.CaptureSiteSnoMedCode; } catch { } if (dbMinutia.AddTemplate(fingerprintTemplate) == false) { _responseText = "Error adding a fingerprint to the match database."; } } dbMinutia.Dispose(); biometricsSaved = true; } else { // check alternate pathway Q&A foreach (var id in _patient.Identifier) { if (id.System.ToLower().Contains("biometric") == true) { Extension extExceptionQA = id.Extension[0]; foreach (var ext in extExceptionQA.Extension) { if (ext.Url.ToLower().Contains("reason") == true) { missingReason = ext.Value.ToString(); } else if (ext.Url.ToLower().Contains("question 1") == true) { question1 = ext.Value.ToString(); } else if (ext.Url.ToLower().Contains("answer 1") == true) { answer1 = ext.Value.ToString(); } else if (ext.Url.ToLower().Contains("question 2") == true) { question2 = ext.Value.ToString(); } else if (ext.Url.ToLower().Contains("answer 2") == true) { answer2 = ext.Value.ToString(); } } if ( missingReason.Length > 0 && question1.Length > 0 && answer1.Length > 0 && question2.Length > 0 && answer2.Length > 0 ) { if (missingReason != "I am permanently physically unable to provide fingerprints") { if (missingReason == "I am temporarily physically unable to provide fingerprints") { seq.PatientStatus = "hold**"; } else if (missingReason == "I attempted the fingerprint scan process, but I could not get a successful scan on either hand") { seq.PatientStatus = "hold"; } } else { seq.PatientStatus = "new***"; } biometricsSaved = true; } } } // log patient in alternatesearch container } if (biometricsSaved) { MongoDBWrapper dbwrapper = new MongoDBWrapper(NoIDMongoDBAddress, SparkMongoDBAddress); dbwrapper.AddPendingPatient(seq); } else { _responseText = "Critical Error! No biometrics or alternates provided. Can not complete enrollment."; LogUtilities.LogEvent(_responseText); } //TODO: end atomic transaction. _responseText = "Successful."; //LogUtilities.LogEvent("Ending AddNewPatient.ashx"); } catch (Exception ex) { _responseText = "Error in AddNewPatient::ProcessRequest: " + ex.Message; LogUtilities.LogEvent(_responseText); } context.Response.Write(_responseText); context.Response.End(); }
public FHIRMessageRouter(HttpContext context) { try { if (uint.TryParse(MinimumAcceptedMatchScore, out _minimumAcceptedMatchScore) == false) { _minimumAcceptedMatchScore = 30; } Resource newResource = FHIRUtilities.StreamToFHIR(new StreamReader(context.Request.InputStream)); switch (newResource.TypeName.ToLower()) { case "patient": //if new patient. TODO: check meta for NoID status SessionQueue seq = new SessionQueue(); _patient = (Patient)newResource; string sessionID = ""; if (_patient.Identifier.Count > 0) { foreach (Identifier id in _patient.Identifier) { if (id.System.ToString().ToLower().Contains("session") == true) { sessionID = id.Value.ToString(); } } } Patient ptSaved = (Patient)SendPatientToSparkServer(); if (ptSaved == null) { _responseText = "Error sending Patient FHIR message to the Spark FHIR endpoint. " + ExceptionString; return; } string LocalNoID = ptSaved.Id.ToString(); //TODO: make this an atomic transaction. // delete the FHIR message from Spark if there is an error in the minutia. //TODO check for existing patient and expire old messages for the patient. if (_patient.Photo.Count > 0) { dbMinutia = new FingerPrintMatchDatabase(_databaseDirectory, _backupDatabaseDirectory, _minimumAcceptedMatchScore); foreach (var minutia in _patient.Photo) { byte[] byteMinutias = minutia.Data; Stream stream = new MemoryStream(byteMinutias); Media media = (Media)FHIRUtilities.StreamToFHIR(new StreamReader(stream)); // Save minutias for matching. Template fingerprintTemplate = ConvertFHIR.FHIRToTemplate(media); fingerprintTemplate.NoID.LocalNoID = LocalNoID; try { dbMinutia.LateralityCode = (FHIRUtilities.LateralitySnoMedCode)fingerprintTemplate.NoID.LateralitySnoMedCode; dbMinutia.CaptureSite = (FHIRUtilities.CaptureSiteSnoMedCode)fingerprintTemplate.NoID.CaptureSiteSnoMedCode; } catch { } if (dbMinutia.AddTemplate(fingerprintTemplate) == false) { _responseText = "Error adding a fingerprint to the match database."; } } dbMinutia.Dispose(); _responseText = "Successful."; } break; case "media": _biometics = (Media)newResource; // TODO send to biometric match engine. If found, add patient reference to FHIR message. // convert FHIR fingerprint message (_biometics) to AFIS template class Template probe = ConvertFHIR.FHIRToTemplate(_biometics); dbMinutia = new FingerPrintMatchDatabase(_databaseDirectory, _backupDatabaseDirectory, _minimumAcceptedMatchScore); try { dbMinutia.LateralityCode = (FHIRUtilities.LateralitySnoMedCode)probe.NoID.LateralitySnoMedCode; dbMinutia.CaptureSite = (FHIRUtilities.CaptureSiteSnoMedCode)probe.NoID.CaptureSiteSnoMedCode; } catch { } MinutiaResult minutiaResult = dbMinutia.SearchPatients(probe); if (minutiaResult != null) { if (minutiaResult.NoID != null && minutiaResult.NoID.Length > 0) { // Fingerprint found in database _responseText = minutiaResult.NoID; //TODO: for now, it returns the localNoID. should return a FHIR response. } else { _responseText = "No local database match."; } } else { _responseText = "No local database match."; } dbMinutia.Dispose(); if (!(SendBiometicsToSparkServer())) { _responseText = "Error sending Biometric Media FHIR message to the Spark FHIR endpoint. " + ExceptionString; } break; default: _responseText = newResource.TypeName.ToLower() + " not supported."; break; } } catch (Exception ex) { _responseText = "Error in FHIRMessageRouter::FHIRMessageRouter: " + ex.Message; } }
public void ProcessRequest(HttpContext context) { context.Response.ContentType = "text/plain"; try { foreach (String key in context.Request.QueryString.AllKeys) { switch (key.ToLower()) { case "localnoid": _localNoID = context.Request.QueryString[key]; break; case "fieldname": _confirmFieldName = context.Request.QueryString[key]; break; case "confirmreponse": _confirmReponse = context.Request.QueryString[key]; break; case "computername": _computerName = context.Request.QueryString[key]; break; case "clinicarea": _clinicArea = context.Request.QueryString[key]; break; } } MongoDBWrapper dbwrapper = new MongoDBWrapper(NoIDMongoDBAddress, SparkMongoDBAddress); FhirClient client = new FhirClient(sparkEndpointAddress); string sparkReference = dbwrapper.GetSparkID(_localNoID); string sparkAddress = sparkEndpointAddress.ToString() + "/Patient/" + sparkReference; Patient pendingPatient = (Patient)client.Get(sparkAddress); if (pendingPatient != null) { if (_confirmFieldName == "birthdate") { if (pendingPatient.BirthDate != null && _confirmReponse == pendingPatient.BirthDate) { SessionQueue seq = Utilities.PatientToSessionQueue(pendingPatient, sparkReference, _localNoID, "return", "pending"); seq.SubmitDate = DateTime.UtcNow; seq._id = StringUtilities.SHA256(DomainName + Guid.NewGuid().ToString() + NodeSalt); seq.SessionComputerName = _computerName; seq.ClinicArea = _clinicArea; dbwrapper.AddPendingPatient(seq); context.Response.Write("yes"); } else { context.Response.Write("no"); } } else if (_confirmFieldName == "lastname") { //TODO: implement lastname, use metaphone or just accept exact matches? context.Response.Write("Error occurred. " + _confirmFieldName + " is not implemented yet!"); } else if (_confirmFieldName == "firstname") { //TODO: implement firstname, use root or just accept exact matches? context.Response.Write("Error occurred. " + _confirmFieldName + " is not implemented yet!"); } else if (_confirmFieldName == "failedchallenge") { SessionQueue seq = Utilities.PatientToSessionQueue(pendingPatient, sparkReference, _localNoID, "return**", "pending"); seq.SubmitDate = DateTime.UtcNow; seq._id = StringUtilities.SHA256(DomainName + Guid.NewGuid().ToString() + NodeSalt); seq.SessionComputerName = _computerName; seq.ClinicArea = _clinicArea; dbwrapper.AddPendingPatient(seq); context.Response.Write("yes"); } } } catch (Exception ex) { context.Response.Write("no. Error occured for LocalNoID = " + _localNoID + ". UpdatePendingStatus::ProcessRequest: " + ex.Message); } context.Response.End(); }
void SnapStillImage(CameraViewController sender) { SessionQueue.DispatchAsync(async() => { AVCaptureConnection connection = StillImageOutput.ConnectionFromMediaType(AVMediaType.Video); var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer; // Update the orientation on the still image output video connection before capturing. connection.VideoOrientation = previewLayer.Connection.VideoOrientation; // Flash set to Auto for Still Capture. SetFlashModeForDevice(AVCaptureFlashMode.Auto, VideoDeviceInput.Device); // Capture a still image. try { var imageDataSampleBuffer = await StillImageOutput.CaptureStillImageTaskAsync(connection); // The sample buffer is not retained. Create image data before saving the still image to the photo library asynchronously. NSData imageData = AVCaptureStillImageOutput.JpegStillToNSData(imageDataSampleBuffer); PHPhotoLibrary.RequestAuthorization(status => { if (status == PHAuthorizationStatus.Authorized) { // To preserve the metadata, we create an asset from the JPEG NSData representation. // Note that creating an asset from a UIImage discards the metadata. // In iOS 9, we can use AddResource method on PHAssetCreationRequest class. // In iOS 8, we save the image to a temporary file and use +[PHAssetChangeRequest creationRequestForAssetFromImageAtFileURL:]. if (UIDevice.CurrentDevice.CheckSystemVersion(9, 0)) { PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => { var request = PHAssetCreationRequest.CreationRequestForAsset(); request.AddResource(PHAssetResourceType.Photo, imageData, null); }, (success, err) => { if (!success) { Console.WriteLine("Error occurred while saving image to photo library: {0}", err); } }); } else { var temporaryFileUrl = new NSUrl(GetTmpFilePath("jpg"), false); PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => { NSError error = null; if (imageData.Save(temporaryFileUrl, NSDataWritingOptions.Atomic, out error)) { PHAssetChangeRequest.FromImage(temporaryFileUrl); } else { Console.WriteLine("Error occured while writing image data to a temporary file: {0}", error); } }, (success, error) => { if (!success) { Console.WriteLine("Error occurred while saving image to photo library: {0}", error); } // Delete the temporary file. NSError deleteError; NSFileManager.DefaultManager.Remove(temporaryFileUrl, out deleteError); }); } } }); } catch (NSErrorException ex) { Console.WriteLine("Could not capture still image: {0}", ex.Error); } }); }