public string GetPersonsBeautyfied() { if (Persons == null || !Persons.Any()) { return(""); } string strBeautyfied = ""; for (int i = 0; i < Persons.Count(); i++) { strBeautyfied += Persons.ElementAt(i).FullName; if (Persons.Count() > 1 && i == (Persons.Count() - 2)) { strBeautyfied += " and "; } if (Persons.Count() > 1 && i < (Persons.Count() - 2)) { if (i != 0) { strBeautyfied += ", "; } } } return(strBeautyfied); }
private BitmapSource VisualizeResult(VideoFrame frame) { // Draw any results on top of the image. BitmapSource visImage = frame.Image.ToBitmapSource(); var result = _latestResultsToDisplay; if (result != null) { // See if we have local face detections for this image. var clientFaces = (OpenCvSharp.Rect[])frame.UserData; if (clientFaces != null && result.Faces != null) { // If so, then the analysis results might be from an older frame. We need to match // the client-side face detections (computed on this frame) with the analysis // results (computed on the older frame) that we want to display. MatchAndReplaceFaceRectangles(result.Faces, clientFaces); } if (result.IdentifyResults != null && result.IdentifyResults.Length > 0) { for (int idx = 0; idx < result.IdentifyResults.Length; idx++) { // Update identification result for rendering var face = result.Faces[idx]; var res = result.IdentifyResults[idx]; result.PersonNames = new string[result.IdentifyResults.Length]; if (res.Candidates.Length > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString())) { if (result.Faces[idx].FaceId == result.IdentifyResults[idx].FaceId) { result.PersonNames[idx] = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName; visImage = Visualization.DrawFaces(visImage, result.Faces, result.PersonNames); } else { result.PersonNames[idx] = "Unknown"; visImage = Visualization.DrawFaces(visImage, result.Faces, result.PersonNames); } } else { result.PersonNames[idx] = "Unknown"; visImage = Visualization.DrawFaces(visImage, result.Faces, result.PersonNames); } } } //DB Operation if (result.PersonNames != null) { DB_Operation(result.PersonNames); } } return(visImage); }
public string GetPersonIdFieldName() { if (Persons != null && Persons.Any()) { return(Persons[0].PersonId); } if (PayerPlanPeriods != null && PayerPlanPeriods.Any()) { return(PayerPlanPeriods[0].PersonId); } if (ConditionOccurrence != null && ConditionOccurrence.Any()) { return(ConditionOccurrence[0].PersonId); } if (Death != null && Death.Any()) { return(Death[0].PersonId); } if (DrugExposure != null && DrugExposure.Any()) { return(DrugExposure[0].PersonId); } if (ProcedureOccurrence != null && ProcedureOccurrence.Any()) { return(ProcedureOccurrence[0].PersonId); } if (Observation != null && Observation.Any()) { return(Observation[0].PersonId); } if (Measurement != null && Measurement.Any()) { return(Measurement[0].PersonId); } if (VisitOccurrence != null && VisitOccurrence.Any()) { return(VisitOccurrence[0].PersonId); } if (Cohort != null && Cohort.Any()) { return(Cohort[0].PersonId); } if (DeviceExposure != null && DeviceExposure.Any()) { return(DeviceExposure[0].PersonId); } throw new Exception("Cant find PersonId FieldName " + this.FileName); }
public async Task <Person> GetById(int id) { if (!Persons.Any()) { await Init(); } return(Persons.FirstOrDefault(p => p.id == id)); }
public async Task <IList <Person> > GetByAge(int age) { if (!Persons.Any()) { await Init(); } return(Persons.Where(p => p.age == age).ToList()); }
private bool CommandBinding_ShowCanExecute(object obj) { if (Persons.Any()) { return(false); } else { return(true); } }
public void EnsureSeeded() { if (!Persons.Any()) { AddRange( new Person { Name = "John Doe" }, new Person { Name = "Joe Bloggs" }); SaveChanges(); } }
/// <summary> /// Получение всех сотрудников. /// </summary> private void GetAllPersons() { if (GetAll != null) { GetAll(); } Persons = Filters(); if (Persons.Any()) { btnNext.Enabled = true; btnPrev.Enabled = true; } else { btnNext.Enabled = false; btnPrev.Enabled = false; } }
public async Task <IList <GenderStatistics> > GenderStatistics() { if (!Persons.Any()) { await Init(); } var query = from person in Persons group person by person.age into ageGroup orderby ageGroup.Key select ageGroup; var agrGroupArray = query.ToList(); var genderStats = new List <GenderStatistics>(); foreach (var group in agrGroupArray) { var stat = new GenderStatistics(); stat.age = group.Key; foreach (var person in group) { if (person.gender == "M") { stat.male++; } else if (person.gender == "F") { stat.female++; } else { stat.others++; } } genderStats.Add(stat); } return(genderStats); }
private void OnOpenFile() { var openFileDialog = new OpenFileDialog() { Title = "Datei öffnen", Filter = "Json |*.json|Alle Dateien|*.*", FilterIndex = 0, InitialDirectory = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) }; if (openFileDialog.ShowDialog(App.Current.MainWindow) != true) { return; } fileName = openFileDialog.FileName; try { Persons.Clear(); var items = JsonSerializer.Deserialize <Person[]>(File.ReadAllText(fileName, Encoding.UTF8)); if (items != null && items.Any()) { Persons.AddRange(items); if (Persons.Any()) { SelectedPerson = Persons[0]; } } ApplyTitle(fileName); } catch (Exception e) { } }
/// <summary> /// Pick image, detect and identify all faces detected /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void Identify_Click(object sender, RoutedEventArgs e) { // Show file picker Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif"; var result = dlg.ShowDialog(); if (result.HasValue && result.Value) { // User picked one image // Clear previous detection and identification results TargetFaces.Clear(); var pickedImagePath = dlg.FileName; var renderingImage = UIHelper.LoadImageAppliedOrientation(pickedImagePath); var imageInfo = UIHelper.GetImageInfoForRendering(renderingImage); SelectedFile = renderingImage; var sw = Stopwatch.StartNew(); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; string subscriptionEndpoint = mainWindow._scenariosControl.SubscriptionEndpoint; var faceServiceClient = new FaceServiceClient(subscriptionKey, subscriptionEndpoint); // Call detection REST API using (var fStream = File.OpenRead(pickedImagePath)) { try { var faces = await RetryHelper.OperationWithBasicRetryAsync(async() => await faceServiceClient.DetectAsync(fStream), new[] { "RateLimitExceeded" }, traceWriter : _mainWindowLogTraceWriter); //var faces = await faceServiceClient.DetectAsync(fStream); // Convert detection result into UI binding object for rendering foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } MainWindow.Log("Request: Identifying {0} face(s) in group \"{1}\"", faces.Length, this.GroupId); // Identify each face // Call identify REST API, the result contains identified person information var identifyResult = await RetryHelper.OperationWithBasicRetryAsync(async() => await faceServiceClient.IdentifyAsync(faces.Select(ff => ff.FaceId).ToArray(), largePersonGroupId: this.GroupId), new[] { "RateLimitExceeded" }, traceWriter : _mainWindowLogTraceWriter); //var identifyResult = await faceServiceClient.IdentifyAsync(faces.Select(ff => ff.FaceId).ToArray(), largePersonGroupId: this.GroupId); for (int idx = 0; idx < faces.Length; idx++) { // Update identification result for rendering var face = TargetFaces[idx]; var res = identifyResult[idx]; if (res.Candidates.Length > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString())) { face.PersonName = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName; } else { face.PersonName = "Unknown"; } } var outString = new StringBuilder(); foreach (var face in TargetFaces) { outString.AppendFormat("Face {0} is identified as {1}. ", face.FaceId, face.PersonName); } MainWindow.Log("Response: Success. {0}", outString); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } } GC.Collect(); }
private bool CommitCommandCanExecute() { return(!Persons.Any(n => n.PropertiesWithError.Count != 0)); }
/// <summary> /// Pick image, detect and identify all faces detected /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void Identify_Click(object sender, RoutedEventArgs e) { // Show file picker Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif"; var result = dlg.ShowDialog(); if (result.HasValue && result.Value) { // User picked one image // Clear previous detection and identification results TargetFaces.Clear(); var pickedImagePath = dlg.FileName; var renderingImage = UIHelper.LoadImageAppliedOrientation(pickedImagePath); var imageInfo = UIHelper.GetImageInfoForRendering(renderingImage); SelectedFile = renderingImage; var sw = Stopwatch.StartNew(); var faceServiceClient = FaceServiceClientHelper.GetInstance(this); // Call detection REST API using (var fStream = File.OpenRead(pickedImagePath)) { try { var faces = await faceServiceClient.Face.DetectWithStreamAsync(fStream, recognitionModel : recognitionModel, detectionModel : detectionModel); // Convert detection result into UI binding object for rendering foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } MainWindow.Log("Request: Identifying {0} face(s) in group \"{1}\"", faces.Count, GroupName); // Identify each face // Call identify REST API, the result contains identified person information var identifyResult = await faceServiceClient.Face.IdentifyAsync((from face in faces where face.FaceId != null select face.FaceId.Value).ToList(), null, GroupName); for (int idx = 0; idx < faces.Count; idx++) { // Update identification result for rendering var face = TargetFaces[idx]; var res = identifyResult[idx]; if (res.Candidates.Count > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString())) { face.PersonName = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName; } else { face.PersonName = "Unknown"; } } var outString = new StringBuilder(); foreach (var face in TargetFaces) { outString.AppendFormat("Face {0} is identified as {1}. ", face.FaceId, face.PersonName); } MainWindow.Log("Response: Success. {0}", outString); } catch (APIErrorException ex) { MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message); } } } GC.Collect(); }
/// <summary> /// Pick image, detect and identify all faces detected /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void Identify_Click(object sender, RoutedEventArgs e) { // Show file picker Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg) | *.jpg"; var result = dlg.ShowDialog(); if (result.HasValue && result.Value) { // User picked one image // Clear previous detection and identification results TargetFaces.Clear(); SelectedFile = dlg.FileName; var sw = Stopwatch.StartNew(); var imageInfo = UIHelper.GetImageInfoForRendering(dlg.FileName); // Call detection REST API using (var fileStream = File.OpenRead(dlg.FileName)) { try { var faces = await App.Instance.DetectAsync(fileStream); // Convert detection result into UI binding object for rendering foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } Output = Output.AppendLine(string.Format("Request: Identifying {0} face(s) in group \"{1}\"", faces.Length, GroupName)); // Identify each face // Call identify REST API, the result contains identified person information var identifyResult = await App.Instance.IdentifyAsync(GroupName, faces.Select(ff => ff.FaceId).ToArray()); for (int idx = 0; idx < faces.Length; idx++) { // Update identification result for rendering var face = TargetFaces[idx]; var res = identifyResult[idx]; if (res.Candidates.Length > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString())) { face.PersonName = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName; } else { face.PersonName = "Unknown"; } } var outString = new StringBuilder(); foreach (var face in TargetFaces) { outString.AppendFormat("Face {0} is identified as {1}. ", face.FaceId, face.PersonName); } Output = Output.AppendLine(string.Format("Response: Success. {0}", outString)); } catch (ClientException ex) { Output = Output.AppendLine(string.Format("Response: {0}. {1}", ex.Error.Code, ex.Error.Message)); } } } }
/// <summary> /// Pick image, detect and identify all faces detected /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void Identify_Click(object sender, RoutedEventArgs e) { // Show file picker Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif"; var result = dlg.ShowDialog(); if (result.HasValue && result.Value) { // User picked one image // Clear previous detection and identification results TargetFaces.Clear(); var pickedImagePath = dlg.FileName; var renderingImage = UIHelper.LoadImageAppliedOrientation(pickedImagePath); var imageInfo = UIHelper.GetImageInfoForRendering(renderingImage); SelectedFile = renderingImage; var sw = Stopwatch.StartNew(); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = "7f9f9eb9d73e4606bbfca54abcf93996"; string subscriptionEndpoint = "https://westcentralus.api.cognitive.microsoft.com/face/v1.0"; var faceServiceClient = new FaceServiceClient(subscriptionKey, subscriptionEndpoint); var requiredFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Smile, FaceAttributeType.Hair, FaceAttributeType.HeadPose, FaceAttributeType.Glasses, FaceAttributeType.Emotion }; // Call detection REST API using (var fStream = File.OpenRead(pickedImagePath)) { try { var faces = await faceServiceClient.DetectAsync(fStream, true, true, requiredFaceAttributes); // Convert detection result into UI binding object for rendering foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } MainWindow.Log("Request: Identifying {0} face(s) in group \"{1}\"", faces.Length, this.GroupId); // Identify each face // Call identify REST API, the result contains identified person information var identifyResult = await faceServiceClient.IdentifyAsync(faces.Select(ff => ff.FaceId).ToArray(), largePersonGroupId : this.GroupId); for (int idx = 0; idx < faces.Length; idx++) { // Update identification result for rendering var face = TargetFaces[idx]; var res = identifyResult[idx]; if (res.Candidates.Length > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString())) { face.PersonName = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName; } else { face.PersonName = "Unknown"; } } var outString = new StringBuilder(); foreach (var face in faces) { MainWindow.Log("\nFace ID : {0}", face.FaceId.ToString()); StringBuilder sb = new StringBuilder(); // Add the gender, age, and smile. sb.Append("Gender: "); sb.Append(face.FaceAttributes.Gender); sb.Append("\n "); sb.Append("Age: "); sb.Append(face.FaceAttributes.Age); sb.Append("\n "); sb.Append(String.Format("smile {0:F1}%\n ", face.FaceAttributes.Smile * 100)); // Add the emotions. Display all emotions over 10%. sb.Append("Emotion: "); Microsoft.ProjectOxford.Common.Contract.EmotionScores emotionScores = face.FaceAttributes.Emotion; if (emotionScores.Anger >= 0.1f) { sb.Append( String.Format("anger {0:F1}%, ", emotionScores.Anger * 100)); } if (emotionScores.Contempt >= 0.1f) { sb.Append( String.Format("contempt {0:F1}%, ", emotionScores.Contempt * 100)); } if (emotionScores.Disgust >= 0.1f) { sb.Append( String.Format("disgust {0:F1}%, ", emotionScores.Disgust * 100)); } if (emotionScores.Fear >= 0.1f) { sb.Append( String.Format("fear {0:F1}%, ", emotionScores.Fear * 100)); } if (emotionScores.Happiness >= 0.1f) { sb.Append( String.Format("happiness {0:F1}%, ", emotionScores.Happiness * 100)); } if (emotionScores.Neutral >= 0.1f) { sb.Append( String.Format("neutral {0:F1}%, ", emotionScores.Neutral * 100)); } if (emotionScores.Sadness >= 0.1f) { sb.Append( String.Format("sadness {0:F1}%, ", emotionScores.Sadness * 100)); } if (emotionScores.Surprise >= 0.1f) { sb.Append( String.Format("surprise {0:F1}%, ", emotionScores.Surprise * 100)); } sb.Append("\n "); // Add glasses. sb.Append(face.FaceAttributes.Glasses); sb.Append("\n "); // Add hair. sb.Append("Hair: "); var hair = face.FaceAttributes.Hair; if (hair.Bald >= 0.01f) { sb.Append(String.Format("bald {0:F1}% ", hair.Bald * 100)); } // Display all hair color attributes over 10%. var hairColors = hair.HairColor; foreach (var hairColor in hairColors) { if (hairColor.Confidence >= 0.1) { sb.Append(hairColor.Color.ToString()); sb.Append(String.Format(" {0:F1}% ", hairColor.Confidence * 100)); } } /*if (hair.HairColor.Length == 0) * { * if (hair.Invisible) * sb.Append("Invisible"); * else * sb.Append("Bald"); * } * else * { * Contract.HairColorType returnColor = Contract.HairColorType.Unknown; * double maxConfidence = 0.0f; * * for (int i = 0; i < hair.HairColor.Length; ++i) * { * if (hair.HairColor[i].Confidence > maxConfidence) * { * maxConfidence = hair.HairColor[i].Confidence; * returnColor = hair.HairColor[i].Color; * } * } * * sb.Append(returnColor.ToString()); * }*/ sb.Append("\n"); MainWindow.Log("Face Attributes : {0}", sb); MainWindow.Log("Face MouthLeft(X) : {0}", face.FaceLandmarks.MouthLeft.X); } foreach (var face in TargetFaces) { outString.AppendFormat("Face {0} is identified as {1}. ", face.FaceId, face.PersonName); } MainWindow.Log("Response: Success. {0}", outString); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } } GC.Collect(); }
private async Task Identify_Click(IListBlobItem blob) { if (true) { // User picked one image // Clear previous detection and identification results TargetFaces.Clear(); using (var client = new WebClient()) { client.DownloadFile(blob.Uri.AbsoluteUri, @"C:\Users\Thinksysuser\Pictures\Saved Pictures\"+blob.Uri.Segments.Last()); } var pickedImagePath = @"C:\Users\Thinksysuser\Pictures\Saved Pictures\" + blob.Uri.Segments.Last(); var renderingImage = UIHelper.LoadImageAppliedOrientation(pickedImagePath); var imageInfo = UIHelper.GetImageInfoForRendering(renderingImage); SelectedFile = renderingImage; var sw = Stopwatch.StartNew(); string subscriptionKey = ""; string subscriptionEndpoint = ""; var faceServiceClient = new FaceServiceClient(subscriptionKey, subscriptionEndpoint); // Call detection REST API using (var fStream = File.OpenRead(pickedImagePath)) { try { var faces = await faceServiceClient.DetectAsync(fStream); count++; System.Diagnostics.Debug.WriteLine("-----after detect----"+count); // Convert detection result into UI binding object for rendering foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } System.Diagnostics.Debug.WriteLine("Request: Identifying {0} face(s) in group \"{1}\"", faces.Length, this.GroupId); // Identify each face // Call identify REST API, the result contains identified person information var identifyResult = await faceServiceClient.IdentifyAsync(faces.Select(ff => ff.FaceId).ToArray(), largePersonGroupId: this.GroupId); count++; System.Diagnostics.Debug.WriteLine("-----after identify----" + count); for (int idx = 0; idx < faces.Length; idx++) { // Update identification result for rendering var face = TargetFaces[idx]; var res = identifyResult[idx]; if (res.Candidates.Length > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString())) { face.PersonName = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName; } else { face.PersonName = "Unknown"; } } var outString = new StringBuilder(); foreach (var face in TargetFaces) { outString.AppendFormat("Face {0} is identified as {1}. ", face.FaceId, face.PersonName); } System.Diagnostics.Debug.WriteLine("Response: Success. {0}", outString); } catch (FaceAPIException ex) { System.Diagnostics.Debug.WriteLine("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } } GC.Collect(); }
public SubordinateViewModel(IMainViewModel mainWindowViewModel) : base(nameof(SubordinateViewModel), mainWindowViewModel) { SetNotification("Loading subordinate data", NotificationType.Refreshing); _mainWindowViewModel = mainWindowViewModel; SearchText = string.Empty; _networkServiceOfPersons ??= Locator.Current.GetService <INetworkService <Person> >(); _networkServiceOfFileData ??= Locator.Current.GetService <INetworkService <FileData> >(); _settingsService ??= Locator.Current.GetService <ISettingsService>(); #region Init Chat service _clientService ??= Locator.Current.GetService <IClientService>(); _clientService.MessageReceived += MessageReceived; #endregion #region Init SelectPersonCommand SelectPersonCommand = ReactiveCommand.CreateFromTask <Person, bool>(SelectPersonExecutedAsync); SelectPersonCommand.ThrownExceptions.Subscribe(exception => { IsPhotoLoading = false; ErrorHandler(nameof(SelectPersonCommand)).Invoke(exception); }); this.WhenAnyValue(x => x.SelectedPerson).InvokeCommand(SelectPersonCommand); #endregion #region Init SearchPersonCommand var canSearch = this.WhenAnyValue(x => x.SearchText, query => !string.IsNullOrWhiteSpace(query)); SearchPersonCommand = ReactiveCommand.CreateFromTask <string, IEnumerable <Person> >( async query => await SearchPersonExecuteAsync(query), canSearch); SearchPersonCommand.IsExecuting.ToProperty(this, x => x.IsSearching, out _isSearching); SearchPersonCommand.ThrownExceptions.Subscribe(ErrorHandler(nameof(SearchPersonCommand))); _searchedPersons = SearchPersonCommand.ToProperty(this, x => x.Persons); this.WhenAnyValue(x => x.SearchText) .Throttle(TimeSpan.FromSeconds(1), RxApp.MainThreadScheduler) .InvokeCommand(SearchPersonCommand); #endregion #region Init ClearSearchPersonCommand var canClearSearch = this.WhenAnyValue(x => x.SearchText, query => !string.IsNullOrWhiteSpace(query) || Persons.Any()); ClearSearchPersonCommand = ReactiveCommand.CreateFromTask <Unit, bool>(ClearSearchPersonsAsync, canClearSearch); ClearSearchPersonCommand.ThrownExceptions.Subscribe(ErrorHandler(nameof(ClearSearchPersonCommand))); #endregion #region Init SendPersonCommand var canSendPerson = this.WhenAnyValue( x => x.IsLoading, x => x.Visitor.Comment, x => x.Visitor.FirstName, x => x.Visitor.Message, x => x.Visitor.MiddleName, x => x.Visitor.Post, x => x.Visitor.SecondName, selector: (isLoading, _, __, ___, ____, _____, ______) => !Visitor.IsNullOrEmpty() && !isLoading); SendVisitorCommand = ReactiveCommand.CreateFromTask <Visitor, bool>(SendVisitorExecuteAsync, canSendPerson); SendVisitorCommand.ThrownExceptions.Subscribe(ErrorHandler(nameof(SendVisitorCommand))); #endregion Initialized += OnSubordinateViewModelInitialized; OnInitialized(); }
private bool ClearCmndCanExecute(object parameter) { return(Persons.Any()); }
public SubordinateViewModel(IScreen screen) { UrlPathSegment = nameof(SubordinateViewModel); HostScreen = screen; SearchText = string.Empty; _networkServiceOfPersons ??= Locator.Current.GetService <INetworkService <Person> >(); #region Init Chat service _clientService ??= Locator.Current.GetService <IClientService>(); _clientService.MessageReceived += MessageReceived; #endregion #region Init SelectPersonCommand SelectPersonCommand = ReactiveCommand.Create <Person, bool>(FillVisitorBySelected); this.WhenAnyValue(x => x.SelectedPerson) .InvokeCommand(SelectPersonCommand); #endregion #region Init SearchPersonCommand var canSearch = this.WhenAnyValue(x => x.SearchText, query => !string.IsNullOrWhiteSpace(query)); SearchPersonCommand = ReactiveCommand.CreateFromTask <string, IEnumerable <Person> >( async query => await SearchPersons(query), canSearch); SearchPersonCommand.ThrownExceptions.Subscribe(error => ShowError(error)); _searchedPersons = SearchPersonCommand.ToProperty(this, x => x.Persons); this.WhenAnyValue(x => x.SearchText) .Throttle(TimeSpan.FromSeconds(1), RxApp.MainThreadScheduler) .InvokeCommand(SearchPersonCommand); #endregion #region Init ClearSearchPersonCommand var canClearSearch = this.WhenAnyValue(x => x.SearchText, query => !string.IsNullOrWhiteSpace(query) || Persons.Any()); ClearSearchPersonCommand = ReactiveCommand.CreateFromTask <Unit, bool>(ClearSearchPersons, canClearSearch); #endregion #region Init SendPersonCommand var canSendPerson = this.WhenAnyValue( x => x.Visitor, x => x.Visitor.Comment, x => x.Visitor.FirstName, x => x.Visitor.Message, x => x.Visitor.MiddleName, x => x.Visitor.Post, x => x.Visitor.SecondName, (person, _, __, ___, ____, _____, ______) => !person.IsNullOrEmpty()); SendVisitorCommand = ReactiveCommand.CreateFromTask <Visitor, bool>(SendVisitor, canSendPerson); #endregion Initialized = SubordinateViewModel_Initialized; Initialized.Invoke(); }