protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (CategorizeInfo)XmlUtility.DeserializeObject(BootParameters, typeof(CategorizeInfo)); _batchSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("UpdateFieldsBatchSize", "AnalyticsProject")); _projectInfo = AnalyticsProject.Get(_jobParameter.MatterId.ToString(CultureInfo.InvariantCulture), _jobParameter.DatasetId.ToString(CultureInfo.InvariantCulture), _jobParameter.ProjectId.ToString(CultureInfo.InvariantCulture)); }
protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; SetCommiyIndexStatusToInitialized(_jobParameter.MatterId); }
protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (CategorizeInfo)XmlUtility.DeserializeObject(BootParameters, typeof(CategorizeInfo)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(_jobParameter.DatasetId); _projectInfo = AnalyticsProject.Get(_jobParameter.MatterId.ToString(CultureInfo.InvariantCulture), _jobParameter.DatasetId.ToString(CultureInfo.InvariantCulture), _jobParameter.ProjectId.ToString(CultureInfo.InvariantCulture)); SetCommiyIndexStatusToInitialized(_jobParameter.MatterId); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _reprocessJobParameter = (ConversionReprocessJobBeo)XmlUtility.DeserializeObject(BootParameters, typeof(ConversionReprocessJobBeo)); var baseConfig = ReconversionDAO.GetJobConfigInfo(Convert.ToInt32(_reprocessJobParameter.OrginialJobId)); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(baseConfig.BootParameters, typeof(AnalyticsProjectInfo)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(_jobParameter.DatasetId); }
protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (CategorizeInfo) XmlUtility.DeserializeObject(BootParameters, typeof (CategorizeInfo)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(_jobParameter.DatasetId); _projectInfo = AnalyticsProject.Get(_jobParameter.MatterId.ToString(CultureInfo.InvariantCulture), _jobParameter.DatasetId.ToString(CultureInfo.InvariantCulture), _jobParameter.ProjectId.ToString(CultureInfo.InvariantCulture)); SetCommiyIndexStatusToInitialized(_jobParameter.MatterId); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _analyticProject = new AnalyticsProject(); _totalDocumentCount = _analyticProject.GetProjectDocumentsCount(Convert.ToInt64(_jobParameter.MatterId, CultureInfo.CurrentCulture), _jobParameter.ProjectCollectionId); _batchSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("UpdateFieldsBatchSize", "AnalyticsProject")); _projectFieldId = AnalyticsProject.GetProjectFieldId(_jobParameter.MatterId, _dataset.CollectionId); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _analyticProject = new AnalyticsProject(); _totalDocumentCount = _analyticProject.GetProjectDocumentsCount( Convert.ToInt64(_jobParameter.MatterId, CultureInfo.CurrentCulture), _jobParameter.ProjectCollectionId); _batchSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("UpdateFieldsBatchSize", "AnalyticsProject")); _projectFieldId = AnalyticsProject.GetProjectFieldId(_jobParameter.MatterId, _dataset.CollectionId); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _projectFieldId = AnalyticsProject.GetProjectFieldId(_jobParameter.MatterId, _dataset.CollectionId); _documentBachSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("IncludeDocumentsIntoProjectJobBatchSize", "AnalyticsProject")); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; if (!_jobParameter.IsAddAdditionalDocuments || !string.IsNullOrEmpty(_jobIds)) return; _jobIds = GetIncludeJobIds(); }
public AnalyticsProjectInfo PostCreateProject(long orgId, long matterId, long datasetId, long projectId, AnalyticsProjectInfo projectInfo) { //Set Project default values.. projectInfo.Confidence = ConfidenceDefaultValue; projectInfo.MarginOfError = MarginOfErrorDefaultValue; projectInfo.OverturnErrorThreshold = OverturnErrorThresholdValue; projectInfo.TargetF1 = TargetF1DefaultValue; projectInfo.TargetPrecision = TargetPrecisionDefaultValue; projectInfo.TargetRecall = TargetRecallDefaultValue; projectInfo.MatterId = matterId; projectInfo.DatasetId = datasetId; projectInfo.Id = System.Convert.ToInt32(projectId); var client = GetAnalyticsRestClient(); HttpContext.Current.Session.Remove(SessionAdminTree); return client.CreateAnalyticProject(matterId.ToString(CultureInfo.InvariantCulture), datasetId.ToString(CultureInfo.InvariantCulture), projectInfo); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _totalDocumentCount = _analyticProject.GetSelectedDocumentsCount(_dataset.CollectionId, _jobParameter, WorkAssignment.JobId); //Update job log initial state var jobSummaryKeyValuePairs = new EVKeyValuePairs(); JobMgmtBO.UpdateJobResult(WorkAssignment.JobId, 0, _totalDocumentCount, jobSummaryKeyValuePairs); _documentBachSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("IncludeDocumentsIntoProjectJobBatchSize", "AnalyticsProject")); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; IncreaseProcessedDocumentsCount(_totalDocumentCount); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _documentBachSize = Convert.ToInt32( ApplicationConfigurationManager.GetValue("IncludeDocumentsIntoProjectInSubSystemJobBatchSize", "AnalyticsProject")); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; _totalDocumentCount = _analyticProject.GetProjectDocumentsCountByTaskId( Convert.ToInt64(_jobParameter.MatterId, CultureInfo.CurrentCulture), _jobParameter.ProjectCollectionId, _jobParameter.PrimarySystemJobId); //Update job log initial state var jobSummaryKeyValuePairs = new EVKeyValuePairs(); JobMgmtBO.UpdateJobResult(WorkAssignment.JobId, 0, _totalDocumentCount, jobSummaryKeyValuePairs); if (_jobParameter.IsRerunJob || _jobParameter.IsAddAdditionalDocuments) //Rerun job or Add additional documents- need get to get existing IndexId ,if already created { _indexId = AnalyticsProject.GetIndexIdForProject(_jobParameter.MatterId, WorkAssignment.JobId, _dataset.CollectionId, _jobParameter.ProjectCollectionId, false); } if (string.IsNullOrEmpty(_indexId)) { _indexId = "idx-" + Guid.NewGuid().ToString().ToLowerInvariant(); _analyticProject.InsertIndexId(_jobParameter.MatterId, WorkAssignment.JobId, _dataset.CollectionId, _jobParameter.ProjectCollectionId, _indexId); } AnalyticsProject.CreateAnalyticalIndex(_jobParameter.MatterId, WorkAssignment.JobId, _indexId); //Create Index in Spark SVM.. IncreaseProcessedDocumentsCount(_totalDocumentCount); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo)XmlUtility.DeserializeObject(BootParameters, typeof(AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _projectFieldId = AnalyticsProject.GetProjectFieldId(_jobParameter.MatterId, _dataset.CollectionId); _documentBachSize = Convert.ToInt32(ApplicationConfigurationManager.GetValue("IncludeDocumentsIntoProjectJobBatchSize", "AnalyticsProject")); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; if (!_jobParameter.IsAddAdditionalDocuments || !string.IsNullOrEmpty(_jobIds)) { return; } _jobIds = GetIncludeJobIds(); }
/// <summary> /// Begins the work. /// </summary> protected override void BeginWork() { BootParameters.ShouldNotBe(null); base.BeginWork(); _jobParameter = (AnalyticsProjectInfo) XmlUtility.DeserializeObject(BootParameters, typeof (AnalyticsProjectInfo)); _analyticProject = new AnalyticsProject(); _documentBachSize = Convert.ToInt32( ApplicationConfigurationManager.GetValue("IncludeDocumentsIntoProjectInSubSystemJobBatchSize", "AnalyticsProject")); _dataset = DataSetBO.GetDataSetDetailForDataSetId(Convert.ToInt64(_jobParameter.DatasetId, CultureInfo.CurrentCulture)); _jobParameter.DocumentSource.CollectionId = _dataset.CollectionId; _totalDocumentCount = _analyticProject.GetProjectDocumentsCountByTaskId( Convert.ToInt64(_jobParameter.MatterId, CultureInfo.CurrentCulture), _jobParameter.ProjectCollectionId, _jobParameter.PrimarySystemJobId); //Update job log initial state var jobSummaryKeyValuePairs = new EVKeyValuePairs(); JobMgmtBO.UpdateJobResult(WorkAssignment.JobId, 0, _totalDocumentCount, jobSummaryKeyValuePairs); if (_jobParameter.IsRerunJob || _jobParameter.IsAddAdditionalDocuments) //Rerun job or Add additional documents- need get to get existing IndexId ,if already created { _indexId = AnalyticsProject.GetIndexIdForProject(_jobParameter.MatterId, WorkAssignment.JobId, _dataset.CollectionId, _jobParameter.ProjectCollectionId,false); } if(string.IsNullOrEmpty(_indexId)) { _indexId = "idx-" + Guid.NewGuid().ToString().ToLowerInvariant(); _analyticProject.InsertIndexId(_jobParameter.MatterId, WorkAssignment.JobId, _dataset.CollectionId, _jobParameter.ProjectCollectionId, _indexId); } AnalyticsProject.CreateAnalyticalIndex(_jobParameter.MatterId, WorkAssignment.JobId, _indexId); //Create Index in Spark SVM.. IncreaseProcessedDocumentsCount(_totalDocumentCount); }
public List<AnalyticsWorkflowState> UpdateAnalyticWorkflowState(long matterId, long datasetId, long projectId, string binderId, List<AnalyticsWorkflowState> workflowState) { if(workflowState.Count > 1) { mockDocuments = null; mockAnalyticsProjectInfo = null; mockQcSts = null; mockTrainingSetAdditionalDocuments = null; analysisSets = null; addTrainingDocumentIndex = 0; trainingSetCount = 0; mockPredictAllSummaryInfo = null; resultTrainingSetSummary = new TrainingSetSummary(); resultTrainingSetSummary.CompletedRoundsSummary = new AnalysisSet(); resultTrainingSetSummary.CompletedRoundDetails = new List<AnalysisSet>(); resultTrainingSetSummary.CurrentRoundProgress = new AnalysisSet(); MockWorkflowState.Initialize(); MockWorkflowState.ChangeToState = new AnalyticsWorkflowState(); //Ensure data is initialized as expected for the updated workflow state var random = new Random(); //ControlSet AnalyticsWorkflowState state = workflowState.Find(p => p.Name == State.ControlSet); if (state.ReviewStatus == Status.Inprogress) { var controlSetDocs = this.GetDocuments(matterId, datasetId, projectId, "controlset", null); //ReviewStatus inprogress means some controlset documents should be coded if (controlSetDocs.Documents.Find(d => d.Fields[ReviewerCategoryIndex].Value == NotCoded) != null) { foreach (var doc in controlSetDocs.Documents) { doc.Fields[ReviewerCategoryIndex].Value = (random.Next(2) == 0) ? Relevant : NotCoded; } } UpdateControlSetSummary(this.GetAllAnalysisSets(matterId, datasetId, projectId)); } if (state.ReviewStatus == Status.Completed) { var controlSetDocs = this.GetDocuments(matterId, datasetId, projectId, "controlset", null); //ReviewStatus Completed means all controlset documents should be coded if (controlSetDocs.Documents.Find(d => d.Fields[ReviewerCategoryIndex].Value == NotCoded) != null) { foreach (var doc in controlSetDocs.Documents) { doc.Fields[ReviewerCategoryIndex].Value = (random.Next(2) == 0)? NotRelevant : Relevant; } } UpdateControlSetSummary(this.GetAllAnalysisSets(matterId, datasetId, projectId)); } //TrainingSet state = workflowState.Find(p => p.Name == State.TrainingSet); if (state.CreateStatus == Status.Completed) { this.CreateTrainingset(matterId.ToString(CultureInfo.InvariantCulture), datasetId.ToString(CultureInfo.InvariantCulture), projectId.ToString(CultureInfo.InvariantCulture)); //MockWorkflowState.ChangeToState = workflowState.Find(p => p.Name == State.ControlSet); //resultTrainingSetSummary.CompletedRoundsSummary = new AnalysisSet(); //resultTrainingSetSummary.CompletedRoundDetails = new List<AnalysisSet>(); //resultTrainingSetSummary.CompletedRoundsSummary.Type = AnalysisSetType.TrainingSet; //resultTrainingSetSummary.RoundsCompleted = 0; //resultTrainingSetSummary.CurrentRound = 1; } if (state.ReviewStatus == Status.Inprogress) { var trainingSetDocs = this.GetDocuments(matterId, datasetId, projectId, "trainingset", CreateQueryContext(AnalysisSetType.TrainingSet, "Training Set 001")); trainingSetDocs.Documents[0].Fields[ReviewerCategoryIndex].Value = Relevant; trainingSetDocs.Documents[1].Fields[ReviewerCategoryIndex].Value = NotRelevant; trainingSetDocs.Documents[2].Fields[ReviewerCategoryIndex].Value = Skipped; UpdateTrainingSetSummary(this.GetAllAnalysisSets(matterId, datasetId, projectId), "current"); resultTrainingSetSummary.CompletedRoundsSummary = new AnalysisSet(); resultTrainingSetSummary.RoundsCompleted = 0; resultTrainingSetSummary.CurrentRound = 1; } if (state.ReviewStatus == Status.Completed) { var queryContext = CreateQueryContext(AnalysisSetType.TrainingSet, "Training Set 001"); var trainingSetDocs = this.GetDocuments(matterId, datasetId, projectId, "trainingset",queryContext); foreach (var doc in trainingSetDocs.Documents) { doc.Fields[ReviewerCategoryIndex].Value = (random.Next(2) == 0) ? NotRelevant : Relevant; } this.CreateTrainingset(matterId.ToString(CultureInfo.InvariantCulture), datasetId.ToString(CultureInfo.InvariantCulture), projectId.ToString(CultureInfo.InvariantCulture)); queryContext.AnalysisSet.Name = "Training Set 002"; trainingSetDocs = this.GetDocuments(matterId, datasetId, projectId, "trainingset", queryContext); foreach (var doc in trainingSetDocs.Documents) { doc.Fields[ReviewerCategoryIndex].Value = (random.Next(2) == 0) ? NotRelevant : Relevant; } this.CreateTrainingset(matterId.ToString(CultureInfo.InvariantCulture), datasetId.ToString(CultureInfo.InvariantCulture), projectId.ToString(CultureInfo.InvariantCulture)); } //QcSet state = workflowState.Find(p => p.Name == State.QcSet); if (state.CreateStatus == Status.Completed) { this.CreateQcSet(matterId, datasetId, projectId, new QcSet()); } if (state.ReviewStatus == Status.Inprogress) { var queryContext = CreateQueryContext(AnalysisSetType.QcSet, "QCSet01"); var qcSetDocs = this.GetDocuments(matterId, datasetId, projectId, "qcset", queryContext); foreach (var doc in qcSetDocs.Documents) { doc.Fields[ReviewerCategoryIndex].Value = (random.Next(2) == 0) ? Relevant : NotCoded; } } if (state.ReviewStatus == Status.Completed) { var queryContext = CreateQueryContext(AnalysisSetType.QcSet, "QCSet01"); var qcSetDocs = this.GetDocuments(matterId, datasetId, projectId, "qcset", queryContext); foreach (var doc in qcSetDocs.Documents) { doc.Fields[ReviewerCategoryIndex].Value = (random.Next(2) == 0) ? NotRelevant : Relevant; } } //PredictSet state = workflowState.Find(p => p.Name == State.PredictSet); if (state.ReviewStatus == Status.Completed) { if (mockDocuments != null) { foreach (var doc in mockDocuments.Documents) { doc.Fields[PredictedCategoryIndex].Value = doc.Fields[PredictedCategoryIndex].Value == NotCoded ? Relevant : doc.Fields[PredictedCategoryIndex].Value; } } } } return MockWorkflowState.UpdateStates(workflowState); }
/// <summary> /// Validate create project info /// </summary> /// <param name="matterId">The matter identifier.</param> /// <param name="dataSetId">The data set identifier.</param> /// <param name="project">Project Info</param> /// <returns></returns> public AnalyticsProjectInfo ValidateCreateProjectInfo(string matterId, string dataSetId, AnalyticsProjectInfo project) { project.IsValidProjectName = (project.Name != "Test"); project.IsValidFieldPrefix = (project.FieldPreFix != "PC"); return project; }
public AnalyticsProjectInfo PostCreateProject(long orgId, long matterId, long datasetId, long projectId, AnalyticsProjectInfo projectInfo) { //Set Project default values.. projectInfo.Confidence = ConfidenceDefaultValue; projectInfo.MarginOfError = MarginOfErrorDefaultValue; projectInfo.OverturnErrorThreshold = OverturnErrorThresholdValue; projectInfo.TargetF1 = TargetF1DefaultValue; projectInfo.TargetPrecision = TargetPrecisionDefaultValue; projectInfo.TargetRecall = TargetRecallDefaultValue; projectInfo.MatterId = matterId; projectInfo.DatasetId = datasetId; projectInfo.Id = System.Convert.ToInt32(projectId); var client = GetAnalyticsRestClient(); HttpContext.Current.Session.Remove(SessionAdminTree); return(client.CreateAnalyticProject(matterId.ToString(CultureInfo.InvariantCulture), datasetId.ToString(CultureInfo.InvariantCulture), projectInfo)); }
public AnalyticsProjectInfo PostValidateCreateProjectInfo(long orgId, long matterId, long datasetId, long projectId, AnalyticsProjectInfo projectInfo) { projectInfo.MatterId = matterId; projectInfo.DatasetId = datasetId; var client = GetAnalyticsRestClient(); return(client.ValidateCreateProjectInfo(matterId.ToString(CultureInfo.InvariantCulture), datasetId.ToString(CultureInfo.InvariantCulture), projectInfo)); }
public AnalyticsProjectInfo CreateAnalyticProject(string matterId, string dataSetId, AnalyticsProjectInfo project) { UpdateAnalyticProjectState(matterId, dataSetId); if (MockWorkflowState.ProjectSetup.CreateStatus == Status.NotStarted) { MockWorkflowState.UpdateState(name: State.ProjectSetup, createStatus: Status.Completed, reviewStatus: Status.Completed, isCurrent: true); } else { var projectId = 1; this.GetDocuments(Convert.ToInt64(matterId, CultureInfo.CurrentCulture), Convert.ToInt64(dataSetId, CultureInfo.CurrentCulture), projectId, "projectsetup", null); var resourceName = string.Format(CultureInfo.InvariantCulture, "{0}.matter_{1}-dataset_{2}-project_{3}-additionaldocuments.json", MockDataNameSpace, matterId, dataSetId, projectId); var mockData = GetEmbeddedResource(resourceName); var mockAddtionalDocuments = JsonConvert.DeserializeObject<DocumentList>(mockData); for (int i = 1; i <= 5; i++) { var random = new Random(); mockDocuments.Documents.Add(mockAddtionalDocuments.Documents[random.Next(1, 14)]); } mockDocuments.Total = mockDocuments.Total + 5; mockAnalyticsProjectInfo.TotalDocumentCount = mockAnalyticsProjectInfo.TotalDocumentCount + 5; } return mockAnalyticsProjectInfo; }
/// <summary> /// Get AnalyticsProjectInfo data /// </summary> /// <param name="matterId">The matter identifier.</param> /// <param name="dataSetId">The data set identifier.</param> /// <returns></returns> private static void UpdateAnalyticProjectState(string matterId, string dataSetId) { if (mockAnalyticsProjectInfo == null) { var resourceName = string.Format(CultureInfo.InvariantCulture, "{0}.matter_{1}-dataset_{2}-create-analytic-project.json", MockDataNameSpace, matterId, dataSetId); var mockData = GetEmbeddedResource(resourceName); mockAnalyticsProjectInfo = JsonConvert.DeserializeObject<AnalyticsProjectInfo>(mockData); } }
public AnalyticsProjectInfo PostValidateCreateProjectInfo(long orgId, long matterId, long datasetId, long projectId, AnalyticsProjectInfo projectInfo) { projectInfo.MatterId = matterId; projectInfo.DatasetId = datasetId; var client = GetAnalyticsRestClient(); return client.ValidateCreateProjectInfo(matterId.ToString(CultureInfo.InvariantCulture), datasetId.ToString(CultureInfo.InvariantCulture), projectInfo); }