public static int AddOriginalDateTag(string[] argv) { ApplicationContext.Current.Start(); ApplicationServiceContext.Current = ApplicationContext.Current; AuthenticationContext.Current = new AuthenticationContext(AuthenticationContext.SystemPrincipal); EntitySource.Current = new EntitySource(new PersistenceServiceEntitySource()); var warehouseService = ApplicationContext.Current.GetService <IAdHocDatawarehouseService>(); var planService = ApplicationContext.Current.GetService <ICarePlanService>(); var actPersistence = ApplicationContext.Current.GetService <IStoredQueryDataPersistenceService <SubstanceAdministration> >(); var tagService = ApplicationContext.Current.GetService <ITagPersistenceService>(); var oizcpDm = warehouseService.GetDatamart("oizcp"); if (oizcpDm == null) { Console.WriteLine("OIZCP datamart does not exist!"); return(-1); } WaitThreadPool wtp = new WaitThreadPool(); Guid queryId = Guid.NewGuid(); int tr = 0, ofs = 0; var acts = actPersistence.Query(o => !o.Tags.Any(t => t.TagKey == "originalDate"), queryId, 0, 100, AuthenticationContext.SystemPrincipal, out tr); while (ofs < tr) { foreach (var itm in acts) { wtp.QueueUserWorkItem((o) => { var act = o as Act; Console.WriteLine("Set originalDate for {0}", act.Key); AuthenticationContext.Current = new AuthenticationContext(AuthenticationContext.SystemPrincipal); var actProtocol = act.LoadCollection <ActProtocol>("Protocols").FirstOrDefault(); if (actProtocol != null) { // Get the original date var warehouseObj = warehouseService.AdhocQuery(oizcpDm.Id, new { protocol_id = actProtocol.ProtocolKey, sequence_id = actProtocol.Sequence }); if (warehouseObj.Any()) { DateTime originalDate = warehouseObj.FirstOrDefault().act_date; var originalEpochTime = (originalDate.ToUniversalTime().Ticks - 621355968000000000) / 10000000; tagService.Save(act.Key.Value, new ActTag("originalDate", originalEpochTime.ToString())); } } }, itm); } ofs += 100; acts = actPersistence.Query(o => !o.Tags.Any(t => t.TagKey == "originalDate"), queryId, ofs, 100, AuthenticationContext.SystemPrincipal, out tr); } wtp.WaitOne(); return(0); }
public static void GenerateStock(String[] args) { ApplicationServiceContext.Current = ApplicationContext.Current; //cp.Repository = new SeederProtocolRepositoryService(); ApplicationContext.Current.Start(); var idp = ApplicationContext.Current.GetService <IDataPersistenceService <Place> >(); WaitThreadPool wtp = new WaitThreadPool(); var mat = ApplicationContext.Current.GetService <IDataPersistenceService <Material> >().Query(o => o.ClassConceptKey == EntityClassKeys.Material, AuthenticationContext.SystemPrincipal); Console.WriteLine("Database has {0} materials", mat.Count()); int tr = 0, ofs = 0; Console.WriteLine("Querying for places"); var results = idp.Query(o => o.ClassConceptKey == EntityClassKeys.ServiceDeliveryLocation, ofs, 1000, AuthenticationContext.SystemPrincipal, out tr); Console.WriteLine("Will create fake stock for {0} places", tr); var r = new Random(); while (ofs < tr) { foreach (var p in results) { wtp.QueueUserWorkItem((parm) => { try { Place target = parm as Place; Console.WriteLine("Starting seeding for {0} currently {1} relationships", target.Names.FirstOrDefault().Component.FirstOrDefault().Value, target.Relationships.Count); // Add some stock!!! :) foreach (var m in mat) { var mmats = m.Relationships.Where(o => o.RelationshipTypeKey == EntityRelationshipTypeKeys.Instance).OrderBy(o => r.Next()).FirstOrDefault(); Console.WriteLine("Selected {0} out of {1} materials", mmats, m.Relationships.Count); var rdp = ApplicationContext.Current.GetService <IDataPersistenceService <EntityRelationship> >(); if (mmats != null) { var er = new EntityRelationship(EntityRelationshipTypeKeys.OwnedEntity, mmats.TargetEntityKey) { Quantity = r.Next(0, 100), SourceEntityKey = target.Key, EffectiveVersionSequenceId = target.VersionSequence }; Console.WriteLine("{0} > {1} {2}", target.Names.FirstOrDefault().Component.FirstOrDefault().Value, er.Quantity, m.Names.FirstOrDefault().Component.FirstOrDefault().Value); rdp.Insert(er, AuthenticationContext.SystemPrincipal, TransactionMode.Commit); } } } catch (Exception e) { Console.WriteLine(e); } }, p); } wtp.WaitOne(); ofs += 25; results = idp.Query(o => o.ClassConceptKey == EntityClassKeys.ServiceDeliveryLocation, ofs, 25, AuthenticationContext.SystemPrincipal, out tr); } }
/// <summary> /// Scan engine has completed /// </summary> private void m_scanEngine_ScanCompleted(object sender, ScanCompletedEventArgs e) { // Enqueue the data m_threadPool.QueueUserWorkItem(ProcessImageWorker, e.Image); if (!bwUpdate.IsBusy) { bwUpdate.RunWorkerAsync(); } }
public static int Careplan(string[] argv) { var parms = new ParameterParser <CareplanParameters>().Parse(argv); ApplicationContext.Current.Start(); ApplicationServiceContext.Current = ApplicationContext.Current; AuthenticationContext.Current = new AuthenticationContext(AuthenticationContext.SystemPrincipal); EntitySource.Current = new EntitySource(new PersistenceServiceEntitySource()); var warehouseService = ApplicationContext.Current.GetService <IAdHocDatawarehouseService>(); var planService = ApplicationContext.Current.GetService <ICarePlanService>(); if (warehouseService == null) { throw new InvalidOperationException("Ad-hoc data warehouse service is not registered"); } if (planService == null) { throw new InvalidOperationException("Missing care plan service"); } // Warehouse service foreach (var cp in planService.Protocols) { Console.WriteLine("Loaded {0}...", cp.Name); } // Deploy schema? var dataMart = warehouseService.GetDatamart("oizcp"); if (dataMart == null) { if (parms.Create) { Console.WriteLine("Datamart for care plan service doesn't exist, will have to create it..."); dataMart = warehouseService.CreateDatamart("oizcp", DatamartSchema.Load(typeof(Warehouse).Assembly.GetManifestResourceStream("OizDevTool.Resources.CarePlanWarehouseSchema.xml"))); } else { throw new InvalidOperationException("Warehouse schema does not exist!"); } } // Truncate? if (parms.Truncate) { warehouseService.Truncate(dataMart.Id); } // Now we want to calculate var patientPersistence = ApplicationContext.Current.GetService <IStoredQueryDataPersistenceService <Patient> >(); var lastRefresh = DateTime.Parse(parms.Since ?? "0001-01-01"); // Should we calculate? var warehousePatients = warehouseService.StoredQuery(dataMart.Id, "consistency", new { }); Guid queryId = Guid.NewGuid(); int tr = 1, ofs = 0, calc = 0, tq = 0; WaitThreadPool wtp = new WaitThreadPool(Environment.ProcessorCount * 2); DateTime start = DateTime.Now; // Type filters List <Guid> typeFilter = new List <Guid>(); if (parms.ActTypes?.Count > 0) { var cpcr = ApplicationContext.Current.GetService <IConceptRepositoryService>(); foreach (var itm in parms.ActTypes) { typeFilter.Add(cpcr.GetConcept(itm).Key.Value); } } while (ofs < tr) { var prodPatients = patientPersistence.Query(o => o.StatusConcept.Mnemonic != "OBSOLETE" && o.ModifiedOn > lastRefresh, queryId, ofs, 100, AuthenticationContext.SystemPrincipal, out tr); ofs += 100; foreach (var p in prodPatients.Where(o => !warehousePatients.Any(w => w.patient_id == o.Key))) { tq++; wtp.QueueUserWorkItem(state => { AuthenticationContext.Current = new AuthenticationContext(AuthenticationContext.SystemPrincipal); Patient pState = (Patient)state; List <dynamic> warehousePlan = new List <dynamic>(); Interlocked.Increment(ref calc); lock (parms) { var ips = (((double)(DateTime.Now - start).Ticks / calc) * (tq - calc)); Console.CursorLeft = 0; Console.Write(" Calculating care plan {0}/{1} <<Scan: {4} ({5:0%})>> ({2:0%}) [ETA: {3}] {6:0.##} R/S ", calc, tq, (float)calc / tq, new TimeSpan((long)ips).ToString("hh'h 'mm'm 'ss's'"), ofs, (float)ofs / tr, ((double)calc / (double)(DateTime.Now - start).TotalSeconds)); } var data = p; // ApplicationContext.Current.GetService<IDataPersistenceService<Patient>>().Get(p.Key.Value); // First, we want a copy of the warehouse var existing = warehouseService.AdhocQuery(dataMart.Id, new { patient_id = data.Key.Value }); warehouseService.Delete(dataMart.Id, new { patient_id = data.Key.Value }); var careplanService = ApplicationContext.Current.GetService <ICarePlanService>(); // Now calculate the care plan... var carePlan = careplanService.CreateCarePlan(data, false, new Dictionary <String, Object>() { { "isBackground", true } }); warehousePlan.AddRange(carePlan.Action.Select(o => new { creation_date = DateTime.Now, patient_id = data.Key.Value, location_id = data.Relationships.FirstOrDefault(r => r.RelationshipTypeKey == EntityRelationshipTypeKeys.DedicatedServiceDeliveryLocation || r.RelationshipType?.Mnemonic == "DedicatedServiceDeliveryLocation")?.TargetEntityKey.Value, act_id = o.Key.Value, class_id = o.ClassConceptKey.Value, type_id = o.TypeConceptKey.Value, protocol_id = o.Protocols.FirstOrDefault()?.ProtocolKey, min_date = o.StartTime?.DateTime.Date, max_date = o.StopTime?.DateTime.Date, act_date = o.ActTime.DateTime.Date, product_id = o.Participations?.FirstOrDefault(r => r.ParticipationRoleKey == ActParticipationKey.Product || r.ParticipationRole?.Mnemonic == "Product")?.PlayerEntityKey.Value, sequence_id = o.Protocols.FirstOrDefault()?.Sequence, dose_seq = (o as SubstanceAdministration)?.SequenceId, fulfilled = false })); var fulfillCalc = data.LoadCollection <ActParticipation>("Participations"); if (typeFilter.Count > 0) { fulfillCalc = fulfillCalc.Where(o => typeFilter.Contains(o.LoadProperty <Act>("Act").TypeConceptKey ?? Guid.Empty)); } // Are there care plan items existing that dont exist in the calculated care plan, if so that means that the patient has completed steps and we need to indicate that if (existing.Any(o => !o.fulfilled)) // != true is needed because it can be null. { var fulfilled = existing.Where(o => !warehousePlan.Any(pl => pl.protocol_id == o.protocol_id && pl.sequence_id == o.sequence_id)); warehousePlan.AddRange(fulfilled.Select(o => new { creation_date = o.creation_date, patient_id = o.patient_id, location_id = o.location_id, act_id = data.LoadCollection <ActParticipation>("Participations").FirstOrDefault(ap => ap.LoadProperty <Act>("Act").LoadCollection <ActProtocol>("Protocols").Any(pr => pr.ProtocolKey == o.protocol_id && pr.Sequence == o.sequence_id))?.Key ?? o.act_id, class_id = o.class_id, type_id = o.type_id, protocol_id = o.protocol_id, min_date = o.min_date, max_date = o.max_date, act_date = o.act_date, product_id = o.product_id, sequence_id = o.sequence_id, dose_seq = o.dose_seq, fulfilled = true })); } else if ( !parms.NoFulfill && fulfillCalc.Any()) // not calculated anything but there are steps previously done, this is a little more complex { // When something is not yet calculated what we have to do is strip away each act done as part of the protocol and re-calculate when that action was supposed to occur // For example: We calculate PCV we strip away PCV2 and re-run the plan to get the proposal of PCV3 then strip away PCV1 and re-run the plan to get the proposal of PCV2 var acts = fulfillCalc.Select(o => o.LoadProperty <Act>("Act")); foreach (var itm in acts.GroupBy(o => o.LoadCollection <ActProtocol>("Protocols").FirstOrDefault()?.ProtocolKey ?? Guid.Empty)) { var steps = itm.OrderByDescending(o => o.LoadCollection <ActProtocol>("Protocols").FirstOrDefault()?.Sequence); var patientClone = data.Clone() as Patient; patientClone.Participations = new List <ActParticipation>(data.Participations); foreach (var s in steps) { patientClone.Participations.RemoveAll(o => o.ActKey == s.Key); // Run protocol IEnumerable <Act> candidate; if (itm.Key == Guid.Empty) // There is no protocol identifier { var tempPlan = careplanService.CreateCarePlan(patientClone, false, new Dictionary <String, Object>() { { "isBackground", true }, { "ignoreEntry", true } }); if (tempPlan.Action.Count == 0) { continue; } candidate = tempPlan.Action.OfType <SubstanceAdministration>().Where(o => o.SequenceId == (s as SubstanceAdministration)?.SequenceId && o.Participations.FirstOrDefault(pt => pt.ParticipationRoleKey == ActParticipationKey.Product).PlayerEntityKey == s.Participations.FirstOrDefault(pt => pt.ParticipationRoleKey == ActParticipationKey.Product).PlayerEntityKey); if (candidate.Count() != 1) { continue; } } else { var tempPlan = careplanService.CreateCarePlan(patientClone, false, new Dictionary <String, Object>() { { "isBackground", true }, { "ignoreEntry", true } }, itm.Key); if (tempPlan.Action.Count == 0) { continue; } candidate = tempPlan.Action.Where(o => o.Protocols.FirstOrDefault().Sequence == s.Protocols.FirstOrDefault().Sequence); if (candidate.Count() != 1) { candidate = tempPlan.Action.OfType <SubstanceAdministration>().Where(o => o.SequenceId == (s as SubstanceAdministration)?.SequenceId); if (candidate.Count() != 1) { continue; } } } var planned = candidate.FirstOrDefault(); warehousePlan.Add(new { creation_date = DateTime.Now, patient_id = data.Key.Value, location_id = data.Relationships.FirstOrDefault(r => r.RelationshipTypeKey == EntityRelationshipTypeKeys.DedicatedServiceDeliveryLocation || r.RelationshipType?.Mnemonic == "DedicatedServiceDeliveryLocation")?.TargetEntityKey.Value, act_id = s.Key, class_id = planned.ClassConceptKey.Value, type_id = planned.TypeConceptKey.Value, protocol_id = itm.Key, min_date = planned.StartTime?.DateTime.Date, max_date = planned.StopTime?.DateTime.Date, act_date = planned.ActTime.DateTime.Date, product_id = planned.Participations?.FirstOrDefault(r => r.ParticipationRoleKey == ActParticipationKey.Product || r.ParticipationRole?.Mnemonic == "Product")?.PlayerEntityKey.Value, sequence_id = planned.Protocols.FirstOrDefault()?.Sequence, dose_seq = (planned as SubstanceAdministration)?.SequenceId, fulfilled = true }); } } } // Insert plans warehouseService.Add(dataMart.Id, warehousePlan); }, p); } } wtp.WaitOne(); return(0); }
/// <summary> /// Apply template /// </summary> /// <param name="template"></param> /// <param name="image"></param> public OmrPageOutput ApplyTemplate(OmrTemplate template, ScannedImage image) { // Image ready for scan if (!image.IsReadyForScan) { if (!image.IsScannable) { image.Analyze(); } image.PrepareProcessing(); } // Page output OmrPageOutput retVal = new OmrPageOutput() { Id = image.TemplateName + DateTime.Now.ToString("yyyyMMddHHmmss"), TemplateId = image.TemplateName, Parameters = image.Parameters, StartTime = DateTime.Now, Template = template }; // Save directory for output images string saveDirectory = String.Empty; var parmStr = new StringBuilder(); if (this.SaveIntermediaryImages) { if (image.Parameters != null) { foreach (var pv in image.Parameters) { parmStr.AppendFormat("{0}.", pv); } } retVal.RefImages = new List <string>() { String.Format("{0}-{1}-init.bmp", retVal.Id, parmStr), String.Format("{0}-{1}-tx.bmp", retVal.Id, parmStr), String.Format("{0}-{1}-fields.bmp", retVal.Id, parmStr), String.Format("{0}-{1}-gs.bmp", retVal.Id, parmStr), String.Format("{0}-{1}-bw.bmp", retVal.Id, parmStr), String.Format("{0}-{1}-inv.bmp", retVal.Id, parmStr) }; saveDirectory = Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), "imgproc"); if (!Directory.Exists(saveDirectory)) { Directory.CreateDirectory(saveDirectory); } image.Image.Save(Path.Combine(saveDirectory, string.Format("{0}-{1}-init.bmp", DateTime.Now.ToString("yyyyMMddHHmmss"), parmStr))); } // First, we want to get the image from the scanned image and translate it to the original // position in the template Bitmap bmp = null; try { bmp = new Bitmap((int)template.BottomRight.X, (int)template.BottomRight.Y, System.Drawing.Imaging.PixelFormat.Format24bppRgb); // Scale float width = template.TopRight.X - template.TopLeft.X, height = template.BottomLeft.Y - template.TopLeft.Y; // Translate to original using (Graphics g = Graphics.FromImage(bmp)) { ResizeBicubic bc = new ResizeBicubic((int)width, (int)height); g.DrawImage(bc.Apply((Bitmap)image.Image), template.TopLeft.X, template.TopLeft.Y); } if (this.SaveIntermediaryImages) { bmp.Save(Path.Combine(saveDirectory, string.Format("{0}-{1}-tx.bmp", DateTime.Now.ToString("yyyyMMddHHmmss"), parmStr))); } // Now try to do hit from the template if (this.SaveIntermediaryImages) { using (var tbmp = bmp.Clone() as Bitmap) { using (Graphics g = Graphics.FromImage(tbmp)) { foreach (var field in template.Fields) { g.DrawRectangle(Pens.Black, field.TopLeft.X, field.TopLeft.Y, field.TopRight.X - field.TopLeft.X, field.BottomLeft.Y - field.TopLeft.Y); g.DrawString(field.Id, SystemFonts.CaptionFont, Brushes.Black, field.TopLeft); } } tbmp.Save(Path.Combine(saveDirectory, string.Format("{0}-{1}-fields.bmp", DateTime.Now.ToString("yyyyMMddHHmmss"), parmStr))); } } // Now convert to Grayscale GrayscaleY grayFilter = new GrayscaleY(); var gray = grayFilter.Apply(bmp); bmp.Dispose(); bmp = gray; if (this.SaveIntermediaryImages) { bmp.Save(Path.Combine(saveDirectory, string.Format("{0}-{1}-gs.bmp", DateTime.Now.ToString("yyyyMMddHHmmss"), parmStr))); } // Prepare answers Dictionary <OmrQuestionField, OmrOutputData> hitFields = new Dictionary <OmrQuestionField, OmrOutputData>(); BarcodeReader barScan = new BarcodeReader(); barScan.Options.UseCode39ExtendedMode = true; barScan.Options.UseCode39RelaxedExtendedMode = true; barScan.Options.TryHarder = true; barScan.TryInverted = true; barScan.Options.PureBarcode = false; barScan.AutoRotate = true; foreach (var itm in template.Fields.Where(o => o is OmrBarcodeField)) { PointF position = itm.TopLeft; SizeF size = new SizeF(itm.TopRight.X - itm.TopLeft.X, itm.BottomLeft.Y - itm.TopLeft.Y); using (var areaOfInterest = new Crop(new Rectangle((int)position.X, (int)position.Y, (int)size.Width, (int)size.Height)).Apply(bmp)) { // Scan the barcode var result = barScan.Decode(areaOfInterest); if (result != null) { hitFields.Add(itm, new OmrBarcodeData() { BarcodeData = result.Text, Format = result.BarcodeFormat, Id = itm.Id, TopLeft = new PointF(result.ResultPoints[0].X + position.X, result.ResultPoints[0].Y + position.Y), BottomRight = new PointF(result.ResultPoints[1].X + position.X, result.ResultPoints[0].Y + position.Y + 10) }); } } } // Now binarize Threshold binaryThreshold = new Threshold(template.ScanThreshold); binaryThreshold.ApplyInPlace(bmp); if (this.SaveIntermediaryImages) { bmp.Save(Path.Combine(saveDirectory, string.Format("{0}-{1}-bw.bmp", DateTime.Now.ToString("yyyyMMddHHmmss"), parmStr))); } // Set return parameters String tAnalyzeFile = Path.Combine(Path.GetTempPath(), Path.GetTempFileName()); bmp.Save(tAnalyzeFile, System.Drawing.Imaging.ImageFormat.Jpeg); retVal.AnalyzedImage = tAnalyzeFile; retVal.BottomRight = new PointF(bmp.Width, bmp.Height); // Now Invert Invert invertFiter = new Invert(); invertFiter.ApplyInPlace(bmp); if (this.SaveIntermediaryImages) { bmp.Save(Path.Combine(saveDirectory, string.Format("{0}-{1}-inv.bmp", DateTime.Now.ToString("yyyyMMddHHmmss"), parmStr))); } // Crop out areas of interest List <KeyValuePair <OmrQuestionField, Bitmap> > areasOfInterest = new List <KeyValuePair <OmrQuestionField, Bitmap> >(); foreach (var itm in template.Fields.Where(o => o is OmrBubbleField)) { PointF position = itm.TopLeft; SizeF size = new SizeF(itm.TopRight.X - itm.TopLeft.X, itm.BottomLeft.Y - itm.TopLeft.Y); areasOfInterest.Add(new KeyValuePair <OmrQuestionField, Bitmap>( itm, new Crop(new Rectangle((int)position.X, (int)position.Y, (int)size.Width, (int)size.Height)).Apply(bmp)) ); } // Queue analysis WaitThreadPool wtp = new WaitThreadPool(); Object syncLock = new object(); foreach (var itm in areasOfInterest) { wtp.QueueUserWorkItem(img => { var parm = (KeyValuePair <OmrQuestionField, Bitmap>)itm; try { var areaOfInterest = parm.Value; var field = parm.Key; BlobCounter blobCounter = new BlobCounter(); blobCounter.FilterBlobs = true; // Check for circles blobCounter.ProcessImage(areaOfInterest); Blob[] blobs = blobCounter.GetObjectsInformation(); var blob = blobs.FirstOrDefault(o => o.Area == blobs.Max(b => b.Area)); if (blob != null) { //var area = new AForge.Imaging.ImageStatistics(blob).PixelsCountWithoutBlack; if (blob.Area < 30) { return; } var bubbleField = field as OmrBubbleField; lock (syncLock) hitFields.Add(field, new OmrBubbleData() { Id = field.Id, Key = bubbleField.Question, Value = bubbleField.Value, TopLeft = new PointF(blob.Rectangle.X + field.TopLeft.X, blob.Rectangle.Y + field.TopLeft.Y), BottomRight = new PointF(blob.Rectangle.X + blob.Rectangle.Width + field.TopLeft.X, blob.Rectangle.Y + blob.Rectangle.Height + field.TopLeft.Y), BlobArea = blob.Area }); } } catch (Exception e) { Trace.TraceError(e.ToString()); } finally { parm.Value.Dispose(); } }, itm); } wtp.WaitOne(); // Organize the response foreach (var res in hitFields) { if (String.IsNullOrEmpty(res.Key.AnswerRowGroup)) { this.AddAnswerToOutputCollection(retVal, res); } else { // Rows of data OmrRowData rowGroup = retVal.Details.Find(o => o.Id == res.Key.AnswerRowGroup) as OmrRowData; if (rowGroup == null) { rowGroup = new OmrRowData() { Id = res.Key.AnswerRowGroup }; retVal.Details.Add(rowGroup); } this.AddAnswerToOutputCollection(rowGroup, res); } } // Remove temporary images //foreach (var f in retVal.RefImages) // File.Delete(Path.Combine(saveDirectory, f)); // Outcome is success retVal.Outcome = OmrScanOutcome.Success; } catch (Exception e) { retVal.Outcome = OmrScanOutcome.Failure; retVal.ErrorMessage = e.Message; Trace.TraceError(e.ToString()); } finally { retVal.StopTime = DateTime.Now; bmp.Dispose(); } return(retVal); }
/// <summary> /// Get recent activity /// </summary> public RegistrationEventCollection GetRecentActivity(TimestampSet timeRange, int offset, int count, bool identifierOnly) { // Get all Services IAuditorService auditSvc = ApplicationContext.CurrentContext.GetService(typeof(IAuditorService)) as IAuditorService; IDataRegistrationService regSvc = ApplicationContext.CurrentContext.GetService(typeof(IDataRegistrationService)) as IDataRegistrationService; IDataPersistenceService repSvc = ApplicationContext.CurrentContext.GetService(typeof(IDataPersistenceService)) as IDataPersistenceService; // Audit message AuditData audit = this.ConstructAuditData(ActionType.Read, EventIdentifierType.Export); audit.EventTypeCode = new CodeValue("ADM_GetRegistrations"); try { // Result identifiers VersionedDomainIdentifier[] vids = null; var dummyQuery = new QueryEvent(); dummyQuery.Add(new RegistrationEvent() { EventClassifier = RegistrationEventType.Register, EffectiveTime = timeRange }, "SUBJ", SVC.Core.ComponentModel.HealthServiceRecordSiteRoleType.SubjectOf, null); vids = regSvc.QueryRecord(dummyQuery); RegistrationEventCollection retVal = new RegistrationEventCollection(); Object syncLock = new object(); retVal.Count = vids.Length; // Now fetch each one asynchronously if (!identifierOnly) { using (WaitThreadPool thdPool = new WaitThreadPool(Environment.ProcessorCount * 2)) { foreach (var id in vids.Skip(offset).Take(count)) { thdPool.QueueUserWorkItem( delegate(object state) { try { var itm = repSvc.GetContainer(state as VersionedDomainIdentifier, true); lock (syncLock) retVal.Event.Add(itm as RegistrationEvent); } catch (Exception e) { Trace.TraceError("Could not fetch result {0} : {1}", (state as VersionedDomainIdentifier).Identifier, e.ToString()); } } , id); } // Wait until fetch is done thdPool.WaitOne(new TimeSpan(0, 0, 30), false); } //retVal.Event.Sort((a, b) => b.Timestamp.CompareTo(a.Timestamp)); // Add audit data foreach (var res in retVal.Event) { audit.AuditableObjects.Add(new AuditableObject() { IDTypeCode = AuditableObjectIdType.ReportNumber, LifecycleType = AuditableObjectLifecycle.Export, ObjectId = String.Format("{0}^^^&{1}&ISO", res.AlternateIdentifier.Identifier, res.AlternateIdentifier.Domain), Role = AuditableObjectRole.MasterFile, Type = AuditableObjectType.SystemObject, QueryData = Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes("loadFast=true")) }); } } return(retVal); } catch (Exception e) { Trace.TraceError("Could not execute GetRegistrations : {0}", e.ToString()); audit.Outcome = OutcomeIndicator.EpicFail; #if DEBUG throw new FaultException(new FaultReason(e.ToString()), new FaultCode(e.GetType().Name)); #else throw new FaultException(new FaultReason(e.Message), new FaultCode(e.GetType().Name)); #endif } finally { if (auditSvc != null) { auditSvc.SendAudit(audit); } } }
static void Main(string[] args) { Console.WriteLine("XML ITS1 Formatter Pregenerator Utility"); Console.WriteLine("Copyright (C) 2012, Mohawk College of Applied Arts and Technology"); ParameterParser <Parameters> parser = new ParameterParser <Parameters>(); try { var arguments = parser.Parse(args); if (arguments.ShowHelp) { ShowHelp(); return; } // Generate formatter utility MARC.Everest.Formatters.XML.ITS1.CodeGen.TypeFormatterCreator creator = new MARC.Everest.Formatters.XML.ITS1.CodeGen.TypeFormatterCreator(); // Create code namespace CodeNamespace ns = new CodeNamespace(arguments.TargetNs); // Load assembly Assembly rmimAssembly = Assembly.LoadFile(arguments.AssemblyFile); List <Type> rmimTypes = new List <Type>(); if (arguments.Interactions != null) { foreach (var s in arguments.Interactions) { rmimTypes.Add(rmimAssembly.GetTypes().First(o => o.Name == s)); } } else { rmimTypes.AddRange(rmimAssembly.GetTypes()); } // Validate parameters if (rmimTypes.Count == 0) { throw new ArgumentException("Type array must have at least one element", "t"); } // Create a list of types (a todo list) that represent the types we want to format List <Type> types = new List <Type>(200); // Iterate through types and create formatter // Iterate throgh the types foreach (Type type in rmimTypes) { if (type.Assembly != rmimAssembly) { throw new InvalidOperationException("All types must belong to the same revision assembly"); } GetUniqueTypes(type, types, true); } // Waith thread pool WaitThreadPool wtp = new WaitThreadPool(); try { // Create type definitions foreach (Type t in types) { // Check if we need to gen this type if (t.GetCustomAttributes(typeof(StructureAttribute), false).Length == 0 || s_formatterGenerated.Contains(t)) { continue; } s_formatterGenerated.Add(t); // Structure Attribute StructureAttribute sta = t.GetCustomAttributes(typeof(StructureAttribute), false)[0] as StructureAttribute; // Type formatter creator TypeFormatterCreator crtr = new TypeFormatterCreator(); // Reset event crtr.CodeTypeDeclarationCompleted += new CreateTypeFormatterCompletedDelegate(delegate(CodeTypeDeclaration result) { // Add to the code currently created if (result != null) { lock (ns) ns.Types.Add(result); } }); // Helper result wtp.QueueUserWorkItem(crtr.CreateTypeFormatter, t); } // Wait for final pool to clear wtp.WaitOne(); } finally { wtp.Dispose(); } if (ns.Types.Count == 0) { Console.WriteLine("Didn't create any formatter helpers..."); return; } // Setup compiler and referenced assemblies CSharpCodeProvider csharpCodeProvider = new CSharpCodeProvider(); using (TextWriter tw = File.CreateText(arguments.Output ?? "output.cs")) csharpCodeProvider.GenerateCodeFromNamespace(ns, tw, new System.CodeDom.Compiler.CodeGeneratorOptions() { IndentString = "\t" }); } catch (ArgumentNullException) { ShowHelp(); } catch (Exception e) { Console.WriteLine(e.ToString()); return; } finally { } #if DEBUG Console.ReadKey(); #endif }
public IdentifiedData Search(string resourceType) { this.ThrowIfNotReady(); try { var handler = ResourceHandlerUtil.Current.GetResourceHandler(resourceType); if (handler != null) { String offset = WebOperationContext.Current.IncomingRequest.UriTemplateMatch.QueryParameters["_offset"], count = WebOperationContext.Current.IncomingRequest.UriTemplateMatch.QueryParameters["_count"]; var query = WebOperationContext.Current.IncomingRequest.UriTemplateMatch.QueryParameters.ToQuery(); // Modified on? if (WebOperationContext.Current.IncomingRequest.IfModifiedSince.HasValue) { query.Add("modifiedOn", ">" + WebOperationContext.Current.IncomingRequest.IfModifiedSince.Value.ToString("o")); } // No obsoletion time? if (typeof(BaseEntityData).IsAssignableFrom(handler.Type) && !query.ContainsKey("obsoletionTime")) { query.Add("obsoletionTime", "null"); } int totalResults = 0; // Lean mode var lean = WebOperationContext.Current.IncomingRequest.UriTemplateMatch.QueryParameters["_lean"]; bool parsedLean = false; bool.TryParse(lean, out parsedLean); var retVal = handler.Query(query, Int32.Parse(offset ?? "0"), Int32.Parse(count ?? "100"), out totalResults).Select(o => o.GetLocked()).ToList(); WebOperationContext.Current.OutgoingResponse.LastModified = retVal.OrderByDescending(o => o.ModifiedOn).FirstOrDefault()?.ModifiedOn.DateTime ?? DateTime.Now; // Last modification time and not modified conditions if ((WebOperationContext.Current.IncomingRequest.IfModifiedSince.HasValue || WebOperationContext.Current.IncomingRequest.IfNoneMatch != null) && totalResults == 0) { WebOperationContext.Current.OutgoingResponse.StatusCode = HttpStatusCode.NotModified; return(null); } else { if (query.ContainsKey("_all") || query.ContainsKey("_expand") || query.ContainsKey("_exclude")) { using (WaitThreadPool wtp = new WaitThreadPool()) { foreach (var itm in retVal) { wtp.QueueUserWorkItem((o) => { try { var i = o as IdentifiedData; ObjectExpander.ExpandProperties(i, query); ObjectExpander.ExcludeProperties(i, query); } catch (Exception e) { this.m_traceSource.TraceEvent(TraceEventType.Error, e.HResult, "Error setting properties: {0}", e); } }, itm); } wtp.WaitOne(); } } return(BundleUtil.CreateBundle(retVal, totalResults, Int32.Parse(offset ?? "0"), parsedLean)); } } else { throw new FileNotFoundException(resourceType); } } catch (Exception e) { var remoteEndpoint = OperationContext.Current.IncomingMessageProperties[RemoteEndpointMessageProperty.Name] as RemoteEndpointMessageProperty; this.m_traceSource.TraceEvent(TraceEventType.Error, e.HResult, String.Format("{0} - {1}", remoteEndpoint?.Address, e.ToString())); throw; } }
public ArticleCollection Process(ClassRepository rep) { ArticleCollection artc = new ArticleCollection(); List <Feature> features = new List <Feature>(); foreach (KeyValuePair <string, Feature> kv in rep) { features.Add(kv.Value); } // Sort so classes are processed first features.Sort(delegate(Feature a, Feature b) { if ((a is SubSystem) && !(b is SubSystem)) { return(-1); } else if ((b is SubSystem) && !(a is SubSystem)) { return(1); } else if ((a is Class) && !(b is Class)) { return(1); } else if ((b is Class) && !(a is Class)) { return(-1); } else { return(a.GetType().Name.CompareTo(b.GetType().Name)); } }); //var vocabArticle = new MohawkCollege.EHR.gpmr.Pipeline.Renderer.Deki.Article.Article() //{ // Title = "Vocabulary", // Children = new ArticleCollection() //}; //vocabArticle.Children.Add(new Article.Article() { Title = "Code Systems" }); //vocabArticle.Children.Add(new Article.Article() { Title = "Concept Domains" }); //vocabArticle.Children.Add(new Article.Article() { Title = "Value Sets" }); //artc.Add(vocabArticle); WaitThreadPool wtp = new WaitThreadPool(); // A thread that does the doohickey thing Thread doohickeyThread = new Thread((ThreadStart) delegate() { string[] hickeythings = { "|", "/", "-", "\\" }; int hickeyThingCount = 0; try { while (true) { int cPosX = Console.CursorLeft, cPosY = Console.CursorTop; Console.SetCursorPosition(1, cPosY); Console.Write(hickeythings[hickeyThingCount++ % hickeythings.Length]); Console.SetCursorPosition(cPosX, cPosY); Thread.Sleep(1000); } } catch { } }); doohickeyThread.Start(); // Loop through each feature foreach (Feature f in features) { // Find the feature template FeatureTemplate ftpl = NonParameterizedTemplate.Spawn(FindTemplate(f.GetType().FullName, f) as NonParameterizedTemplate, this, f) as FeatureTemplate; if (ftpl == null) { System.Diagnostics.Trace.WriteLine(string.Format("Feature '{0}' won't be published as no feature template could be located", f.Name), "warn"); } else if (f.Annotations.Find(o => o is SuppressBrowseAnnotation) != null) { System.Diagnostics.Trace.WriteLine(String.Format("Feature '{0}' won't be published as a SuppressBrowse annotation was found", f.Name), "warn"); } else if (ftpl.NewPage) { System.Diagnostics.Trace.WriteLine(string.Format("Queueing ({1}) '{0}'...", f.Name, f.GetType().Name), "debug"); // Create a new worker Worker w = new Worker(); w.ArticleCollection = artc; w.FeatureTemplate = ftpl; w.OnComplete += delegate(object sender, EventArgs e) { Worker wrkr = sender as Worker; System.Diagnostics.Trace.WriteLine(String.Format("Rendered ({1}) '{0}'...", (wrkr.FeatureTemplate.Context as Feature).Name, wrkr.FeatureTemplate.Context.GetType().Name), "debug"); }; wtp.QueueUserWorkItem(w.Start); } } System.Diagnostics.Trace.WriteLine("Waiting for work items to complete...", "debug"); wtp.WaitOne(); doohickeyThread.Abort(); ArticleCollection retVal = new ArticleCollection(); Article.Article MasterTOC = new MohawkCollege.EHR.gpmr.Pipeline.Renderer.Deki.Article.Article(); MasterTOC.Children = artc; System.Diagnostics.Trace.WriteLine("Creating Table of Contents...", "information"); PrepareTOC(MasterTOC); MasterTOC.Children = null; artc.Add(MasterTOC); return(artc); }
public static void GeneratePatients(String[] args) { var parameters = new ParameterParser <ConsoleParameters>().Parse(args); int populationSize = Int32.Parse(parameters.PopulationSize ?? "10"); int maxAge = Int32.Parse(parameters.MaxAge ?? "500"); Console.WriteLine("Adding minimal service providers..."); ApplicationContext.Current.AddServiceProvider(typeof(SimpleCarePlanService)); ApplicationContext.Current.AddServiceProvider(typeof(LocalPlaceRepositoryService)); ApplicationContext.Current.AddServiceProvider(typeof(LocalActRepositoryService)); ApplicationServiceContext.Current = ApplicationContext.Current; ApplicationContext.Current.RemoveServiceProvider(typeof(AppletBusinessRulesDaemon)); //cp.Repository = new SeederProtocolRepositoryService(); ApplicationContext.Current.Start(); foreach (var i in ApplicationContext.Current.Configuration.ServiceProviders.Where(o => o.GetInterfaces().Any(i => i.Name.StartsWith("IBusinessRules"))).ToArray()) { ApplicationContext.Current.RemoveServiceProvider(i); } //ApplicationContext.Current.Configuration.ServiceProviders.RemoveAll(o => o.GetInterfaces().Any(i => i.Name.StartsWith("IBusinessRules"))); int tr = 0; Console.WriteLine("Adding minimal loading places..."); IEnumerable <Place> places = null; Guid facId = Guid.Empty; if (!String.IsNullOrEmpty(parameters.Facility) && Guid.TryParse(parameters.Facility, out facId)) { places = (ApplicationContext.Current.GetService <IPlaceRepositoryService>() as IFastQueryRepositoryService).FindFast <Place>(o => o.Key == facId, 0, 1, out tr, Guid.Empty); } else { places = (ApplicationContext.Current.GetService <IPlaceRepositoryService>() as IFastQueryRepositoryService).FindFast <Place>(o => o.StatusConceptKey == StatusKeys.Active && o.ClassConceptKey == EntityClassKeys.ServiceDeliveryLocation, 0, Int32.Parse(parameters.FacilityCount ?? "1000"), out tr, Guid.Empty); }; places = places.Union((ApplicationContext.Current.GetService <IPlaceRepositoryService>() as IFastQueryRepositoryService).FindFast <Place>(o => o.StatusConceptKey == StatusKeys.Active && o.ClassConceptKey != EntityClassKeys.ServiceDeliveryLocation, 0, Int32.Parse(parameters.FacilityCount ?? "1000"), out tr, Guid.Empty)); WaitThreadPool wtp = new WaitThreadPool(Environment.ProcessorCount); Random r = new Random(); int npatients = 0; Console.WriteLine("Generating Patients..."); DateTime startTime = DateTime.Now; WaitCallback genFunc = (s) => { try { AuthenticationContext.Current = new AuthenticationContext(AuthenticationContext.SystemPrincipal); var patient = GeneratePatient(maxAge, parameters.BarcodeAuth, places, r); if (patient == null) { return; } var persistence = ApplicationContext.Current.GetService <IDataPersistenceService <Patient> >(); // Insert int pPatient = Interlocked.Increment(ref npatients); var ips = (((double)(DateTime.Now - startTime).Ticks / pPatient) * (populationSize - pPatient)); var remaining = new TimeSpan((long)ips); patient = persistence.Insert(patient, AuthenticationContext.SystemPrincipal, TransactionMode.Commit); Console.WriteLine("#{2:#,###,###}({4:0%} - ETA:{5}): {0} ({1:#0} mo) [{3}]", patient.Identifiers.First().Value, DateTime.Now.Subtract(patient.DateOfBirth.Value).TotalDays / 30, pPatient, places.FirstOrDefault(p => p.Key == patient.Relationships.FirstOrDefault(o => o.RelationshipTypeKey == EntityRelationshipTypeKeys.DedicatedServiceDeliveryLocation).TargetEntityKey).Names.FirstOrDefault().ToString(), (float)pPatient / populationSize, remaining.ToString("hh'h 'mm'm 'ss's'")); // Schedule if (!parameters.PatientOnly) { var acts = ApplicationContext.Current.GetService <ICarePlanService>().CreateCarePlan(patient).Action.Where(o => o.ActTime <= DateTime.Now).Select(o => o.Copy() as Act); Bundle bundle = new Bundle(); foreach (var act in acts) { if (act.Key.Value.ToByteArray()[0] > 200) { continue; } act.MoodConceptKey = ActMoodKeys.Eventoccurrence; act.StatusConceptKey = StatusKeys.Completed; act.ActTime = act.ActTime.AddDays(r.Next(0, 5)); act.StartTime = null; act.StopTime = null; if (act is QuantityObservation) { (act as QuantityObservation).Value = (r.Next((int)(act.ActTime - patient.DateOfBirth.Value).TotalDays, (int)(act.ActTime - patient.DateOfBirth.Value).TotalDays + 10) / 10) + 4; } else { act.Tags.AddRange(new ActTag[] { new ActTag("catchmentIndicator", "True"), new ActTag("hasRunAdjustment", "True") }); act.Participations.Add(new ActParticipation(ActParticipationKey.Location, patient.Relationships.First(l => l.RelationshipTypeKey == EntityRelationshipTypeKeys.DedicatedServiceDeliveryLocation).TargetEntityKey)); } // Persist the act bundle.Item.Add(act); } Console.WriteLine("\t {0} acts", bundle.Item.Count()); ApplicationContext.Current.GetService <IBatchRepositoryService>().Insert(bundle); } } catch { } }; genFunc(null); for (int i = 0; i < populationSize; i++) { wtp.QueueUserWorkItem(genFunc); } wtp.WaitOne(); }
/// <summary> /// Creates the formatter assembly for the specified types /// </summary> public Assembly CreateFormatterAssembly(Type[] rmimTypes, List <IStructureFormatter> aides, bool generateDeep) { // Enter and lock lock (m_syncObject) { while (m_codeGenBlocking) { Monitor.Wait(m_syncObject); } m_codeGenBlocking = true; } // Create code namespace CodeNamespace ns = new CodeNamespace(String.Format("MARC.Everest.Formatters.XML.ITS1.d{0}", Guid.NewGuid().ToString("N"))); List <Assembly> rmimAssemblies = new List <Assembly>() { rmimTypes[0].Assembly }; try { // Validate parameters if (rmimTypes.Length == 0) { throw new ArgumentException("Type array must have at least one element", "t"); } // Scan all classes in any graph aides List <string> graphAidesClasses = new List <string>(200); foreach (IStructureFormatter isf in aides) { graphAidesClasses.AddRange(isf.HandleStructure); } // Create a list of types (a todo list) that represent the types we want to format List <Type> types = new List <Type>(200); // Iterate through types and create formatter if (generateDeep) { foreach (var type in Array.FindAll <Type>(rmimAssemblies[0].GetTypes(), o => o.IsClass && !o.IsAbstract && o.GetCustomAttributes(typeof(StructureAttribute), false).Length > 0)) { //if (!rmimAssemblies.Contains(type.Assembly)) // rmimAssemblies.Add(type.Assembly); GetUniqueTypes(type, types, true); } } else { // Iterate throgh the types foreach (Type type in rmimTypes) { //if (!rmimAssemblies.Contains(type.Assembly)) // throw new InvalidOperationException("All types must belong to the same revision assembly"); GetUniqueTypes(type, types, false); } } // Waith thread pool WaitThreadPool wtp = new WaitThreadPool(); try { // Create type definitions foreach (Type t in types) { // Check if we need to gen this type if (t.GetCustomAttributes(typeof(StructureAttribute), false).Length == 0 || s_formatterGenerated.Contains(t)) { continue; } s_formatterGenerated.Add(t); // Scan and add base type Type dScan = t.BaseType; while (dScan != null && dScan != typeof(System.Object)) { if (!rmimAssemblies.Contains(dScan.Assembly)) { rmimAssemblies.Add(dScan.Assembly); } dScan = dScan.BaseType; } // Structure Attribute StructureAttribute sta = t.GetCustomAttributes(typeof(StructureAttribute), false)[0] as StructureAttribute; // Is this type already handled by a helper formatter? bool hasHelper = graphAidesClasses.Contains(sta.Name); // Compile if helper is not available if (!hasHelper) { // Type formatter creator TypeFormatterCreator crtr = new TypeFormatterCreator(); // Reset event crtr.CodeTypeDeclarationCompleted += new CreateTypeFormatterCompletedDelegate(delegate(CodeTypeDeclaration result) { // Add to the code currently created if (result != null) { lock (ns) { ns.Types.Add(result); } } }); // Helper result wtp.QueueUserWorkItem(crtr.CreateTypeFormatter, t); } } // Wait for final pool to clear wtp.WaitOne(); } finally { wtp.Dispose(); } if (ns.Types.Count == 0) { return(null); } } finally { m_codeGenBlocking = false; lock (m_syncObject) Monitor.Pulse(m_syncObject); } // Setup compiler and referenced assemblies CSharpCodeProvider csharpCodeProvider = new CSharpCodeProvider(); CodeCompileUnit compileUnit = new CodeCompileUnit(); compileUnit.Namespaces.Add(ns); compileUnit.ReferencedAssemblies.Add(typeof(II).Assembly.Location); compileUnit.ReferencedAssemblies.Add(typeof(ITypeFormatter).Assembly.Location); // Was this assembly loaded directly from disk or from a byte array foreach (var asm in rmimAssemblies) { compileUnit.ReferencedAssemblies.Add(asm.Location); } compileUnit.ReferencedAssemblies.Add("System.dll"); compileUnit.ReferencedAssemblies.Add("System.Xml.dll"); // Assembly info CodeAttributeDeclaration cadecl = new CodeAttributeDeclaration("System.Reflection.AssemblyVersion", new CodeAttributeArgument[] { new CodeAttributeArgument(new CodePrimitiveExpression("1.0.*")) }); compileUnit.AssemblyCustomAttributes.Add(cadecl); // Setup compiler CompilerParameters compilerParms = new CompilerParameters(); compilerParms.GenerateInMemory = !generateDeep; compilerParms.WarningLevel = 1; compilerParms.TempFiles.KeepFiles = generateDeep; // Compile code dom // To see the generated code, set a breakpoint on the next line. // Then take a look at the results.TempFiles array to find the // path to the generated C# files CompilerResults results = csharpCodeProvider.CompileAssemblyFromDom(compilerParms, new CodeCompileUnit[] { compileUnit }); if (results.Errors.HasErrors) { throw new Exceptions.FormatterCompileException(results); } else { Assembly a = !generateDeep ? results.CompiledAssembly : Assembly.LoadFile(results.PathToAssembly); AddFormatterAssembly(a); return(a); } }