void Update() { var client = GetComponent <uOscClient>(); var bundle1 = new Bundle(Timestamp.Now); bundle1.Add(new Message("/uOSC/root/bundle1/message1", 123, "hoge", new byte[] { 1, 2, 3, 4 })); bundle1.Add(new Message("/uOSC/root/bundle1/message2", 1.2345f)); bundle1.Add(new Message("/uOSC/root/bundle1/message3", "abcdefghijklmn")); var date2 = System.DateTime.UtcNow.AddSeconds(10); var timestamp2 = Timestamp.CreateFromDateTime(date2); var bundle2 = new Bundle(timestamp2); bundle2.Add(new Message("/uOSC/root/bundle2/message1", 234, "fuga", new byte[] { 2, 3, 4 })); bundle2.Add(new Message("/uOSC/root/bundle2/message2", 2.3456f)); bundle2.Add(new Message("/uOSC/root/bundle2/message3", "opqrstuvwxyz")); var root = new Bundle(Timestamp.Immediate); root.Add(bundle1); root.Add(bundle2); root.Add(new Message("/uOSC/root/message2")); client.Send(root); }
//Adds as much as possible, returns rest public Bundle AddBundleAndReturnLeftovers(Bundle bundle, Settlement settlement) { var leftovers = new Bundle(); foreach (var(resource, amount) in bundle) { if (resource is Asset asset) { leftovers.Add(asset, AddAssetAndReturnLeftovers(asset, amount, settlement)); } else { leftovers.Add(resource, AddResourceAndReturnLeftovers(resource, amount)); } } return(leftovers); }
internal static string AddTestBundle(string tags) { string bundleName = GetRandomString(); bool added = Bundle.Add(bundleName, tags); CleanupBundleList.Add(bundleName); Assert.IsTrue(added, "The Bundle '" + bundleName + "' was not sucessfully added."); return(bundleName); }
private void PrepBundle() { _bundle = new Bundle(Timestamp.Now); // You have to add each value separately so that it gets type tagged in the message // Scale all numbers to 0..1 range if (sendPosition) { var p = transform.position; p.x = Klak.Math.BasicMath.Map(p.x, posInMin.x, posInMax.x, posOutMin.x, posOutMax.x); p.y = Klak.Math.BasicMath.Map(p.y, posInMin.y, posInMax.y, posOutMin.y, posOutMax.y); p.z = Klak.Math.BasicMath.Map(p.z, posInMin.z, posInMax.z, posOutMin.z, posOutMax.z); _position = new Message(String.Format("{0}/{1}/position", address, objectId), p.x, p.y, p.z); _bundle.Add(_position); } if (sendRotation) { var r = transform.rotation.eulerAngles; r.x = Klak.Math.BasicMath.Map(r.x, rotInMin.x, rotInMax.x, rotOutMin.x, rotOutMax.x); r.y = Klak.Math.BasicMath.Map(r.y, rotInMin.y, rotInMax.y, rotOutMin.y, rotOutMax.y); r.z = Klak.Math.BasicMath.Map(r.z, rotInMin.z, rotInMax.z, rotOutMin.z, rotOutMax.z); _rotation = new Message(String.Format("{0}/{1}/rotation", address, objectId), r.x, r.y, r.z); _bundle.Add(_rotation); } if (sendScale) { var s = transform.localScale; s.x = Klak.Math.BasicMath.Map(s.x, sclInMin.x, sclInMax.x, sclOutMin.x, sclOutMax.x); s.y = Klak.Math.BasicMath.Map(s.y, sclInMin.y, sclInMax.y, sclOutMin.y, sclOutMax.y); s.z = Klak.Math.BasicMath.Map(s.z, sclInMin.z, sclInMax.z, sclOutMin.z, sclOutMax.z); _scale = new Message(String.Format("{0}/{1}/scale", address, objectId), s.x, s.y, s.z); _bundle.Add(_scale); } }
Bundle bundleResetVolume() { Bundle bundle = new Bundle(Timestamp.Now); bundle.Add(new Message("/vibrators/1", 0)); bundle.Add(new Message("/vibrators/2", 0)); bundle.Add(new Message("/vibrators/3", 0)); bundle.Add(new Message("/vibrators/4", 0)); bundle.Add(new Message("/vibrators/5", 0)); bundle.Add(new Message("/vibrators/6", 0)); bundle.Add(new Message("/vibrators/7", 0)); return(bundle); }
Bundle bundleVolumePattern(Dictionary <string, List <int> > vP) { Bundle bundle = new Bundle(Timestamp.Now); bundle.Add(new Message("/vibrators/1", vP["vibrator1"][measurementNum - 1] * vibrationVol[0])); bundle.Add(new Message("/vibrators/2", vP["vibrator2"][measurementNum - 1] * vibrationVol[1])); bundle.Add(new Message("/vibrators/3", vP["vibrator3"][measurementNum - 1] * vibrationVol[2])); bundle.Add(new Message("/vibrators/4", vP["vibrator4"][measurementNum - 1] * vibrationVol[3])); bundle.Add(new Message("/vibrators/5", vP["vibrator5"][measurementNum - 1] * vibrationVol[4])); bundle.Add(new Message("/vibrators/6", vP["vibrator6"][measurementNum - 1] * vibrationVol[5])); bundle.Add(new Message("/vibrators/7", vP["vibrator7"][measurementNum - 1] * vibrationVol[6])); return(bundle); }
void SendBoneTransformForTracker(ref Bundle bundle, HumanBodyBones bone, string DeviceSerial) { var DeviceTransform = animator.GetBoneTransform(bone); if (DeviceTransform != null) { bundle.Add(new Message("/VMC/Ext/Tra/Pos", (string)DeviceSerial, (float)DeviceTransform.position.x, (float)DeviceTransform.position.y, (float)DeviceTransform.position.z, (float)DeviceTransform.rotation.x, (float)DeviceTransform.rotation.y, (float)DeviceTransform.rotation.z, (float)DeviceTransform.rotation.w)); } }
private Ensemble GenerateGroupings(Ensemble eIn, List <Books> books) { if (books.Count == 0) { return(eIn); } var e = new Ensemble(); var book = books[0]; var otherBooks = books.Skip(1).ToList(); if (eIn.Count == 0) { var b = new Bundle(); b.Add(book); var gAlone = new Grouping(); gAlone.Add(b); e.Add(gAlone); return(GenerateGroupings(e, otherBooks)); } else { foreach (var grouping in eIn) { var nbBundles = grouping.Count(); for (var iBundle = 0; iBundle < nbBundles; iBundle++) { var bundle = grouping[iBundle]; if (bundle.CanAccept(book)) { var enrichedGrouping = grouping.Clone(); var clonedBundle = enrichedGrouping[iBundle]; clonedBundle.Add(book); e.Add(enrichedGrouping); } } var clonedGrouping = grouping.Clone(); var b = new Bundle(); b.Add(book); clonedGrouping.Add(b); e.Add(clonedGrouping); } //Console.WriteLine(e.Count); eIn.Clear(); return(GenerateGroupings(e, otherBooks)); } }
private void Update() { var bundle = new Bundle(); foreach (var tracker in VirtualTrackers) { bundle.Add(new Message("/VMC/Ext/Tra/Pos", tracker.name, tracker.transform.position.x, tracker.transform.position.y, tracker.transform.position.z, tracker.transform.rotation.x, tracker.transform.rotation.y, tracker.transform.rotation.z, tracker.transform.rotation.w)); } OscClient.Send(bundle); }
/// <summary> /// Create a bundle /// </summary> public static Bundle CreateBundle(IEnumerable <IdentifiedData> resourceRoot, int totalResults, int offset, bool lean) { m_traceSource.TraceEvent(TraceEventType.Verbose, 0, "Creating bundle for results {0}..{1} of {2}", offset, offset + resourceRoot.Count(), totalResults); try { Bundle retVal = new Bundle(); retVal.Key = Guid.NewGuid(); retVal.Count = resourceRoot.Count(); retVal.Offset = offset; retVal.TotalResults = totalResults; if (resourceRoot == null) { return(retVal); } foreach (var itm in resourceRoot) { if (itm == null) { continue; } if (!retVal.HasTag(itm.Tag) && itm.Key.HasValue) { retVal.Add(itm); if (!lean) { Bundle.ProcessModel(itm.GetLocked(), retVal, !lean); } } } return(retVal); } catch (Exception e) { m_traceSource.TraceEvent(TraceEventType.Verbose, 0, "Error building bundle: {0}", e); throw; } }
Promise <List <ProductInfo> > IStore.GetInformationAboutProducts(List <ConfiguredProduct> products) { // Already in progress? Refuse immediately. lock (this) { if (LastGetInformationAboutProductsPromise != null) { return(Promise <List <ProductInfo> > .Rejected(new CotcException(ErrorCode.AlreadyInProgress, "Listing products"))); } LastGetInformationAboutProductsPromise = new Promise <List <ProductInfo> >(); } // Serialize for Android (passed as a JSON string) Bundle interop = Bundle.CreateArray(); foreach (ConfiguredProduct pi in products) { interop.Add(pi.AsBundle()); } // Will call back the CotcInappPurchaseGameObject (done like this to prevent AOT issues on iOS) CotcInappPurchase_listProducts(interop.ToJson(), (this as IStore).GetInformationAboutProducts_Done); return(LastGetInformationAboutProductsPromise); }
/// <summary> /// Perform the operation /// </summary> public override object Invoke(Type scopingType, object scopingKey, ParameterCollection parameters) { var dataManager = MdmDataManagerFactory.GetDataManager(scopingType); if (dataManager == null) { throw new NotSupportedException($"MDM is not configured for {scopingType}"); } if (scopingKey == null) { parameters.TryGet <bool>("clear", out bool clear); this.m_jobManager.StartJob(typeof(MdmMatchJob <>).MakeGenericType(scopingType), new object[] { clear }); return(null); } else if (scopingKey is Guid scopingObjectKey) { // Load the current master from @scopingKey if (!dataManager.IsMaster(scopingObjectKey)) { throw new KeyNotFoundException($"{scopingObjectKey} is not an MDM Master"); } // Now - we want to prepare a transaction Bundle retVal = new Bundle(); if (parameters.TryGet <bool>("clear", out bool clear) && clear) { foreach (var itm in dataManager.GetCandidateLocals(scopingObjectKey).Where(o => o.ClassificationKey == MdmConstants.AutomagicClassification)) { if (itm is EntityRelationship er) { er.BatchOperation = Core.Model.DataTypes.BatchOperationType.Delete; retVal.Add(er); } else if (itm is ActRelationship ar) { ar.BatchOperation = Core.Model.DataTypes.BatchOperationType.Delete; retVal.Add(ar); } } } retVal.AddRange(dataManager.MdmTxDetectCandidates(dataManager.MdmGet(scopingObjectKey).Synthesize(AuthenticationContext.Current.Principal) as Entity, retVal.Item)); // Now we want to save? try { retVal = this.m_batchService.Insert(retVal, TransactionMode.Commit, AuthenticationContext.Current.Principal); } catch (Exception e) { this.m_tracer.TraceError("Error persisting re-match: {0}", e.Message); throw new MdmException("Error persisting re-match operation", e); } return(retVal); } else { throw new InvalidOperationException("Cannot determine the operation"); } }
/// <summary> /// Merge the specified records together /// </summary> public override RecordMergeResult Merge(Guid survivorKey, IEnumerable <Guid> linkedDuplicates) { try { // Merging if (this.FireMerging(survivorKey, linkedDuplicates)) { this.m_tracer.TraceWarning("Pre-Event Handler for merge indicated cancel on {0}", survivorKey); return(new RecordMergeResult(RecordMergeStatus.Cancelled, null, null)); } // We want to get the target RecordMergeStatus recordMergeStatus = RecordMergeStatus.Success; var survivor = this.m_dataManager.GetRaw(survivorKey) as Entity; bool isSurvivorMaster = this.m_dataManager.IsMaster(survivorKey); if (isSurvivorMaster) { try { // Trying to write to master - do they have permission? this.m_pepService.Demand(MdmPermissionPolicyIdentifiers.WriteMdmMaster); } catch (PolicyViolationException e) when(e.PolicyId == MdmPermissionPolicyIdentifiers.WriteMdmMaster) { survivor = this.m_dataManager.GetLocalFor(survivorKey, AuthenticationContext.Current.Principal); if (survivor == null) { throw new DetectedIssueException(Core.BusinessRules.DetectedIssuePriorityType.Error, MdmConstants.INVALID_MERGE_ISSUE, $"Principal has no authority to merge into {survivorKey}", DetectedIssueKeys.SecurityIssue, e); } recordMergeStatus = RecordMergeStatus.Alternate; isSurvivorMaster = false; } } Bundle transactionBundle = new Bundle(); // For each linked duplicate var replaced = linkedDuplicates.Select(itm => { var victim = this.m_dataManager.GetRaw(itm) as Entity; var isVictimMaster = this.m_dataManager.IsMaster(itm); if (isVictimMaster) { try { // Trying to write to master - do they have permission? this.m_pepService.Demand(MdmPermissionPolicyIdentifiers.MergeMdmMaster); } catch (PolicyViolationException e) when(e.PolicyId == MdmPermissionPolicyIdentifiers.MergeMdmMaster) { victim = this.m_dataManager.GetLocalFor(itm, AuthenticationContext.Current.Principal); if (victim == null) { throw new DetectedIssueException(Core.BusinessRules.DetectedIssuePriorityType.Error, MdmConstants.INVALID_MERGE_ISSUE, $"Principal has no authority to merge {itm}", DetectedIssueKeys.SecurityIssue, e); } isVictimMaster = false; recordMergeStatus = RecordMergeStatus.Alternate; } } // Sanity check if (victim.Key == survivor.Key) { throw new DetectedIssueException(DetectedIssuePriorityType.Error, MdmConstants.INVALID_MERGE_ISSUE, "Records cannot be merged into themselves", DetectedIssueKeys.FormalConstraintIssue, null); } // MASTER>MASTER if (isSurvivorMaster && isVictimMaster) // MASTER>MASTER { this.m_tracer.TraceInfo("MASTER({0})>MASTER({0}) MERGE", victim.Key, survivor.Key); transactionBundle.AddRange(this.m_dataManager.MdmTxMergeMasters(survivorKey, itm, transactionBundle.Item)); } else if (isSurvivorMaster && !isVictimMaster) // LOCAL>MASTER = LINK { // Ensure that the local manipulation is allowed if (!this.m_dataManager.IsOwner((TEntity)victim, AuthenticationContext.Current.Principal)) { this.m_pepService.Demand(MdmPermissionPolicyIdentifiers.UnrestrictedMdm); // MUST BE ABLE TO MANIPULATE OTHER LOCALS } this.m_tracer.TraceInfo("LOCAL({0})>MASTER({0}) MERGE", victim.Key, survivor.Key); transactionBundle.AddRange(this.m_dataManager.MdmTxMasterLink(survivorKey, victim.Key.Value, transactionBundle.Item, true)); } else if (!isSurvivorMaster && !isVictimMaster) // LOCAL>LOCAL = MERGE { // First, target replaces victim transactionBundle.Add(new EntityRelationship(EntityRelationshipTypeKeys.Replaces, survivor.Key, victim.Key, null) { RelationshipRoleKey = EntityRelationshipTypeKeys.Duplicate }); this.m_tracer.TraceInfo("LOCAL({0})>LOCAL({0}) MERGE", victim.Key, survivor.Key); // Obsolete the victim - the victim is obsolete since it was accurate and is no longer the accurate victim.StatusConceptKey = StatusKeys.Inactive; transactionBundle.Add(victim); // Obsolete the old identifiers over transactionBundle.AddRange( victim.LoadCollection(o => o.Identifiers).Where(i => !survivor.LoadCollection(o => o.Identifiers).Any(e => e.SemanticEquals(i))).Select(o => { o.BatchOperation = BatchOperationType.Delete; return(o); }) ); // Copy identifiers over transactionBundle.AddRange( victim.LoadCollection(o => o.Identifiers).Where(i => !survivor.LoadCollection(o => o.Identifiers).Any(e => e.SemanticEquals(i))).Select(o => new EntityIdentifier(o.Authority, o.Value) { SourceEntityKey = survivor.Key, IssueDate = o.IssueDate, ExpiryDate = o.ExpiryDate }) ); // Remove links from victim foreach (var rel in this.m_dataManager.GetAllMdmAssociations(victim.Key.Value).OfType <EntityRelationship>()) { rel.BatchOperation = BatchOperationType.Delete; transactionBundle.Add(rel); } // Recheck the master to ensure that it isn't dangling out here var otherLocals = this.m_relationshipPersistence.Count(o => o.RelationshipTypeKey == MdmConstants.MasterRecordRelationship && o.TargetEntityKey == itm && o.SourceEntityKey != victim.Key, AuthenticationContext.SystemPrincipal); if (otherLocals == 0) { transactionBundle.Add(new Entity() { BatchOperation = BatchOperationType.Delete, Key = itm }); } } else { throw new MdmException($"Cannot determine viable merge/link strategy between {survivor.Key} and {victim.Key}", null); } return(victim.Key.Value); }).ToArray(); this.m_batchPersistence.Insert(transactionBundle, TransactionMode.Commit, AuthenticationContext.Current.Principal); this.FireMerged(survivor.Key.Value, replaced); return(new RecordMergeResult(recordMergeStatus, new Guid[] { survivor.Key.Value }, replaced)); } catch (Exception ex) { this.m_tracer.TraceError("Error performing MDM merging operation on {0}: {1}", survivorKey, ex); throw new MdmException($"Error performing MDM merge on {survivorKey}", ex); } }
/// <summary> /// The issue despactch advice message will insert a new shipped order into the TImR system. /// </summary> public void IssueDespatchAdvice(DespatchAdviceMessageType advice) { if (advice == null || advice.despatchAdvice == null) { throw new InvalidOperationException("Invalid message sent"); } // TODO: Validate the standard header // Loop Bundle orderTransaction = new Bundle(); foreach (var adv in advice.despatchAdvice) { // Has this already been created? Place sourceLocation = this.m_gs1Util.GetLocation(adv.shipper), destinationLocation = this.m_gs1Util.GetLocation(adv.receiver); if (sourceLocation == null) { throw new KeyNotFoundException($"Shipper location not found"); } else if (destinationLocation == null) { throw new KeyNotFoundException($"Receiver location not found"); } // Find the original order which this despatch advice is fulfilling Act orderRequestAct = null; if (adv.orderResponse != null || adv.purchaseOrder != null) { orderRequestAct = this.m_gs1Util.GetOrder(adv.orderResponse ?? adv.purchaseOrder, ActMoodKeys.Request); if (orderRequestAct != null) // Orderless despatch! { // If the original order request is not comlete, then complete it orderRequestAct.StatusConceptKey = StatusKeys.Completed; orderTransaction.Add(orderRequestAct); } } // Find the author of the shipment var oidService = ApplicationContext.Current.GetService <IOidRegistrarService>(); var gln = oidService.GetOid("GLN"); var issuingAuthority = oidService.FindData($"{gln.Oid}.{adv.despatchAdviceIdentification.contentOwner?.gln}"); if (issuingAuthority == null) { issuingAuthority = oidService.GetOid(this.m_configuration.DefaultContentOwnerAssigningAuthority); } if (issuingAuthority == null) { throw new KeyNotFoundException("Cannot find default issuing authority for advice identification. Please configure a valid OID"); } int tr = 0; var existing = this.m_actRepository.Find <Act>(o => o.Identifiers.Any(i => i.Authority.DomainName == issuingAuthority.Mnemonic && i.Value == adv.despatchAdviceIdentification.entityIdentification), 0, 1, out tr); if (existing.Any()) { this.m_tracer.TraceWarning("Duplicate despatch {0} will be ignored", adv.despatchAdviceIdentification.entityIdentification); continue; } // Now we want to create a new Supply act which that fulfills the old act Act fulfillAct = new Act() { CreationTime = DateTimeOffset.Now, MoodConceptKey = ActMoodKeys.Eventoccurrence, ClassConceptKey = ActClassKeys.Supply, StatusConceptKey = StatusKeys.Active, TypeConceptKey = Guid.Parse("14d69b32-f6c4-4a49-a527-a74893dbcf4a"), // Order ActTime = adv.despatchInformation.despatchDateTimeSpecified ? adv.despatchInformation.despatchDateTime : DateTime.Now, Extensions = new List <ActExtension>() { new ActExtension(Gs1ModelExtensions.ActualShipmentDate, typeof(DateExtensionHandler), adv.despatchInformation.actualShipDateTime), new ActExtension(Gs1ModelExtensions.ExpectedDeliveryDate, typeof(DateExtensionHandler), adv.despatchInformation.estimatedDeliveryDateTime) }, Tags = new List <ActTag>() { new ActTag("orderNumber", adv.despatchAdviceIdentification.entityIdentification), new ActTag("orderStatus", "shipped"), new ActTag("http://openiz.org/tags/contrib/importedData", "true") }, Identifiers = new List <ActIdentifier>() { new ActIdentifier(new AssigningAuthority(issuingAuthority.Mnemonic, issuingAuthority.Name, issuingAuthority.Oid), adv.despatchAdviceIdentification.entityIdentification) }, Participations = new List <ActParticipation>() { // TODO: Author // TODO: Performer new ActParticipation(ActParticipationKey.Location, sourceLocation.Key), new ActParticipation(ActParticipationKey.Destination, destinationLocation.Key) } }; orderTransaction.Add(fulfillAct); // Fullfillment if (orderRequestAct != null) { fulfillAct.Relationships = new List <ActRelationship>() { new ActRelationship(ActRelationshipTypeKeys.Fulfills, orderRequestAct.Key) } } ; // Now add participations for each material in the despatch foreach (var dal in adv.despatchAdviceLogisticUnit) { foreach (var line in dal.despatchAdviceLineItem) { if (line.despatchedQuantity.measurementUnitCode != "dose" && line.despatchedQuantity.measurementUnitCode != "unit") { throw new InvalidOperationException("Despatched quantity must be reported in units or doses"); } var material = this.m_gs1Util.GetManufacturedMaterial(line.transactionalTradeItem, this.m_configuration.AutoCreateMaterials); // Add a participation fulfillAct.Participations.Add(new ActParticipation(ActParticipationKey.Consumable, material.Key) { Quantity = (int)line.despatchedQuantity.Value }); } } } // insert transaction if (orderTransaction.Item.Count > 0) { try { ApplicationContext.Current.GetService <IBatchRepositoryService>().Insert(orderTransaction); } catch (Exception e) { this.m_tracer.TraceError("Error issuing despatch advice: {0}", e); throw new Exception($"Error issuing despatch advice: {e.Message}", e); } } }
/// <summary> /// This method will construct a three folder structure inside <paramref name="targetDirectory"/> containing: Html, Index, and Source /// </summary> /// <param name="sourceDirectory"> /// Directory containing ldoc files /// </param> /// <param name="targetDirectory"> /// Output directory /// </param> public void Build(string sourceDirectory, string targetDirectory) { if (Directory.Exists(targetDirectory) && Directory.EnumerateFileSystemEntries(targetDirectory).Any()) throw new InvalidOperationException("Target path is not empty."); this.OnStateChanged(State.Preparing); string htmlRoot = Path.Combine(targetDirectory, "Html"); string indexRoot = Path.Combine(targetDirectory, "Index"); string sourceRoot = Path.Combine(targetDirectory, "Source"); string logRoot = Path.Combine(targetDirectory, "Logs"); DirectoryInfo htmlDir = Directory.CreateDirectory(htmlRoot); DirectoryInfo indexDir = Directory.CreateDirectory(indexRoot); DirectoryInfo sourceDir = Directory.CreateDirectory(sourceRoot); DirectoryInfo logDir = Directory.CreateDirectory(logRoot); var sourceFiles = Directory.EnumerateFiles(sourceDirectory, "*.ldoc", SearchOption.TopDirectoryOnly); // copy all source files to output directory and add to bundle Bundle bundle = new Bundle(this.IgnoreVersionComponent); foreach (var sourceFile in sourceFiles) { string targetFile = Path.Combine(sourceDir.FullName, Path.GetFileName(sourceFile)); File.Copy(sourceFile, targetFile); bundle.Add(XDocument.Load(targetFile)); } TemplateOutput templateOutput; // wire up logging string templateLogFile = Path.Combine(logDir.FullName, string.Format("template_{0:yyyy'_'MM'_'dd'__'HH'_'mm'_'ss}.log", DateTime.Now)); using (TextWriterTraceListener traceListener = new TextWriterTraceListener(templateLogFile)) { // log everything traceListener.Filter = new EventTypeFilter(SourceLevels.All); LostDoc.Diagnostics.TraceSources.TemplateSource.Switch.Level = SourceLevels.All; LostDoc.Diagnostics.TraceSources.BundleSource.Switch.Level = SourceLevels.All; LostDoc.Diagnostics.TraceSources.AssetResolverSource.Switch.Level = SourceLevels.All; LostDoc.Diagnostics.TraceSources.TemplateSource.Listeners.Add(traceListener); LostDoc.Diagnostics.TraceSources.BundleSource.Listeners.Add(traceListener); LostDoc.Diagnostics.TraceSources.AssetResolverSource.Listeners.Add(traceListener); // merge ldoc files this.OnStateChanged(State.Merging); AssetRedirectCollection assetRedirects; var mergedDoc = bundle.Merge(out assetRedirects); // generate output var templateData = new TemplateData(mergedDoc) { AssetRedirects = assetRedirects, IgnoredVersionComponent = this.IgnoreVersionComponent, OutputFileProvider = new ScopedFileProvider(new DirectoryFileProvider(), htmlDir.FullName), //TargetDirectory = htmlDir.FullName, Arguments = new Dictionary<string, object> { { "SearchUri", "/search/" } }, KeepTemporaryFiles = true, TemporaryFilesPath = Path.Combine(logDir.FullName, "temp") }; this.OnStateChanged(State.Templating); templateOutput = this.Template.Generate(templateData); LostDoc.Diagnostics.TraceSources.TemplateSource.Listeners.Remove(traceListener); LostDoc.Diagnostics.TraceSources.BundleSource.Listeners.Remove(traceListener); LostDoc.Diagnostics.TraceSources.AssetResolverSource.Listeners.Remove(traceListener); } this.OnStateChanged(State.Indexing); string indexLogFile = Path.Combine(logDir.FullName, string.Format("index_{0:yyyy'_'MM'_'dd'__'HH'_'mm'_'ss}.log", DateTime.Now)); using (TextWriterTraceListener traceListener = new TextWriterTraceListener(indexLogFile)) { // log everything traceListener.Filter = new EventTypeFilter(SourceLevels.All); TraceSources.ContentBuilderSource.Switch.Level = SourceLevels.All; TraceSources.ContentBuilderSource.Listeners.Add(traceListener); // one stop-word per line StringReader stopWordsReader = new StringReader(@"missing"); // index output using (var directory = FSDirectory.Open(indexDir)) using (stopWordsReader) { Analyzer analyzer = new StandardAnalyzer(global::Lucene.Net.Util.Version.LUCENE_30, stopWordsReader); Analyzer titleAnalyzer = new TitleAnalyzer(); IDictionary<string, Analyzer> fieldAnalyzers = new Dictionary<string, Analyzer> { { "title", titleAnalyzer } }; PerFieldAnalyzerWrapper analyzerWrapper = new PerFieldAnalyzerWrapper(analyzer, fieldAnalyzers); using ( var writer = new IndexWriter(directory, analyzerWrapper, IndexWriter.MaxFieldLength.UNLIMITED)) { var saResults = templateOutput.Results.Select(wur => wur.WorkUnit).OfType<StylesheetApplication>(); var saDict = saResults.ToDictionary(sa => sa.Asset); var indexResults = saDict.Values.Where(sa => sa.SaveAs.EndsWith(".xml")); foreach (var sa in indexResults) { string absPath = Path.Combine(htmlDir.FullName, sa.SaveAs); XDocument indexDoc = XDocument.Load(absPath); string assetId = indexDoc.Root.Attribute("assetId").Value; string title = indexDoc.Root.Element("title").Value.Trim(); string summary = indexDoc.Root.Element("summary").Value.Trim(); string text = indexDoc.Root.Element("text").Value.Trim(); var ssApplication = saDict[AssetIdentifier.Parse(assetId)]; var doc = new Document(); doc.Add(new Field("uri", new Uri(ssApplication.SaveAs, UriKind.Relative).ToString(), Field.Store.YES, Field.Index.NO)); doc.Add(new Field("aid", ssApplication.Asset, Field.Store.YES, Field.Index.NOT_ANALYZED)); foreach (AssetIdentifier aid in ssApplication.Aliases) doc.Add(new Field("alias", aid, Field.Store.NO, Field.Index.NOT_ANALYZED)); foreach (var section in ssApplication.Sections) { doc.Add(new Field("section", section.AssetIdentifier, Field.Store.NO, Field.Index.NOT_ANALYZED)); } doc.Add(new Field("title", title, Field.Store.YES, Field.Index.ANALYZED)); doc.Add(new Field("summary", summary, Field.Store.YES, Field.Index.ANALYZED)); doc.Add(new Field("content", text, Field.Store.NO, Field.Index.ANALYZED)); TraceSources.ContentBuilderSource.TraceVerbose("Indexing document: {0}", doc.ToString()); writer.AddDocument(doc); } writer.Optimize(); writer.Commit(); } analyzerWrapper.Close(); analyzer.Close(); } TraceSources.ContentBuilderSource.Listeners.Remove(traceListener); } this.OnStateChanged(State.Finalizing); var infoDoc = new XDocument( new XElement("content", new XAttribute("created", XmlConvert.ToString(DateTime.UtcNow, XmlDateTimeSerializationMode.Utc)), templateOutput.Results.Select(this.ConvertToXml))); infoDoc.Save(Path.Combine(targetDirectory, "info.xml")); this.OnStateChanged(State.Idle); }
public void Invoke(CompositionContainer container) { var traceListener = new ConsolidatedConsoleTraceListener( new Dictionary <string, string> { { "LostDoc.Core.Template", "Template" }, { "LostDoc.Core.Bundle", "Bundle" }, { "LostDoc.Core.Template.AssetResolver", "Resolve" } }); TraceSources.TemplateSource.Listeners.Add(traceListener); TraceSources.AssetResolverSource.Listeners.Add(traceListener); try { if (this.Quiet.IsPresent) { const SourceLevels quietLevel = SourceLevels.Error | SourceLevels.Warning | SourceLevels.Critical; TraceSources.TemplateSource.Switch.Level = quietLevel; TraceSources.AssetResolverSource.Switch.Level = quietLevel; TraceSources.BundleSource.Listeners.Add(traceListener); } else if (this.Verbose.IsPresent) { const SourceLevels verboseLevel = SourceLevels.All; TraceSources.TemplateSource.Switch.Level = verboseLevel; TraceSources.AssetResolverSource.Switch.Level = verboseLevel; TraceSources.BundleSource.Listeners.Add(traceListener); } else { const SourceLevels normalLevel = SourceLevels.Information | SourceLevels.Warning | SourceLevels.Error | SourceLevels.ActivityTracing; TraceSources.TemplateSource.Switch.Level = normalLevel; TraceSources.AssetResolverSource.Switch.Level = normalLevel; } LinkedList <FileInfo> includedFiles = new LinkedList <FileInfo>(); if (File.Exists(this.Path)) { includedFiles.AddLast(new FileInfo(this.Path)); } else if (Directory.Exists(this.Path)) { Directory.GetFiles(this.Path, "*.ldoc", SearchOption.AllDirectories) .Aggregate(includedFiles, (l, f) => l.AddLast(new FileInfo(f)).List); } else { throw new FileNotFoundException(System.IO.Path.GetFullPath(this.Path)); } Bundle bundle = new Bundle(this.IgnoreVersionComponent); TraceSources.TemplateSource.TraceInformation("Merging LostDoc files into bundle."); foreach (FileInfo file in includedFiles) { TraceSources.TemplateSource.TraceEvent(TraceEventType.Information, 0, "Source: {0}", file.Name); XDocument fileDoc = XDocument.Load(file.FullName); bundle.Add(fileDoc); } var lazyProviders = container.GetExports <IFileProvider>(ContractNames.TemplateProvider); var realProviders = lazyProviders.Select(lazy => lazy.Value); TemplateResolver templateResolver = new TemplateResolver(realProviders.ToArray()); Template template = new Template(container); template.Load(templateResolver, this.Template); string outputDir = this.Output ?? (Directory.Exists(this.Path) ? this.Path : System.IO.Path.GetDirectoryName(this.Path)); AssetRedirectCollection assetRedirects; XDocument mergedDoc = bundle.Merge(out assetRedirects); var templateData = new TemplateData(mergedDoc) { AssetRedirects = assetRedirects, OverwriteExistingFiles = this.Force.IsPresent, IgnoredVersionComponent = this.IgnoreVersionComponent, Arguments = this.Arguments, OutputFileProvider = new ScopedFileProvider(new DirectoryFileProvider(), outputDir) }; template.Generate(templateData); } finally { TraceSources.TemplateSource.Listeners.Remove(traceListener); TraceSources.AssetResolverSource.Listeners.Remove(traceListener); } }
/// <summary> /// Issues the order response message which will mark the requested order as underway /// </summary> public void IssueOrderResponse(OrderResponseMessageType orderResponse) { // TODO: Validate the standard header Bundle orderTransaction = new Bundle(); // Loop foreach (var resp in orderResponse.orderResponse) { // Find the original order which this despatch advice is fulfilling Act orderRequestAct = this.m_gs1Util.GetOrder(resp.originalOrder, ActMoodKeys.Request); if (orderRequestAct == null) { throw new KeyNotFoundException("Could not find originalOrder"); } // Update the supplier if it exists Place sourceLocation = this.m_gs1Util.GetLocation(resp.seller); if (sourceLocation != null && !orderRequestAct.Participations.Any(o => o.ParticipationRoleKey == ActParticipationKey.Distributor)) { // Add participation orderRequestAct.Participations.Add(new ActParticipation() { ActKey = orderRequestAct.Key, PlayerEntityKey = sourceLocation.Key, ParticipationRoleKey = ActParticipationKey.Distributor }); } else if (resp.seller != null && sourceLocation == null) { throw new KeyNotFoundException($"Could not find seller id with {resp.seller?.additionalPartyIdentification?.FirstOrDefault()?.Value ?? resp.seller.gln}"); } var oidService = ApplicationContext.Current.GetService <IOidRegistrarService>(); var gln = oidService.GetOid("GLN"); var issuingAuthority = oidService.FindData($"{gln.Oid}.{resp.orderResponseIdentification.contentOwner.gln}"); if (issuingAuthority == null) { issuingAuthority = oidService.GetOid(this.m_configuration.DefaultContentOwnerAssigningAuthority); } if (issuingAuthority == null) { throw new KeyNotFoundException("Cannot find default issuing authority for advice identification. Please configure a valid OID"); } orderRequestAct.Identifiers.Add(new ActIdentifier(new AssigningAuthority(issuingAuthority.Mnemonic, issuingAuthority.Name, issuingAuthority.Oid), resp.orderResponseIdentification.entityIdentification)); // If the original order request is not comlete, then complete it var existingTag = orderRequestAct.Tags.FirstOrDefault(o => o.TagKey == "orderStatus"); if (existingTag == null) { existingTag = new ActTag("orderStatus", ""); orderRequestAct.Tags.Add(existingTag); } // Accepted or not if (resp.responseStatusCode?.Value == "ACCEPTED") { existingTag.Value = "accepted"; } else if (resp.responseStatusCode?.Value == "REJECTED") { existingTag.Value = "rejected"; } orderTransaction.Add(orderRequestAct); } // insert transaction try { ApplicationContext.Current.GetService <IBatchRepositoryService>().Insert(orderTransaction); } catch (Exception e) { this.m_tracer.TraceError("Error issuing despatch advice: {0}", e); throw new Exception($"Error issuing despatch advice: {e.Message}", e); } }
/// <summary> /// This method will construct a three folder structure inside <paramref name="targetDirectory"/> containing: Html, Index, and Source /// </summary> /// <param name="sourceDirectory"> /// Directory containing ldoc files /// </param> /// <param name="targetDirectory"> /// Output directory /// </param> public void Build(string sourceDirectory, string targetDirectory) { if (Directory.Exists(targetDirectory) && Directory.EnumerateFileSystemEntries(targetDirectory).Any()) { throw new InvalidOperationException("Target path is not empty."); } this.OnStateChanged(State.Preparing); string htmlRoot = Path.Combine(targetDirectory, "Html"); string indexRoot = Path.Combine(targetDirectory, "Index"); string sourceRoot = Path.Combine(targetDirectory, "Source"); string logRoot = Path.Combine(targetDirectory, "Logs"); DirectoryInfo htmlDir = Directory.CreateDirectory(htmlRoot); DirectoryInfo indexDir = Directory.CreateDirectory(indexRoot); DirectoryInfo sourceDir = Directory.CreateDirectory(sourceRoot); DirectoryInfo logDir = Directory.CreateDirectory(logRoot); var sourceFiles = Directory.EnumerateFiles(sourceDirectory, "*.ldoc", SearchOption.TopDirectoryOnly); // copy all source files to output directory and add to bundle Bundle bundle = new Bundle(this.IgnoreVersionComponent); foreach (var sourceFile in sourceFiles) { string targetFile = Path.Combine(sourceDir.FullName, Path.GetFileName(sourceFile)); File.Copy(sourceFile, targetFile); bundle.Add(XDocument.Load(targetFile)); } TemplateOutput templateOutput; // wire up logging string templateLogFile = Path.Combine(logDir.FullName, string.Format("template_{0:yyyy'_'MM'_'dd'__'HH'_'mm'_'ss}.log", DateTime.Now)); using (TextWriterTraceListener traceListener = new TextWriterTraceListener(templateLogFile)) { // log everything traceListener.Filter = new EventTypeFilter(SourceLevels.All); LostDoc.Diagnostics.TraceSources.TemplateSource.Switch.Level = SourceLevels.All; LostDoc.Diagnostics.TraceSources.BundleSource.Switch.Level = SourceLevels.All; LostDoc.Diagnostics.TraceSources.AssetResolverSource.Switch.Level = SourceLevels.All; LostDoc.Diagnostics.TraceSources.TemplateSource.Listeners.Add(traceListener); LostDoc.Diagnostics.TraceSources.BundleSource.Listeners.Add(traceListener); LostDoc.Diagnostics.TraceSources.AssetResolverSource.Listeners.Add(traceListener); // merge ldoc files this.OnStateChanged(State.Merging); AssetRedirectCollection assetRedirects; var mergedDoc = bundle.Merge(out assetRedirects); // generate output var templateData = new TemplateData(mergedDoc) { AssetRedirects = assetRedirects, IgnoredVersionComponent = this.IgnoreVersionComponent, OutputFileProvider = new ScopedFileProvider(new DirectoryFileProvider(), htmlDir.FullName), //TargetDirectory = htmlDir.FullName, Arguments = new Dictionary <string, object> { { "SearchUri", "/search/" } }, KeepTemporaryFiles = true, TemporaryFilesPath = Path.Combine(logDir.FullName, "temp") }; this.OnStateChanged(State.Templating); templateOutput = this.Template.Generate(templateData); LostDoc.Diagnostics.TraceSources.TemplateSource.Listeners.Remove(traceListener); LostDoc.Diagnostics.TraceSources.BundleSource.Listeners.Remove(traceListener); LostDoc.Diagnostics.TraceSources.AssetResolverSource.Listeners.Remove(traceListener); } this.OnStateChanged(State.Indexing); string indexLogFile = Path.Combine(logDir.FullName, string.Format("index_{0:yyyy'_'MM'_'dd'__'HH'_'mm'_'ss}.log", DateTime.Now)); using (TextWriterTraceListener traceListener = new TextWriterTraceListener(indexLogFile)) { // log everything traceListener.Filter = new EventTypeFilter(SourceLevels.All); TraceSources.ContentBuilderSource.Switch.Level = SourceLevels.All; TraceSources.ContentBuilderSource.Listeners.Add(traceListener); // one stop-word per line StringReader stopWordsReader = new StringReader(@"missing"); // index output using (var directory = FSDirectory.Open(indexDir)) using (stopWordsReader) { Analyzer analyzer = new StandardAnalyzer(global::Lucene.Net.Util.Version.LUCENE_30, stopWordsReader); Analyzer titleAnalyzer = new TitleAnalyzer(); IDictionary <string, Analyzer> fieldAnalyzers = new Dictionary <string, Analyzer> { { "title", titleAnalyzer } }; PerFieldAnalyzerWrapper analyzerWrapper = new PerFieldAnalyzerWrapper(analyzer, fieldAnalyzers); using ( var writer = new IndexWriter(directory, analyzerWrapper, IndexWriter.MaxFieldLength.UNLIMITED)) { var saResults = templateOutput.Results.Select(wur => wur.WorkUnit).OfType <StylesheetApplication>(); var saDict = saResults.ToDictionary(sa => sa.Asset); var indexResults = saDict.Values.Where(sa => sa.SaveAs.EndsWith(".xml")); foreach (var sa in indexResults) { string absPath = Path.Combine(htmlDir.FullName, sa.SaveAs); XDocument indexDoc = XDocument.Load(absPath); string assetId = indexDoc.Root.Attribute("assetId").Value; string title = indexDoc.Root.Element("title").Value.Trim(); string summary = indexDoc.Root.Element("summary").Value.Trim(); string text = indexDoc.Root.Element("text").Value.Trim(); var ssApplication = saDict[AssetIdentifier.Parse(assetId)]; var doc = new Document(); doc.Add(new Field("uri", new Uri(ssApplication.SaveAs, UriKind.Relative).ToString(), Field.Store.YES, Field.Index.NO)); doc.Add(new Field("aid", ssApplication.Asset, Field.Store.YES, Field.Index.NOT_ANALYZED)); foreach (AssetIdentifier aid in ssApplication.Aliases) { doc.Add(new Field("alias", aid, Field.Store.NO, Field.Index.NOT_ANALYZED)); } foreach (var section in ssApplication.Sections) { doc.Add(new Field("section", section.AssetIdentifier, Field.Store.NO, Field.Index.NOT_ANALYZED)); } doc.Add(new Field("title", title, Field.Store.YES, Field.Index.ANALYZED)); doc.Add(new Field("summary", summary, Field.Store.YES, Field.Index.ANALYZED)); doc.Add(new Field("content", text, Field.Store.NO, Field.Index.ANALYZED)); TraceSources.ContentBuilderSource.TraceVerbose("Indexing document: {0}", doc.ToString()); writer.AddDocument(doc); } writer.Optimize(); writer.Commit(); } analyzerWrapper.Close(); analyzer.Close(); } TraceSources.ContentBuilderSource.Listeners.Remove(traceListener); } this.OnStateChanged(State.Finalizing); var infoDoc = new XDocument( new XElement("content", new XAttribute("created", XmlConvert.ToString(DateTime.UtcNow, XmlDateTimeSerializationMode.Utc)), templateOutput.Results.Select(this.ConvertToXml))); infoDoc.Save(Path.Combine(targetDirectory, "info.xml")); this.OnStateChanged(State.Idle); }
void Update() { //モデルが更新されたときのみ読み込み if (Model != null && OldModel != Model) { animator = Model.GetComponent <Animator>(); blendShapeProxy = Model.GetComponent <VRMBlendShapeProxy>(); OldModel = Model; } if (Model != null && animator != null && uClient != null) { //Root var RootTransform = Model.transform; if (RootTransform != null) { uClient.Send("/VMC/Ext/Root/Pos", "root", RootTransform.position.x, RootTransform.position.y, RootTransform.position.z, RootTransform.rotation.x, RootTransform.rotation.y, RootTransform.rotation.z, RootTransform.rotation.w); } //Bones var boneBundle = new Bundle(Timestamp.Now); foreach (HumanBodyBones bone in Enum.GetValues(typeof(HumanBodyBones))) { if (bone != HumanBodyBones.LastBone) { var Transform = animator.GetBoneTransform(bone); if (Transform != null) { boneBundle.Add(new Message("/VMC/Ext/Bone/Pos", bone.ToString(), Transform.localPosition.x, Transform.localPosition.y, Transform.localPosition.z, Transform.localRotation.x, Transform.localRotation.y, Transform.localRotation.z, Transform.localRotation.w)); } } } uClient.Send(boneBundle); //ボーン位置を仮想トラッカーとして送信 var trackerBundle = new Bundle(Timestamp.Now); SendBoneTransformForTracker(ref trackerBundle, HumanBodyBones.Head, "Head"); SendBoneTransformForTracker(ref trackerBundle, HumanBodyBones.Spine, "Spine"); SendBoneTransformForTracker(ref trackerBundle, HumanBodyBones.LeftHand, "LeftHand"); SendBoneTransformForTracker(ref trackerBundle, HumanBodyBones.RightHand, "RightHand"); SendBoneTransformForTracker(ref trackerBundle, HumanBodyBones.LeftFoot, "LeftFoot"); SendBoneTransformForTracker(ref trackerBundle, HumanBodyBones.RightFoot, "RightFoot"); uClient.Send(trackerBundle); //BlendShape if (blendShapeProxy != null) { var blendShapeBundle = new Bundle(Timestamp.Now); foreach (var b in blendShapeProxy.GetValues()) { blendShapeBundle.Add(new Message("/VMC/Ext/Blend/Val", b.Key.ToString(), (float)b.Value )); } blendShapeBundle.Add(new Message("/VMC/Ext/Blend/Apply")); uClient.Send(blendShapeBundle); } //Available uClient.Send("/VMC/Ext/OK", 1); } else { uClient.Send("/VMC/Ext/OK", 0); } uClient.Send("/VMC/Ext/T", Time.time); }
public IdentifiedData DownloadPatient() { var search = NameValueCollection.ParseQueryString(MiniHdsiServer.CurrentContext.Request.Url.Query); if (!search.ContainsKey("_id")) { throw new ArgumentNullException("Missing _id parameter"); } Guid patientId = Guid.Parse(search["_id"][0]); // Get the patient var hdsiIntegrationService = ApplicationContext.Current.GetService <IClinicalIntegrationService>(); var pdp = ApplicationContext.Current.GetService <IDataPersistenceService <Bundle> >(); // We shove the data onto the queue for import!!! :) ApplicationContext.Current.SetProgress(Strings.locale_downloadingExternalPatient, 0.1f); var dbundle = hdsiIntegrationService.Find <Patient>(o => o.Key == patientId, 0, 1); dbundle.Item.RemoveAll(o => !(o is Patient || o is Person)); pdp.Insert(dbundle); var patient = dbundle.Item.OfType <Patient>().FirstOrDefault(); // We now want to subscribe this patient our facility var facilityId = ApplicationContext.Current.Configuration.GetSection <SynchronizationConfigurationSection>().Facilities?.FirstOrDefault(); if (facilityId != null) { patient.Relationships.Add(new EntityRelationship(EntityRelationshipTypeKeys.IncidentalServiceDeliveryLocation, Guid.Parse(facilityId)) { SourceEntityKey = patient.Key }); hdsiIntegrationService.Update(patient); } var personBundle = new Bundle(); foreach (var rel in patient.Relationships) { var person = hdsiIntegrationService.Get <Entity>(rel.TargetEntityKey.Value, null); if (person != null && person.Type == "Person") { personBundle.Add(person as Person); } } if (personBundle.Item.Count > 0) { pdp.Insert(personBundle); } int tr = 1, ofs = 0; Guid qid = Guid.NewGuid(); while (ofs < tr) { var bundle = hdsiIntegrationService.Find <Act>(o => o.Participations.Where(p => p.ParticipationRole.Mnemonic == "RecordTarget").Any(p => p.PlayerEntityKey == patientId), ofs, 20); //bundle.Reconstitute(); tr = bundle.TotalResults; ApplicationContext.Current.SetProgress(Strings.locale_downloadingExternalPatient, ((float)ofs / tr) * 0.9f + 0.1f); ofs += 20; pdp.Insert(bundle); } return(patient); }
public override void Invoke(CompositionContainer container) { var traceListener = new ConsolidatedConsoleTraceListener { { TraceSources.TemplateSource, "Template" }, { TraceSources.BundleSource, "Bundle" }, { TraceSources.AssetResolverSource, "Resolve" } }; using (traceListener) { this.ConfigureTraceLevels(traceListener); LinkedList<FileInfo> includedFiles = new LinkedList<FileInfo>(); if (File.Exists(this.Path)) includedFiles.AddLast(new FileInfo(this.Path)); else if (Directory.Exists(this.Path)) { Directory.GetFiles(this.Path, "*.ldoc", SearchOption.AllDirectories) .Aggregate(includedFiles, (l, f) => l.AddLast(new FileInfo(f)).List); } else throw new FileNotFoundException(System.IO.Path.GetFullPath(this.Path)); Bundle bundle = new Bundle(this.IgnoreVersionComponent); TraceSources.TemplateSource.TraceInformation("Merging LostDoc files into bundle."); foreach (FileInfo file in includedFiles) { TraceSources.TemplateSource.TraceEvent(TraceEventType.Information, 0, "Source: {0}", file.Name); XDocument fileDoc = XDocument.Load(file.FullName); bundle.Add(fileDoc); } var lazyProviders = container.GetExports<IFileProvider>(ContractNames.TemplateProvider); var realProviders = lazyProviders.Select(lazy => lazy.Value); TemplateResolver templateResolver = new TemplateResolver(realProviders.ToArray()); TemplateInfo templateInfo = templateResolver.Resolve(this.Template); Template template = templateInfo.Load(container); string outputDir = this.Output ?? (Directory.Exists(this.Path) ? this.Path : System.IO.Path.GetDirectoryName(this.Path)); AssetRedirectCollection assetRedirects; XDocument mergedDoc = bundle.Merge(out assetRedirects); var templateData = new TemplateData(mergedDoc) { AssetRedirects = assetRedirects, OverwriteExistingFiles = this.Force.IsPresent, IgnoredVersionComponent = this.IgnoreVersionComponent, Arguments = this.Arguments, OutputFileProvider = new ScopedFileProvider(new DirectoryFileProvider(), outputDir) }; template.Generate(templateData); } }
public void Invoke(CompositionContainer container) { var traceListener = new ConsolidatedConsoleTraceListener( new Dictionary<string, string> { { "LostDoc.Core.Template", "Template" }, { "LostDoc.Core.Bundle", "Bundle" }, { "LostDoc.Core.Template.AssetResolver", "Resolve" } }); TraceSources.TemplateSource.Listeners.Add(traceListener); TraceSources.AssetResolverSource.Listeners.Add(traceListener); try { if (this.Quiet.IsPresent) { const SourceLevels quietLevel = SourceLevels.Error | SourceLevels.Warning | SourceLevels.Critical; TraceSources.TemplateSource.Switch.Level = quietLevel; TraceSources.AssetResolverSource.Switch.Level = quietLevel; TraceSources.BundleSource.Listeners.Add(traceListener); } else if (this.Verbose.IsPresent) { const SourceLevels verboseLevel = SourceLevels.All; TraceSources.TemplateSource.Switch.Level = verboseLevel; TraceSources.AssetResolverSource.Switch.Level = verboseLevel; TraceSources.BundleSource.Listeners.Add(traceListener); } else { const SourceLevels normalLevel = SourceLevels.Information | SourceLevels.Warning | SourceLevels.Error | SourceLevels.ActivityTracing; TraceSources.TemplateSource.Switch.Level = normalLevel; TraceSources.AssetResolverSource.Switch.Level = normalLevel; } LinkedList<FileInfo> includedFiles = new LinkedList<FileInfo>(); if (File.Exists(this.Path)) includedFiles.AddLast(new FileInfo(this.Path)); else if (Directory.Exists(this.Path)) { Directory.GetFiles(this.Path, "*.ldoc", SearchOption.AllDirectories) .Aggregate(includedFiles, (l, f) => l.AddLast(new FileInfo(f)).List); } else throw new FileNotFoundException(System.IO.Path.GetFullPath(this.Path)); Bundle bundle = new Bundle(this.IgnoreVersionComponent); TraceSources.TemplateSource.TraceInformation("Merging LostDoc files into bundle."); foreach (FileInfo file in includedFiles) { TraceSources.TemplateSource.TraceEvent(TraceEventType.Information, 0, "Source: {0}", file.Name); XDocument fileDoc = XDocument.Load(file.FullName); bundle.Add(fileDoc); } var lazyProviders = container.GetExports<IFileProvider>(ContractNames.TemplateProvider); var realProviders = lazyProviders.Select(lazy => lazy.Value); TemplateResolver templateResolver = new TemplateResolver(realProviders.ToArray()); Template template = new Template(container); template.Load(templateResolver, this.Template); string outputDir = this.Output ?? (Directory.Exists(this.Path) ? this.Path : System.IO.Path.GetDirectoryName(this.Path)); AssetRedirectCollection assetRedirects; XDocument mergedDoc = bundle.Merge(out assetRedirects); var templateData = new TemplateData(mergedDoc) { AssetRedirects = assetRedirects, OverwriteExistingFiles = this.Force.IsPresent, IgnoredVersionComponent = this.IgnoreVersionComponent, Arguments = this.Arguments, OutputFileProvider = new ScopedFileProvider(new DirectoryFileProvider(), outputDir) }; template.Generate(templateData); } finally { TraceSources.TemplateSource.Listeners.Remove(traceListener); TraceSources.AssetResolverSource.Listeners.Remove(traceListener); } }
/// <summary> /// Merges the specified duplicates into the master /// </summary> public virtual RecordMergeResult Merge(Guid masterKey, IEnumerable <Guid> linkedDuplicates) { var mergeEventArgs = new DataMergingEventArgs <TModel>(masterKey, linkedDuplicates); this.Merging?.Invoke(this, mergeEventArgs); if (mergeEventArgs.Cancel) { this.m_tracer.TraceInfo("Pre-Event trigger indicated cancel merge"); return(new RecordMergeResult(RecordMergeStatus.Cancelled, null, null)); } // The invoke may have changed the master masterKey = mergeEventArgs.SurvivorKey; var master = ApplicationServiceContext.Current.GetService <IDataPersistenceService <TModel> >().Get(masterKey, null, true, AuthenticationContext.Current.Principal); // We'll update the parameters from the candidate to create a single master record // TODO: Verify this in edge cases Bundle persistenceBundle = new Bundle(); foreach (var l in linkedDuplicates) { var local = ApplicationServiceContext.Current.GetService <IDataPersistenceService <TModel> >().Get(l, null, true, AuthenticationContext.Current.Principal); master.CopyObjectData(local, false); // Copy data which is different // Add replaces and nullify if (l == Guid.Empty) { if (master is Act actMaster) { actMaster.Relationships.Add(new ActRelationship(ActRelationshipTypeKeys.Replaces, l)); } else if (master is Entity entityMaster) { entityMaster.Relationships.Add(new EntityRelationship(EntityRelationshipTypeKeys.Replaces, l)); } persistenceBundle.Add(local); } else // Not persisted yet { if (master is Act actMaster) { actMaster.Relationships.Add(new ActRelationship(ActRelationshipTypeKeys.Replaces, masterKey) { TargetActKey = l }); } else if (master is Entity entityMaster) { entityMaster.Relationships.Add(new EntityRelationship(EntityRelationshipTypeKeys.Replaces, masterKey) { TargetEntityKey = l }); } } (local as IHasState).StatusConceptKey = StatusKeys.Nullified; } master.Key = masterKey; persistenceBundle.Add(master); // Persist ApplicationServiceContext.Current.GetService <IDataPersistenceService <Bundle> >().Update(persistenceBundle, TransactionMode.Commit, AuthenticationContext.SystemPrincipal); this.Merged?.Invoke(this, new DataMergeEventArgs <TModel>(masterKey, linkedDuplicates)); return(new RecordMergeResult(RecordMergeStatus.Success, new Guid[] { masterKey }, linkedDuplicates.ToArray())); }
public void Build(Package package) { HashSet <UInt32> types = new HashSet <uint>(); Dictionary <UInt32, List <ProjectFile> > files = new Dictionary <uint, List <ProjectFile> >(); Big big = new Big(); Big bundle_patch = new Big(); foreach (var file in Files) { if (types.Add(file.Type)) { if (!files.ContainsKey(file.Type)) { files[file.Type] = new List <ProjectFile>(); } } files[file.Type].Add(file); String full_path = Path.GetFullPath(Path.Combine(DirPath, file.Filename)); if (file.Type == 0x100) // bundle { bundle_patch.AddFile(file.FileID, full_path, file.Flags); } else { big.AddFile(file.FileID, full_path, file.Flags); } } foreach (var t in types) { var init_id = Utils.SH(String.Format(".\\{0}\\{1}_init.bin", package.PackageName, Utils.BundleTypeName(t))); var bundle_id = Utils.SH(String.Format(".\\{0}\\bundles\\{1}.bundle", package.PackageName, Utils.BundleMgrName(t))); var init_file = package.Main.FindFirst(init_id); var bundle_file = package.Main.FindFirst(bundle_id); if (init_file != null) { Init init = new Init(); init.Load(init_file.GetStream()); bool changed = false; foreach (var file in files[t]) { if (init.Add(file.FileID, file.SymbolName.GetHash())) { changed = true; } } if (changed) { String init_filename = String.Format("tmp\\{0}_init.bin", Utils.BundleTypeName(t)); init.Save(new FileStream(init_filename, FileMode.Create)); big.AddFile(init_id, init_filename, 0x14); } } if (bundle_file != null) { Bundle bundle = new Bundle(); bundle.Load(bundle_file.GetStream()); bool changed = false; foreach (var file in files[t]) { if (bundle.Add(file.FileID, (byte)file.Type, false)) { changed = true; } } if (changed) { String bundle_filename = String.Format("tmp\\{0}.bundle", Utils.BundleMgrName(t)); bundle.Save(new FileStream(bundle_filename, FileMode.Create)); bundle_patch.AddFile(bundle_id, bundle_filename, 0x14); } } } Directory.CreateDirectory(Path.Combine(package.BasePath, "Patches")); big.Build(Path.Combine(package.BasePath, "Patches", "generated_patch.big")); bundle_patch.Build(Path.Combine(package.BasePath, "BundleTarget", "generated_patch.big")); }