/// <summary> /// Creates an instance of this class. One instance is maintained per service principal. /// </summary> /// <param name="tenantId">tenantId of the tenant</param> /// <param name="appId">appId of service principal instance</param> /// <param name="key">key of service principal</param> /// <returns>RmsContentPublisher instance</returns> public static RmsContentPublisher Create(string tenantId, string appId, string key) { ServicePrincipalTuple servicePrincipalTuple; servicePrincipalTuple.AppId = appId; servicePrincipalTuple.TenantId = tenantId; servicePrincipalTuple.Key = key; //check if an instance exists in cache lock (LockRmsContentPublisherInstances) { if (!RmsContentPublisherInstances.ContainsKey(servicePrincipalTuple)) { RmsContentPublisherInstances.Add(servicePrincipalTuple, new RmsContentPublisher(servicePrincipalTuple.GetSymmetricKey())); } } RmsContentPublisher rmsContentPublisher = RmsContentPublisherInstances[servicePrincipalTuple] as RmsContentPublisher; return(rmsContentPublisher); }
private void ProcessQueueMessage(object state) { CloudQueueMessage msg = state as CloudQueueMessage; try { // Log and delete if this is a "poison" queue message (repeatedly processed // and always causes an error that prevents processing from completing). // Production applications should move the "poison" message to a "dead message" // queue for analysis rather than deleting the message. if (msg.DequeueCount > 5) { Trace.TraceError("Deleting poison message: message {0} Role Instance {1}.", msg.ToString(), GetRoleInstance()); DataModel.StorageFactory.Instance.IpcAzureAppWorkerJobQueue.DeleteMessage(msg); return; } RmsCommand rmsCommand = new RmsCommand(msg.AsString); switch (rmsCommand.RmsOperationCommand) { case RmsCommand.Command.GetTemplate: { ServicePrincipalModel sp = ServicePrincipalModel.GetFromStorage(rmsCommand.Parameters.First <object>().ToString()); RMS.RmsContentPublisher rmsPublisher = RMS.RmsContentPublisher.Create(sp.TenantId, sp.AppId, sp.Key); var templates = rmsPublisher.GetRmsTemplates(); List <TemplateModel> templateEntityList = new List <TemplateModel>(); foreach (var temp in templates) { TemplateModel templateEntity = new TemplateModel(); templateEntity.TenantId = sp.TenantId; templateEntity.TemplateId = temp.TemplateId; templateEntity.TemplateName = temp.Name; templateEntity.TemplateDescription = temp.Description; templateEntityList.Add(templateEntity); } TemplateModel.SaveToStorage(templateEntityList); break; } case RmsCommand.Command.PublishTemplate: { PublishModel publishJob = PublishModel.GetFromStorage(rmsCommand.Parameters[0].ToString(), rmsCommand.Parameters[1].ToString()); ServicePrincipalModel sp = ServicePrincipalModel.GetFromStorage(rmsCommand.Parameters[0].ToString()); CloudBlockBlob originalFileBlob = DataModel.StorageFactory.Instance.IpcAzureAppFileBlobContainer.GetBlockBlobReference(publishJob.OriginalFileBlobRef); Stream sinkStream = null; string tempFilePath = null; try { //if file length is less than 100,000 bytes, keep it in memory if (publishJob.OriginalFileSizeInBytes < 100000) { sinkStream = new MemoryStream(); } else { tempFilePath = Path.GetRandomFileName(); sinkStream = new FileStream(tempFilePath, FileMode.Create); } using (Stream sourceStream = originalFileBlob.OpenRead()) using (sinkStream) { RMS.RmsContent rmsContent = new RMS.RmsContent(sourceStream, sinkStream); rmsContent.RmsTemplateId = publishJob.TemplateId; rmsContent.OriginalFileNameWithExtension = publishJob.OriginalFileName; RMS.RmsContentPublisher rmsContentPublisher = RMS.RmsContentPublisher.Create(sp.TenantId, sp.AppId, sp.Key); rmsContentPublisher.PublishContent(rmsContent); publishJob.PublishedFileName = rmsContent.PublishedFileNameWithExtension; sinkStream.Flush(); sinkStream.Seek(0, SeekOrigin.Begin); //published file is uploaded to blob storage. //Note: This sample code doesn't manage lifetime of this original and published file blob //Actual code must manage the lifetime as appropriate CloudBlockBlob destFileBlob = DataModel.StorageFactory.Instance.IpcAzureAppFileBlobContainer.GetBlockBlobReference(publishJob.PublishedFileBlobRef); using (CloudBlobStream blobStream = destFileBlob.OpenWrite()) { int tempSize = 1024; byte[] tempBuffer = new byte[tempSize]; while (true) { int readSize = sinkStream.Read(tempBuffer, 0, tempSize); if (readSize <= 0) { break; } blobStream.Write(tempBuffer, 0, readSize); } blobStream.Flush(); } } publishJob.JState = PublishModel.JobState.Completed.ToString(); publishJob.SaveToStorage(); break; } finally { if (!string.IsNullOrWhiteSpace(tempFilePath) && File.Exists(tempFilePath)) { File.Delete(tempFilePath); } } } } //delete the message from the queue DataModel.StorageFactory.Instance.IpcAzureAppWorkerJobQueue.DeleteMessage(msg); } catch (Exception ex) { Process p = Process.GetCurrentProcess(); string a = p.ProcessName; string b = p.MainModule.FileName; string err = ex.Message; if (ex.InnerException != null) { err += " Inner Exception: " + ex.InnerException.Message; } if (msg != null) { err += " Last queue message retrieved: " + msg.AsString; } Trace.TraceError(err); } }