public BasicFormFile(Stream st) { Muxer = new StreamMuxer(st, false); Length = st.Length; }
protected async override Task OnGoAsync(Job job, ContactsFromEligibilityJobResult result) { var tenant = await Rdb.Tenants.FindAsync(job.TenantId.Value); throw new NotImplementedException(); #if false var blast = Rdb.ZCommunicationBlasts.Single(z => z.TenantId == job.TenantId && z.JobId == job.JobId); var messageTemplate = Rdb.MessageTemplates.Include(z => z.HtmlBodyTemplate).Include(z => z.SubjectTemplate).Include(z => z.TextBodyTemplate).FirstOrDefault(z => z.MessageTemplateId == blast.MessageTemplateId); var streamByCloubBlob = new Dictionary <CloudBlob, StreamMuxer>(); foreach (var blob in await messageTemplate.GetFileAttachmentInfosAsync(Blobs)) { var st = new MemoryStream(); await blob.DownloadToStreamAsync(st); st.Position = 0; streamByCloubBlob[blob] = new StreamMuxer(st); } var alreadySentRecipientContactIds = new HashSet <int>(); foreach (var sent in from z in Crm.CommunicationLogs where z.TenantId == job.TenantId.Value && z.JobId == job.JobId select z) { alreadySentRecipientContactIds.Add(int.Parse(sent.RecipientContactId)); } result.AlreadySentRecipientContactIdCount = alreadySentRecipientContactIds.Count; var ands = new List <TestExpression>() { new TestExpression(CollectionNames.Contacts, nameof(Contact.ContactType), Operators.Equals, Contact.ContactTypes.Person), new TestExpression(CollectionNames.Eligibility, nameof(Eligibility.mbr_relationship_desc), Operators.Equals, "Employee"), }; foreach (var collection in ands.ConvertAll(a => a.Collection).Distinct().ToArray()) { if (collection == CollectionNames.Contacts) { ands.Add(new TestExpression(collection, nameof(Contact.PrimaryEmail), Operators.IsNotNull)); } ands.Add(new TestExpression(collection, "TenantId", Operators.Equals, job.TenantId)); } var contactIds = TestExpression.MatchingContactIds(Crm, ands); result.MatchingContactIdCount = contactIds.Count; var tm = new TemplateManager(new DbTemplateFinder(Rdb, job.TenantId.Value), tenant.TenantSettings.ReusableValues); var messages = new List <MimeMessage>(); var wq = new WorkQueue(1); Parallel.ForEach(contactIds, new ParallelOptions { MaxDegreeOfParallelism = 1 }, delegate(int contactId) { if (alreadySentRecipientContactIds.Contains(contactId)) { result.IncrementSkippedDueToPreviousSendAttempt(); return; } try { var c = Rdb.Contacts.FirstOrDefault(z => z.TenantId == job.TenantId.Value && z.ContactId == contactId); if (c == null) { result.IncrementMissingContactCount(); return; } var e = Rdb.Eligibility.FirstOrDefault();// z => z.ContactId == c.Id); if (e != null) { throw new NotImplementedException(); } var model = Template.ModelTypes.CreateContactSummaryPhiModel(e); var m = new MimeMessage(); m.From.Add(new MailboxAddress(tenant.TenantSettings.EmailSenderName, tenant.TenantSettings.EmailSenderAddress)); m.Headers.Add(MailHelpers.ContactIdHeader, contactId.ToString()); m.Headers.Add(MailHelpers.TopicHeader, blast.TopicName); m.Headers.Add(MailHelpers.CampaignHeader, blast.CampaignName); m.Headers.Add(MailHelpers.JobId, blast.JobId?.ToString()); m.To.Add(new MailboxAddress(c.FullName, c.PrimaryEmail)); lock (messageTemplate) { m.Fill(tm, messageTemplate, null, c, model); } if (streamByCloubBlob.Count > 0) { var multipart = new Multipart("mixed"); multipart.Add(m.Body); foreach (var kvp in streamByCloubBlob) { var attachment = new MimePart(kvp.Key.Properties.ContentType) { ContentObject = new ContentObject(kvp.Value.Create(true, false), ContentEncoding.Default), ContentDisposition = new ContentDisposition(ContentDisposition.Attachment), ContentTransferEncoding = ContentEncoding.Base64, FileName = Path.GetFileName(kvp.Key.Name) }; multipart.Add(attachment); } m.Body = multipart; } lock (messages) { messages.Add(m); if (messages.Count >= MessagesPerBlock) { var b = messages.ToArray(); messages.Clear(); wq.Enqueue(() => Emailer.SendEmailAsync(b)); } } } catch (Exception ex) { result.IncrementExceptionsDuringMessageCreation(); Trace.WriteLine(ex); } }); if (messages.Count > 0) { wq.Enqueue(() => Emailer.SendEmailAsync(messages)); } wq.WaitTillDone(); streamByCloubBlob.Values.ForEach(sm => sm.Dispose()); #endif }
static void Main(string[] args) { //The format is #Objects, SizeObj0, Obj0... SizeObjN, ObjN //This is a combo of binary and xml data MemoryStream st = new MemoryStream(); //Create a stream the old fashioned way Console.WriteLine("Creating Stream=========="); Debug.WriteLine("Creating Stream=========="); BinaryWriter w = new BinaryWriter(st); for (int z = 0; z < 31; ++z) { st.Position = 0; w.Write(z + 1); long objSizeOffset = st.Length; st.SetLength(st.Length + 8); st.Seek(0, SeekOrigin.End); TestObj o = new TestObj(z, string.Format("Test Object #{0}", z)); Console.WriteLine(o); Debug.WriteLine(o); TestObj.Serializer.Serialize(st, o); long size = st.Length - objSizeOffset - 8; st.Position = objSizeOffset; w.Write(size); st.Flush(); } //So you can examine the stream contents in the memory window byte[] buf = st.ToArray(); //now let's de-serialize every other object with the muxer Console.WriteLine("Reading Stream=========="); Debug.WriteLine("Reading Stream=========="); st.Position = 0; using (StreamMuxer muxer = new StreamMuxer(st)) { using (Stream binaryPartsStream = muxer.Create(true, false)) { BinaryReader r = new BinaryReader(binaryPartsStream); int objCnt = r.ReadInt32(); long basePos = 4; for (int z = 0; z < objCnt; ++z) { long size = r.ReadInt64(); basePos += 8; //jump the binary stream past the xml data //note that i am seeking pas the data, not setting the position. //this should show the independance of the 2 streams! binaryPartsStream.Seek(size, SeekOrigin.Current); //if every other... if (z % 2 == 0) { //create a new readonly stream positioned at the current spot //But every now and then, artificially expand the new stream to a larger size //to force an exeption. in doing so, the outer stream of course retains //it's state, an is unaffected! long s = size + ((z % 8 == 2) ? 40 : 0); using (Stream xmlPartStream = muxer.Create(true, false, basePos, s)) { try { Debug.WriteLine(string.Format("XmlPartStream: BEFORE read of obj {0}. xmlSize={1} size={2} pos={3}", z, size, xmlPartStream.Length, xmlPartStream.Position)); TestObj testObj = (TestObj)TestObj.Serializer.Deserialize(xmlPartStream); Console.WriteLine(testObj); Debug.WriteLine(testObj); Debug.WriteLine(string.Format("XmlPartStream: AFTER read of obj {0}. xmlSize={1} size={2} pos={3}", z, size, xmlPartStream.Length, xmlPartStream.Position)); } catch (Exception ex) { Debug.WriteLine("Object creation failed since stream size was expanded past bounds and serializer could not recognize binary data after the xml data!"); } } } basePos += size; } } } }
private async Task FetchTheItemAsync(DataSourceFetche fetch, FileDetails details, DataSourceFetchItem.DataSourceFetchItemTypes dataSourceFetchItemType, DataSourceFetchItem parentFetchItem, Func <FileDetails, Task <string> > fetchAsync) { string tfn = null; var item = new DataSourceFetchItem { DataSourceFetch = fetch, DataSourceFetchItemType = dataSourceFetchItemType, ParentDataSourceFetchItem = parentFetchItem, Size = details.Size, Name = details.Name, }; item.DataSourceFetchItemProperties.LastModifiedAtUtc = details.LastModifiedAtUtc; item.DataSourceFetchItemProperties.ContentMD5 = details.ContentMD5; item.DataSourceFetchItemProperties.ETag = details.ETag; try { Trace.WriteLine($"Checking {details.FullName} size={details.Size} LastWriteTimeUtc={details.LastModifiedAtUtc}"); var sameDataSourceReplicatedDataSourceFetchItem = FindEvidenceItems(details.CreateEvidence()).FirstOrDefault(); if (sameDataSourceReplicatedDataSourceFetchItem != null) { item.DataSourceFetchItemType = DataSourceFetchItem.DataSourceFetchItemTypes.Duplicate; item.SameDataSourceReplicatedDataSourceFetchItem = sameDataSourceReplicatedDataSourceFetchItem; return; } // Logger.LogInformation("Downloading", file.FullName, file.Length, tfn); tfn = await fetchAsync(details); using (var st = File.OpenRead(tfn)) { item.Size = st.Length; using (var muxer = new StreamMuxer(st, true)) { var p = new BlobStorageServices.FileProperties { LastModifiedAtUtc = details.LastModifiedAtUtc }; p.Metadata[BlobStorageServices.MetaKeyNames.SourcePath] = details.Folder; p.Metadata[BlobStorageServices.MetaKeyNames.SourceFullName] = details.FullName; var urns = new List <string>(); Parallel.ForEach( new[] { Hash.CommonHashAlgorithmNames.Md5, Hash.CommonHashAlgorithmNames.Sha1, Hash.CommonHashAlgorithmNames.Sha512, }, hashAlgName => { var urn = Hash.Compute(muxer.OpenRead(), hashAlgName).Urn; if (urn == null) { return; //yes... in some cases this somehow happens... } urns.Add(urn); }); if (urns.Count > 0) { p.Metadata[BlobStorageServices.MetaKeyNames.Urns] = CSV.FormatLine(urns, false); sameDataSourceReplicatedDataSourceFetchItem = FindEvidenceItems(urns).FirstOrDefault(); if (sameDataSourceReplicatedDataSourceFetchItem != null) { item.DataSourceFetchItemType = DataSourceFetchItem.DataSourceFetchItemTypes.Duplicate; item.SameDataSourceReplicatedDataSourceFetchItem = sameDataSourceReplicatedDataSourceFetchItem; return; } } var res = await BlobStorageServices.StoreStreamAsync( Runner.BlobConfig, BlobStorageServices.ContainerNames.Secure, $"{BlobRootPath}{details.Folder.Substring(1)}{details.Name}", muxer.OpenRead(), p, amt => Trace.WriteLine($"Uploading {amt}/{muxer.Length}") ); item.DataSourceFetchItemProperties = new DataSourceFetchItemProperties(); item.DataSourceFetchItemProperties.Set(p); item.Url = res.Uri.ToString(); PopulateEvidence(item); } } } catch (Exception ex) { item.DataSourceFetchItemType = DataSourceFetchItem.DataSourceFetchItemTypes.Errored; item.DataSourceFetchItemProperties.Error = new ExceptionError(ex); Trace.WriteLine(ex); } finally { if (item != null) { await GdbLocker.GoAsync(async() => { Gdb.DataSourceFetchItems.Add(item); await Gdb.SaveChangesAsync(); }); } } var ext = Path.GetExtension(details.Name).ToLower(); if (ext == ".pgp" || details.Name.ToLower().Contains(".pgp.")) { var name = details.Name; if (name.ToLower().EndsWith(".pgp")) { name = name.Left(name.Length - 4); } else if (name.ToLower().EndsWith(".pgp.asc")) { name = name.Left(name.Length - 8); } else if (name.ToLower().Contains(".pgp.")) { name = new Regex(@"\.pgp\.", RegexOptions.IgnoreCase).Replace(name, "."); } await FetchTheItemAsync( fetch, new FileDetails(details, name), DataSourceFetchItem.DataSourceFetchItemTypes.Decrypted, item, async _ => { var utfp = Path.GetTempFileName(); using (var st = File.OpenRead(tfn)) { await Runner.DecryptAsync(st, utfp); } return(utfp); } ); } else if ( MimeType.Application.Zip.DoesExtensionMatch(details.Name) && DS.DataSourceSettings.DecompressItems && dataSourceFetchItemType != DataSourceFetchItem.DataSourceFetchItemTypes.UnpackedRecompressedSingleton) { var relUnzipFolder = Path.GetFileNameWithoutExtension(details.Name); var unzipFolder = Path.Combine(Path.GetDirectoryName(tfn), relUnzipFolder); using (var st = File.OpenRead(tfn)) { using (var za = new ZipArchive(st, ZipArchiveMode.Read)) { if (za.Entries.Count < 2) { return; } } } ZipFile.ExtractToDirectory(tfn, unzipFolder); await TaskWhenAllOneAtATime( Directory.GetFiles(unzipFolder, "*.*", SearchOption.AllDirectories).ConvertAll( unzipped => { string rezipped = unzipped; bool isRezipped = false; if (!MimeType.Application.Zip.DoesExtensionMatch(unzipped)) { rezipped = unzipped + MimeType.Application.Zip.PrimaryFileExtension; using (var st = File.Create(rezipped)) { using (var za = new ZipArchive(st, ZipArchiveMode.Create)) { za.CreateEntryFromFile(unzipped, Path.GetFileName(unzipped)); } isRezipped = true; } } return(FetchTheItemAsync( fetch, new FileDetails(new FileInfo(rezipped), Path.Combine(details.Folder, relUnzipFolder)), isRezipped ? DataSourceFetchItem.DataSourceFetchItemTypes.UnpackedRecompressedSingleton : DataSourceFetchItem.DataSourceFetchItemTypes.Unpacked, item, _ => Task.FromResult(rezipped) )); })); Stuff.Noop(); } }