/// <summary> /// Reads bytes for a specific document from the database. /// </summary> /// <param name="dataContext">The DataContext for the database.</param> /// <param name="documentID">The ID of the document to read the content for.</param> /// <param name="offset">The starting offset to read the content bytes from.</param> /// <param name="length">The length of content bytes to read.</param> /// <returns>A byte array containing the document content for the specified byte range.</returns> public async Task <byte[]> ReadChunk(Data.DataContext dataContext, Guid documentID, int offset, int length) { string filename = Path.Combine(_uploadPath, documentID.ToString("D") + ".part"); if (!File.Exists(filename)) { lock (_lock) { if (!File.Exists(filename)) { string tempFileName = Path.Combine(_uploadPath, "temp_" + documentID.ToString("D") + ".part"); using (var fs = new FileStream(tempFileName, FileMode.Append, FileAccess.Write, FileShare.Write)) using (var dbStream = new Data.Documents.DocumentStream(dataContext, documentID)) { dbStream.CopyTo(fs); fs.Flush(); fs.Close(); } File.Move(tempFileName, filename); } } } byte[] buffer = new byte[length]; using (var stream = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.Read)) { stream.Seek(offset, SeekOrigin.Begin); await stream.ReadAsync(buffer, 0, buffer.Length); } return(buffer); }
/// <summary> /// Returns a readonly stream for the specified document from the local disk cache. /// </summary> /// <param name="dataContext">The current datacontext to use if the cache needs to be populated.</param> /// <param name="documentID">The ID of the document.</param> /// <returns></returns> public Stream GetStream(Data.DataContext dataContext, Guid documentID) { string filename = Path.Combine(_uploadPath, documentID.ToString("D") + ".part"); if (!File.Exists(filename)) { lock (_lock) { if (!File.Exists(filename)) { string tempFileName = Path.Combine(_uploadPath, "temp_" + documentID.ToString("D") + ".part"); using (var fs = new FileStream(tempFileName, FileMode.Append, FileAccess.Write, FileShare.ReadWrite)) using (var dbStream = new Data.Documents.DocumentStream(dataContext, documentID)) { dbStream.CopyTo(fs); fs.Flush(); fs.Close(); } File.Move(tempFileName, filename); } } } return(new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)); }
async Task GetDocumentContent(Guid documentID) { using (var db = new DataContext()) using (var cndsAPI = new Lpp.CNDS.ApiClient.CNDSClient(CNDSurl)) { using (var docStream = new Data.Documents.DocumentStream(db, documentID)) { var httpResponse = await cndsAPI.Requests.ReadDocument(documentID); await httpResponse.Content.CopyToAsync(docStream); await docStream.FlushAsync(); } } }
public async Task OverwriteDocumentContent() { Guid documentID = new Guid("B7024973-02F7-4477-AABB-A9C900F71CDE"); string filename = "response.json"; using (var db = new DataContext()) { using (var dbStream = new Data.Documents.DocumentStream(db, documentID)) using (var fileStream = System.IO.File.OpenRead(Path.Combine("Playpen", filename))) { await dbStream.WriteStreamAsync(fileStream); } } Console.WriteLine("Finished updating document content."); }
public async Task DownloadSpecificDocument() { Guid documentID = new Guid("12D6B147-3FA6-400A-9F9A-A9C900F6F88C"); using (var db = new DataContext()) { string filename = db.Documents.Where(d => d.ID == documentID).Select(d => d.FileName).Single(); using (var dbStream = new Data.Documents.DocumentStream(db, documentID)) using (var fileStream = System.IO.File.Create(Path.Combine("Playpen", filename))) { await dbStream.CopyToAsync(fileStream); await fileStream.FlushAsync(); } } }
public void DownloadDocument() { Guid documentID = new Guid("6F1D10BB-B3E2-4028-A1E5-A84D011E4249"); using (var db = new DataContext()) { var document = db.Documents.Single(d => d.ID == documentID); using (var writer = File.OpenWrite(document.FileName)) using (var reader = new Data.Documents.DocumentStream(db, documentID)) { reader.CopyTo(writer); reader.Flush(); writer.Flush(); writer.Close(); } } }
public JsonResult GetResponseDataset(Guid responseID) { DataTable dt = null; string json = string.Empty; using (var db = new DataContext()) { var document = db.Documents.Where(r => r.ItemID == responseID).FirstOrDefault(); if (document == null) { return(null); } var serializationSettings = new Newtonsoft.Json.JsonSerializerSettings(); serializationSettings.Converters.Add(new DTO.QueryComposer.QueryComposerResponsePropertyDefinitionConverter()); var deserializer = Newtonsoft.Json.JsonSerializer.Create(serializationSettings); Type queryComposerResponseDTOType = typeof(DTO.QueryComposer.QueryComposerResponseDTO); Lpp.Dns.DTO.QueryComposer.QueryComposerResponseDTO rsp; using (var documentStream = new Data.Documents.DocumentStream(db, document.ID)) using (var streamReader = new System.IO.StreamReader(documentStream)) { rsp = (Lpp.Dns.DTO.QueryComposer.QueryComposerResponseDTO)deserializer.Deserialize(streamReader, queryComposerResponseDTOType); } dt = CreateTable(rsp.Queries.First().Results.First()); } var respDT = (from item in dt.AsEnumerable() select new { DP = item["DP"], NDC = item["NDC"] }).ToArray(); return(Json(new { Table = respDT }, JsonRequestBehavior.AllowGet)); }
public JsonResult GetResponseDataset(Guid responseID) { var dt = new List <IEnumerable <Dictionary <string, object> > >(); var ht = new List <QueryComposerResponsePropertyDefinitionDTO>(); string json = string.Empty; using (var db = new DataContext()) { var document = db.Documents.Where(r => r.ItemID == responseID).FirstOrDefault(); if (document == null) { return(null); } var serializationSettings = new Newtonsoft.Json.JsonSerializerSettings(); serializationSettings.Converters.Add(new DTO.QueryComposer.QueryComposerResponsePropertyDefinitionConverter()); var deserializer = Newtonsoft.Json.JsonSerializer.Create(serializationSettings); Type queryComposerResponseDTOType = typeof(DTO.QueryComposer.QueryComposerResponseDTO); Lpp.Dns.DTO.QueryComposer.QueryComposerResponseDTO rsp; using (var documentStream = new Data.Documents.DocumentStream(db, document.ID)) using (var streamReader = new System.IO.StreamReader(documentStream)) { rsp = (Lpp.Dns.DTO.QueryComposer.QueryComposerResponseDTO)deserializer.Deserialize(streamReader, queryComposerResponseDTOType); } dt.Add(rsp.Queries.First().Results.First()); ht.AddRange(rsp.Queries.First().Properties); } return(Json(new SqlDTO() { Results = dt, Header = ht }, JsonRequestBehavior.AllowGet)); }
public JsonResult ProcessMetricsByResponse(Guid responseID) { DataTable dt = null; string json = string.Empty; using (var db = new DataContext()) { var document = db.Documents.Where(r => r.ItemID == responseID).FirstOrDefault(); if (document == null) { return(null); } var serializationSettings = new Newtonsoft.Json.JsonSerializerSettings(); serializationSettings.Converters.Add(new DTO.QueryComposer.QueryComposerResponsePropertyDefinitionConverter()); var deserializer = Newtonsoft.Json.JsonSerializer.Create(serializationSettings); Type queryComposerResponseDTOType = typeof(DTO.QueryComposer.QueryComposerResponseDTO); Lpp.Dns.DTO.QueryComposer.QueryComposerResponseDTO rsp; using (var documentStream = new Data.Documents.DocumentStream(db, document.ID)) using (var streamReader = new System.IO.StreamReader(documentStream)) { rsp = (Lpp.Dns.DTO.QueryComposer.QueryComposerResponseDTO)deserializer.Deserialize(streamReader, queryComposerResponseDTOType); } dt = CreateTable(rsp.Results.First()); } if (dt == null) { return(null); } return(GetMetrics(dt)); }
public async Task CreateLogForNonDistributedRegressionRequest() { Guid requestID = Guid.Parse("C8702FA2-C9AC-4CC4-82BE-A84D0118904C"); var Identity = new Lpp.Utilities.Security.ApiIdentity(Guid.Parse("9F400001-FAD6-4E84-8933-A2380151C648"), "jmalenfant", null); using (var DataContext = new Dns.Data.DataContext()) { var requestQuery = DataContext.Secure <Request>(Identity).Where(r => r.ID == requestID); var builder = new Lpp.Dns.Data.DistributedRegressionTracking.EnhancedEventLogBuilder(); builder.RequestStatusChangeEvents = async() => { var evts = await(from l in DataContext.LogsRequestStatusChanged let dtTimestamp = DbFunctions.CreateDateTime(l.TimeStamp.Year, l.TimeStamp.Month, l.TimeStamp.Day, l.TimeStamp.Hour, l.TimeStamp.Minute, l.TimeStamp.Second) where requestQuery.Any() && l.RequestID == requestID select new { l.TimeStamp, //treat the step as the lowest response count where the response is submitted after the status change log item timestamp or zero Step = DataContext.Responses.Where(rsp => rsp.RequestDataMart.RequestID == l.RequestID && rsp.SubmittedOn >= dtTimestamp).Select(rsp => (int?)rsp.Count).Min() ?? 0, l.Description }).ToArrayAsync(); return(evts.Select(l => new EnhancedEventLogItemDTO { Timestamp = l.TimeStamp.DateTime, Source = string.Empty, Step = l.Step, Description = l.Description })); }; builder.RoutingStatusChangeEvents = async() => { var evts = await(from l in DataContext.LogsRoutingStatusChange let dtTimestamp = DbFunctions.CreateDateTime(l.TimeStamp.Year, l.TimeStamp.Month, l.TimeStamp.Day, l.TimeStamp.Hour, l.TimeStamp.Minute, l.TimeStamp.Second) where requestQuery.Any() && l.RequestDataMart.RequestID == requestID select new { Timestamp = l.TimeStamp, Source = l.RequestDataMart.DataMart.Name, Description = l.Description, //treat the step as the maximum response count where the response is submitted before the status change log item timestamp or zero Step = l.RequestDataMart.Responses.Where(rsp => rsp.SubmittedOn <= dtTimestamp).Select(rsp => (int?)rsp.Count).Max() ?? 0 }).ToArrayAsync(); return(evts.Select(l => new EnhancedEventLogItemDTO { Timestamp = l.Timestamp.DateTime, Source = l.Source, Step = l.Step, Description = l.Description })); }; builder.DocumentUploadEvents = async() => { var lastDocumentUpload = await(from rsp in DataContext.Responses let lastDoc = (from rd in DataContext.RequestDocuments join doc in DataContext.Documents on rd.RevisionSetID equals doc.RevisionSetID where rd.ResponseID == rsp.ID && rd.DocumentType == RequestDocumentType.Output orderby doc.CreatedOn descending select doc).FirstOrDefault() where rsp.RequestDataMart.RequestID == requestID && requestQuery.Any() && rsp.ResponseTime != null && lastDoc != null select new { Iteration = rsp.Count, DataMart = rsp.RequestDataMart.DataMart.Name, DocumentCreatedOn = lastDoc.CreatedOn }).ToArrayAsync(); return(lastDocumentUpload.Select(l => new EnhancedEventLogItemDTO { Timestamp = l.DocumentCreatedOn, Source = l.DataMart, Step = l.Iteration, Description = "Files finished uploading" })); }; ////parse latest AC tracking table ////parse any DP tracking tables that are iteration a head of AC var dataPartners = await DataContext.RequestDataMarts.Where(rdm => rdm.RequestID == requestID).Select(rdm => new { rdm.DataMart.Name, Identifier = (rdm.DataMart.DataPartnerIdentifier ?? rdm.DataMart.Acronym), rdm.RoutingType }).ToDictionaryAsync(k => k.Identifier); builder.TrackingTableEvents = async() => { //get the ID of the latest Analysis tracking document var latestACTrackingDocumentID = await(from rd in DataContext.RequestDocuments join doc in DataContext.Documents on rd.RevisionSetID equals doc.RevisionSetID where rd.Response.RequestDataMart.RequestID == requestID && requestQuery.Any() && rd.Response.RequestDataMart.RoutingType == RoutingType.AnalysisCenter && rd.Response.Count == rd.Response.RequestDataMart.Responses.Max(rsp => rsp.Count) && doc.Kind == "DistributedRegression.TrackingTable" orderby doc.MajorVersion, doc.MinorVersion, doc.BuildVersion, doc.RevisionVersion descending select doc.ID).FirstOrDefaultAsync(); if (latestACTrackingDocumentID == default(Guid)) { return(Array.Empty <EnhancedEventLogItemDTO>()); } IEnumerable <Data.DistributedRegressionTracking.TrackingTableItem> trackingTableItems; using (var db = new DataContext()) using (var stream = new Data.Documents.DocumentStream(db, latestACTrackingDocumentID)) { trackingTableItems = await DistributedRegressionTrackingTableProcessor.Read(stream); } List <EnhancedEventLogItemDTO> logItems = new List <EnhancedEventLogItemDTO>(trackingTableItems.Count()); int lastIteration = trackingTableItems.Max(t => t.Iteration); foreach (var partnerEntries in trackingTableItems.GroupBy(k => k.DataPartnerCode)) { var dataPartnerName = dataPartners[TranslatePartnerIdentifier(partnerEntries.Key)].Name; foreach (var iteration in partnerEntries.GroupBy(k => k.Iteration)) { if (iteration.Key == 0 || iteration.Key == lastIteration) { //read from the last start time logItems.Add(new EnhancedEventLogItemDTO { Step = iteration.Key, Description = "SAS program execution begin", Source = dataPartnerName, Timestamp = iteration.Max(l => l.Start) }); } else { //if DP read the latest start //if AC read the 2nd last start time //TODO: talk to Qoua - I don't think this is valid logic/rule logItems.Add(new EnhancedEventLogItemDTO { Step = iteration.Key, Description = "SAS program execution begin", Source = dataPartnerName, Timestamp = iteration.Max(l => l.Start) }); } //read the last end time logItems.Add(new EnhancedEventLogItemDTO { Step = iteration.Key, Description = "SAS program execution complete, output files written.", Source = dataPartnerName, Timestamp = iteration.Max(l => l.End) }); } ; } ; return(logItems); }; //builder.AdapterLoggedEvents = async () => { // List<EnhancedEventLogItemDTO> logItems = new List<EnhancedEventLogItemDTO>(); // //get the adapter event logs, will need to know the response iteration, and the datamart name // var adapterLogs = await (from rd in DataContext.RequestDocuments // let doc = (from d in DataContext.Documents where d.RevisionSetID == rd.RevisionSetID && d.Kind == "DistributedRegression.AdapterEventLog" select d).DefaultIfEmpty() // where rd.DocumentType == RequestDocumentType.Output // && rd.Response.RequestDataMart.RequestID == requestID // && requestQuery.Any() // && doc.Any() // select // new // { // ResponseID = rd.ResponseID, // ResponseIteration = rd.Response.Count, // DataMart = rd.Response.RequestDataMart.DataMart.Name, // DocumentID = doc.OrderByDescending(d => d.MajorVersion).ThenByDescending(d => d.MinorVersion).ThenByDescending(d => d.BuildVersion).ThenByDescending(d => d.RevisionVersion).Select(d => d.ID).FirstOrDefault() // }).ToArrayAsync(); // foreach (var log in adapterLogs) // { // //get the log content // using (var db = new DataContext()) // using (var streamReader = new StreamReader(new Data.Documents.DocumentStream(db, log.DocumentID))) // using (var reader = new Newtonsoft.Json.JsonTextReader(streamReader)) // { // var serializer = new Newtonsoft.Json.JsonSerializer(); // var adapterEvents = serializer.Deserialize<IEnumerable<EventLogItem>>(reader) // .Select(al => new EnhancedEventLogItemDTO // { // Step = log.ResponseIteration, // Source = log.DataMart, // Description = al.Description, // Timestamp = al.Timestamp // }).ToArray(); // if (adapterEvents.Length > 0) // { // logItems.AddRange(adapterEvents); // } // } // } // return logItems; //}; var eventLog = await builder.GetItems(); StringBuilder sb = new StringBuilder(); foreach (var item in eventLog) { //Console.WriteLine("{0}\t{1:o}\t{2}\t{3}", item.Step, item.Timestamp, item.Source, item.Description); //Logger.InfoFormat("{0}\t{1:o}\t{2}\t{3}\r\n", item.Step, item.Timestamp, item.Source, item.Description); sb.AppendLine(string.Format("{0}\t{1:o}\t{2}\t{3}", item.Step, item.Timestamp, item.Source, item.Description)); } Logger.Info("Event Log:\n" + sb.ToString()); } }