private void SetKeyValue(string KeyName, object TheValue, RegistryValueKind type) { if (this.readJson()) { JToken token; bool write = true; if (AIDASettings.TryGetValue(KeyName, out token)) { string asdd = (token.Value <object>()).ToString(); if (type == RegistryValueKind.String || type == RegistryValueKind.DWord || type == RegistryValueKind.ExpandString || type == RegistryValueKind.MultiString || type == RegistryValueKind.QWord) { write = (!string.IsNullOrEmpty(asdd) && asdd.ToLower() != TheValue.ToString().ToLower()); } } if (write) { using (JTokenWriter asd = new JTokenWriter()) { asd.WriteValue(TheValue); asd.Flush(); AIDASettings[KeyName] = asd.Token; using (StreamWriter sr = new StreamWriter(AIDASettingsLocation, false)) using (Newtonsoft.Json.JsonTextWriter jr = new Newtonsoft.Json.JsonTextWriter(sr)) { AIDASettings.WriteTo(jr); jr.Flush(); } this.jsonLastWrite = DateTime.Now; File.SetLastWriteTime(AIDASettingsLocation, this.jsonLastWrite); } } } this.theReg.SetValue(KeyName, TheValue, type); this.theReg.Flush(); }
protected override System.Threading.Tasks.Task SerializeToStreamAsync(System.IO.Stream stream, System.Net.TransportContext context) { var jw = new Newtonsoft.Json.JsonTextWriter(new System.IO.StreamWriter(stream)) { Formatting = Newtonsoft.Json.Formatting.Indented }; _value.WriteTo(jw); jw.Flush(); return System.Threading.Tasks.Task.FromResult<object>(null); }
protected override System.Threading.Tasks.Task SerializeToStreamAsync(System.IO.Stream stream, System.Net.TransportContext context) { var jw = new Newtonsoft.Json.JsonTextWriter(new System.IO.StreamWriter(stream)) { Formatting = Newtonsoft.Json.Formatting.Indented }; _value.WriteTo(jw); jw.Flush(); return(System.Threading.Tasks.Task.FromResult <object>(null)); }
public void NewtonsoftJson1() { using Stream stream = new MemoryStream(); using StreamWriter textWriter = new StreamWriter(stream); using JsonTextWriter jsonWriter = new JsonTextWriter(textWriter); var serializer = new Newtonsoft.Json.JsonSerializer(); serializer.Serialize(jsonWriter, Model); jsonWriter.Flush(); }
static byte[] SerializeWithJsonNet <T>(T obj) { using (var memStream = new MemoryStream()) using (var sw = new StreamWriter(memStream)) using (var writer = new Newtonsoft.Json.JsonTextWriter(sw)) { var serializer = new Newtonsoft.Json.JsonSerializer(); serializer.Serialize(writer, obj); writer.Flush(); return(memStream.ToArray()); } }
public static string Serialize(TimingDebugger TimingDebuggerObj) { var writer = new StringWriter(); var jwriter = new Newtonsoft.Json.JsonTextWriter(writer); Serialize(jwriter, TimingDebuggerObj); jwriter.Flush(); writer.Flush(); return(writer.ToString()); }
private static string GetExpectedString(bool prettyPrint, bool isUtf8, int[] data) { MemoryStream ms = new MemoryStream(); StreamWriter streamWriter = new StreamWriter(ms, new UTF8Encoding(false), 1024, true); StringBuilder sb = new StringBuilder(); StringWriter stringWriter = new StringWriter(sb); TextWriter writer = isUtf8 ? streamWriter : (TextWriter)stringWriter; var json = new Newtonsoft.Json.JsonTextWriter(writer) { Formatting = prettyPrint ? Newtonsoft.Json.Formatting.Indented : Newtonsoft.Json.Formatting.None }; json.WriteStartObject(); json.WritePropertyName("age"); json.WriteValue(42); json.WritePropertyName("first"); json.WriteValue("John"); json.WritePropertyName("last"); json.WriteValue("Smith"); json.WritePropertyName("phoneNumbers"); json.WriteStartArray(); json.WriteValue("425-000-1212"); json.WriteValue("425-000-1213"); json.WriteEnd(); json.WritePropertyName("address"); json.WriteStartObject(); json.WritePropertyName("street"); json.WriteValue("1 Microsoft Way"); json.WritePropertyName("city"); json.WriteValue("Redmond"); json.WritePropertyName("zip"); json.WriteValue(98052); json.WriteEnd(); // Add a large array of values json.WritePropertyName("ExtraArray"); json.WriteStartArray(); for (var i = 0; i < ExtraArraySize; i++) { json.WriteValue(data[i]); } json.WriteEnd(); json.WriteEnd(); json.Flush(); return(isUtf8 ? Encoding.UTF8.GetString(ms.ToArray()) : sb.ToString()); }
public void ProcessRequest(System.Web.HttpContext context) { context.Response.ContentType = "application/json"; Newtonsoft.Json.JsonTextWriter jsonWriter = new Newtonsoft.Json.JsonTextWriter(context.Response.Output); Newtonsoft.Json.JsonSerializer ser = new Newtonsoft.Json.JsonSerializer(); using (System.Data.DataTable dt = Basic_SQL.SQL.GetDataTable("SELECT COL_Hex FROM T_SYS_ApertureColorToHex ORDER BY COL_Aperture")) { jsonWriter.Formatting = Newtonsoft.Json.Formatting.Indented; ser.Serialize(jsonWriter, dt); jsonWriter.Flush(); } // End Using dt }
public Message SerializeRequest(MessageVersion messageVersion, object[] parameters) { byte[] body; Newtonsoft.Json.JsonSerializer serializer = endpoint.NewtonsoftSettings().JsonSerializer; using (MemoryStream ms = new MemoryStream()) { using (StreamWriter sw = new StreamWriter(ms, Encoding.UTF8)) { using (Newtonsoft.Json.JsonWriter writer = new Newtonsoft.Json.JsonTextWriter(sw)) { if (parameters.Length == 1) { // Single parameter, assuming bare serializer.Serialize(sw, parameters[0]); } else { writer.WriteStartObject(); for (int i = 0; i < this.operation.Messages[0].Body.Parts.Count; i++) { writer.WritePropertyName(this.operation.Messages[0].Body.Parts[i].Name); serializer.Serialize(writer, parameters[i]); } writer.WriteEndObject(); } writer.Flush(); sw.Flush(); body = ms.ToArray(); } } } if (traceSource.Switch.ShouldTrace(TraceEventType.Information)) { traceSource.TraceEvent(TraceEventType.Information, 1004, System.Text.Encoding.UTF8.GetString(body)); } Message requestMessage = Message.CreateMessage(messageVersion, operation.Messages[0].Action, new RawBodyWriter(body)); requestMessage.Headers.To = operationUri; requestMessage.Properties.Add(WebBodyFormatMessageProperty.Name, new WebBodyFormatMessageProperty(WebContentFormat.Raw)); HttpRequestMessageProperty reqProp = new HttpRequestMessageProperty(); reqProp.Headers[HttpRequestHeader.ContentType] = "application/json"; requestMessage.Properties.Add(HttpRequestMessageProperty.Name, reqProp); return(requestMessage); }
public static void SetupNewtonsoftJson( out Action <object, ChunkedMemoryStream> serialize, out Func <ChunkedMemoryStream, Type, object> deserialize) { var serializer = new Newtonsoft.Json.JsonSerializer(); serializer.TypeNameAssemblyFormat = FormatterAssemblyStyle.Simple; serializer.TypeNameHandling = Newtonsoft.Json.TypeNameHandling.Auto; serialize = (obj, stream) => { var sw = new Newtonsoft.Json.JsonTextWriter(stream.GetWriter()); serializer.Serialize(sw, obj); sw.Flush(); }; deserialize = (stream, type) => serializer.Deserialize(new Newtonsoft.Json.JsonTextReader(stream.GetReader()), type); }
/// <summary> /// Writes Protocol in JSON format /// </summary> /// <returns>JSON string</returns> public override string ToString() { using (System.IO.StringWriter sw = new System.IO.StringWriter()) { using (Newtonsoft.Json.JsonTextWriter writer = new Newtonsoft.Json.JsonTextWriter(sw)) { #if DEBUG writer.Formatting = Newtonsoft.Json.Formatting.Indented; #endif WriteJson(writer, new SchemaNames()); writer.Flush(); return(sw.ToString()); } } }
public override void ExecuteResult(System.Web.Mvc.ControllerContext context) { if (context == null) { throw (new System.ArgumentNullException("context")); } //if ((JsonRequestBehavior == System.Web.Mvc.JsonRequestBehavior.DenyGet) && // (string.Equals(context.HttpContext.Request.HttpMethod, "GET", System.StringComparison.OrdinalIgnoreCase))) //{ // throw (new System.InvalidOperationException // ("This request has been blocked because sensitive information could be disclosed to third party web sites when this is used in a GET request. To allow GET requests, set JsonRequestBehavior to AllowGet.")); //} System.Web.HttpResponseBase response = context.HttpContext.Response; if (string.IsNullOrWhiteSpace(ContentType) == false) { response.ContentType = ContentType; } else { response.ContentType = "application/json"; } if (ContentEncoding != null) { response.ContentEncoding = ContentEncoding; } if (Data != null) { var writer = new Newtonsoft.Json.JsonTextWriter(response.Output) { Formatting = Formatting }; var serializer = Newtonsoft.Json.JsonSerializer.Create(SerializerSettings); serializer.Serialize(writer, Data); writer.Flush(); } }
public Message SerializeRequest(MessageVersion messageVersion, object[] parameters) { byte[] body; var serializer = NewtonsoftJsonSettings.GetSerializer(); using (var ms = new MemoryStream()) { using (var sw = new StreamWriter(ms)) { using (Newtonsoft.Json.JsonWriter writer = new Newtonsoft.Json.JsonTextWriter(sw)) { writer.Formatting = NewtonsoftJsonSettings.GetFormatting(); if (parameters.Length == 1) { // Single parameter, assuming bare serializer.Serialize(sw, parameters[0]); } else { writer.WriteStartObject(); foreach (MessagePartDescription t in this.m_operation.Messages[0].Body.Parts) { writer.WritePropertyName(t.Name); serializer.Serialize(writer, parameters[0]); } writer.WriteEndObject(); } writer.Flush(); sw.Flush(); body = ms.ToArray(); } } } Message requestMessage = Message.CreateMessage(messageVersion, m_operation.Messages[0].Action, new RawBodyWriter(body)); requestMessage.Headers.To = m_operationUri; requestMessage.Properties.Add(WebBodyFormatMessageProperty.Name, new WebBodyFormatMessageProperty(WebContentFormat.Raw)); HttpRequestMessageProperty reqProp = new HttpRequestMessageProperty(); reqProp.Headers[HttpRequestHeader.ContentType] = "application/json"; requestMessage.Properties.Add(HttpRequestMessageProperty.Name, reqProp); return(requestMessage); }
public static void Serialize(System.IO.TextWriter tw, object value) { // if(value == null) System.Type type = value.GetType(); Newtonsoft.Json.JsonSerializer json = new Newtonsoft.Json.JsonSerializer(); json.NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore; json.ObjectCreationHandling = Newtonsoft.Json.ObjectCreationHandling.Replace; json.MissingMemberHandling = Newtonsoft.Json.MissingMemberHandling.Ignore; json.ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Ignore; if (type == typeof(System.Data.DataRow)) { json.Converters.Add(new DataRowConverter()); } else if (type == typeof(System.Data.DataTable)) { json.Converters.Add(new DataTableConverter()); } else if (type == typeof(System.Data.DataSet)) { json.Converters.Add(new DataSetConverter()); } using (Newtonsoft.Json.JsonTextWriter writer = new Newtonsoft.Json.JsonTextWriter(tw)) { // if (this.FormatJsonOutput) #if DEBUG writer.Formatting = Newtonsoft.Json.Formatting.Indented; #else writer.Formatting = Newtonsoft.Json.Formatting.None; #endif writer.QuoteChar = '"'; json.Serialize(writer, value); writer.Flush(); tw.Flush(); tw.Close(); writer.Close(); } // End Using writer } // End Sub Serialize
public static void Serialize(object value, System.IO.Stream s, Newtonsoft.Json.Serialization.IContractResolver resolver) { using (System.IO.StreamWriter writer = new System.IO.StreamWriter(s, System.Text.Encoding.UTF8)) using (Newtonsoft.Json.JsonTextWriter jsonWriter = new Newtonsoft.Json.JsonTextWriter(writer)) { Newtonsoft.Json.JsonSerializer ser = new Newtonsoft.Json.JsonSerializer(); ser.Formatting = Newtonsoft.Json.Formatting.Indented; if (resolver != null) { ser.ContractResolver = resolver; } ser.Serialize(jsonWriter, value); jsonWriter.Flush(); }// End Using jsonWriter } // End Sub Serialize
public Message SerializeRequest(MessageVersion messageVersion, object[] parameters) { byte[] body; Newtonsoft.Json.JsonSerializer serializer = new Newtonsoft.Json.JsonSerializer(); using (MemoryStream ms = new MemoryStream()) { using (StreamWriter sw = new StreamWriter(ms, Encoding.UTF8)) { using (Newtonsoft.Json.JsonWriter writer = new Newtonsoft.Json.JsonTextWriter(sw)) { writer.Formatting = Newtonsoft.Json.Formatting.Indented; if (parameters.Length == 1) { // Single parameter, assuming bare serializer.Serialize(sw, parameters[0]); } else { writer.WriteStartObject(); for (int i = 0; i < this.operation.Messages[0].Body.Parts.Count; i++) { writer.WritePropertyName(this.operation.Messages[0].Body.Parts[i].Name); serializer.Serialize(writer, parameters[0]); } writer.WriteEndObject(); } writer.Flush(); sw.Flush(); body = ms.ToArray(); } } } Message requestMessage = Message.CreateMessage(messageVersion, operation.Messages[0].Action, new RawBodyWriter(body)); requestMessage.Headers.To = operationUri; requestMessage.Properties.Add(WebBodyFormatMessageProperty.Name, new WebBodyFormatMessageProperty(WebContentFormat.Raw)); HttpRequestMessageProperty reqProp = new HttpRequestMessageProperty(); reqProp.Headers[HttpRequestHeader.ContentType] = "application/json"; requestMessage.Properties.Add(HttpRequestMessageProperty.Name, reqProp); return requestMessage; }
/// <summary> /// 返回结果的存储过程 /// </summary> /// <param name="strSql">任何SQL语句</param> /// <param name="parameters">参数值</param> /// <returns></returns> public override string ExecuteJson(string strSql, params DbParameter[] parameters) { try { DbCommand cmd = BuilderQueryCommand(strSql, parameters); System.IO.StringWriter sw = null; Newtonsoft.Json.JsonWriter writer = null; sw = new System.IO.StringWriter(); writer = new Newtonsoft.Json.JsonTextWriter(sw); writer.WriteStartArray(); using (DbDataReader dr = cmd.ExecuteReader()) { do { while (dr.Read()) { writer.WriteStartObject(); for (int i = 0; i < dr.FieldCount; i++) { writer.WritePropertyName(dr.GetName(i)); writer.WriteValue(Convert.ToString(dr[i])); } writer.WriteEndObject(); } }while (dr.NextResult()); } writer.WriteEndArray(); writer.Flush(); return(sw.GetStringBuilder().ToString()); } catch (DbException ex) { throw ex; } catch { throw; } }
/// <summary> /// Serializes a <see cref="RpcRequest"/> for transmission to the server. /// </summary> /// <param name="request">The <see cref="RpcRequest"/> to serialize.</param> /// <param name="outputStream">A <see cref="Stream"/> to write the serialized output to.</param> /// <remarks> /// <para>The <see cref="RpcRequest.Arguments"/> must be one of the following types; /// <list type="Bullet"> /// <item>Dictionary>string, object<</item> /// <item>object[]</item> /// <item>IEnumerable<KeyValuePair<string, object>></item> /// </list> /// </para> /// </remarks> /// <returns>A <see cref="Stream"/> containing the serialized content.</returns> public void Serialize(RpcRequest request, System.IO.Stream outputStream) { if (request == null) { throw new ArgumentNullException(nameof(request)); } if (outputStream == null) { throw new ArgumentNullException(nameof(outputStream)); } object jsonRpcRequest = RequestToJsonRequest(request); using (var textWriter = new System.IO.StreamWriter(outputStream, _TextEncoding)) using (var writer = new Newtonsoft.Json.JsonTextWriter(textWriter)) { _Serializer.Serialize(writer, jsonRpcRequest); writer.Flush(); textWriter.Flush(); } }
public static void SerializeObject(System.IO.Stream strm, object value, bool pretty) { Newtonsoft.Json.JsonSerializer ser = new Newtonsoft.Json.JsonSerializer(); using (System.IO.TextWriter sw = new System.IO.StreamWriter(strm, System.Text.Encoding.UTF8) ) { using (Newtonsoft.Json.JsonTextWriter jsonWriter = new Newtonsoft.Json.JsonTextWriter(sw)) { if (pretty) { jsonWriter.Formatting = Newtonsoft.Json.Formatting.Indented; } ser.Serialize(jsonWriter, value); jsonWriter.Flush(); } // End Using jsonWriter sw.Flush(); } // End Using sw } // End Sub SerializeObject
private static string GetHelloWorldExpectedString(bool prettyPrint, bool isUtf8) { MemoryStream ms = new MemoryStream(); StreamWriter streamWriter = new StreamWriter(ms, new UTF8Encoding(false), 1024, true); StringBuilder sb = new StringBuilder(); StringWriter stringWriter = new StringWriter(sb); TextWriter writer = isUtf8 ? streamWriter : (TextWriter)stringWriter; var json = new Newtonsoft.Json.JsonTextWriter(writer) { Formatting = prettyPrint ? Newtonsoft.Json.Formatting.Indented : Newtonsoft.Json.Formatting.None }; json.WriteStartObject(); json.WritePropertyName("message"); json.WriteValue("Hello, World!"); json.WriteEnd(); json.Flush(); return(isUtf8 ? Encoding.UTF8.GetString(ms.ToArray()) : sb.ToString()); }
public override void ExecuteResult(System.Web.Mvc.ControllerContext context) { System.Web.HttpResponseBase response = context.HttpContext.Response; response.ContentType = "application/json"; response.ContentEncoding = System.Text.Encoding.UTF8; Newtonsoft.Json.JsonSerializer serializer = new Newtonsoft.Json.JsonSerializer(); using (System.IO.StreamWriter sw = new System.IO.StreamWriter(response.OutputStream)) { using (Newtonsoft.Json.JsonTextWriter writer = new Newtonsoft.Json.JsonTextWriter(sw)) { writer.WriteStartArray(); foreach (object item in itemsToSerialize) { Newtonsoft.Json.Linq.JObject obj = Newtonsoft.Json.Linq.JObject.FromObject(item, serializer); obj.WriteTo(writer); writer.Flush(); } // Next item writer.WriteEndArray(); } // End using writer } // End Using sw }
public override string ToString() { using (StringWriter sw = new StringWriter()) { using (Newtonsoft.Json.JsonTextWriter writer = new Newtonsoft.Json.JsonTextWriter(sw)) { #if(DEBUG) writer.Formatting = Newtonsoft.Json.Formatting.Indented; #endif writeJson(writer); writer.Flush(); return sw.ToString(); } } }
public void Setup() { Model.Initialize(); T deserializedModel; // Newtonsoft using (MemoryStream stream = new MemoryStream()) { using StreamWriter textWriter = new StreamWriter(stream); using JsonTextWriter jsonWriter = new JsonTextWriter(textWriter); var serializer = new Newtonsoft.Json.JsonSerializer(); serializer.Serialize(jsonWriter, Model); jsonWriter.Flush(); NewtonsoftJsonData = stream.GetBuffer(); stream.Seek(0, SeekOrigin.Begin); using StreamReader textReader = new StreamReader(stream); using JsonTextReader jsonReader = new JsonTextReader(textReader); deserializedModel = serializer.Deserialize <T>(jsonReader); if (!Model.Equals(deserializedModel)) { throw new InvalidOperationException("Failed comparison with Newtonsoft.Json"); } } // Binary formatter using (MemoryStream stream = new MemoryStream()) { var formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter(); formatter.Serialize(stream, Model); BinaryFormatterData = stream.GetBuffer(); stream.Seek(0, SeekOrigin.Begin); deserializedModel = (T)formatter.Deserialize(stream); if (!Model.Equals(deserializedModel)) { throw new InvalidOperationException("Failed comparison with BinaryFormatter"); } } // .NETCore JSON using (MemoryStream stream = new MemoryStream()) { using Utf8JsonWriter jsonWriter = new Utf8JsonWriter(stream); System.Text.Json.JsonSerializer.Serialize(jsonWriter, Model); DotNetCoreJsonData = stream.GetBuffer(); stream.Seek(0, SeekOrigin.Begin); deserializedModel = System.Text.Json.JsonSerializer.DeserializeAsync <T>(stream).Result; if (!Model.Equals(deserializedModel)) { throw new InvalidOperationException("Failed comparison with System.Text.Json"); } } // DataContractJson using (MemoryStream stream = new MemoryStream()) { var serializer = new System.Runtime.Serialization.Json.DataContractJsonSerializer(typeof(T)); serializer.WriteObject(stream, Model); DataContractJsonData = stream.GetBuffer(); stream.Seek(0, SeekOrigin.Begin); deserializedModel = (T)serializer.ReadObject(stream); if (!Model.Equals(deserializedModel)) { throw new InvalidOperationException("Failed comparison with DataContractJson"); } } // XML serializer using (MemoryStream stream = new MemoryStream()) { var serializer = new System.Xml.Serialization.XmlSerializer(typeof(T)); serializer.Serialize(stream, Model); XmlSerializerData = stream.GetBuffer(); stream.Seek(0, SeekOrigin.Begin); deserializedModel = (T)serializer.Deserialize(stream); if (!Model.Equals(deserializedModel)) { throw new InvalidOperationException("Failed comparison with XmlSerializer"); } } // Portable Xaml using (MemoryStream stream = new MemoryStream()) { Portable.Xaml.XamlServices.Save(stream, Model); PortableXamlData = stream.GetBuffer(); stream.Seek(0, SeekOrigin.Begin); _ = Portable.Xaml.XamlServices.Load(stream); if (!Model.Equals(deserializedModel)) { throw new InvalidOperationException("Failed comparison with Portable.Xaml"); } } // Utf8Json using (MemoryStream stream = new MemoryStream()) { Utf8JsonSerializer.Serialize(stream, Model); Utf8JsonData = stream.GetBuffer(); stream.Seek(0, SeekOrigin.Begin); deserializedModel = Utf8JsonSerializer.Deserialize <T>(stream); if (!Model.Equals(deserializedModel)) { throw new InvalidOperationException("Failed comparison with Utf8Json"); } } // MessagePack using (MemoryStream stream = new MemoryStream()) { MessagePack.MessagePackSerializer.Serialize(stream, Model, MessagePack.Resolvers.ContractlessStandardResolver.Instance); MessagePackData = stream.GetBuffer(); stream.Seek(0, SeekOrigin.Begin); deserializedModel = MessagePack.MessagePackSerializer.Deserialize <T>(stream, MessagePack.Resolvers.ContractlessStandardResolver.Instance); if (!Model.Equals(deserializedModel)) { throw new InvalidOperationException("Failed comparison with MessagePack"); } } // BinaryPack using (MemoryStream stream = new MemoryStream()) { BinaryConverter.Serialize(Model, stream); BinaryPackData = stream.GetBuffer(); stream.Seek(0, SeekOrigin.Begin); deserializedModel = BinaryConverter.Deserialize <T>(stream); if (!Model.Equals(deserializedModel)) { throw new InvalidOperationException("Failed comparison with BinaryPack"); } } }
public static void StreamJsonZip(Stream stream, IStreamController<Newtonsoft.Json.JsonTextWriter> streamController, Encoding encoding) { try { Stream zipStream = new System.IO.Compression.GZipStream(stream, System.IO.Compression.CompressionMode.Compress); using (var writer = new Newtonsoft.Json.JsonTextWriter(new StreamWriter(zipStream, encoding))) { streamController.StreamTo(writer, new Queue<Action>()); writer.Flush(); } } catch (Exception e) { _logger.Error(e.Message, e); throw _builder.Build(e); } }
} // End Sub SerializeLargeDataset public void SerializeLargeTable(HttpContext context) { Newtonsoft.Json.JsonSerializer ser = new Newtonsoft.Json.JsonSerializer(); using (Newtonsoft.Json.JsonTextWriter jsonWriter = new Newtonsoft.Json.JsonTextWriter(context.Response.Output)) { jsonWriter.Formatting = Newtonsoft.Json.Formatting.Indented; jsonWriter.WriteStartObject(); using (System.Data.Common.DbConnection con = SQL.CreateConnection()) { if (con.State != System.Data.ConnectionState.Open) { con.Open(); } using (System.Data.Common.DbCommand cmd = con.CreateCommand()) { cmd.CommandText = "SELECT TOP 10000 * FROM T_LOG_SAP_Interface"; using (System.Data.Common.DbDataReader dr = cmd.ExecuteReader(System.Data.CommandBehavior.SequentialAccess | System.Data.CommandBehavior.CloseConnection )) { jsonWriter.WritePropertyName("Columns"); jsonWriter.WriteStartArray(); for (int i = 0; i < dr.FieldCount; i++) { string colName = dr.GetName(i); System.Type t = dr.GetFieldType(i); jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("ColumnName"); jsonWriter.WriteValue(colName); jsonWriter.WritePropertyName("DataType"); jsonWriter.WriteValue(GetAssemblyQualifiedNoVersionName(t)); // jsonWriter.WritePropertyName("DateTimeMode"); // jsonWriter.WriteValue(column.DateTimeMode.ToString()); jsonWriter.WriteEndObject(); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Rows"); jsonWriter.WriteStartArray(); if (dr.HasRows) { int fieldCount = dr.FieldCount; while (dr.Read()) { jsonWriter.WriteStartArray(); for (int i = 0; i < fieldCount; ++i) { object obj = dr.GetValue(i); jsonWriter.WriteValue(obj); } // Next i jsonWriter.WriteEndArray(); jsonWriter.Flush(); context.Response.Output.Flush(); context.Response.Flush(); } // Whend while (dr.Read()) } // End if (dr.HasRows) dr.Close(); jsonWriter.WriteEndArray(); } // End using dr } // End using cmd if (con.State != System.Data.ConnectionState.Closed) { con.Close(); } } // End using con jsonWriter.WriteEndObject(); jsonWriter.Flush(); context.Response.Output.Flush(); context.Response.OutputStream.Flush(); context.Response.Flush(); } // End Using jsonWriter } // End Sub SerializeLargeTable
/// <summary> /// The process that serializes data from a <see cref="CoCoL.Network.PendingNetworkRequest" /> and sends it into the channel. /// </summary> /// <returns>The awaitable task.</returns> /// <param name="client">The <see cref="System.Net.Sockets.TcpClient"/> to read data from.</param> /// <param name="stream">The stream to write data to.</param> /// <param name="channel">The channel to read requests from.</param> /// <param name="selfid">A string used to identify this process in logs</param> private static async Task WriterProcess(TcpClient client, Stream stream, IReadChannelEnd <PendingNetworkRequest> channel, string selfid) { try { var headbuffer = new byte[SMALL_MESSAGE_SIZE]; var json = new Newtonsoft.Json.JsonSerializer(); using (client) using (stream) using (channel) { while (true) { var prnq = await channel.ReadAsync(); var header = new RequestHeader() { ChannelID = prnq.ChannelID, ChannelDataType = prnq.ChannelDataType.AssemblyQualifiedName, RequestID = prnq.RequestID, SourceID = prnq.SourceID, RequestType = prnq.RequestType, Timeout = prnq.Timeout, PayloadClassName = prnq.Value == null ? null : prnq.Value.GetType().AssemblyQualifiedName, NoOffer = prnq.Offer == null }; ushort headlen; using (var ms = new MemoryStream(headbuffer, true)) { // Make space for the size fields ms.Write(headbuffer, 0, 8 + 2); using (var tw = new StreamWriter(ms)) using (var jw = new Newtonsoft.Json.JsonTextWriter(tw)) { json.Serialize(jw, header); jw.Flush(); await tw.FlushAsync(); headlen = (ushort)(ms.Position - 8 - 2); } } // We write it all into the array before writing to the stream if (headlen > SMALL_MESSAGE_SIZE - 8 - 2 - 8) { throw new Exception("Too larger header"); } // Make a memory stream for the payload using (var ms = new MemoryStream()) using (var tw = new StreamWriter(ms)) using (var jw = new Newtonsoft.Json.JsonTextWriter(tw)) { ulong payloadlen = 0; if (prnq.Value != null) { json.Serialize(jw, prnq.Value); jw.Flush(); await tw.FlushAsync(); payloadlen = (ulong)ms.Length; ms.Position = 0; } if (payloadlen > MAX_MESSAGE_SIZE) { throw new Exception("Too large message payload"); } ulong packlen = 8uL + 2uL + headlen + 8uL + payloadlen; Array.Copy(BitConverter.GetBytes(packlen), headbuffer, 8); Array.Copy(BitConverter.GetBytes(headlen), 0, headbuffer, 8, 2); Array.Copy(BitConverter.GetBytes(payloadlen), 0, headbuffer, 8 + 2 + headlen, 8); LOG.DebugFormat("{2}: Sending {0} - {1} request", prnq.RequestID, prnq.RequestType, selfid); await stream.WriteAsync(headbuffer, 0, headlen + 8 + 2 + 8); if (payloadlen != 0) { await ms.CopyToAsync(stream); } LOG.DebugFormat("{4}: Sent {0} - {1} request with {2} bytes to {3}", prnq.RequestID, prnq.RequestType, packlen, client.Client.RemoteEndPoint, selfid); await stream.FlushAsync(); } } } } catch (Exception ex) { if (!ex.IsRetiredException()) { LOG.Error("Crashed network client writer side", ex); throw; } else { LOG.Info("Stopped network client writer"); } } }
/// <summary> /// For the specified response processes any associated tracking table documents, converting from csv to json array. /// The json array is set on the ResponseData property of the response, save is _not_ called. /// </summary> /// <param name="response"></param> /// <returns></returns> public async Task Process(Response response) { Guid[] trackingTableDocuments = await _db.Documents.AsNoTracking().Where(d => d.ItemID == response.ID && d.Kind == TrackingTableDocumentKind).Select(d => d.ID).ToArrayAsync(); if (trackingTableDocuments.Length == 0) { return; } StringBuilder buffer = new StringBuilder(); string[] tableHeader; string[] currentLine; using (var writer = new Newtonsoft.Json.JsonTextWriter(new StringWriter(buffer))) { writer.QuoteName = true; writer.WriteStartArray(); foreach (Guid trackingTableDocumentID in trackingTableDocuments) { //read the tracking table csv into a dictionary, each dictionary represents a row in the csv document using (var dsDataContext = new DataContext()) using (var reader = new StreamReader(new Data.Documents.DocumentStream(dsDataContext, trackingTableDocumentID))) using (var csv = new Microsoft.VisualBasic.FileIO.TextFieldParser(reader)) { csv.SetDelimiters(","); csv.TrimWhiteSpace = true; tableHeader = csv.ReadFields(); while (csv.EndOfData == false) { currentLine = csv.ReadFields(); if (currentLine.Length == 0) { continue; } writer.WriteStartObject(); for (int i = 0; i < currentLine.Length; i++) { writer.WritePropertyName(tableHeader[i]); writer.WriteValue(currentLine[i]); } writer.WriteEndObject(); } reader.Close(); } } writer.WriteEndArray(); writer.Flush(); response.ResponseData = buffer.ToString(); } }
public void SerializeLargeDataset(HttpContext context) { string strSQL = @" SELECT TOP 10 * FROM T_Benutzer; SELECT TOP 10 * FROM T_Benutzergruppen; -- SELECT * FROM T_Benutzer LIMIT 10; -- SELECT * FROM T_Benutzergruppen LIMIT 10; -- SELECT * FROM T_Benutzer OFFSET 0 FETCH NEXT 10 ROWS ONLY; -- SELECT * FROM T_Benutzergruppen OFFSET 0 FETCH NEXT 10 ROWS ONLY; "; Newtonsoft.Json.JsonSerializer ser = new Newtonsoft.Json.JsonSerializer(); using (Newtonsoft.Json.JsonTextWriter jsonWriter = new Newtonsoft.Json.JsonTextWriter(context.Response.Output)) { jsonWriter.Formatting = Newtonsoft.Json.Formatting.Indented; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Tables"); jsonWriter.WriteStartArray(); using (System.Data.Common.DbConnection con = SQL.CreateConnection()) { if (con.State != System.Data.ConnectionState.Open) { con.Open(); } using (System.Data.Common.DbCommand cmd = con.CreateCommand()) { cmd.CommandText = strSQL; using (System.Data.Common.DbDataReader dr = cmd.ExecuteReader(System.Data.CommandBehavior.SequentialAccess | System.Data.CommandBehavior.CloseConnection )) { do { jsonWriter.WriteStartObject(); // tbl = new Table(); jsonWriter.WritePropertyName("Columns"); jsonWriter.WriteStartArray(); for (int i = 0; i < dr.FieldCount; ++i) { jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("ColumnName"); jsonWriter.WriteValue(dr.GetName(i)); jsonWriter.WritePropertyName("FieldType"); jsonWriter.WriteValue(GetAssemblyQualifiedNoVersionName(dr.GetFieldType(i))); jsonWriter.WriteEndObject(); } // Next i jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Rows"); jsonWriter.WriteStartArray(); if (dr.HasRows) { while (dr.Read()) { object[] thisRow = new object[dr.FieldCount]; jsonWriter.WriteStartArray(); // object[] thisRow = new object[dr.FieldCount]; for (int i = 0; i < dr.FieldCount; ++i) { jsonWriter.WriteValue(dr.GetValue(i)); } // Next i jsonWriter.WriteEndArray(); // tbl.Rows.Add(thisRow); } // Whend } // End if (dr.HasRows) jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); // ser.Tables.Add(tbl); } while (dr.NextResult()); } // End using dr } // End using cmd if (con.State != System.Data.ConnectionState.Closed) { con.Close(); } } // End using con jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); jsonWriter.Flush(); } // End Using jsonWriter context.Response.Output.Flush(); context.Response.OutputStream.Flush(); context.Response.Flush(); } // End Sub SerializeLargeDataset
public ActionResult OpenNBG(string fname, bool antrag, bool sign, bool do_xtraData) { string err = null; string rawHtml = null; string StartID = null; if (string.IsNullOrWhiteSpace(fname)) { err = "Der übergebene Parameter fname darf n icht leer sein. Starten Sie am besten die Anwendung über die Index.cshtml."; } else { var stream = System.Reflection.Assembly.GetExecutingAssembly().GetManifestResourceStream(fname); byte[] BT4allFile = null; if (fname.EndsWith(".xml", StringComparison.CurrentCultureIgnoreCase)) { var biproString = new System.IO.StreamReader(stream).ReadToEnd(); using (var mem = new System.IO.MemoryStream()) { var writer = new Newtonsoft.Json.JsonTextWriter( new System.IO.StreamWriter(mem)); writer.WriteStartObject(); writer.WritePropertyName("module"); writer.WriteValue("YA"); writer.WritePropertyName("data"); writer.WriteValue(biproString); writer.WritePropertyName("callBackURL"); writer.WriteValue("https://nuernberger.de"); writer.WriteEndObject(); writer.Flush(); BT4allFile = mem.GetBuffer().Take((int)mem.Length).ToArray(); } } else if (fname.EndsWith(".txt", StringComparison.CurrentCultureIgnoreCase)) { var biproString = new System.IO.StreamReader(stream).ReadToEnd(); using (var mem = new System.IO.MemoryStream()) { var writer = new Newtonsoft.Json.JsonTextWriter( new System.IO.StreamWriter(mem)); writer.WriteStartObject(); writer.WritePropertyName("module"); writer.WriteValue("LVLebenPrivat_6"); writer.WritePropertyName("data"); writer.WriteValue(biproString); writer.WritePropertyName("callBackURL"); writer.WriteValue("https://nuernberger.de"); writer.WriteEndObject(); writer.Flush(); BT4allFile = mem.GetBuffer().Take((int)mem.Length).ToArray(); } } else //PDF auspacken { iTextSharp.text.pdf.PdfReader reader; try { reader = new iTextSharp.text.pdf.PdfReader(stream); iTextSharp.text.pdf.PdfDictionary root = reader.Catalog; iTextSharp.text.pdf.PdfDictionary names = root.GetAsDict(iTextSharp.text.pdf.PdfName.NAMES); if (names != null) { iTextSharp.text.pdf.PdfDictionary embeddedFiles = names.GetAsDict(iTextSharp.text.pdf.PdfName.EMBEDDEDFILES); if (embeddedFiles != null) { var en = embeddedFiles.Keys.GetEnumerator(); while (en.MoveNext()) { var obj = embeddedFiles.GetAsArray(en.Current as iTextSharp.text.pdf.PdfName); iTextSharp.text.pdf.PdfDictionary fileSpec = obj.GetAsDict(1); iTextSharp.text.pdf.PdfDictionary file = fileSpec.GetAsDict(iTextSharp.text.pdf.PdfName.EF); foreach (iTextSharp.text.pdf.PdfName key in file.Keys) { iTextSharp.text.pdf.PRStream innerstream = (iTextSharp.text.pdf.PRStream) iTextSharp.text.pdf.PdfReader.GetPdfObject(file.GetAsIndirectObject(key)); if (innerstream != null) { BT4allFile = iTextSharp.text.pdf.PdfReader.GetStreamBytes(innerstream); break; } } } } } } catch (Exception ex) { err = ex.ToString(); } } //Request mit Post des Falls und Response mit der ID if (BT4allFile != null) { try { //var Req = System.Net.WebRequest.Create("http://localhost/BT4All/SV/d.svc/m?i=getStartID_SLS_NoSign"); var Req = System.Net.WebRequest.Create("https://test.nuernberger-bt4all.de/BT4All/SV/d.svc/m?i=getStartID_SLS_NoSign"); Req.Method = "POST"; //Req.UseDefaultCredentials = true; var xtraData = new byte[0]; if (do_xtraData) { var xstream = System.Reflection.Assembly.GetExecutingAssembly().GetManifestResourceStream( "AufrufVergleicher.170728_NuernbergerVorschlag.xml"); var bList = new List <byte>(); int b = xstream.ReadByte(); while (b != -1) { bList.Add((byte)b); b = xstream.ReadByte(); } } Req.ContentLength = BT4allFile.Length + xtraData.Length; if (xtraData.Length > 0) { Req.Headers.Add("xtraDataLen", xtraData.Length.ToString()); } var reqS = Req.GetRequestStream(); reqS.Write(BT4allFile, 0, BT4allFile.Length); reqS.Write(xtraData, 0, xtraData.Length); var resp = Req.GetResponse(); var respStream = resp.GetResponseStream(); var respData = new byte[resp.ContentLength]; respStream.Read(respData, 0, respData.Length); try { using (var mem = new System.IO.MemoryStream(respData)) { Newtonsoft.Json.JsonTextReader jReader = new Newtonsoft.Json.JsonTextReader( new System.IO.StreamReader(mem)); while (jReader.Read()) { var tp = jReader.TokenType; var val = jReader.Value; if (tp == Newtonsoft.Json.JsonToken.PropertyName && (val as string) == "sid") { StartID = jReader.ReadAsString(); break; } } } } catch { using (var strReader = new System.IO.StreamReader(new System.IO.MemoryStream(respData))) { err = "Fehler beim Parsen des JSON Objekts. Serverresponse: "; rawHtml = strReader.ReadToEnd(); } } } catch (Exception ex) { err = ex.ToString(); } } } //=>iframe wird in der Views/Home/INDEX.cshtml aufgebaut ViewBag.err = err; ViewBag.rawHtml = rawHtml; ViewBag.StartID = StartID; ViewBag.antrag = antrag; ViewBag.sign = sign; return(View()); }
}// End Sub public static void SerializeDataTableAsAssociativeJsonArray( System.Data.Common.DbCommand cmd , Microsoft.AspNetCore.Http.HttpContext context , bool pretty , System.Text.Encoding enc) { SqlService service = (SqlService)context.RequestServices.GetService(typeof(SqlService)); using (System.IO.TextWriter sw = new System.IO.StreamWriter(context.Response.Body, enc)) { using (Newtonsoft.Json.JsonTextWriter jsonWriter = new Newtonsoft.Json.JsonTextWriter(sw)) { if (pretty) { jsonWriter.Formatting = Newtonsoft.Json.Formatting.Indented; } // jsonWriter.WriteStartObject(); // jsonWriter.WritePropertyName("tables"); // jsonWriter.WriteStartArray(); using (System.Data.Common.DbConnection con = service.Connection) { cmd.Connection = con; if (con.State != System.Data.ConnectionState.Open) { con.Open(); } try { using (System.Data.Common.DbDataReader dr = cmd.ExecuteReader(System.Data.CommandBehavior.SequentialAccess | System.Data.CommandBehavior.CloseConnection )) { do { // jsonWriter.WriteStartObject(); // tbl = new Table(); //jsonWriter.WritePropertyName("columns"); //// WriteArray(jsonWriter, dr); //WriteAssociativeArray(jsonWriter, dr); //jsonWriter.WritePropertyName("rows"); jsonWriter.WriteStartArray(); if (dr.HasRows) { string[] columns = new string[dr.FieldCount]; for (int i = 0; i < dr.FieldCount; i++) { columns[i] = dr.GetName(i); } // Next i while (dr.Read()) { // jsonWriter.WriteStartArray(); // object[] thisRow = new object[dr.FieldCount]; jsonWriter.WriteStartObject(); // tbl = new Table(); for (int i = 0; i < dr.FieldCount; ++i) { jsonWriter.WritePropertyName(columns[i]); object obj = dr.GetValue(i); if (obj == System.DBNull.Value) { obj = null; } jsonWriter.WriteValue(obj); } // Next i // jsonWriter.WriteEndArray(); // tbl.Rows.Add(thisRow); jsonWriter.WriteEndObject(); } // Whend } // End if (dr.HasRows) jsonWriter.WriteEndArray(); // jsonWriter.WriteEndObject(); // ser.Tables.Add(tbl); } while (dr.NextResult()); } // End using dr } catch (System.Exception ex) { System.Console.WriteLine(ex.Message); throw; } if (con.State != System.Data.ConnectionState.Closed) { con.Close(); } } // End using con // jsonWriter.WriteEndArray(); // jsonWriter.WriteEndObject(); jsonWriter.Flush(); } // End Using jsonWriter } // End Using sw } // End Sub SerializeDataTableAsAssociativeJsonArray
public void AnalysisCenterWithNewPayloadFromDataPartners() { _adapter.Initialize(CreateSettings(MonitorFolderAnalysisCenter), Guid.NewGuid().ToString("D")); //remove all the directories var directories = Directory.GetDirectories(MonitorFolderAnalysisCenter); foreach (var dir in directories) { Directory.Delete(dir, true); } //create the inputfiles folder string inputfilesFolderPath = Path.Combine(MonitorFolderAnalysisCenter, "inputfiles"); if (!Directory.Exists(inputfilesFolderPath)) { Directory.CreateDirectory(inputfilesFolderPath); } //payload will consist of input files from each data partner, need to be extracted to partner specific folders and then monitor the inputfiles folder List <DTO.QueryComposer.DistributedRegressionAnalysisCenterManifestItem> manifestItems = new List <DTO.QueryComposer.DistributedRegressionAnalysisCenterManifestItem>(); List <DocumentWithStream> datapartnerDocuments = new List <DocumentWithStream>(); for (int i = 1; i <= 5; i++) { Guid documentID = Guid.NewGuid(); datapartnerDocuments.Add(new DocumentWithStream(documentID, new Document(documentID.ToString("D"), "text/plain", "payload_document.txt"), new MemoryStream(System.Text.Encoding.UTF8.GetBytes("Hello!")))); manifestItems.Add(new DTO.QueryComposer.DistributedRegressionAnalysisCenterManifestItem { DataMart = "Data Partner " + i, DataMartID = Guid.NewGuid(), DataPartnerIdentifier = "msoc" + i, DocumentID = documentID, RequestDataMartID = Guid.NewGuid(), ResponseID = Guid.NewGuid(), RevisionSetID = documentID }); } //create the analysis center manifest MemoryStream manifestStream; using (var ms = new MemoryStream()) using (var sr = new StreamWriter(ms)) using (var jr = new Newtonsoft.Json.JsonTextWriter(sr)) { var serializer = new Newtonsoft.Json.JsonSerializer(); serializer.Serialize(jr, manifestItems); jr.Flush(); manifestStream = new MemoryStream(ms.ToArray()); } Guid manifestID = Guid.NewGuid(); datapartnerDocuments.Add(new DocumentWithStream(manifestID, new Document(manifestID, "application/json", "manifest.json", false, Convert.ToInt32(manifestStream.Length), Lpp.Dns.DTO.Enums.DocumentKind.SystemGeneratedNoLog), manifestStream)); var timer = new System.Timers.Timer(TimeSpan.FromSeconds(1).TotalMilliseconds); timer.AutoReset = true; timer.Elapsed += (object sender, System.Timers.ElapsedEventArgs e) => { if (Directory.Exists(Path.Combine(MonitorFolderAnalysisCenter, "inputfiles"))) { timer.Stop(); using (var fs = File.Create(Path.Combine(MonitorFolderAnalysisCenter, "inputfiles", ExecutionCompleteFilename))) { fs.Close(); } } }; timer.Start(); var responseDocuments = _adapter.StartRequest(datapartnerDocuments.ToArray()); for (int i = 1; i <= 5; i++) { Assert.IsTrue(Directory.Exists(Path.Combine(MonitorFolderAnalysisCenter, "msoc" + i))); } }
} // End Sub WriteArray public static void AnyDataReaderToAnyJson( string sql , SqlService service , System.Collections.Generic.Dictionary <string, object> pars , System.Web.HttpContext context , RenderType_t format) { using (System.Data.Common.DbConnection con = service.Connection) { using (System.Data.Common.DbCommand cmd = con.CreateCommand()) { cmd.CommandText = sql; service.AddParameterList(pars, cmd); // cmd.ExecuteNonQuery // cmd.ExecuteReader // cmd.ExecuteScalar using (System.Data.Common.DbDataReader dr = cmd.ExecuteReader( System.Data.CommandBehavior.SequentialAccess | System.Data.CommandBehavior.CloseConnection)) { using (System.IO.StreamWriter output = new System.IO.StreamWriter(context.Response.OutputStream)) { using (Newtonsoft.Json.JsonTextWriter jsonWriter = new Newtonsoft.Json.JsonTextWriter(output)) // context.Response.Output) { if (format.HasFlag(RenderType_t.Indented)) { jsonWriter.Formatting = Newtonsoft.Json.Formatting.Indented; } context.Response.StatusCode = (int)System.Net.HttpStatusCode.OK; context.Response.ContentType = "application/json"; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("tables"); jsonWriter.WriteStartArray(); do { if (!format.HasFlag(RenderType_t.Data_Only) && !format.HasFlag(RenderType_t.DataTable)) { jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("columns"); if (format.HasFlag(RenderType_t.Columns_Associative)) { WriteAssociativeColumnsArray(jsonWriter, dr, format); } else if (format.HasFlag(RenderType_t.Columns_ObjectArray)) { WriteComplexArray(jsonWriter, dr, format); } else // (format.HasFlag(RenderType_t.Array)) { WriteArray(jsonWriter, dr); } } // End if (!format.HasFlag(RenderType_t.Data_Only)) if (!format.HasFlag(RenderType_t.Data_Only) && !format.HasFlag(RenderType_t.DataTable)) { jsonWriter.WritePropertyName("rows"); } // End if (!format.HasFlag(RenderType_t.Data_Only)) jsonWriter.WriteStartArray(); if (dr.HasRows) { string[] columns = null; if (format.HasFlag(RenderType_t.DataTable)) { columns = new string[dr.FieldCount]; for (int i = 0; i < dr.FieldCount; i++) { columns[i] = dr.GetName(i); } // Next i } // End if (format.HasFlag(RenderType_t.DataTable)) while (dr.Read()) { if (format.HasFlag(RenderType_t.DataTable)) { jsonWriter.WriteStartObject(); } else { jsonWriter.WriteStartArray(); } for (int i = 0; i <= dr.FieldCount - 1; i++) { object obj = dr.GetValue(i); if (obj == System.DBNull.Value) { obj = null; } if (columns != null && format.HasFlag(RenderType_t.DataTable)) { jsonWriter.WritePropertyName(columns[i]); } jsonWriter.WriteValue(obj); } // Next i if (format.HasFlag(RenderType_t.DataTable)) { jsonWriter.WriteEndObject(); } else { jsonWriter.WriteEndArray(); } } // Whend } // End if (dr.HasRows) jsonWriter.WriteEndArray(); if (!format.HasFlag(RenderType_t.Data_Only) && !format.HasFlag(RenderType_t.DataTable)) { jsonWriter.WriteEndObject(); } // End if (!format.HasFlag(RenderType_t.Data_Only)) } while (dr.NextResult()); jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); jsonWriter.Flush(); output.Flush(); } // jsonWriter } // output } // dr } // End Using cmd if (con.State != System.Data.ConnectionState.Closed) { con.Close(); } } // con } // End Sub WriteArray
/// <summary> /// /// </summary> /// <param name="request"></param> /// <returns></returns> async Task FromDataPartner(RequestDataMart reqDM) { //close the current task var currentTask = await PmnTask.GetActiveTaskForRequestActivityAsync(reqDM.Request.ID, reqDM.Request.WorkFlowActivityID.Value, DB); CompleteTask(currentTask); //open new task and set the request to the new activity var task = DB.Actions.Add(PmnTask.CreateForWorkflowActivity(reqDM.Request.ID, ConductAnalysisActivityID, WorkflowID, DB)); reqDM.Request.WorkFlowActivityID = ConductAnalysisActivityID; //create new routing var analysisCenterRouting = await DB.RequestDataMarts.Include(rdm => rdm.Responses).Where(rdm => rdm.RequestID == reqDM.Request.ID && rdm.RoutingType == RoutingType.AnalysisCenter).FirstOrDefaultAsync(); Lpp.Dns.Data.Response analysisCenterResponse = null; if (analysisCenterRouting.Status == RoutingStatus.Draft && analysisCenterRouting.Responses.Count == 1 && analysisCenterRouting.Responses.Where(rsp => rsp.ResponseTime.HasValue == false).Any()) { //if the initial status of the routing is draft, and there is only a single response assume this is the first time hitting the analysis center. //use the existing response to submit to the analysis center analysisCenterResponse = analysisCenterRouting.Responses.First(); } else if (analysisCenterRouting.Status != RoutingStatus.Draft) { analysisCenterRouting.Status = RoutingStatus.Draft; } if (analysisCenterResponse == null) { analysisCenterResponse = analysisCenterRouting.AddResponse(IdentityID); } //use all the dp output documents to be the input documents for the AC routing //build a manifest for where the documents are coming from List <Lpp.Dns.DTO.QueryComposer.DistributedRegressionAnalysisCenterManifestItem> manifestItems = new List <DTO.QueryComposer.DistributedRegressionAnalysisCenterManifestItem>(); var q = from rd in DB.RequestDocuments join rsp in DB.Responses on rd.ResponseID equals rsp.ID join rdm in DB.RequestDataMarts on rsp.RequestDataMartID equals rdm.ID join dm in DB.DataMarts on rdm.DataMartID equals dm.ID join doc in DB.Documents on rd.RevisionSetID equals doc.RevisionSetID where rsp.Count == rsp.RequestDataMart.Responses.Max(r => r.Count) && rdm.RequestID == reqDM.Request.ID && rd.DocumentType == RequestDocumentType.Output && rdm.RoutingType == RoutingType.DataPartner && doc.ItemID == rsp.ID && doc.ID == DB.Documents.Where(dd => dd.RevisionSetID == doc.RevisionSetID && doc.ItemID == rsp.ID).OrderByDescending(dd => dd.MajorVersion).ThenByDescending(dd => dd.MinorVersion).ThenByDescending(dd => dd.BuildVersion).ThenByDescending(dd => dd.RevisionVersion).Select(dd => dd.ID).FirstOrDefault() select new { DocumentID = doc.ID, DocumentKind = doc.Kind, DocumentFileName = doc.FileName, ResponseID = rd.ResponseID, RevisionSetID = rd.RevisionSetID, RequestDataMartID = rsp.RequestDataMartID, DataMartID = rdm.DataMartID, DataPartnerIdentifier = dm.DataPartnerIdentifier, DataMart = dm.Name }; var documents = await(q).ToArrayAsync(); // further filtering based on if a output filelist document was included needs to be done. Only the files indicated should be passed on to the analysis center foreach (var dpDocuments in documents.GroupBy(k => k.RequestDataMartID)) { var filelistDocument = dpDocuments.Where(d => !string.IsNullOrEmpty(d.DocumentKind) && string.Equals("DistributedRegression.FileList", d.DocumentKind, StringComparison.OrdinalIgnoreCase)).FirstOrDefault(); if (filelistDocument != null) { //only include the files indicated in the filelist document using (var ds = new Lpp.Dns.Data.Documents.DocumentStream(DB, filelistDocument.DocumentID)) using (var reader = new System.IO.StreamReader(ds)) { //read the header line reader.ReadLine(); string line, filename; bool includeInDistribution = false; while (!reader.EndOfStream) { line = reader.ReadLine(); string[] split = line.Split(','); if (split.Length > 0) { filename = split[0].Trim(); if (split.Length > 1) { includeInDistribution = string.Equals(split[1].Trim(), "1"); } else { includeInDistribution = false; } if (includeInDistribution == false) { continue; } if (!string.IsNullOrEmpty(filename)) { Guid?revisionSetID = dpDocuments.Where(d => string.Equals(d.DocumentFileName, filename, StringComparison.OrdinalIgnoreCase)).Select(d => d.RevisionSetID).FirstOrDefault(); if (revisionSetID.HasValue) { DB.RequestDocuments.AddRange(dpDocuments.Where(d => d.RevisionSetID == revisionSetID.Value).Select(d => new RequestDocument { DocumentType = RequestDocumentType.Input, ResponseID = analysisCenterResponse.ID, RevisionSetID = d.RevisionSetID }).ToArray()); manifestItems.AddRange(dpDocuments.Where(d => d.RevisionSetID == revisionSetID.Value).Select(d => new DTO.QueryComposer.DistributedRegressionAnalysisCenterManifestItem { DocumentID = d.DocumentID, DataMart = d.DataMart, DataMartID = d.DataMartID, DataPartnerIdentifier = d.DataPartnerIdentifier, RequestDataMartID = d.RequestDataMartID, ResponseID = d.ResponseID, RevisionSetID = d.RevisionSetID }).ToArray()); } } } } reader.Close(); } } else { var inputDocuments = dpDocuments.Where(d => d.DocumentKind != "DistributedRegression.AdapterEventLog" && d.DocumentKind != "DistributedRegression.TrackingTable"); if (inputDocuments.Count() > 0) { DB.RequestDocuments.AddRange(inputDocuments.Select(d => new RequestDocument { DocumentType = RequestDocumentType.Input, ResponseID = analysisCenterResponse.ID, RevisionSetID = d.RevisionSetID }).ToArray()); manifestItems.AddRange(inputDocuments.Select(d => new DTO.QueryComposer.DistributedRegressionAnalysisCenterManifestItem { DocumentID = d.DocumentID, DataMart = d.DataMart, DataMartID = d.DataMartID, DataPartnerIdentifier = d.DataPartnerIdentifier, RequestDataMartID = d.RequestDataMartID, ResponseID = d.ResponseID, RevisionSetID = d.RevisionSetID }).ToArray()); } } } //serialize the manifest of dataparter documents to the analysis center byte[] buffer; using (var ms = new System.IO.MemoryStream()) using (var sw = new System.IO.StreamWriter(ms)) using (var jw = new Newtonsoft.Json.JsonTextWriter(sw)) { Newtonsoft.Json.JsonSerializer serializer = new Newtonsoft.Json.JsonSerializer(); serializer.Serialize(jw, manifestItems); jw.Flush(); buffer = ms.ToArray(); } //create and add the manifest file Document analysisCenterManifest = DB.Documents.Add(new Document { Description = "Contains information about the input documents and the datamart they came from.", Name = "Internal: Analysis Center Manifest", FileName = "manifest.json", ItemID = task.ID, Kind = DocumentKind.SystemGeneratedNoLog, UploadedByID = IdentityID, Viewable = false, MimeType = "application/json", Length = buffer.Length }); analysisCenterManifest.RevisionSetID = analysisCenterManifest.ID; //TODO:determine if there is a parent document to make the manifest a revision of. If there is update the revisionset id, and version numbers //chances are there should not be unless this is a resubmit for the same task var allTasks = await DB.ActionReferences.Where(tr => tr.ItemID == reqDM.Request.ID && tr.Type == DTO.Enums.TaskItemTypes.Request && tr.Task.Type == DTO.Enums.TaskTypes.Task ) .Select(tr => tr.Task.ID).ToArrayAsync(); var attachments = await(from doc in DB.Documents.AsNoTracking() join x in ( DB.Documents.Where(dd => allTasks.Contains(dd.ItemID)) .GroupBy(k => k.RevisionSetID) .Select(k => k.OrderByDescending(d => d.MajorVersion).ThenByDescending(d => d.MinorVersion).ThenByDescending(d => d.BuildVersion).ThenByDescending(d => d.RevisionVersion).Select(y => y.ID).Distinct().FirstOrDefault()) ) on doc.ID equals x where allTasks.Contains(doc.ItemID) && doc.Kind == "Attachment.Input" orderby doc.ItemID descending, doc.RevisionSetID descending, doc.CreatedOn descending select doc).ToArrayAsync(); DB.RequestDocuments.Add(new RequestDocument { DocumentType = RequestDocumentType.Input, ResponseID = analysisCenterResponse.ID, RevisionSetID = analysisCenterManifest.RevisionSetID.Value }); foreach (var attachment in attachments) { DB.RequestDocuments.Add(new RequestDocument { RevisionSetID = attachment.RevisionSetID.Value, ResponseID = analysisCenterResponse.ID, DocumentType = DTO.Enums.RequestDocumentType.AttachmentInput }); } await DB.SaveChangesAsync(); await DB.Entry(analysisCenterRouting).ReloadAsync(); //post the manifest content analysisCenterManifest.SetData(DB, buffer); //submit the routing analysisCenterRouting.Status = analysisCenterResponse.Count > 1 ? RoutingStatus.Resubmitted : RoutingStatus.Submitted; await DB.SaveChangesAsync(); //Check for Job Fail trigger, and update Data Partner Routing to Failed. //Request status will not be affected. if (documents.Any(d => d.DocumentKind == JobFailTriggerFileKind)) { //Reload the entity before updating the status, else EF will report an exception. await DB.Entry(reqDM).ReloadAsync(); reqDM.Status = RoutingStatus.Failed; await DB.SaveChangesAsync(); } //change the status of the request to conducting analysis //manually override the request status using sql direct, EF does not allow update of computed await DB.Database.ExecuteSqlCommandAsync("UPDATE Requests SET Status = @status WHERE ID = @ID", new System.Data.SqlClient.SqlParameter("@status", (int)RequestStatuses.ConductingAnalysis), new System.Data.SqlClient.SqlParameter("@ID", reqDM.Request.ID)); await DB.Entry(reqDM.Request).ReloadAsync(); }
/// <summary> /// Streams the structural metadata. /// </summary> /// <param name="schemaVersion">The schema version.</param> /// <param name="stream">The stream.</param> /// <param name="streamController">The stream controller.</param> /// <param name="encoding">The response encoding.</param> public static void StreamJson(Stream stream, IStreamController<Newtonsoft.Json.JsonTextWriter> streamController, Encoding encoding) { try { using (var writer = new Newtonsoft.Json.JsonTextWriter(new StreamWriter(stream, encoding))) { streamController.StreamTo(writer, new Queue<Action>()); writer.Flush(); } } catch (Exception e) { _logger.Error(e.Message, e); throw _builder.Build(e); } }
public override async Task Start(string comment) { var task = await PmnTask.GetActiveTaskForRequestActivityAsync(_entity.ID, ID, db); if (task == null) { task = db.Actions.Add(PmnTask.CreateForWorkflowActivity(_entity.ID, ID, _workflow.ID, db, CustomTaskSubject)); await db.SaveChangesAsync(); var analysisCenterRouting = await db.RequestDataMarts.Include(rdm => rdm.Responses).Where(rdm => rdm.RequestID == _entity.ID && rdm.RoutingType == RoutingType.AnalysisCenter).FirstOrDefaultAsync(); Lpp.Dns.Data.Response analysisCenterResponse = null; if (analysisCenterRouting.Status == RoutingStatus.Draft && analysisCenterRouting.Responses.Count == 1 && analysisCenterRouting.Responses.Where(rsp => rsp.ResponseTime.HasValue == false).Any()) { //if the initial status of the routing is draft, and there is only a single response assume this is the first time hitting the analysis center. //use the existing response to submit to the analysis center analysisCenterResponse = analysisCenterRouting.Responses.First(); } else if (analysisCenterRouting.Status != RoutingStatus.Draft) { analysisCenterRouting.Status = RoutingStatus.Draft; } if (analysisCenterResponse == null) { analysisCenterResponse = analysisCenterRouting.AddResponse(_workflow.Identity.ID); } if (db.Entry(task).Collection(t => t.References).IsLoaded == false) { await db.Entry(task).Collection(t => t.References).LoadAsync(); } if (task.References.Any(tr => tr.ItemID == analysisCenterResponse.ID) == false) { //add a reference to the response to be able to link task to iteration task.References.Add(new TaskReference { ItemID = analysisCenterResponse.ID, TaskID = task.ID, Type = TaskItemTypes.Response }); } //use all the dp output documents to be the input documents for the AC routing //build a manifest for where the documents are coming from List <Lpp.Dns.DTO.QueryComposer.DistributedRegressionAnalysisCenterManifestItem> manifestItems = new List <DTO.QueryComposer.DistributedRegressionAnalysisCenterManifestItem>(); var documents = await(from rd in db.RequestDocuments join rsp in db.Responses on rd.ResponseID equals rsp.ID join rdm in db.RequestDataMarts on rsp.RequestDataMartID equals rdm.ID join dm in db.DataMarts on rdm.DataMartID equals dm.ID join doc in db.Documents on rd.RevisionSetID equals doc.RevisionSetID where rsp.Count == rsp.RequestDataMart.Responses.Max(r => r.Count) && rdm.RequestID == _entity.ID && rd.DocumentType == RequestDocumentType.Output && rdm.RoutingType == RoutingType.DataPartner && doc.ID == db.Documents.Where(dd => dd.RevisionSetID == doc.RevisionSetID).OrderByDescending(dd => dd.MajorVersion).ThenByDescending(dd => dd.MinorVersion).ThenByDescending(dd => dd.BuildVersion).ThenByDescending(dd => dd.RevisionVersion).Select(dd => dd.ID).FirstOrDefault() select new { DocumentID = doc.ID, DocumentKind = doc.Kind, DocumentFileName = doc.FileName, ResponseID = rd.ResponseID, RevisionSetID = rd.RevisionSetID, RequestDataMartID = rsp.RequestDataMartID, DataMartID = rdm.DataMartID, DataPartnerIdentifier = dm.DataPartnerIdentifier, DataMart = dm.Name }).ToArrayAsync(); // further filtering based on if a output filelist document was included needs to be done. Only the files indicated should be passed on to the analysis center foreach (var dpDocuments in documents.GroupBy(k => k.RequestDataMartID)) { var filelistDocument = dpDocuments.Where(d => !string.IsNullOrEmpty(d.DocumentKind) && string.Equals("DistributedRegression.FileList", d.DocumentKind, StringComparison.OrdinalIgnoreCase)).FirstOrDefault(); if (filelistDocument != null) { //only include the files indicated in the filelist document using (var ds = new Lpp.Dns.Data.Documents.DocumentStream(db, filelistDocument.DocumentID)) using (var reader = new System.IO.StreamReader(ds)) { //read the header line reader.ReadLine(); string line, filename; bool includeInDistribution = false; while (!reader.EndOfStream) { line = reader.ReadLine(); string[] split = line.Split(','); if (split.Length > 0) { filename = split[0].Trim(); if (split.Length > 1) { includeInDistribution = string.Equals(split[1].Trim(), "1"); } else { includeInDistribution = false; } if (includeInDistribution == false) { continue; } if (!string.IsNullOrEmpty(filename)) { Guid?revisionSetID = dpDocuments.Where(d => string.Equals(d.DocumentFileName, filename, StringComparison.OrdinalIgnoreCase)).Select(d => d.RevisionSetID).FirstOrDefault(); if (revisionSetID.HasValue) { db.RequestDocuments.AddRange(dpDocuments.Where(d => d.RevisionSetID == revisionSetID.Value).Select(d => new RequestDocument { DocumentType = RequestDocumentType.Input, ResponseID = analysisCenterResponse.ID, RevisionSetID = d.RevisionSetID }).ToArray()); manifestItems.AddRange(dpDocuments.Where(d => d.RevisionSetID == revisionSetID.Value).Select(d => new DTO.QueryComposer.DistributedRegressionAnalysisCenterManifestItem { DocumentID = d.DocumentID, DataMart = d.DataMart, DataMartID = d.DataMartID, DataPartnerIdentifier = d.DataPartnerIdentifier, RequestDataMartID = d.RequestDataMartID, ResponseID = d.ResponseID, RevisionSetID = d.RevisionSetID }).ToArray()); } } } } reader.Close(); } } else { db.RequestDocuments.AddRange(dpDocuments.Select(d => new RequestDocument { DocumentType = RequestDocumentType.Input, ResponseID = analysisCenterResponse.ID, RevisionSetID = d.RevisionSetID }).ToArray()); manifestItems.AddRange(dpDocuments.Select(d => new DTO.QueryComposer.DistributedRegressionAnalysisCenterManifestItem { DocumentID = d.DocumentID, DataMart = d.DataMart, DataMartID = d.DataMartID, DataPartnerIdentifier = d.DataPartnerIdentifier, RequestDataMartID = d.RequestDataMartID, ResponseID = d.ResponseID, RevisionSetID = d.RevisionSetID }).ToArray()); } } //serialize the manifest of dataparter documents to the analysis center byte[] buffer; using (var ms = new System.IO.MemoryStream()) using (var sw = new System.IO.StreamWriter(ms)) using (var jw = new Newtonsoft.Json.JsonTextWriter(sw)) { Newtonsoft.Json.JsonSerializer serializer = new Newtonsoft.Json.JsonSerializer(); serializer.Serialize(jw, manifestItems); jw.Flush(); buffer = ms.ToArray(); } //create and add the manifest file Document analysisCenterManifest = db.Documents.Add(new Document { Description = "Contains information about the input documents and the datamart they came from.", Name = "Internal: Analysis Center Manifest", FileName = "manifest.json", ItemID = task.ID, Kind = DocumentKind.SystemGeneratedNoLog, UploadedByID = _workflow.Identity.ID, Viewable = false, MimeType = "application/json", Length = buffer.Length }); analysisCenterManifest.RevisionSetID = analysisCenterManifest.ID; db.RequestDocuments.Add(new RequestDocument { DocumentType = RequestDocumentType.Input, ResponseID = analysisCenterResponse.ID, RevisionSetID = analysisCenterManifest.RevisionSetID.Value }); await db.SaveChangesAsync(); await db.Entry(analysisCenterRouting).ReloadAsync(); //post the manifest content analysisCenterManifest.SetData(db, buffer); //submit the routing analysisCenterRouting.Status = analysisCenterResponse.Count > 1 ? RoutingStatus.Resubmitted : RoutingStatus.Submitted; await db.SaveChangesAsync(); //change the status of the request to conducting analysis //manually override the request status using sql direct, EF does not allow update of computed await db.Database.ExecuteSqlCommandAsync("UPDATE Requests SET Status = @status WHERE ID = @ID", new System.Data.SqlClient.SqlParameter("@status", (int)RequestStatuses.ConductingAnalysis), new System.Data.SqlClient.SqlParameter("@ID", _entity.ID)); await db.Entry(_entity).ReloadAsync(); } if (!string.IsNullOrWhiteSpace(comment)) { var cmt = db.Comments.Add(new Comment { CreatedByID = _workflow.Identity.ID, ItemID = _entity.ID, Text = comment }); db.CommentReferences.Add(new CommentReference { CommentID = cmt.ID, Type = DTO.Enums.CommentItemTypes.Task, ItemTitle = task.Subject, ItemID = task.ID }); await db.SaveChangesAsync(); } }
public override void ExecuteResult(System.Web.Mvc.ControllerContext context) { context.HttpContext.Response.ContentType = "application/json"; context.HttpContext.Response.ContentEncoding = System.Text.Encoding.UTF8; using (Newtonsoft.Json.JsonTextWriter jsonWriter = new Newtonsoft.Json.JsonTextWriter(context.HttpContext.Response.Output)) { jsonWriter.Formatting = Newtonsoft.Json.Formatting.Indented; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Tables"); jsonWriter.WriteStartArray(); using (System.Data.Common.DbConnection con = SQL.CreateConnection()) { if (con.State != System.Data.ConnectionState.Open) { con.Open(); } using (System.Data.Common.DbCommand cmd = this.GetCommand(con)) { using (System.Data.Common.DbDataReader dr = cmd.ExecuteReader(System.Data.CommandBehavior.SequentialAccess | System.Data.CommandBehavior.CloseConnection )) { do { jsonWriter.WriteStartObject(); // tbl = new Table(); jsonWriter.WritePropertyName("Columns"); jsonWriter.WriteStartArray(); for (int i = 0; i < dr.FieldCount; ++i) { jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("ColumnName"); jsonWriter.WriteValue(dr.GetName(i)); jsonWriter.WritePropertyName("FieldType"); jsonWriter.WriteValue(SQL.GetAssemblyQualifiedNoVersionName(dr.GetFieldType(i))); jsonWriter.WriteEndObject(); } // Next i jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Rows"); jsonWriter.WriteStartArray(); if (dr.HasRows) { while (dr.Read()) { object[] thisRow = new object[dr.FieldCount]; jsonWriter.WriteStartArray(); // object[] thisRow = new object[dr.FieldCount]; for (int i = 0; i < dr.FieldCount; ++i) { jsonWriter.WriteValue(dr.GetValue(i)); } // Next i jsonWriter.WriteEndArray(); // tbl.Rows.Add(thisRow); } // Whend } // End if (dr.HasRows) jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); // ser.Tables.Add(tbl); } while (dr.NextResult()); } // End using dr } // End using cmd if (con.State != System.Data.ConnectionState.Closed) { con.Close(); } } // End using con jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); jsonWriter.Flush(); } // End Using jsonWriter context.HttpContext.Response.Output.Flush(); context.HttpContext.Response.OutputStream.Flush(); context.HttpContext.Response.Flush(); } // End Sub SerializeLargeDataset