private void BuildResults(SqlDataReader dr, int subqueryIndex = 0) { // paging switch (_pagingDirection) { case DatabaseQueryPagingDirection.Top: _currentPage = 1; break; case DatabaseQueryPagingDirection.Next: _currentPage++; break; case DatabaseQueryPagingDirection.Previous: _currentPage--; if (_currentPage < 1) { _currentPage = 1; } break; } int startread = (_currentPage * _pagingLimit) - _pagingLimit; int endread = startread + _pagingLimit; //build result node MemoryStream stream = new MemoryStream(); XmlTextWriter w = new XmlTextWriter(stream, Encoding.UTF8); XmlNodeList fieldNodes; // begin writing the xml result w.WriteStartElement("result"); try { //field values for results if (subqueryIndex > 0) { fieldNodes = _querynode.SelectNodes(String.Format(@"subquery[{0}]/fields/field", subqueryIndex)); } else { fieldNodes = _querynode.SelectNodes(@"fields/field"); } //add rows to result node int currentrow = 0; _resultPages = getNewResultPages(_querynode); int pagecount = 1; int inpagecount = 0; while (dr.Read()) { bool flginpage = false; if ((currentrow >= (startread) && currentrow < endread) || _pagingLimit == -1) { flginpage = true; } #region in page write results if (flginpage) { w.WriteStartElement("row"); // if we come accross Json fields, they are translated to Xml and added as child to row node // keep track of them and add them after all attributes have been processed List <string> innerXml = new List <string>(); //attributes (fields) if (fieldNodes.Count > 0) { foreach (XmlElement m in fieldNodes) { try { // validate json type if (m.GetAttribute("type") == "json" || m.GetAttribute("type") == "tryjson") { try { var val = dr[m.GetAttribute("name")].ToString(); string json = null; if (string.IsNullOrEmpty(val)) { json = "{}"; } else { var jsoObj = Newtonsoft.Json.Linq.JObject.Parse(val); json = val; } w.WriteAttributeString(m.GetAttribute("name").ToString(), json); } catch (IndexOutOfRangeException handled) { throw handled; } catch (Exception unhandled) { w.WriteAttributeString(m.GetAttribute("name").ToString(), "{\"error\":\"" + unhandled.Message + "\"}"); } } else { string val = dr[m.GetAttribute("name")].ToString(); if (!(m.HasAttribute("encode") || m.HasAttribute("regx"))) { val = dr[m.GetAttribute("name")].ToString().Trim(); } if (m.HasAttribute("encode")) { val = System.Web.HttpUtility.UrlEncode(dr[m.GetAttribute("encode")].ToString().TrimEnd()).Replace("+", "%20");; } if (m.HasAttribute("regx") && m.HasAttribute("replace") && m.HasAttribute("field")) { val = dr[m.GetAttribute("field")].ToString().Trim(); val = Regex.Replace(val, m.GetAttribute("regex").ToString(), m.GetAttribute("replace").ToString()); } w.WriteAttributeString(m.GetAttribute("name").ToString(), String.IsNullOrEmpty(val) ? "" : safeXmlCharacters(val.ToString().Trim())); } if (m.GetAttribute("type") == "json") { string xml = null; try { var details = dr[m.GetAttribute("name")].ToString(); // try to parse it string jsonDetails = String.Format("{{\"{0}\":{1}}}", m.GetAttribute("name"), details); xml = JsonConvert.DeserializeXmlNode(jsonDetails).OuterXml; if (xml.StartsWith("<?")) { xml = xml.Substring(xml.IndexOf("?>") + 2); } } catch (JsonReaderException) { } catch (JsonSerializationException) { } finally { if (xml != null) { innerXml.Add(xml); } } } } catch (IndexOutOfRangeException handled) { w.WriteAttributeString(m.GetAttribute("name").ToString(), "#field not found#"); } } } else { for (int i = 0; i < dr.FieldCount; i++) { w.WriteAttributeString(dr.GetName(i), dr[i].ToString().Trim()); } } // add inner xml foreach (String s in innerXml) { w.WriteRaw(s); } w.WriteEndElement(); } #endregion _rowsaffected = currentrow++; inpagecount++; if (inpagecount >= _pagingLimit) { _resultPages.Add(new ResultPaging(pagecount++, inpagecount)); inpagecount = 0; } } // get last result for _resultPage if (inpagecount > 0) { _resultPages.Add(new ResultPaging(pagecount++, inpagecount)); } _results = true; // reset a couple of paging items _currentPage = 1; _pagingDirection = DatabaseQueryPagingDirection.None; } catch (IndexOutOfRangeException ie) { string errmsg = string.Format("One or more invalid Field or Parameters for QueryName: {0}", _querynode.Attributes["name"].InnerText); ProjectExceptionArgs args = new ProjectExceptionArgs(errmsg, "Database.cs", "BuildResults", null, SeverityLevel.Fatal, LogLevel.Event); throw new ProjectException(args, ie); } catch (SqlException se) { string errmsg = string.Format("ExecuteReader Error For QueryName: {0}", _querynode.Attributes["name"].InnerText); ProjectExceptionArgs args = new ProjectExceptionArgs(errmsg, "Database.cs", "BuildResults", null, SeverityLevel.Fatal, LogLevel.Event); throw new ProjectException(args, se); } //end result node w.WriteEndElement(); w.Flush(); // include sub results (StoredProcedure returns more than one Result Set) while (dr.NextResult()) { subqueryIndex++; BuildResults(dr, subqueryIndex); } //add stream xml to return xml XmlDocument xmStreamObj = new XmlDocument(); stream.Seek(0, SeekOrigin.Begin); xmStreamObj.Load(stream); pushToTree(xmStreamObj, subqueryIndex); }
private void BuildResults(SqlDataReader dr, int subqueryIndex = 0) { // paging switch (_pagingDirection) { case DatabaseQueryPagingDirection.Top: _currentPage = 1; break; case DatabaseQueryPagingDirection.Next: _currentPage++; break; case DatabaseQueryPagingDirection.Previous: _currentPage--; if (_currentPage < 1) _currentPage = 1; break; } int startread = (_currentPage * _pagingLimit) - _pagingLimit; int endread = startread + _pagingLimit; //build result node MemoryStream stream = new MemoryStream(); XmlTextWriter w = new XmlTextWriter(stream, Encoding.UTF8); XmlNodeList fieldNodes; // begin writing the xml result w.WriteStartElement("result"); try { //field values for results if(subqueryIndex > 0) fieldNodes = _querynode.SelectNodes(String.Format(@"subquery[{0}]/fields/field", subqueryIndex)); else fieldNodes = _querynode.SelectNodes(@"fields/field"); //add rows to result node long currentrow = 0; _resultPages = getNewResultPages(_querynode); int pagecount = 1; int inpagecount = 0; while (dr.Read()) { bool flginpage = false; if ((currentrow >= (startread) && currentrow < endread) || _pagingLimit == -1) flginpage = true; #region in page write results if (flginpage) { w.WriteStartElement("row"); // if we come accross Json fields, they are translated to Xml and added as child to row node // keep track of them and add them after all attributes have been processed List<string> innerXml = new List<string>(); //attributes (fields) if (fieldNodes.Count > 0) { foreach (XmlElement m in fieldNodes) { try { // validate json type if (m.GetAttribute("type") == "json") { try { var val = dr[m.GetAttribute("name")].ToString(); string json = null; if (string.IsNullOrEmpty(val)) { json = "{}"; } else { var jsoObj = Newtonsoft.Json.Linq.JObject.Parse(val); json = val; } w.WriteAttributeString(m.GetAttribute("name").ToString(), json); } catch (IndexOutOfRangeException handled) { throw handled; } catch (Exception unhandled) { w.WriteAttributeString(m.GetAttribute("name").ToString(), "{\"error\":\"" + unhandled.Message + "\"}"); } } else { string val = null; if(!(m.HasAttribute("encode") || m.HasAttribute("regx"))) val = dr[m.GetAttribute("name")].ToString().Trim(); if (m.HasAttribute("encode")) { val = System.Web.HttpUtility.UrlEncode(dr[m.GetAttribute("encode")].ToString().TrimEnd()).Replace("+", "%20"); ; } if (m.HasAttribute("regx") && m.HasAttribute("replace") && m.HasAttribute("field")) { val = dr[m.GetAttribute("field")].ToString().Trim(); val = Regex.Replace(val, m.GetAttribute("regex").ToString(), m.GetAttribute("replace").ToString()); } w.WriteAttributeString(m.GetAttribute("name").ToString(), String.IsNullOrEmpty(val) ? "" : val.ToString().Trim()); } if (m.GetAttribute("type") == "json") { string xml = null; try { var details = dr[m.GetAttribute("name")].ToString(); // try to parse it string jsonDetails = String.Format("{{\"{0}\":{1}}}", m.GetAttribute("name"), details); xml = JsonConvert.DeserializeXmlNode(jsonDetails).OuterXml; if (xml.StartsWith("<?")) xml = xml.Substring(xml.IndexOf("?>") + 2); } catch (JsonReaderException) { } catch (JsonSerializationException) { } finally { if(xml != null) innerXml.Add(xml); } } } catch (IndexOutOfRangeException handled) { w.WriteAttributeString(m.GetAttribute("name").ToString(), "#field not found#"); } } } else { for (int i = 0; i < dr.FieldCount; i++) { w.WriteAttributeString(dr.GetName(i), dr[i].ToString().Trim()); } } // add inner xml foreach (String s in innerXml) w.WriteRaw(s); w.WriteEndElement(); } #endregion _rowsaffected = (Int32)currentrow++; inpagecount++; if (inpagecount >= _pagingLimit) { _resultPages.Add(new ResultPaging(pagecount++, inpagecount)); inpagecount = 0; } } // get last result for _resultPage if (inpagecount > 0) _resultPages.Add(new ResultPaging(pagecount++, inpagecount)); _results = true; // reset a couple of paging items _currentPage = 1; _pagingDirection = DatabaseQueryPagingDirection.None; } catch (IndexOutOfRangeException ie) { string errmsg = string.Format("One or more invalid Field or Parameters for QueryName: {0}", _querynode.Attributes["name"].InnerText); ProjectExceptionArgs args = new ProjectExceptionArgs(errmsg, "Database.cs", "BuildResults", null, SeverityLevel.Fatal, LogLevel.Event); throw new ProjectException(args, ie); } catch (SqlException se) { string errmsg = string.Format("ExecuteReader Error For QueryName: {0}", _querynode.Attributes["name"].InnerText); ProjectExceptionArgs args = new ProjectExceptionArgs(errmsg, "Database.cs", "BuildResults", null, SeverityLevel.Fatal, LogLevel.Event); throw new ProjectException(args, se); } //end result node w.WriteEndElement(); w.Flush(); // include sub results (StoredProcedure returns more than one Result Set) while (dr.NextResult()) { subqueryIndex++; BuildResults(dr, subqueryIndex); } //add stream xml to return xml XmlDocument xmStreamObj = new XmlDocument(); stream.Seek(0, SeekOrigin.Begin); xmStreamObj.Load(stream); //import result xml to original xml obj XmlNode import = _xmresult.ImportNode(xmStreamObj.DocumentElement, true); XmlNode elm; if (subqueryIndex > 0 && _xmresult.SelectSingleNode("results/subquery") == null) elm = _xmresult.SelectSingleNode("results").AppendChild(_xmresult.CreateElement("subquery")); else elm = _xmresult.SelectSingleNode("results"); elm.AppendChild(import); }
public void PageMove(DatabaseQueryPagingDirection PagingDirection) { _pagingDirection = PagingDirection; }