public DataTable LoadCorrectionPointsFromCSV(string fullFileName, DataTable dt) { string Fulltext; dt.Clear(); using (StreamReader sr = new StreamReader(fullFileName)) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); //read full file text string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Count() - 1; i++) { string[] rowValues = rows[i].Split(','); //split each row with comma to get individual values DataRow dr = dt.NewRow(); dr[0] = Convert.ToInt32(rowValues[0]); dr[1] = Convert.ToInt32(rowValues[1]); dr[2] = Convert.ToDouble(rowValues[2]); dr[3] = Convert.ToDouble(rowValues[3]); dr[4] = Convert.ToDouble(rowValues[4]); dr[5] = Convert.ToDouble(rowValues[5]); dt.Rows.Add(dr); //add other rows } } } return(dt); }
public static DataTable ReadCsvFile(StreamReader csvreader) { DataTable dtCsv = new DataTable(); string Fulltext; while (!csvreader.EndOfStream) { Fulltext = csvreader.ReadToEnd().ToString(); //read full file text string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Count() - 1; i++) { string[] rowValues = rows[i].Split(';'); //split each row with comma to get individual values { if (i == 0) { for (int j = 0; j < rowValues.Count(); j++) { dtCsv.Columns.Add(rowValues[j]); //add headers } } else { DataRow dr = dtCsv.NewRow(); for (int k = 0; k < rowValues.Count(); k++) { dr[k] = rowValues[k].ToString().Replace("<br />", " "); } dtCsv.Rows.Add(dr); //add other rows } } } } return(dtCsv); }
public static DataTable ReadFile() { DataTable dtCsv = new DataTable(); //create a datatable to store the CSV file info- to make it easier to covert to json string Fulltext; using (StreamReader sr = new StreamReader("Deal.csv")) //steamreader will read through documents { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); //read full file text and convert to string string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Count(); i++) { string[] rowValues = rows[i].Split("||"); //split each row with || to get individual values { DataRow dh = dtCsv.NewRow(); //create 2 row types for datatable, for headers and content DataRow dr = dtCsv.NewRow(); if (i == 0) { for (int j = 0; j < rowValues.Count(); j++) { dtCsv.Columns.Add(rowValues[j]); //look through the first line and see how many values are present = no of columns dh[j] = rowValues[j].ToString(); //create the headers and add to table } dtCsv.Rows.Add(dh); //add to table } if (i > 0) { for (int k = 0; k < rowValues.Count(); k++) { dr[k] = rowValues[k].ToString(); //look through remaining lines of data } dtCsv.Rows.Add(dr); //add remaining lines of data under correct header } } } } } foreach (DataRow row in dtCsv.Rows) //method to display table on console to check accuracy { Console.WriteLine(); for (int x = 0; x < dtCsv.Columns.Count; x++) { Console.Write(row[x].ToString() + " "); } } DataTableToJSON(dtCsv); //calling method to covert to JSON return(dtCsv); }
/// <summary> /// convert csv to data table /// </summary> /// <param name="fileSaveWithPath"></param> /// <returns></returns> public DataTable ReadCsvFile(string fileSaveWithPath) { DataTable dtCsv = new DataTable(); string Fulltext; try { if (!String.IsNullOrEmpty(fileSaveWithPath)) { using (StreamReader sr = new StreamReader(fileSaveWithPath)) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); //read full file text Fulltext = Fulltext.Replace("\r", ""); string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Count() - 1; i++) { string[] rowValues = rows[i].Split(','); //split each row with comma to get individual values { if (i == 0) { for (int j = 0; j < rowValues.Count(); j++) { dtCsv.Columns.Add(rowValues[j]); //add headers } } else { DataRow dr = dtCsv.NewRow(); for (int k = 0; k < dtCsv.Columns.Count; k++) { dr[k] = rowValues[k].ToString(); } dtCsv.Rows.Add(dr); //add other rows } } } } } } } catch (Exception e) { throw new InvalidDataException("CSVFile content is not correct "); } return(dtCsv); }
// bind CSV upload public DataTable ReadCsvFile() { DataTable dtCsv = new DataTable(); string Fulltext; if (bulkupload.HasFile) { // bulkupload.SaveAs(FileSaveWithPath); using (StreamReader sr = new StreamReader(bulkupload.PostedFile.InputStream)) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString().Replace(",", " "); //read full file text string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Length - 1; i++) { string[] rowValues = rows[i].Split(','); //split each row with comma to get individual values { if (i == 0) { for (int j = 0; j < rowValues.Length; j++) { dtCsv.Columns.Add(rowValues[j]); //add headers } } else { DataRow dr = dtCsv.NewRow(); for (int k = 0; k < rowValues.Length; k++) { if (!string.IsNullOrEmpty(Convert.ToString(rowValues[k]))) { string str = rowValues[k].ToString(); dr[k] = rowValues[k].ToString(); } } dtCsv.Rows.Add(dr); //add other rows } } } } } } return(dtCsv); }
private DataTable ReadCsvFile(string path, ref string phoneCol) { string empty = string.Empty; DataTable dtCsv = new DataTable(); string Fulltext; using (StreamReader sr = new StreamReader(path)) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); //read full file text string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Count() - 1; i++) { string[] rowValues = rows[i].Split(','); //split each row with comma to get individual values { if (i == 0) { for (int j = 0; j < rowValues.Count(); j++) { var thisCol = rowValues[j]; dtCsv.Columns.Add(thisCol.Replace("\r", "")); //add headers if (filterCols.Split(',').ToList().Contains(thisCol.Replace("\r", "")) || filterCols.Split(',').ToList().Contains(thisCol.Replace("\r", ""))) { phoneCol = thisCol.Replace("\r", ""); } } } else { DataRow dr = dtCsv.NewRow(); for (int k = 0; k < rowValues.Count(); k++) { dr[k] = LeadsHelpers.ProcessNumber(rowValues[k].Replace("\r", "").ToString()); } dtCsv.Rows.Add(dr); //add other rows } } } } } return(dtCsv); }
public DataTable ReadCsvFile() { DataTable dtCsv = new DataTable(); string Fulltext; if (FileUploader.HasFile) { string FileSaveWithPath = Server.MapPath("\\Upload\\Import" + System.DateTime.Now.ToString("ddMMyyyy_hhmmss") + ".csv"); FileUploader.SaveAs(FileSaveWithPath); using (StreamReader sr = new StreamReader(FileSaveWithPath)) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); //read full file text string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Count() - 1; i++) { string[] rowValues = rows[i].Split(','); //split each row with comma to get individual values { if (i == 0) { for (int j = 0; j < rowValues.Count(); j++) { dtCsv.Columns.Add(rowValues[j].Replace(" ", "").Replace("\r", "")); //add headers } } else { DataRow dr = dtCsv.NewRow(); for (int k = 0; k < rowValues.Count(); k++) { dr[k] = rowValues[k].ToString(); } dtCsv.Rows.Add(dr); //add other rows } } } } } } return(dtCsv); }
public List <TransectionDB> ReadCsvFile(string file) { try { List <TransectionDB> tdbL = new List <TransectionDB>(); string Fulltext; if (FileUpload1.HasFile) { using (StreamReader sr = new StreamReader(file)) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); string[] rows = Fulltext.Split('\n'); for (int i = 0; i < rows.Count() - 1; i++) { if (rows[i] != "") { string[] rowValues = rows[i].Replace("\r", "").Split(','); { TransectionDB tdb = new TransectionDB(); tdb.Id = Guid.NewGuid(); tdb.Transaction_Id = rowValues[0].ToString(); tdb.Amount = decimal.Parse(rowValues[1].ToString()); tdb.Currency_Code = rowValues[2].ToString(); tdb.Transaction_Date = DateTime.ParseExact(rowValues[3].ToString(), "dd/MM/yyyy HH:mm:ss", CultureInfo.InvariantCulture); tdb.Status = rowValues[4].ToString(); tdbL.Add(tdb); //add other rows } } } } } } return(tdbL); } catch (Exception ex) { ValidationMessage = "UnValidation format." + ex.Message; return(null); } }
public static DataTable ReadCsvFile() { DataTable dtCsv = new DataTable(); string Fulltext; string FileSaveWithPath = "C:\\Users\\heman\\Documents\\Customer Info\\Data.csv"; using (StreamReader sr = new StreamReader(FileSaveWithPath)) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); //read full file text string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Count() - 1; i++) { string[] rowValues = rows[i].Split(','); //split each row with comma to get individual values { if (i == 0) { for (int j = 0; j < rowValues.Count(); j++) { dtCsv.Columns.Add(rowValues[j]); //add headers } } else { DataRow dr = dtCsv.NewRow(); for (int k = 0; k < rowValues.Count(); k++) { dr[k] = rowValues[k].ToString(); } if (dr != null) { dtCsv.Rows.Add(dr); }//add other rows } } } } } return(dtCsv); }
public DataTable ReadCsvFile() { DataTable dtCsv = new DataTable(); string Fulltext; if (FileUpload1.HasFile) { string FileSaveWithPath = Server.MapPath("Employee.csv"); FileUpload1.SaveAs(FileSaveWithPath); using (StreamReader sr = new StreamReader(FileSaveWithPath)) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); //read full file text string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Length - 1; i++) { string[] rowValues = rows[i].Split(','); //split each row with comma to get individual values { if (i == 0) { for (int j = 0; j < rowValues.Length; j++) { dtCsv.Columns.Add(rowValues[j]); //add headers } } else { DataRow dr = dtCsv.NewRow(); for (int k = 0; k < rowValues.Length; k++) { dr[k] = rowValues[k].ToString(); } dtCsv.Rows.Add(dr); //add other rows } } } } } } return(dtCsv); }
public DataTable ReadCsvFile(string FileName) { DataTable dtCsv = new DataTable(); string Fulltext; if (FileUpload1.HasFile) { using (StreamReader sr = new StreamReader(FileName)) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); //read full file text string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Count() - 1; i++) { string[] rowValues = rows[i].Split(','); //split each row with comma to get individual values { if (i == 0) { for (int j = 0; j < rowValues.Count(); j++) { dtCsv.Columns.Add(rowValues[j]); //add headers } } else { DataRow dr = dtCsv.NewRow(); for (int k = 0; k < rowValues.Count(); k++) { dr[k] = rowValues[k].ToString(); } dtCsv.Rows.Add(dr); //add other rows } } } } } } return dtCsv; }
private DataTable ReadCsvFile(string fileName) { DataTable dtCsv = new DataTable(); string Fulltext; if (!string.IsNullOrEmpty(fileName)) { string FileSaveWithPath = string.Format("{0}\\Data\\{1}", Directory.GetCurrentDirectory(), fileName); using (StreamReader sr = new StreamReader(FileSaveWithPath)) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); //read full file text string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Count() - 1; i++) { string[] rowValues = rows[i].Split(','); //split each row with comma to get individual values { if (i == 0) { for (int j = 0; j < rowValues.Count(); j++) { dtCsv.Columns.Add(rowValues[j]); //add headers } } else { DataRow dr = dtCsv.NewRow(); for (int k = 0; k < rowValues.Count(); k++) { dr[k] = rowValues[k].ToString(); } dtCsv.Rows.Add(dr); //add other rows } } } } } } return(dtCsv); }
public DataTable ReadCSVFile(string filepath) { DataTable table = new DataTable(); //Name Author Publisher Series Genre PublishDate No Cilt RackNumber Shelf using (StreamReader reader = new StreamReader(filepath)) { string Fulltext; while (!reader.EndOfStream) { Fulltext = reader.ReadToEnd().ToString(); //read full file text string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Length - 1; i++) { string[] rowValues = rows[i].Split(';'); //split each row with comma to get individual values { if (i == 0) { for (int j = 0; j < rowValues.Length; j++) { table.Columns.Add(rowValues[j]); //add headers } } else { DataRow dr = table.NewRow(); for (int k = 0; k < rowValues.Length; k++) { dr[k] = rowValues[k].ToString(); } table.Rows.Add(dr); //add other rows } } } } } return(table); }
private static DataTable GetDatasetFromCsv(string csvFileName) { DataTable dtCsv = new DataTable(); string Fulltext; var path = Path.GetDirectoryName(Assembly.GetExecutingAssembly().GetName().CodeBase); string FileSaveWithPath = path.Replace(@"file:\", "") + @"\Datasets\" + csvFileName; using (StreamReader sr = new StreamReader(FileSaveWithPath)) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); //read full file text Fulltext = Fulltext.Replace("\r", ""); string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Count() - 1; i++) { string[] rowValues = rows[i].Split(','); //split each row with comma to get individual values { if (i == 0) { for (int j = 0; j < rowValues.Count(); j++) { dtCsv.Columns.Add(rowValues[j]); //add headers } } else { DataRow dr = dtCsv.NewRow(); for (int k = 0; k < rowValues.Count(); k++) { dr[k] = rowValues[k].ToString(); } dtCsv.Rows.Add(dr); //add other rows } } } } } return(dtCsv); }
public static DataTable ReadCsvFile(string filePath) { DataTable dtCsv = AddColumns(); string Fulltext; try { using (StreamReader sr = new StreamReader(filePath)) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); //read full file text string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Count(); i++) { string[] rowValues = rows[i].Split(','); //split each row with comma to get individual values { DataRow dr = dtCsv.NewRow(); for (int k = 0; k < rowValues.Count(); k++) { dr[k] = rowValues[k].ToString().Replace("\r", ""); } dtCsv.Rows.Add(dr); //add other rows } } } sr.Close(); } dtCsv = RemoveBlankRows(dtCsv); } catch (Exception ex) { } return(dtCsv); }
/// <summary> /// Read the Csv File /// </summary> /// <param name="sourceFile">sourceFile path of csv</param> /// <returns>returns datatable</returns> #region ReadCsvFile public static DataTable ReadCsvFile(string sourceFile) { DataTable dtCsv = new DataTable(); try { string Fulltext; if (File.Exists(sourceFile)) { using (StreamReader sr = new StreamReader(File.OpenRead(sourceFile))) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); //read full file text string[] rows = Fulltext.Split('\n'); //split full file text into rows rows = rows.Where(x => !string.IsNullOrWhiteSpace(x)).ToArray(); for (int i = 0; i < rows.Count(); i++) { string[] rowValues = rows[i].Split(','); //split each row with comma to get individual values { if (i == 0) { for (int j = 0; j < rowValues.Count(); j++) { dtCsv.Columns.Add(rowValues[j].Replace("\r", "")); //add headers } } else { DataRow dr = dtCsv.NewRow(); for (int k = 0; k < rowValues.Count(); k++) { dr[k] = rowValues[k].ToString(); } dtCsv.Rows.Add(dr); //add other rows } } } // Closing for loop } //Closing while loop } } else { Console.WriteLine("File Not Found"); } } catch (FileNotFoundException ex) { Console.WriteLine(ex.Message); ErrorLog.LogError(ex); throw ex; } catch (Exception ex) { Console.WriteLine(ex.Message); ErrorLog.LogError(ex); throw ex; } return(dtCsv); }
public static DataTable ImportDataTableFromExcel(string filePath) { try { DataTable dt = new DataTable(); if (Path.GetExtension(filePath) == ".csv") { string Fulltext; using (StreamReader sr = new StreamReader(filePath)) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); //read full file text string[] rows = Fulltext.Split('\n'); //split full file text into rows for (int i = 0; i < rows.Count() - 1; i++) { string[] rowValues = rows[i].Split(','); //split each row with comma to get individual values { if (i == 0) { for (int j = 0; j < rowValues.Count(); j++) { dt.Columns.Add(rowValues[j]); //add headers } } else { DataRow dr = dt.NewRow(); for (int k = 0; k < rowValues.Count(); k++) { dr[k] = rowValues[k].ToString(); } dt.Rows.Add(dr); //add other rows } } } } } return(dt); } else { string fullPath = Path.GetFullPath(filePath); string connString = @"Provider=Microsoft.ACE.OLEDB.12.0;Data Source=" + filePath + ";Extended Properties='Excel 12.0;HDR=NO;IMEX=1;TypeGuessRows=0;ImportMixedTypes=Text';"; string sql = @"SELECT * FROM [sheet1$]"; using (OleDbDataAdapter dataAdapter = new OleDbDataAdapter(sql, connString)) { dataAdapter.Fill(dt); } dt = BuildHeadersFromFirstRowThenRemoveFirstRow(dt); return(dt); } } catch (InvalidOperationException ioe) { MessageBox.Show("Excel 파일을 불러올 수 없습니다.\n" + ioe.Message); return(new DataTable()); } }
public DataTable ReadCsvFile(string filePath) { Regex CSVParser = new Regex(",(?=(?:[^\"]*\"[^\"]*\")*(?![^\"]*\"))"); DataTable dtCsv = new DataTable(); string Fulltext; if (File.Exists(filePath)) { string FileSaveWithPath = filePath; using (StreamReader sr = new StreamReader(FileSaveWithPath)) { while (!sr.EndOfStream) { Fulltext = sr.ReadToEnd().ToString(); //read full file text string[] rows = Fulltext.Split(new string[] { "\r\n" }, StringSplitOptions.None); //split full file text into rows for (int i = 0; i < rows.Count() - 1; i++) { string[] rowValues = CSVParser.Split(rows[i]); //split each row with comma to get individual values { if (i == 0) { for (int j = 0; j < rowValues.Count(); j++) { string token = rowValues[j].Trim(' ', '"'); dtCsv.Columns.Add(token); //add headers //Columns: ProductVariant.DESCRIPTION,"lst_price","id","name",ProductVariant.STANDARD_PRICE } if (!dtCsv.Columns.Contains(Product.OLD_STANDARD_PRICE)) { dtCsv.Columns.Add(Product.OLD_STANDARD_PRICE); //old cost (price) } if (!dtCsv.Columns.Contains(Product.SYNC_STATE)) { dtCsv.Columns.Add(Product.SYNC_STATE); } if (!dtCsv.Columns.Contains(Product.SYNC_DATE)) { dtCsv.Columns.Add(Product.SYNC_DATE); } if (!dtCsv.Columns.Contains(Product.IS_PUBLISHED)) { dtCsv.Columns.Add(Product.IS_PUBLISHED); } } else { DataRow dr = dtCsv.NewRow(); for (int k = 0; k < rowValues.Count(); k++) { string token = rowValues[k].Trim(' ', '"'); dr[k] = token; } //dr[ProductVariant.OLD_STANDARD_PRICE] = dr[ProductVariant.STANDARD_PRICE]; //dr[ProductVariant.STANDARD_PRICE] = null; // skip product type "Service" which is not ok to sale // skip not consumable // skip "is_special_product" if ( (dtCsv.Columns.Contains(Product.TYPE) && dr[Product.TYPE].ToString() != Product.CONSUMABLE) || (dtCsv.Columns.Contains(Product.SALE_OK) && dr[Product.SALE_OK].ToString().ToUpper() != Product.TRUE) || (dtCsv.Columns.Contains(Product.IS_SPECIAL_PRODUCT) && dr[Product.IS_SPECIAL_PRODUCT].ToString().ToUpper() == Product.TRUE) ) { continue; } dtCsv.Rows.Add(dr); //add other rows } } } } } } return(dtCsv); }
public static StringBuilder ReadFile() { var JSONString = new StringBuilder(); string Fulltext; using (StreamReader sr = new StreamReader(FileType())) { while (!sr.EndOfStream) { JSONString.Append("["); Fulltext = sr.ReadToEnd().ToString(); string[] rows = Fulltext.Split('\n'); string a = rows[0]; string b = rows[1]; string c = rows[2]; string[] AValues = a.Split("||"); string[] BValues = b.Split("||"); string[] CValues = c.Split("||"); for (int i = 0; i <= rows.Count() - 1; i++) { JSONString.Append("{"); for (int j = 0; j < AValues.Count(); j++) { if (i == 0) { if (j == 7 || j == 5) { Nullable <int> v = Validations.IntVal(BValues[j].ToString()); JSONString.Append("\"" + AValues[j].ToString() + "\":" + "\"" + v + "\""); JSONString.Append(","); } if (j == 12) { Nullable <double> w = Validations.DoubVal(BValues[j].ToString()); JSONString.Append("\"" + AValues[j].ToString() + "\":" + "\"" + w + "\""); JSONString.Append(","); } if (j == 0 || j == 1 || j == 2 || j == 8) { if (BValues[j].ToString() == "") { Console.WriteLine("Error: "); string empty = Validations.Error(BValues[j].ToString()); Console.WriteLine(AValues[j].ToString() + empty + " in Row 1"); break; } else { string m = Validations.StriVal(BValues[j].ToString()); JSONString.Append("\"" + AValues[j].ToString() + "\":" + "\"" + m + "\""); JSONString.Append(","); } } else { string x = Validations.StriVal(BValues[j].ToString()); JSONString.Append("\"" + AValues[j].ToString() + "\":" + "\"" + x + "\""); JSONString.Append(","); } } } if (i == rows.Count() - 1) { for (int k = 0; k < AValues.Count(); k++) { if (k == 7) { Nullable <int> v = Validations.IntVal(CValues[k].ToString()); JSONString.Append("\"" + AValues[k].ToString() + "\":" + "\"" + v + "\""); JSONString.Append(","); } if (k == 12) { Nullable <double> w = Validations.DoubVal(CValues[k].ToString()); JSONString.Append("\"" + AValues[k].ToString() + "\":" + "\"" + w + "\""); JSONString.Append(","); } if (k == 5 || k == 11 || k == 8 || k == 2) { if (CValues[k].ToString() == "") { Console.WriteLine("Error: "); string empty = Validations.Error(CValues[k].ToString()); Console.WriteLine(AValues[k].ToString() + empty + " in Row 2"); break; } else { string m = Validations.StriVal(CValues[k].ToString()); JSONString.Append("\"" + AValues[k].ToString() + "\":" + "\"" + m + "\""); JSONString.Append(","); } } else { string x = Validations.StriVal(CValues[k].ToString()); JSONString.Append("\"" + AValues[k].ToString() + "\":" + "\"" + x + "\""); JSONString.Append(","); } } } JSONString.Append("}"); } } JSONString.Append("]"); } Console.WriteLine(JSONString.ToString()); FileStream Sjson = new FileStream("Vali.json", FileMode.OpenOrCreate, FileAccess.Write); StreamWriter addJ = new StreamWriter(Sjson); Console.SetOut(addJ); Console.Write(JSONString.ToString()); Console.SetOut(Console.Out); addJ.Close(); Sjson.Close(); return(JSONString); }