public void mReadRecordSet(RecordSetMD aInputRecordSetMD, RecordSet aInputRecordSet, PrintWriter pw) //throws com.WDataSci.WDS.WDSException { this.mReadRecordSet(aInputRecordSetMD , aInputRecordSet , aInputRecordSetMD.FileMatter.Path , aInputRecordSetMD.FileMatter.FileName , aInputRecordSetMD.FileMatter.hasHeaderRow , aInputRecordSetMD.FileMatter.Dlm , pw ); }
public void Dispose() //throws com.WDataSci.WDS.WDSException, Exception { if (this.RecordSet != null) { this.RecordSet.Dispose(); } this.RecordSet = null; if (this.RecordSetMD != null) { this.RecordSetMD.Dispose(); } this.RecordSetMD = null; }
public void Dispose() //throws com.WDataSci.WDS.WDSException, Exception { this._XSDDoc = null; this._XSDFileName = null; this._XSDString = null; if (this.RecordSet != null) { this.RecordSet.Dispose(); } this.RecordSet = null; if (this.RecordSetMD != null) { this.RecordSetMD.Dispose(); } this.RecordSetMD = null; }
public void mWriteRecordSet(RecordSetMD aOutputRecordSetMD, RecordSet aOutputRecordSet) //throws com.WDataSci.WDS.WDSException { int i = -1; int j = -1; int k = -1; int jj = -1; int nInputMap = aOutputRecordSetMD.ModeMatter.nInputFields; int nColumns = aOutputRecordSetMD.Column.Length; int nResultColumns = nColumns; int nRows = aOutputRecordSet.Records.size(); if (aOutputRecordSetMD.Type.bIn(RecordSetMDEnums.eType.CSV, RecordSetMDEnums.eType.TXT, RecordSetMDEnums.eType.Dlm)) { try { String aPathAndName = null; aPathAndName = com.WDataSci.WDS.Util.PathAndName(aOutputRecordSetMD.FileMatter.Path, aOutputRecordSetMD.FileMatter.FileName); //Java BufferedWriter outBW = Files.newBufferedWriter(Paths.get(aPathAndName)); //C# StreamWriter outBW = new StreamWriter(aPathAndName); //Java CSVPrinter outCSV = null; //C# CsvWriter outCSV = null; if (aOutputRecordSetMD.FileMatter.hasHeaderRow) { String[] hr = new String[nColumns]; for (k = 0; k < nColumns; k++) { hr[k] = aOutputRecordSetMD.Column[k].Name; } /* Java >>> * * outCSV = new CSVPrinter(outBW, CSVFormat.DEFAULT.withHeader(hr).withDelimiter(aOutputRecordSetMD.FileMatter.Dlm.charAt(0))); * /* <<< Java */ /* C# >>> */ outCSV = new CsvWriter(outBW); outCSV.printRecord(hr); /* <<< C# */ } else { //Java outCSV = new CSVPrinter(outBW, CSVFormat.DEFAULT.withDelimiter(aOutputRecordSetMD.FileMatter.Dlm.charAt(0))); //C# outCSV = new CsvWriter(outBW); } outCSV.flush(); for (i = 0; i < aOutputRecordSet.Records.size(); i++) { //Java List<String> outputrow = new ArrayList<>(0); //C# List <String> outputrow = new List <String>(0); jj = 0; for (k = 0, j = jj; k < nResultColumns; k++, j++) { Object obj = aOutputRecordSet.Records.get(i).get(aOutputRecordSetMD.Column[j].MapKey); if (obj == null) { outputrow.add(null); } else { outputrow.add(aOutputRecordSet.Records.get(i).get(aOutputRecordSetMD.Column[j].MapKey).toString()); } } outCSV.printRecord(outputrow); } outCSV.flush(); outCSV.close(); outBW.close(); } catch (Exception e) { throw new com.WDataSci.WDS.WDSException("Error in WranglerFlatFile.mWriteRecordSet", e); } } }
public void mReadRecordSet(RecordSetMD aInputRecordSetMD , RecordSet aInputRecordSet , String aPath , String aFileName , Boolean hasHeaderRow , String dlm , PrintWriter pw ) //throws com.WDataSci.WDS.WDSException { try { if (aInputRecordSetMD.FileMatter == null) { aInputRecordSetMD.FileMatter.cPointToFile(aInputRecordSetMD, aPath, aFileName, hasHeaderRow, dlm); } int nInputMap = aInputRecordSetMD.nColumns(); int i = -1; int j = -1; String aPathAndName = null; //Java if ( this.__CSV == null || this.__CSV.isClosed() || !this.Path.equals(aPath) || !this.FileName.equals(aFileName) ) //C# if (this.__CSV == null || !this.Path.equals(aPath) || !this.FileName.equals(aFileName)) { aPathAndName = com.WDataSci.WDS.Util.PathAndName(aPath, aFileName); //Java this.__CSV = new CSVParser(new FileReader(aPathAndName), CSVFormat.EXCEL); //Java this.__CSVParserIterator = this.__CSVParser.iterator(); //C# this.__CSV = new CsvParser(new StreamReader(aPathAndName)); } if (aInputRecordSet.isEmpty()) { //Java aInputRecordSet.Records = new ArrayList<>(0); //C# aInputRecordSet.Records = new List <Map <FieldName, Object> >(0); //Java aInputRecordSet.Records_Orig = new ArrayList<>(0); //C# aInputRecordSet.Records_Orig = new List <Object[]>(0); } //Java CSVRecord inputLine; //C# String[] inputLine; //Java if ( hasHeaderRow && this.__CSVParser.getCurrentLineNumber() == 0 ) //C# if (hasHeaderRow && this.__CSV.Context.Row < 1) { //Java inputLine = this.__CSVParserIterator.next(); //C# inputLine = this.__CSV.Read(); } int row = -1; //Java while (this.__CSVParserIterator.hasNext()) //C# while (true) { //Java inputLine = this.__CSVParserIterator.next(); //C# inputLine = this.__CSV.Read(); if (inputLine == null) { break; } int nIncomingFields = inputLine.size(); if (nIncomingFields == 0 || (nIncomingFields == 1 && nInputMap > 1)) { break; } //if nIncomingFields<nInputMap, pad with nulls, otherwise throw if (nIncomingFields > nInputMap) { //Java throw new com.WDataSci.WDS.WDSException(String.format("Error reading from delimited file, row %d has insufficient columns, stopping input!\n", row + 1)); //C# throw new com.WDataSci.WDS.WDSException("Error reading from delimited file, row " + (row + 1) + " has insufficient columns, stopping input!\n"); } row++; //Java Map<FieldName, Object> inputRow = new LinkedHashMap<>(); //C# Map <FieldName, Object> inputRow = new Map <FieldName, Object>(); Object[] inputRow_orig = new Object[nInputMap]; for (j = 0; j < nInputMap; j++) { String s = null; if (j < nIncomingFields) { s = inputLine.get(j); } if (s == null || s.isEmpty()) { inputRow_orig[j] = null; if (aInputRecordSetMD.Column[j].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[j].MapKey, null); } } else if (aInputRecordSetMD.Column[j].DTyp.equals(FieldMDEnums.eDTyp.Dbl)) { //Java Double lv = Double.parseDouble(inputLine.get(j)); /* C# >>> */ double tmplv = double.NaN; double?lv = null; if (double.TryParse(inputLine.get(j), out tmplv)) { lv = tmplv; } /* <<< C# */ inputRow_orig[j] = lv; if (aInputRecordSetMD.Column[j].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[j].MapKey, lv); } } else if (aInputRecordSetMD.Column[j].DTyp.equals(FieldMDEnums.eDTyp.Lng)) { //Java Long lv = Long.parseLong(inputLine.get(j)); /* C# >>> */ long tmplv = long.MinValue; long?lv = null; if (long.TryParse(inputLine.get(j), out tmplv)) { lv = tmplv; } /* <<< C# */ inputRow_orig[j] = lv; if (aInputRecordSetMD.Column[j].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[j].MapKey, lv); } } else if (aInputRecordSetMD.Column[j].DTyp.equals(FieldMDEnums.eDTyp.Int)) { //Java Integer lv = Integer.parseInt(inputLine.get(j)); /* C# >>> */ int tmplv = int.MinValue; int?lv = null; if (int.TryParse(inputLine.get(j), out tmplv)) { lv = tmplv; } /* <<< C# */ inputRow_orig[j] = lv; if (aInputRecordSetMD.Column[j].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[j].MapKey, lv); } } else { //Java String lv = com.WDataSci.JniPMML.Util.CleanAsToken(inputLine.get(j)); //C# String lv = com.WDataSci.WDS.Util.CleanAsToken(inputLine.get(j)); inputRow_orig[j] = lv; if (aInputRecordSetMD.Column[j].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[j].MapKey, lv); } } } aInputRecordSet.Records.add(inputRow); aInputRecordSet.Records_Orig.add(inputRow_orig); } //Java this.__CSV.close(); //C# this.__CSV.Dispose(); //C#? this.__StreamReader.Dispose(); } catch (com.WDataSci.WDS.WDSException e) { throw new com.WDataSci.WDS.WDSException("Error reading from delimited file:", e); } catch (Exception e) { throw new com.WDataSci.WDS.WDSException("Error reading from delimited file:", e); } }
public int mWriteRecordSet(RecordSetMD outRecordSetMD , RecordSet aOutputRecordSet , RecordSetMD inRecordSetMD , RecordSet aInputRecordSet ) //throws com.WDataSci.WDS.WDSException { try { if (this.RecordSet.Buffer == null) { throw new com.WDataSci.WDS.WDSException("Error, RecordSet buffer not set before WriteSet!"); } DBB buf = this.RecordSet.Buffer; int nRows = aOutputRecordSet.Records.size(); this.mWritePrepFor(outRecordSetMD, nRows); buf.position(0, 0, 0); int bptr = 0; //write leading data buf.PutLayerFLenString(0, "WDSD", 8, 0); buf.PutLayerLong(0, buf.nDBBRequiredBytes); buf.PutLayerLong(0, buf.nDBBLeadingBytes); buf.PutLayerLong(0, buf.nDBBFLenBytes); buf.PutLayerLong(0, buf.nDBBVLenBytes); buf.PutLayerLong(0, buf.nRecords); buf.PutLayerLong(0, buf.nRecordFLenBytes); buf.PutLayerLong(0, buf.nRecordVLenBytes); int nColumns = outRecordSetMD.nColumns(); int nInputColumns = inRecordSetMD.nColumns(); bptr = 0; for (int i = 0; i < nRows; i++, bptr += (int)buf.nRecordFLenBytes) { buf.position(buf.ptr, bptr, buf.vlenptr); Map <FieldName, Object> outRow = aOutputRecordSet.Records.get(i); Object[] inRow = null; if (outRecordSetMD.ModeMatter.bRepeatInputFields) { inRow = aInputRecordSet.Records_Orig.get(i); } Boolean bInInputSet = outRecordSetMD.ModeMatter.bRepeatInputFields; int j = 0; int jj = 0; for (jj = 0, j = 0; jj < nColumns; jj++, j++) { if (bInInputSet && jj == nInputColumns) { bInInputSet = false; j = 0; } Object lv = 0; if (bInInputSet) { lv = inRow[j]; } else { lv = outRow.get(outRecordSetMD.Column[j].MapKey); } switch (outRecordSetMD.Column[jj].DTyp) { //case Dbl: case FieldMDEnums.eDTyp.Dbl: if (lv == null) { lv = Double.NaN; } buf.PutLayerDouble(1, (double)lv); break; //case Lng: case FieldMDEnums.eDTyp.Lng: //Java if ( lv == null ) lv = Long.MIN_VALUE; //C# if (lv == null) { lv = long.MinValue; } buf.PutLayerLong(1, (long)lv); break; //case Int: case FieldMDEnums.eDTyp.Int: //Java if ( lv == null ) lv = Integer.MIN_VALUE; //C# if (lv == null) { lv = int.MinValue; } buf.PutLayerInt(1, (int)lv); break; //case Dte: case FieldMDEnums.eDTyp.Dte: if (lv == null) { lv = Double.NaN; } buf.PutLayerDouble(1, (double)lv); break; //case DTm: case FieldMDEnums.eDTyp.DTm: if (lv == null) { lv = Double.NaN; } buf.PutLayerDouble(1, (double)lv); break; //case Str: case FieldMDEnums.eDTyp.Str: buf.PutLayerFLenString(1, (String)lv, (int)outRecordSetMD.Column[jj].ByteMaxLength, 2); break; //case VLS: case FieldMDEnums.eDTyp.VLS: buf.PutLayerVLenString(1, (String)lv, (int)outRecordSetMD.Column[jj].ByteMaxLength, 2); break; default: throw new com.WDataSci.WDS.WDSException("Error, unImplemented column type" + outRecordSetMD.Column[jj].DTyp.ToString()); } } } return(0); } catch (Exception e) { throw new com.WDataSci.WDS.WDSException("Error in writing output map to DBB", e); } }
public void mReadRecordSet(RecordSetMD aInputRecordSetMD, RecordSet aInputRecordSet, PrintWriter pw) //throws WDSException { int ri = 0; try { if (aInputRecordSet.isEmpty()) { //Java aInputRecordSet.Records = new ArrayList<>(0); //C# aInputRecordSet.Records = new List <Map <FieldName, Object> >(0); //Java aInputRecordSet.Records_Orig = new ArrayList<>(0); //C# aInputRecordSet.Records_Orig = new List <Object []>(0); } int nColumns = aInputRecordSetMD.nColumns(); int i = -1; int j = -1; int jj = -1; DBB buf = aInputRecordSetMD.DBBMatter.RecordSet.Buffer; int nRows = (int)aInputRecordSetMD.DBBMatter.RecordSet.Buffer.nRecords; /* Java >>> * * Double lvd = 0.0; * Long lvl = 0L; * Integer lvi = 0; * Boolean lvbln = false; * /* <<< Java */ /* C# >>> */ double? lvd = 0.0; long? lvl = 0L; int? lvi = 0; Boolean?lvbln = false; /* <<< C# */ String lvs = ""; int bptr = 0; for (i = 0; i < nRows; i++, bptr += (int)aInputRecordSetMD.DBBMatter.RecordSet.Buffer.nRecordFLenBytes) { ri = i; buf.position(buf.ptr, bptr, buf.vlenptr); Object[] inputRow_orig = new Object[nColumns]; //Java Map<FieldName, Object> inputRow = new LinkedHashMap<>(); //C# Map <FieldName, Object> inputRow = new Map <FieldName, Object>(); for (jj = 0; jj < nColumns; jj++) { switch (aInputRecordSetMD.Column[jj].DTyp) { //case Dbl: case FieldMDEnums.eDTyp.Dbl: lvd = buf.GetLayerDouble(1); //Java if (lvd==null || lvd.isNaN() || Double.isInfinite(lvd) ) lvd=null; //C# if (lvd == null || Double.IsNaN(lvd.Value) || lvd == Double.MinValue || lvd == Double.MaxValue || Double.IsNegativeInfinity(lvd.Value) || Double.IsPositiveInfinity(lvd.Value)) { lvd = null; } inputRow_orig[jj] = lvd; if (aInputRecordSetMD.Column[jj].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[jj].MapKey, lvd); } break; //case Lng: case FieldMDEnums.eDTyp.Lng: lvl = buf.GetLayerLong(1); //Java if ( lvl == null || lvl.equals(Long.MIN_VALUE) || lvl.equals(Long.MAX_VALUE) ) lvl = null; //C# if (lvl == null || lvl == long.MinValue || lvl == long.MaxValue) { lvl = null; } inputRow_orig[jj] = lvl; if (aInputRecordSetMD.Column[jj].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[jj].MapKey, lvl); } break; //case Int: case FieldMDEnums.eDTyp.Int: lvi = buf.GetLayerInt(1); //Java if ( lvi == null || lvi.equals(Integer.MIN_VALUE) || lvi.equals(Integer.MAX_VALUE) ) lvi = null; //C# if (lvi == null || lvi == int.MinValue || lvi == int.MaxValue) { lvi = null; } inputRow_orig[jj] = lvi; if (aInputRecordSetMD.Column[jj].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[jj].MapKey, lvi); } break; //case Str: case FieldMDEnums.eDTyp.Str: lvs = buf.GetLayerFLenString(1, aInputRecordSetMD.Column[jj].ByteMaxLength); inputRow_orig[jj] = lvs; if (aInputRecordSetMD.Column[jj].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[jj].MapKey, lvs); } break; //case VLS: case FieldMDEnums.eDTyp.VLS: lvs = buf.GetLayerVLenString(1, aInputRecordSetMD.Column[jj].ByteMaxLength); inputRow_orig[jj] = lvs; if (aInputRecordSetMD.Column[jj].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[jj].MapKey, lvs); } break; //case Bln: case FieldMDEnums.eDTyp.Bln: lvi = buf.GetLayerInt(1); //Java if ( lvi == null || lvi.equals(Integer.MIN_VALUE) || lvi.equals(Integer.MAX_VALUE) ) lvi = null; //C$ if (lvi == null || lvi == int.MinValue || lvi == int.MaxValue) { lvi = null; } lvbln = null; if (lvi != null) { lvbln = (lvi != 0); } inputRow_orig[jj] = lvbln; if (aInputRecordSetMD.Column[jj].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[jj].MapKey, lvbln); } break; //case Byt: case FieldMDEnums.eDTyp.Byt: default: throw new com.WDataSci.WDS.WDSException("Error, column type not implemented"); } } aInputRecordSet.Records.add(inputRow); aInputRecordSet.Records_Orig.add(inputRow_orig); } } catch (Exception e) { throw new com.WDataSci.WDS.WDSException("Error reading from DBB, row " + ri + ":", e); } }
public RecordSet mWriteRecordSet(RecordSetMD aOutputRecordSetMD, RecordSetMD aInputRecordSetMD, RecordSet aInputRecordSet) //throws com.WDataSci.WDS.WDSException { if (aOutputRecordSetMD.Type.bIn(RecordSetMDEnums.eType.CSV, RecordSetMDEnums.eType.TXT, RecordSetMDEnums.eType.Dlm)) { aOutputRecordSetMD.FileMatter.mWriteRecordSet(aOutputRecordSetMD, this, aInputRecordSetMD, aInputRecordSet); } else if (aOutputRecordSetMD.Type.bIn(RecordSetMDEnums.eType.HDF5)) { aOutputRecordSetMD.HDF5Matter.mWriteRecordSet(aOutputRecordSetMD, this, aInputRecordSetMD, aInputRecordSet); } else if (aOutputRecordSetMD.Type.bIn(RecordSetMDEnums.eType.DBB)) { aOutputRecordSetMD.DBBMatter.mWriteRecordSet(aOutputRecordSetMD, this, aInputRecordSetMD, aInputRecordSet); } return(this); }