public RecordSetMD mCopyColumnsFrom(RecordSetMD arg) //throws com.WDataSci.WDS.WDSException, Exception { this.Column = null; this.Column = new FieldMD[arg.nColumns()]; for (int jj = 0; jj < arg.nColumns(); jj++) { this.Column[jj] = new FieldMD(arg.Column[jj]); this.Column[jj].Consistency(); } return(this); }
public int mWriteMap(RecordSetMD aRecordSetMD) //throws com.WDataSci.WDS.WDSException { try { if (this.Header.Buffer == null) { throw new com.WDataSci.WDS.WDSException("Error, Header buffer not set before WriteMap!"); } long nColumns = aRecordSetMD.nColumns(); int nColumnNameMaxByteLength = (int)(this.Header.MaxStringByteLength); DBB buf = this.Header.Buffer; buf.cAsUsualLayout("WDSH", nColumns, 40, 2 * nColumnNameMaxByteLength); buf.position(0, 0, 0); int bptr = 0; //write leading data buf.PutLayerFLenString(0, "WDSH", 8, 0); buf.PutLayerLong(0, buf.nDBBRequiredBytes); buf.PutLayerLong(0, buf.nDBBLeadingBytes); buf.PutLayerLong(0, buf.nDBBFLenBytes); buf.PutLayerLong(0, buf.nDBBVLenBytes); buf.PutLayerLong(0, buf.nRecords); buf.PutLayerLong(0, buf.nRecordFLenBytes); buf.PutLayerLong(0, buf.nRecordVLenBytes); buf.position(buf.ptr, 0, 0); //here bptr is relative only to layer 1 bptr = 0; for (int jj = 0; jj < nColumns; jj++, bptr += (int)buf.nRecordFLenBytes) { aRecordSetMD.Column[jj].Consistency(); buf.position(buf.ptr, bptr, buf.vlenptr); buf.PutLayerVLenString(1, aRecordSetMD.Column[jj].Name, nColumnNameMaxByteLength, 2); if (aRecordSetMD.Column[jj].MapKey != null) { buf.PutLayerVLenString(1, aRecordSetMD.Column[jj].MapKey.getValue(), nColumnNameMaxByteLength, 2); } else { buf.PutLayerVLenString(1, "", nColumnNameMaxByteLength, 2); } buf.PutLayerInt(1, aRecordSetMD.Column[jj].DTyp.AsInt()); buf.PutLayerLong(1, aRecordSetMD.Column[jj].ByteMemLength); buf.PutLayerLong(1, aRecordSetMD.Column[jj].ByteMaxLength); } return(0); } catch (Exception e) { throw new com.WDataSci.WDS.WDSException("Error in writing output map to DBB", e); } }
/* <<< C# */ public void mWritePrepFor(RecordSetMD aRecordSetMD, long nRecords) //throws com.WDataSci.WDS.WDSException { long rflen = 0; long rvlen = 0; for (int jj = 0; jj < aRecordSetMD.nColumns(); jj++) { aRecordSetMD.Column[jj].Consistency(); rflen += aRecordSetMD.Column[jj].ByteMemLength; if (aRecordSetMD.Column[jj].isVLen()) { rvlen += aRecordSetMD.Column[jj].ByteMaxLength; } } this.RecordSet.Buffer.nRecords = nRecords; this.RecordSet.Buffer.nRecordFLenBytes = rflen; this.RecordSet.Buffer.nRecordVLenBytes = rvlen; this.RecordSet.Buffer.LayoutStyle = "WDSD"; this.RecordSet.Buffer.nDBBLeadingBytes = 8 * 8; this.RecordSet.Buffer.nDBBFLenBytes = nRecords * rflen; this.RecordSet.Buffer.nDBBVLenBytes = nRecords * rvlen; this.RecordSet.Buffer.nDBBRequiredBytes = this.RecordSet.Buffer.nDBBLeadingBytes + this.RecordSet.Buffer.nDBBFLenBytes + this.RecordSet.Buffer.nDBBVLenBytes; if (this.RecordSet.Buffer.nDBBRequiredBytes > this.RecordSet.Buffer.Length || (this.RecordSet.Buffer.nDBBRequiredBytes + this.RecordSet.Buffer.offset) > this.RecordSet.Buffer.data.Length) { throw new com.WDataSci.WDS.WDSException("Error, capacity of underlying byte[] is insufficient in DBB.ProcessRecordSetParameters"); } this.RecordSet.Buffer.bHasFLenVLenSplit = true; this.RecordSet.Buffer.flenoffset = this.RecordSet.Buffer.offset + this.RecordSet.Buffer.nDBBLeadingBytes; this.RecordSet.Buffer.flenlength = this.RecordSet.Buffer.nDBBFLenBytes; this.RecordSet.Buffer.vlenoffset = this.RecordSet.Buffer.offset + this.RecordSet.Buffer.nDBBLeadingBytes + this.RecordSet.Buffer.nDBBFLenBytes; this.RecordSet.Buffer.flenlength = this.RecordSet.Buffer.nDBBVLenBytes; this.RecordSet.Buffer.position(0, 0, 0); }
/* C# >>> */ public void mBytesRequired(RecordSetMD aRecordSetMD, long nRecords , out long rsize, out long rflensize, out long rvlensize ) { rsize = 0; rflensize = 0; rvlensize = 0; int nColumns = aRecordSetMD.nColumns(); for (int jj = 0; jj < nColumns; jj++) { aRecordSetMD.Column[jj].Consistency(); rflensize += aRecordSetMD.Column[jj].ByteMemLength; if (aRecordSetMD.Column[jj].isVLen()) { rvlensize += aRecordSetMD.Column[jj].ByteMaxLength; } } rsize = nRecords * rflensize + nRecords * rvlensize; }
/* <<< C# */ /* Java >>> * * public void mBytesRequired(RecordSetMD aRecordSetMD, long nRecords, long[] csize, long[] hsize, long[] rsize * , long[] cleadsize * , long[] hleadsize, long[] hflensize, long[] hvlensize * , long[] rleadsize, long[] rflensize, long[] rvlensize * ) * throws com.WDataSci.WDS.WDSException * { * csize[0] = 0; * hsize[0] = 0; * rsize[0] = 0; * cleadsize[0] = 8 * 8; * hleadsize[0] = 8 * 8; * hflensize[0] = 40; * hvlensize[0] = 2 * aRecordSetMD.nHeaderByteMaxLength(); * rleadsize[0] = 8 * 8; * * rflensize[0] = 0; * rvlensize[0] = 0; * int nColumns = aRecordSetMD.nColumns(); * for ( int jj = 0; jj < nColumns; jj++ ) { * aRecordSetMD.Column[jj].Consistency(); * rflensize[0] += aRecordSetMD.Column[jj].ByteMemLength; * if ( aRecordSetMD.Column[jj].isVLen() ) * rvlensize[0] += aRecordSetMD.Column[jj].ByteMaxLength; * } * * hsize[0] = hleadsize[0] + nColumns * 40 + 2 * nColumns * aRecordSetMD.nHeaderByteMaxLength(); * rsize[0] = rleadsize[0] + nRecords * rflensize[0] + nRecords * rvlensize[0]; * csize[0] = cleadsize[0] + hsize[0] + rsize[0]; * * } * * /* <<< Java */ /* C# >>> */ public void mBytesRequired(RecordSetMD aRecordSetMD, long nRecords, out long csize, out long hsize, out long rsize , out long cleadsize , out long hleadsize, out long hflensize, out long hvlensize , out long rleadsize, out long rflensize, out long rvlensize ) { csize = 0; hsize = 0; rsize = 0; cleadsize = 8 * 8; hleadsize = 8 * 8; hflensize = 40; hvlensize = 2 * aRecordSetMD.nHeaderByteMaxLength(); rleadsize = 8 * 8; int nColumns = aRecordSetMD.nColumns(); this.mBytesRequired(aRecordSetMD, nRecords, out rsize, out rflensize, out rvlensize); hsize = hleadsize + nColumns * 40 + 2 * nColumns * aRecordSetMD.nHeaderByteMaxLength(); rsize = rleadsize + nRecords * rflensize + nRecords * rvlensize; csize = cleadsize + hsize + rsize; }
public void mWriteRecordSet(RecordSetMD aOutputRecordSetMD, RecordSet aOutputRecordSet, RecordSetMD aInputRecordSetMD, RecordSet aInputRecordSet) //throws com.WDataSci.WDS.WDSException { int i = -1; int j = -1; int k = -1; int jj = -1; int nInputMap = aOutputRecordSetMD.ModeMatter.nInputFields; int nColumns = aOutputRecordSetMD.Column.Length; int nResultColumns = nColumns; if (aOutputRecordSetMD.ModeMatter.bRepeatInputFields) { nResultColumns -= nInputMap; } int nRows = aOutputRecordSet.Records.size(); if (aOutputRecordSetMD.Type.bIn(RecordSetMDEnums.eType.CSV, RecordSetMDEnums.eType.TXT, RecordSetMDEnums.eType.Dlm)) { try { String aPathAndName = null; aPathAndName = com.WDataSci.WDS.Util.PathAndName(aOutputRecordSetMD.FileMatter.Path, aOutputRecordSetMD.FileMatter.FileName); //Java BufferedWriter outBW = Files.newBufferedWriter(Paths.get(aPathAndName)); //C# StreamWriter outBW = new StreamWriter(aPathAndName); //Java CSVPrinter outCSV = null; //C# CsvWriter outCSV = null; if (aOutputRecordSetMD.FileMatter.hasHeaderRow) { int nTotalColumns = nColumns; if (aOutputRecordSetMD.ModeMatter.bRepeatInputFields) { nTotalColumns += aInputRecordSetMD.nColumns(); } String[] hr = new String[nTotalColumns]; jj = 0; if (aOutputRecordSetMD.ModeMatter.bRepeatInputFields) { for (k = 0; k < nColumns; k++, jj++) { hr[jj] = aInputRecordSetMD.Column[k].Name; } } for (k = 0; k < nColumns; k++, jj++) { hr[jj] = aOutputRecordSetMD.Column[k].Name; } /* Java >>> * * outCSV = new CSVPrinter(outBW, CSVFormat.DEFAULT.withHeader(hr).withDelimiter(aOutputRecordSetMD.FileMatter.Dlm.charAt(0))); * /* <<< Java */ /* C# >>> */ outCSV = new CsvWriter(outBW); outCSV.printRecord(hr); /* <<< C# */ } else { //Java outCSV = new CSVPrinter(outBW, CSVFormat.DEFAULT.withDelimiter(aOutputRecordSetMD.FileMatter.Dlm.charAt(0))); //C# outCSV = new CsvWriter(outBW); } outCSV.flush(); for (i = 0; i < aInputRecordSet.Records.size(); i++) { //Java List<String> outputrow = new ArrayList<>(0); //C# List <String> outputrow = new List <String>(0); jj = 0; if (aOutputRecordSetMD.ModeMatter.bRepeatInputFields) { //Java for (Object obj : aInputRecordSet.Records_Orig.get(i)) //C# foreach (Object obj in aInputRecordSet.Records_Orig.get(i)) { if (obj == null) { outputrow.add(null); } else { outputrow.add(obj.toString()); } } jj = nInputMap; } for (k = 0, j = jj; k < nResultColumns; k++, j++) { Object obj = aOutputRecordSet.Records.get(i).get(aOutputRecordSetMD.Column[j].MapKey); if (obj == null) { outputrow.add(null); } else { outputrow.add(aOutputRecordSet.Records.get(i).get(aOutputRecordSetMD.Column[j].MapKey).toString()); } } outCSV.printRecord(outputrow); } outCSV.flush(); outCSV.close(); outBW.close(); } catch (Exception e) { throw new com.WDataSci.WDS.WDSException("Error in WranglerFlatFile.mWriteRecordSet", e); } } }
public void mReadRecordSet(RecordSetMD aInputRecordSetMD , RecordSet aInputRecordSet , String aPath , String aFileName , Boolean hasHeaderRow , String dlm , PrintWriter pw ) //throws com.WDataSci.WDS.WDSException { try { if (aInputRecordSetMD.FileMatter == null) { aInputRecordSetMD.FileMatter.cPointToFile(aInputRecordSetMD, aPath, aFileName, hasHeaderRow, dlm); } int nInputMap = aInputRecordSetMD.nColumns(); int i = -1; int j = -1; String aPathAndName = null; //Java if ( this.__CSV == null || this.__CSV.isClosed() || !this.Path.equals(aPath) || !this.FileName.equals(aFileName) ) //C# if (this.__CSV == null || !this.Path.equals(aPath) || !this.FileName.equals(aFileName)) { aPathAndName = com.WDataSci.WDS.Util.PathAndName(aPath, aFileName); //Java this.__CSV = new CSVParser(new FileReader(aPathAndName), CSVFormat.EXCEL); //Java this.__CSVParserIterator = this.__CSVParser.iterator(); //C# this.__CSV = new CsvParser(new StreamReader(aPathAndName)); } if (aInputRecordSet.isEmpty()) { //Java aInputRecordSet.Records = new ArrayList<>(0); //C# aInputRecordSet.Records = new List <Map <FieldName, Object> >(0); //Java aInputRecordSet.Records_Orig = new ArrayList<>(0); //C# aInputRecordSet.Records_Orig = new List <Object[]>(0); } //Java CSVRecord inputLine; //C# String[] inputLine; //Java if ( hasHeaderRow && this.__CSVParser.getCurrentLineNumber() == 0 ) //C# if (hasHeaderRow && this.__CSV.Context.Row < 1) { //Java inputLine = this.__CSVParserIterator.next(); //C# inputLine = this.__CSV.Read(); } int row = -1; //Java while (this.__CSVParserIterator.hasNext()) //C# while (true) { //Java inputLine = this.__CSVParserIterator.next(); //C# inputLine = this.__CSV.Read(); if (inputLine == null) { break; } int nIncomingFields = inputLine.size(); if (nIncomingFields == 0 || (nIncomingFields == 1 && nInputMap > 1)) { break; } //if nIncomingFields<nInputMap, pad with nulls, otherwise throw if (nIncomingFields > nInputMap) { //Java throw new com.WDataSci.WDS.WDSException(String.format("Error reading from delimited file, row %d has insufficient columns, stopping input!\n", row + 1)); //C# throw new com.WDataSci.WDS.WDSException("Error reading from delimited file, row " + (row + 1) + " has insufficient columns, stopping input!\n"); } row++; //Java Map<FieldName, Object> inputRow = new LinkedHashMap<>(); //C# Map <FieldName, Object> inputRow = new Map <FieldName, Object>(); Object[] inputRow_orig = new Object[nInputMap]; for (j = 0; j < nInputMap; j++) { String s = null; if (j < nIncomingFields) { s = inputLine.get(j); } if (s == null || s.isEmpty()) { inputRow_orig[j] = null; if (aInputRecordSetMD.Column[j].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[j].MapKey, null); } } else if (aInputRecordSetMD.Column[j].DTyp.equals(FieldMDEnums.eDTyp.Dbl)) { //Java Double lv = Double.parseDouble(inputLine.get(j)); /* C# >>> */ double tmplv = double.NaN; double?lv = null; if (double.TryParse(inputLine.get(j), out tmplv)) { lv = tmplv; } /* <<< C# */ inputRow_orig[j] = lv; if (aInputRecordSetMD.Column[j].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[j].MapKey, lv); } } else if (aInputRecordSetMD.Column[j].DTyp.equals(FieldMDEnums.eDTyp.Lng)) { //Java Long lv = Long.parseLong(inputLine.get(j)); /* C# >>> */ long tmplv = long.MinValue; long?lv = null; if (long.TryParse(inputLine.get(j), out tmplv)) { lv = tmplv; } /* <<< C# */ inputRow_orig[j] = lv; if (aInputRecordSetMD.Column[j].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[j].MapKey, lv); } } else if (aInputRecordSetMD.Column[j].DTyp.equals(FieldMDEnums.eDTyp.Int)) { //Java Integer lv = Integer.parseInt(inputLine.get(j)); /* C# >>> */ int tmplv = int.MinValue; int?lv = null; if (int.TryParse(inputLine.get(j), out tmplv)) { lv = tmplv; } /* <<< C# */ inputRow_orig[j] = lv; if (aInputRecordSetMD.Column[j].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[j].MapKey, lv); } } else { //Java String lv = com.WDataSci.JniPMML.Util.CleanAsToken(inputLine.get(j)); //C# String lv = com.WDataSci.WDS.Util.CleanAsToken(inputLine.get(j)); inputRow_orig[j] = lv; if (aInputRecordSetMD.Column[j].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[j].MapKey, lv); } } } aInputRecordSet.Records.add(inputRow); aInputRecordSet.Records_Orig.add(inputRow_orig); } //Java this.__CSV.close(); //C# this.__CSV.Dispose(); //C#? this.__StreamReader.Dispose(); } catch (com.WDataSci.WDS.WDSException e) { throw new com.WDataSci.WDS.WDSException("Error reading from delimited file:", e); } catch (Exception e) { throw new com.WDataSci.WDS.WDSException("Error reading from delimited file:", e); } }
//Java public RecordSetMD mPrepForOutput(RecordSetMD aInputRecordSetMD, JniPMML aJniPMML, List<Map<FieldName, Object>> Results) //C# public RecordSetMD mPrepForOutput <T>(RecordSetMD aInputRecordSetMD, JniPMMLItem aJniPMML, List <Map <T, Object> > Results) //throws Exception { int i = -1; int j = -1; int k = -1; int jj = -1; int nInputMap = aInputRecordSetMD.nColumns(); //C# throw new com.WDataSci.WDS.WDSException("Error, not implemented on the C# side"); /* Java >>> * * //Output * List<Model> m = aJniPMML.PMMLMatter.Doc.getModels(); * org.dmg.pmml.Output mo = m.get(0).getOutput(); * List<org.dmg.pmml.OutputField> mol = mo.getOutputFields(); * * //Java Set<FieldName> ks = Results.get(0).keySet(); * //Java FieldName[] ksa = new FieldName[ks.size()]; * //Java ks.toArray(ksa); * * //C# * FieldName[] ksa = Results.get(0).keyArray(); * * int nResultColumns = ks.size(); * int nColumns = nResultColumns; * int nRows = Results.size(); * * if ( this.ModeMatter == null ) this.ModeMatter = new __ModeMatter(); * * if ( this.ModeMatter.bRepeatInputFields ) { * nColumns += nInputMap; * this.ModeMatter.nInputFields = nInputMap; * this.Column = new FieldMD[nColumns]; * for (j = 0; j < nInputMap; j++) { * this.Column[j] = new FieldMD(aInputRecordSetMD.Column[j]); * this.Column[j].Name = aInputRecordSetMD.Column[j].Name + this.ModeMatter.CompositeNameDlm + this.ModeMatter.CompositeInputNameSuffix; * } * jj = nInputMap; * } * else { * this.Column = new FieldMD[nColumns]; * jj = 0; * } * * for (k = 0, j = jj; k < nResultColumns; k++, j++) { * this.Column[j] = new FieldMD(); * this.Column[j].Name = ksa[k].toString(); * this.Column[j].MapToMapKey(ksa[k]); * * org.dmg.pmml.OutputField of = mol.get(k); * org.dmg.pmml.DataType ofdtyp = of.getDataType(); * * if ( nResultColumns != mol.size() ) * throw new WDSException("Error, difference between result field count and model output field count!"); * * //if the output field does not have a type, does it match an input field? This can happen with a result feature. * Boolean found = false; * if ( ofdtyp == null ) { * for (i = 0; !found && i < nInputMap; i++) { * if ( aInputRecordSetMD.Column[i].hasMapKey() && aInputRecordSetMD.Column[i].MapKey.getValue().equals(this.Column[j].Name) ) { * found = true; * this.Column[j].Copy(aInputRecordSetMD.Column[i]); * break; * } * } * if ( found && of.getResultFeature() != null ) { * this.Column[j].Name = this.Column[j].Name + this.ModeMatter.CompositeNameDlm + of.getResultFeature().toString(); * } * } * if ( !found ) { * //If not found as an input field or a feature of one, extract the rest of the X mapping info from the PMML * if ( of.getDataType().equals(org.dmg.pmml.DataType.DOUBLE) || of.getDataType().equals(org.dmg.pmml.DataType.FLOAT) ) { * this.Column[j].DTyp = FieldMDEnums.eDTyp.Dbl; * } * else if ( of.getDataType().equals(org.dmg.pmml.DataType.INTEGER) ) { * this.Column[j].DTyp = FieldMDEnums.eDTyp.Int; * //there may not be a long PMML output type, double check if field is named like an input long * for (found = false, i = 0; !found && i < nInputMap; i++) { * if ( this.Column[i].hasMapKey() && this.Column[i].MapKey.getValue().equals(this.Column[j].Name) ) { * found = true; * if ( this.Column[i].DTyp.equals(FieldMDEnums.eDTyp.Lng) ) { * this.Column[j].DTyp = FieldMDEnums.eDTyp.Lng; * } * } * } * } * else if ( of.getDataType().equals(org.dmg.pmml.DataType.DATE) ) { * this.Column[j].DTyp = FieldMDEnums.eDTyp.Dte; * } * else if ( of.getDataType().equals(org.dmg.pmml.DataType.DATE_TIME) ) { * this.Column[j].DTyp = FieldMDEnums.eDTyp.DTm; * } * else if ( of.getDataType().equals(org.dmg.pmml.DataType.STRING) ) { * this.Column[j].DTyp = FieldMDEnums.eDTyp.VLS; * } * else if ( of.getDataType().equals(org.dmg.pmml.DataType.BOOLEAN) ) { * throw new WDSException("Error, OutputColumn DataType for Boolean not implemented!"); * } * else { * throw new WDSException("Error, un-implemented OutputColumn DataType !"); * } * } * } * * return this; * /* <<< Java */ }
public Boolean Equals(RecordSetMD arg, Boolean bIgnoreMode) { if (!bIgnoreMode && !this.Mode.equals(arg.Mode)) { return(false); } if (!this.Type.equals(arg.Type)) { return(false); } if (!this.SchemaType.equals(arg.SchemaType)) { return(false); } if (this.Column == null) { if (arg.Column != null) { return(false); } } else { if (arg.Column == null) { return(false); } if (this.nColumns() != arg.nColumns()) { return(false); } } for (int jj = 0; jj < this.nColumns(); jj++) { if (!this.Column[jj].Equals(arg.Column[jj])) { return(false); } } if (!MatchingNullity(this.FileMatter, arg.FileMatter)) { return(false); } if (this.FileMatter != null && !this.FileMatter.Equals(arg.FileMatter)) { return(false); } if (!MatchingNullity(this.SchemaMatter, arg.SchemaMatter)) { return(false); } if (this.SchemaMatter != null && !this.SchemaMatter.Equals(arg.SchemaMatter)) { return(false); } if (!bIgnoreMode) { if (!MatchingNullity(this.ModeMatter, arg.ModeMatter)) { return(false); } if (this.ModeMatter != null && !this.ModeMatter.Equals(arg.ModeMatter)) { return(false); } } return(true); }
public int mWriteRecordSet(RecordSetMD inRecordSetMD , Object[,] r , Boolean bIncludesHeaderRow ) { try { if (this.RecordSet.Buffer == null) { throw new com.WDataSci.WDS.WDSException("Error, RecordSet buffer not set before WriteSet!"); } DBB buf = this.RecordSet.Buffer; int rowstartindex = r.GetLowerBound(0); int nRows = r.GetUpperBound(0) - rowstartindex + 1; if (bIncludesHeaderRow) { nRows -= 1; rowstartindex += 1; } int nColumns = r.GetUpperBound(1) - r.GetLowerBound(1) + 1; if (nColumns != inRecordSetMD.nColumns()) { throw new com.WDataSci.WDS.WDSException("Error, Excel ListObject #columns does not match RecordSetMD #columns"); } this.mWritePrepFor(inRecordSetMD, nRows); buf.position(0, 0, 0); //write leading data buf.PutLayerFLenString(0, "WDSD", 8, 0); buf.PutLayerLong(0, buf.nDBBRequiredBytes); buf.PutLayerLong(0, buf.nDBBLeadingBytes); buf.PutLayerLong(0, buf.nDBBFLenBytes); buf.PutLayerLong(0, buf.nDBBVLenBytes); buf.PutLayerLong(0, buf.nRecords); buf.PutLayerLong(0, buf.nRecordFLenBytes); buf.PutLayerLong(0, buf.nRecordVLenBytes); int bptr = 0; //bptr = (int) buf.nDBBLeadingBytes; int nInputColumns = inRecordSetMD.nColumns(); for (int i = 0, ii = rowstartindex; i < nRows; i++, ii++) { if (i > 0) { bptr += (int)this.RecordSet.Buffer.nRecordFLenBytes; } long lbptr = bptr; buf.position(buf.ptr, (int)bptr, buf.vlenptr); for (int j = 0, jj = r.GetLowerBound(1); j < nColumns; j++, jj++) { Object obj = r [ii, jj]; switch (inRecordSetMD.Column [j].DTyp) { case FieldMDEnums.eDTyp.Dbl: buf.PutLayerDouble(1, obj); break; case FieldMDEnums.eDTyp.Lng: buf.PutLayerLong(1, obj); break; case FieldMDEnums.eDTyp.Dte: buf.PutLayerDouble(1, obj); break; case FieldMDEnums.eDTyp.DTm: buf.PutLayerDouble(1, obj); break; case FieldMDEnums.eDTyp.Int: buf.PutLayerInt(1, obj); break; case FieldMDEnums.eDTyp.Str: buf.PutLayerFLenString(1, Convert.ToString(obj), (int)inRecordSetMD.Column [j].ByteMaxLength, 2); break; case FieldMDEnums.eDTyp.VLS: buf.PutLayerVLenString(1, Convert.ToString(obj), (int)inRecordSetMD.Column [j].ByteMaxLength, 2); break; default: throw new Exception("Hey"); } } } return(0); } catch (Exception e) { throw new com.WDataSci.WDS.WDSException("Error in writing output map to DBB", e); } }
/* C# >>> */ public int mWriteRecordSet(RecordSetMD inRecordSetMD , MOIE.ListObject aListObject ) { try { if (this.RecordSet.Buffer == null) { throw new com.WDataSci.WDS.WDSException("Error, RecordSet buffer not set before WriteSet!"); } DBB buf = this.RecordSet.Buffer; int nRows = aListObject.ListRows.Count; int nColumns = aListObject.ListColumns.Count; if (nColumns != inRecordSetMD.nColumns()) { throw new com.WDataSci.WDS.WDSException("Error, Excel ListObject #columns does not match RecordSetMD #columns"); } this.mWritePrepFor(inRecordSetMD, nRows); buf.position(0, 0, 0); //write leading data buf.PutLayerFLenString(0, "WDSD", 8, 0); buf.PutLayerLong(0, buf.nDBBRequiredBytes); buf.PutLayerLong(0, buf.nDBBLeadingBytes); buf.PutLayerLong(0, buf.nDBBFLenBytes); buf.PutLayerLong(0, buf.nDBBVLenBytes); buf.PutLayerLong(0, buf.nRecords); buf.PutLayerLong(0, buf.nRecordFLenBytes); buf.PutLayerLong(0, buf.nRecordVLenBytes); int bptr = 0; //bptr = (int) buf.nDBBLeadingBytes; int nInputColumns = inRecordSetMD.nColumns(); object[,] r = aListObject.Range.Value2; for (int i = 0, iP2 = 2; i < nRows; i++, iP2++) { if (i > 0) { bptr += (int)this.RecordSet.Buffer.nRecordFLenBytes; } long lbptr = bptr; buf.position(buf.ptr, (int)bptr, buf.vlenptr); for (int j = 0, jP1 = 1; j < nColumns; j++, jP1++) { Object obj = r[iP2, jP1]; switch (inRecordSetMD.Column[j].DTyp) { case FieldMDEnums.eDTyp.Dbl: buf.PutLayerDouble(1, obj); break; case FieldMDEnums.eDTyp.Lng: buf.PutLayerLong(1, obj); break; case FieldMDEnums.eDTyp.Dte: buf.PutLayerDouble(1, obj); break; case FieldMDEnums.eDTyp.DTm: buf.PutLayerDouble(1, obj); break; case FieldMDEnums.eDTyp.Int: buf.PutLayerInt(1, obj); break; case FieldMDEnums.eDTyp.Str: buf.PutLayerFLenString(1, Convert.ToString(obj), (int)inRecordSetMD.Column[j].ByteMaxLength, 2); break; case FieldMDEnums.eDTyp.VLS: buf.PutLayerVLenString(1, Convert.ToString(obj), (int)inRecordSetMD.Column[j].ByteMaxLength, 2); break; default: throw new Exception("Hey"); } } } return(0); } catch (Exception e) { throw new com.WDataSci.WDS.WDSException("Error in writing output map to DBB", e); } }
public int mWriteRecordSet(RecordSetMD outRecordSetMD , RecordSet aOutputRecordSet , RecordSetMD inRecordSetMD , RecordSet aInputRecordSet ) //throws com.WDataSci.WDS.WDSException { try { if (this.RecordSet.Buffer == null) { throw new com.WDataSci.WDS.WDSException("Error, RecordSet buffer not set before WriteSet!"); } DBB buf = this.RecordSet.Buffer; int nRows = aOutputRecordSet.Records.size(); this.mWritePrepFor(outRecordSetMD, nRows); buf.position(0, 0, 0); int bptr = 0; //write leading data buf.PutLayerFLenString(0, "WDSD", 8, 0); buf.PutLayerLong(0, buf.nDBBRequiredBytes); buf.PutLayerLong(0, buf.nDBBLeadingBytes); buf.PutLayerLong(0, buf.nDBBFLenBytes); buf.PutLayerLong(0, buf.nDBBVLenBytes); buf.PutLayerLong(0, buf.nRecords); buf.PutLayerLong(0, buf.nRecordFLenBytes); buf.PutLayerLong(0, buf.nRecordVLenBytes); int nColumns = outRecordSetMD.nColumns(); int nInputColumns = inRecordSetMD.nColumns(); bptr = 0; for (int i = 0; i < nRows; i++, bptr += (int)buf.nRecordFLenBytes) { buf.position(buf.ptr, bptr, buf.vlenptr); Map <FieldName, Object> outRow = aOutputRecordSet.Records.get(i); Object[] inRow = null; if (outRecordSetMD.ModeMatter.bRepeatInputFields) { inRow = aInputRecordSet.Records_Orig.get(i); } Boolean bInInputSet = outRecordSetMD.ModeMatter.bRepeatInputFields; int j = 0; int jj = 0; for (jj = 0, j = 0; jj < nColumns; jj++, j++) { if (bInInputSet && jj == nInputColumns) { bInInputSet = false; j = 0; } Object lv = 0; if (bInInputSet) { lv = inRow[j]; } else { lv = outRow.get(outRecordSetMD.Column[j].MapKey); } switch (outRecordSetMD.Column[jj].DTyp) { //case Dbl: case FieldMDEnums.eDTyp.Dbl: if (lv == null) { lv = Double.NaN; } buf.PutLayerDouble(1, (double)lv); break; //case Lng: case FieldMDEnums.eDTyp.Lng: //Java if ( lv == null ) lv = Long.MIN_VALUE; //C# if (lv == null) { lv = long.MinValue; } buf.PutLayerLong(1, (long)lv); break; //case Int: case FieldMDEnums.eDTyp.Int: //Java if ( lv == null ) lv = Integer.MIN_VALUE; //C# if (lv == null) { lv = int.MinValue; } buf.PutLayerInt(1, (int)lv); break; //case Dte: case FieldMDEnums.eDTyp.Dte: if (lv == null) { lv = Double.NaN; } buf.PutLayerDouble(1, (double)lv); break; //case DTm: case FieldMDEnums.eDTyp.DTm: if (lv == null) { lv = Double.NaN; } buf.PutLayerDouble(1, (double)lv); break; //case Str: case FieldMDEnums.eDTyp.Str: buf.PutLayerFLenString(1, (String)lv, (int)outRecordSetMD.Column[jj].ByteMaxLength, 2); break; //case VLS: case FieldMDEnums.eDTyp.VLS: buf.PutLayerVLenString(1, (String)lv, (int)outRecordSetMD.Column[jj].ByteMaxLength, 2); break; default: throw new com.WDataSci.WDS.WDSException("Error, unImplemented column type" + outRecordSetMD.Column[jj].DTyp.ToString()); } } } return(0); } catch (Exception e) { throw new com.WDataSci.WDS.WDSException("Error in writing output map to DBB", e); } }
public void mReadRecordSet(RecordSetMD aInputRecordSetMD, RecordSet aInputRecordSet, PrintWriter pw) //throws WDSException { int ri = 0; try { if (aInputRecordSet.isEmpty()) { //Java aInputRecordSet.Records = new ArrayList<>(0); //C# aInputRecordSet.Records = new List <Map <FieldName, Object> >(0); //Java aInputRecordSet.Records_Orig = new ArrayList<>(0); //C# aInputRecordSet.Records_Orig = new List <Object []>(0); } int nColumns = aInputRecordSetMD.nColumns(); int i = -1; int j = -1; int jj = -1; DBB buf = aInputRecordSetMD.DBBMatter.RecordSet.Buffer; int nRows = (int)aInputRecordSetMD.DBBMatter.RecordSet.Buffer.nRecords; /* Java >>> * * Double lvd = 0.0; * Long lvl = 0L; * Integer lvi = 0; * Boolean lvbln = false; * /* <<< Java */ /* C# >>> */ double? lvd = 0.0; long? lvl = 0L; int? lvi = 0; Boolean?lvbln = false; /* <<< C# */ String lvs = ""; int bptr = 0; for (i = 0; i < nRows; i++, bptr += (int)aInputRecordSetMD.DBBMatter.RecordSet.Buffer.nRecordFLenBytes) { ri = i; buf.position(buf.ptr, bptr, buf.vlenptr); Object[] inputRow_orig = new Object[nColumns]; //Java Map<FieldName, Object> inputRow = new LinkedHashMap<>(); //C# Map <FieldName, Object> inputRow = new Map <FieldName, Object>(); for (jj = 0; jj < nColumns; jj++) { switch (aInputRecordSetMD.Column[jj].DTyp) { //case Dbl: case FieldMDEnums.eDTyp.Dbl: lvd = buf.GetLayerDouble(1); //Java if (lvd==null || lvd.isNaN() || Double.isInfinite(lvd) ) lvd=null; //C# if (lvd == null || Double.IsNaN(lvd.Value) || lvd == Double.MinValue || lvd == Double.MaxValue || Double.IsNegativeInfinity(lvd.Value) || Double.IsPositiveInfinity(lvd.Value)) { lvd = null; } inputRow_orig[jj] = lvd; if (aInputRecordSetMD.Column[jj].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[jj].MapKey, lvd); } break; //case Lng: case FieldMDEnums.eDTyp.Lng: lvl = buf.GetLayerLong(1); //Java if ( lvl == null || lvl.equals(Long.MIN_VALUE) || lvl.equals(Long.MAX_VALUE) ) lvl = null; //C# if (lvl == null || lvl == long.MinValue || lvl == long.MaxValue) { lvl = null; } inputRow_orig[jj] = lvl; if (aInputRecordSetMD.Column[jj].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[jj].MapKey, lvl); } break; //case Int: case FieldMDEnums.eDTyp.Int: lvi = buf.GetLayerInt(1); //Java if ( lvi == null || lvi.equals(Integer.MIN_VALUE) || lvi.equals(Integer.MAX_VALUE) ) lvi = null; //C# if (lvi == null || lvi == int.MinValue || lvi == int.MaxValue) { lvi = null; } inputRow_orig[jj] = lvi; if (aInputRecordSetMD.Column[jj].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[jj].MapKey, lvi); } break; //case Str: case FieldMDEnums.eDTyp.Str: lvs = buf.GetLayerFLenString(1, aInputRecordSetMD.Column[jj].ByteMaxLength); inputRow_orig[jj] = lvs; if (aInputRecordSetMD.Column[jj].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[jj].MapKey, lvs); } break; //case VLS: case FieldMDEnums.eDTyp.VLS: lvs = buf.GetLayerVLenString(1, aInputRecordSetMD.Column[jj].ByteMaxLength); inputRow_orig[jj] = lvs; if (aInputRecordSetMD.Column[jj].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[jj].MapKey, lvs); } break; //case Bln: case FieldMDEnums.eDTyp.Bln: lvi = buf.GetLayerInt(1); //Java if ( lvi == null || lvi.equals(Integer.MIN_VALUE) || lvi.equals(Integer.MAX_VALUE) ) lvi = null; //C$ if (lvi == null || lvi == int.MinValue || lvi == int.MaxValue) { lvi = null; } lvbln = null; if (lvi != null) { lvbln = (lvi != 0); } inputRow_orig[jj] = lvbln; if (aInputRecordSetMD.Column[jj].hasMapKey()) { inputRow.put(aInputRecordSetMD.Column[jj].MapKey, lvbln); } break; //case Byt: case FieldMDEnums.eDTyp.Byt: default: throw new com.WDataSci.WDS.WDSException("Error, column type not implemented"); } } aInputRecordSet.Records.add(inputRow); aInputRecordSet.Records_Orig.add(inputRow_orig); } } catch (Exception e) { throw new com.WDataSci.WDS.WDSException("Error reading from DBB, row " + ri + ":", e); } }