/// <summary> /// This is where the data is read from the input buffer /// </summary> /// <param name="inputID"></param> /// <param name="buffer"></param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { string sharepointUrl = ""; string sharepointList = ""; string sharepointListView = ""; short batchSize = (short)2; Enums.BatchType batchType = Enums.BatchType.Deletion; if (!buffer.EndOfRowset) { // Queue the data up for batching by the sharepoint accessor object var dataQueue = new List<Dictionary<string, FieldValue>>(); while (buffer.NextRow()) { var rowData = new Dictionary<string, FieldValue>(); foreach (var fieldName in _bufferLookup.Keys) { if (buffer.IsNull(_bufferLookup[fieldName])) { // Do nothing, can ignore this field } else { FieldValue filedObj = new FieldValue(); switch (_bufferLookupDataType[fieldName]) { case DataType.DT_STR: case DataType.DT_WSTR: filedObj.value = buffer.GetString(_bufferLookup[fieldName]); filedObj.type = "string"; rowData.Add(fieldName, filedObj); break; case DataType.DT_NTEXT: int colDataLength = (int)buffer.GetBlobLength(_bufferLookup[fieldName]); byte[] stringData = buffer.GetBlobData(_bufferLookup[fieldName], 0, colDataLength); filedObj.value = Encoding.Unicode.GetString(stringData); filedObj.type = "string"; rowData.Add(fieldName, filedObj); break; case DataType.DT_R4: filedObj.value = buffer.GetSingle(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_CY: filedObj.value = buffer.GetDecimal(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_R8: filedObj.value = buffer.GetDouble(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI1: case DataType.DT_I1: case DataType.DT_BOOL: filedObj.value = buffer.GetBoolean(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Boolean"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI2: case DataType.DT_I2: filedObj.value = buffer.GetInt16(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI4: case DataType.DT_I4: filedObj.value = buffer.GetInt32(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI8: case DataType.DT_I8: filedObj.value = buffer.GetInt64(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_GUID: filedObj.value = buffer.GetGuid(_bufferLookup[fieldName]).ToString(); filedObj.type = "String"; rowData.Add(fieldName, filedObj); break; case DataType.DT_DBTIMESTAMP: filedObj.value = buffer.GetDateTime(_bufferLookup[fieldName]).ToString("u").Replace(" ", "T"); filedObj.type = "Datetime"; rowData.Add(fieldName, filedObj); break; case DataType.DT_DATE: filedObj.value = buffer.GetDateTime(_bufferLookup[fieldName]).ToString("yyyy-MM-dd"); filedObj.type = "Datetime"; rowData.Add(fieldName, filedObj); break; } } } dataQueue.Add(rowData); } bool fireAgain = false; Enums.TrueFalseValue removeRecords = (Enums.TrueFalseValue)ComponentMetaData.CustomPropertyCollection[C_REMOVERECORDS].Value; if (removeRecords == Enums.TrueFalseValue.True) ClearRows(); if (dataQueue.Count() > 0) { System.Diagnostics.Stopwatch timer = new System.Diagnostics.Stopwatch(); timer.Start(); System.Xml.Linq.XElement resultData; CreateDataset(dataQueue); AddClassRows(dataQueue); timer.Stop(); } else { ComponentMetaData.FireInformation(0, ComponentMetaData.Name, "No rows found to update in destination.", "", 0, ref fireAgain); } } }
public override void ProcessInput(int inputID, PipelineBuffer inputBuffer) { base.ProcessInput(inputID, inputBuffer); var jsSerializer = new JavaScriptSerializer(); while (inputBuffer.NextRow()) { var inputBytes = inputBuffer.GetBlobData(0, 0, (int)inputBuffer.GetBlobLength(0)); var inputXml = Encoding.UTF8.GetString(inputBytes); var inputObject = XmlSerializer.XmlDeserialize <Order>(inputXml); var serializedObject = jsSerializer.Serialize(inputObject); if (serializedObject.Length > 4000 - 4) { serializedObject = serializedObject.Substring(0, 4000 - 4) + " ..."; } outputBuffer.AddRow(); outputBuffer.SetString(0, serializedObject); } if (inputBuffer.EndOfRowset) { outputBuffer.SetEndOfRowset(); } }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { base.ProcessInput(inputID, buffer); while (buffer.NextRow()) { var inputColumnId = GetInputColumnId(OutputObjectColumnName); var inputBytes = buffer.GetBlobData(inputColumnId, 0, (int)buffer.GetBlobLength(inputColumnId)); var inputXml = Encoding.UTF8.GetString(inputBytes); var inputObject = XmlSerializer.XmlDeserialize <TSource>(inputXml); var outputObject = ProcessInput(inputObject); var outputXml = XmlSerializer.XmlSerialize(outputObject); var outputBytes = Encoding.UTF8.GetBytes(outputXml); var outputColumnId = GetOutputColumnId(OutputObjectColumnName); AsynchronousOutputBuffer.AddRow(); AsynchronousOutputBuffer.AddBlobData(outputColumnId, outputBytes); } if (buffer.EndOfRowset) { AsynchronousOutputBuffer.SetEndOfRowset(); } }
static protected SqlGeometry GetGeometryData(int bufferIndex, PipelineBuffer buffer) { if (buffer.IsNull(bufferIndex)) { return(null); } else { SqlGeometry geometry = new SqlGeometry(); byte[] blobData = buffer.GetBlobData(bufferIndex, 0, (int)buffer.GetBlobLength(bufferIndex)); try { geometry = SqlGeometry.STGeomFromWKB(new System.Data.SqlTypes.SqlBytes(blobData), 0); } catch (FormatException) { System.IO.MemoryStream memoryStream = new System.IO.MemoryStream((byte[])blobData); System.IO.BinaryReader binaryReader = new System.IO.BinaryReader(memoryStream); geometry.Read(binaryReader); } geometry = geometry.MakeValid(); return(geometry); } }
private void AddOutputRow(ref PipelineBuffer inputbuffer) { // For some RESON I STILL DO NOT UNDERSTAND, THIS METHOD FALLS in a sort of race condition. // The following is the worst workaroud ever, but needed ito quickly address the problem. while (true) { try { _outputBuffer.AddRow(); break; } catch (Exception e) { ComponentMetaData.FireWarning(ComponentConstants.RUNTIME_GENERIC_ERROR, ComponentMetaData.Name, "Outputrow was not ready. ", null, 0); throw e; } } // Copy the inputs into outputs for (var i = 0; i < _startOfJsonColIndex; i++) { if (inputbuffer[i] is BlobColumn) { _outputBuffer.AddBlobData(i, inputbuffer.GetBlobData(i, 0, (int)inputbuffer.GetBlobLength(i))); } else { _outputBuffer[i] = inputbuffer[i]; } } }
public override void transform(ref PipelineBuffer buffer, int defaultOutputId, int inputColumnBufferIndex, int outputColumnBufferIndex) { //Get OGR Geometry from buffer byte[] geomBytes = new byte[buffer.GetBlobLength(inputColumnBufferIndex)]; geomBytes = buffer.GetBlobData(inputColumnBufferIndex, 0, geomBytes.Length); Geometry geom = Geometry.CreateFromWkb(geomBytes); double area = geom.GetArea(); buffer.SetDouble(outputColumnBufferIndex, area); buffer.DirectRow(defaultOutputId); }
public override void transform(ref PipelineBuffer buffer, int defaultOutputId, int inputColumnBufferIndex, int outputColumnBufferIndex) { //Get OGR Geometry from buffer byte[] geomBytes = new byte[buffer.GetBlobLength(inputColumnBufferIndex)]; geomBytes = buffer.GetBlobData(inputColumnBufferIndex, 0, geomBytes.Length); Geometry geom = Geometry.CreateFromWkb(geomBytes); string kml; kml = geom.ExportToKML(null); buffer.SetString(outputColumnBufferIndex, kml); buffer.DirectRow(defaultOutputId); }
public override void transform(ref PipelineBuffer buffer, int defaultOutputId, int inputColumnBufferIndex, int outputColumnBufferIndex) { //Get OGR Geometry from buffer byte[] geomBytes = new byte[buffer.GetBlobLength(inputColumnBufferIndex)]; geomBytes = buffer.GetBlobData(inputColumnBufferIndex, 0, geomBytes.Length); Geometry geom = Geometry.CreateFromWkb(geomBytes); string json; json = geom.ExportToJson(null); buffer.SetString(outputColumnBufferIndex, json); buffer.DirectRow(defaultOutputId); }
public override void transform(ref PipelineBuffer buffer, int defaultOutputId, int inputColumnBufferIndex, int outputColumnBufferIndex) { //Get OGR Geometry from buffer byte[] geomBytes = new byte[buffer.GetBlobLength(inputColumnBufferIndex)]; geomBytes = buffer.GetBlobData(inputColumnBufferIndex, 0, geomBytes.Length); Geometry geom = Geometry.CreateFromWkb(geomBytes); geom = geom.SimplifyPreserveTopology(this.tolerance); geomBytes = new byte[geom.WkbSize()]; geom.ExportToWkb(geomBytes); buffer.ResetBlobData(inputColumnBufferIndex); buffer.AddBlobData(inputColumnBufferIndex, geomBytes); //Direct row to default output buffer.DirectRow(defaultOutputId); }
public override void transform(ref PipelineBuffer buffer, int defaultOutputId, int inputColumnBufferIndex, int outputColumnBufferIndex) { //Get OGR Geometry from buffer byte[] geomBytes = new byte[buffer.GetBlobLength(inputColumnBufferIndex)]; geomBytes = buffer.GetBlobData(inputColumnBufferIndex, 0, geomBytes.Length); Geometry geom = Geometry.CreateFromWkb(geomBytes); geom.FlattenTo2D(); geomBytes = new byte[geom.WkbSize()]; geom.ExportToWkb(geomBytes); buffer.ResetBlobData(inputColumnBufferIndex); buffer.AddBlobData(inputColumnBufferIndex, geomBytes); //Direct row to default output buffer.DirectRow(defaultOutputId); }
/// <summary> /// Formats the element output. /// </summary> /// <param name="buffer"> /// The buffer. /// </param> /// <param name="writer"> /// The writer. /// </param> private void FormatElementOutput(PipelineBuffer buffer, XmlWriter writer) { // Iterate the columns in the columnInfos array. string columnValue; for (int i = 0; i < this.inputColumnInfos.Length; i++) { ColumnInfo colInfo = this.inputColumnInfos[i]; writer.WriteStartElement(this.columnElementName); if (this.includeColumnName) { writer.WriteAttributeString(this.nameAttributeName, colInfo.Name); } // Is the column null? if (!buffer.IsNull(colInfo.BufferColumnIndex)) { BufferColumn bufferColumn = buffer.GetColumnInfo(colInfo.BufferColumnIndex); if (bufferColumn.DataType == DataType.DT_NTEXT || bufferColumn.DataType == DataType.DT_TEXT) { writer.WriteValue( Encoding.Default.GetString( buffer.GetBlobData( colInfo.BufferColumnIndex, 0, (int)buffer.GetBlobLength(colInfo.BufferColumnIndex)))); } else { writer.WriteValue(buffer[colInfo.BufferColumnIndex].ToString()); } } else { writer.WriteAttributeString(this.nullAttributeName, "true"); } writer.WriteEndElement(); } }
private void CacheColumnData(PipelineBuffer buffer, int inputBufferIndex, int outputBufferIndex) { if (buffer.IsNull(inputBufferIndex)) { values[outputBufferIndex] = null; } else { BufferColumn bufferColumn = buffer.GetColumnInfo(inputBufferIndex); if (bufferColumn.DataType == DataType.DT_TEXT || bufferColumn.DataType == DataType.DT_NTEXT || bufferColumn.DataType == DataType.DT_IMAGE) { this.values[outputBufferIndex] = buffer.GetBlobData(inputBufferIndex, 0, (int)buffer.GetBlobLength(inputBufferIndex)); } else { this.values[outputBufferIndex] = buffer[inputBufferIndex]; } } }
public override void ProcessInput(int inputID, PipelineBuffer inputBuffer) { base.ProcessInput(inputID, inputBuffer); while (inputBuffer.NextRow()) { var inputBytes = inputBuffer.GetBlobData(0, 0, (int)inputBuffer.GetBlobLength(0)); var inputXml = Encoding.UTF8.GetString(inputBytes); var inputObject = XmlSerializer.XmlDeserialize <TSource>(inputXml); var outputObject = ProcessInput(inputObject); var outputXml = XmlSerializer.XmlSerialize(outputObject); var outputBytes = Encoding.UTF8.GetBytes(outputXml); outputBuffer.AddRow(); outputBuffer.AddBlobData(0, outputBytes); } if (inputBuffer.EndOfRowset) { outputBuffer.SetEndOfRowset(); } }
public override void ProcessInput(int inputID, PipelineBuffer inputBuffer) { base.ProcessInput(inputID, inputBuffer); var jsSerializer = new JavaScriptSerializer(); while (inputBuffer.NextRow()) { var inputBytes = inputBuffer.GetBlobData(0, 0, (int)inputBuffer.GetBlobLength(0)); var inputXml = Encoding.UTF8.GetString(inputBytes); var inputObject = XmlSerializer.XmlDeserialize<Order>(inputXml); var serializedObject = jsSerializer.Serialize(inputObject); if (serializedObject.Length > 4000 - 4) serializedObject = serializedObject.Substring(0, 4000 - 4) + " ..."; outputBuffer.AddRow(); outputBuffer.SetString(0, serializedObject); } if (inputBuffer.EndOfRowset) outputBuffer.SetEndOfRowset(); }
public override void ProcessInput(Int32 inputID, PipelineBuffer buffer) { IDTSInput100 input = ComponentMetaData.InputCollection.GetObjectByID(inputID); Int32 emailAddressInputColumnId = input.InputColumnCollection[_emailAddressInputColumnName].ID; IDTSInputColumn100 emailAddressInputColumn = input.InputColumnCollection.GetObjectByID(emailAddressInputColumnId); Int32 emailAddressInputColumnIndex = input.InputColumnCollection.GetObjectIndexByID(emailAddressInputColumnId); IDTSOutput100 output = ComponentMetaData.OutputCollection[OUTPUT_NAME]; Int32 isValidColumnId = output.OutputColumnCollection[IS_VALID_COLUMN_NAME].ID; IDTSOutputColumn100 isValidColumn = output.OutputColumnCollection.GetObjectByID(isValidColumnId); Int32 isValidColumnIndex = BufferManager.FindColumnByLineageID(input.Buffer, isValidColumn.LineageID); while (buffer.NextRow()) { String emailAddress; switch (emailAddressInputColumn.DataType) { case DataType.DT_NTEXT: emailAddress = Encoding.Unicode.GetString(buffer.GetBlobData(emailAddressInputColumnIndex, 0, (Int32)buffer.GetBlobLength(emailAddressInputColumnIndex))); break; case DataType.DT_TEXT: emailAddress = Encoding.GetEncoding(emailAddressInputColumn.CodePage).GetString(buffer.GetBlobData(emailAddressInputColumnIndex, 0, emailAddressInputColumn.Length)); break; default: emailAddress = buffer.GetString(emailAddressInputColumnIndex); break; } buffer.SetBoolean(isValidColumnIndex, this.IsValidEmail(emailAddress)); } if (buffer.EndOfRowset) { } }
public override void transform(ref PipelineBuffer buffer, int defaultOutputId, int inputColumnBufferIndex, int outputColumnBufferIndex) { //Get OGR Geometry from buffer byte[] geomBytes = new byte[buffer.GetBlobLength(inputColumnBufferIndex)]; geomBytes = buffer.GetBlobData(inputColumnBufferIndex, 0, geomBytes.Length); Geometry geom = Geometry.CreateFromWkb(geomBytes); if (this.isMultipart(geom)) { for (int i = 0; i < geom.GetGeometryCount(); i++) { Geometry geomPart = geom.GetGeometryRef(i); geomBytes = new byte[geom.WkbSize()]; geomPart.ExportToWkb(geomBytes); buffer.ResetBlobData(inputColumnBufferIndex); buffer.AddBlobData(inputColumnBufferIndex, geomBytes); buffer.DirectRow(defaultOutputId); } } else { buffer.DirectRow(defaultOutputId); } }
private static object GetBufferColumnValue(PipelineBuffer buffer, ColumnInfo col) { if (buffer.IsNull(col.BufferIndex)) { return(null); } switch (col.ColumnDataType) { case DataType.DT_BOOL: return(buffer.GetBoolean(col.BufferIndex)); case DataType.DT_BYTES: return(buffer.GetBytes(col.BufferIndex)); case DataType.DT_CY: return(buffer.GetDecimal(col.BufferIndex)); case DataType.DT_DATE: return(buffer.GetDateTime(col.BufferIndex)); case DataType.DT_DBDATE: return(buffer.GetDate(col.BufferIndex)); case DataType.DT_DBTIME: return(buffer.GetTime(col.BufferIndex)); case DataType.DT_DBTIME2: return(buffer.GetTime(col.BufferIndex)); case DataType.DT_DBTIMESTAMP: return(buffer.GetDateTime(col.BufferIndex)); case DataType.DT_DBTIMESTAMP2: return(buffer.GetDateTime(col.BufferIndex)); case DataType.DT_DBTIMESTAMPOFFSET: return(buffer.GetDateTimeOffset(col.BufferIndex)); case DataType.DT_DECIMAL: return(buffer.GetDecimal(col.BufferIndex)); case DataType.DT_FILETIME: return(buffer.GetDateTime(col.BufferIndex)); case DataType.DT_GUID: return(buffer.GetGuid(col.BufferIndex)); case DataType.DT_I1: return(buffer.GetSByte(col.BufferIndex)); case DataType.DT_I2: return(buffer.GetInt16(col.BufferIndex)); case DataType.DT_I4: return(buffer.GetInt32(col.BufferIndex)); case DataType.DT_I8: return(buffer.GetInt64(col.BufferIndex)); case DataType.DT_IMAGE: return(buffer.GetBlobData(col.BufferIndex, 0, (int)buffer.GetBlobLength(col.BufferIndex))); case DataType.DT_NTEXT: return(buffer.GetBlobData(col.BufferIndex, 0, (int)buffer.GetBlobLength(col.BufferIndex))); case DataType.DT_NUMERIC: return(buffer.GetDecimal(col.BufferIndex)); case DataType.DT_R4: return(buffer.GetSingle(col.BufferIndex)); case DataType.DT_R8: return(buffer.GetDouble(col.BufferIndex)); case DataType.DT_STR: return(buffer.GetString(col.BufferIndex)); case DataType.DT_TEXT: return(buffer.GetBlobData(col.BufferIndex, 0, (int)buffer.GetBlobLength(col.BufferIndex))); case DataType.DT_UI1: return(buffer.GetByte(col.BufferIndex)); case DataType.DT_UI2: return(buffer.GetUInt16(col.BufferIndex)); case DataType.DT_UI4: return(buffer.GetUInt32(col.BufferIndex)); case DataType.DT_UI8: return(buffer.GetUInt64(col.BufferIndex)); case DataType.DT_WSTR: return(buffer.GetString(col.BufferIndex)); default: return(null); } }
/// <summary> /// Processes rows in the inpuyt buffer /// </summary> /// <param name="inputID">ID of the input to process the input rows</param> /// <param name="buffer">Pipeline bufere with rows to process</param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { if (!buffer.EndOfRowset) { while (buffer.NextRow()) { rowsProcessed++; if (outputColumns.Count == 0) { continue; } XmlColumn lastCol = null; XElement rowElement = null; foreach (XmlColumn outCol in outputColumns) { if (rowElement == null || lastCol == null || lastCol.SerializeLineage != outCol.SerializeLineage || lastCol.SerializeDataType != outCol.SerializeDataType) { rowElement = new XElement("row"); //Add sourceID and sourceNme attributes to the row node if those are specified in the component properties if (!string.IsNullOrEmpty(outCol.SourceID)) { rowElement.Add(new XAttribute("sourceID", outCol.SourceID)); } if (!string.IsNullOrEmpty(outCol.SourceName)) { rowElement.Add(new XAttribute("sourceName", outCol.SourceName)); } XElement columnElement = null; //Process XML for selected input columns for (int i = 0; i < outCol.XmlInputColumns.Count; i++) { int colIdx = outCol.XmlInputColumns[i]; InputBufferColumnInfo bci = inputBufferColumns[colIdx]; BufferColumn col = buffer.GetColumnInfo(bci.Index); byte[] bdata; columnElement = new XElement("Column"); //add name, id and lineageId atributes to the column node columnElement.Add(new XAttribute("name", bci.Name)); if (outCol.SerializeLineage) //Serialize ID and LineageId { columnElement.Add(new XAttribute("id", bci.ID), new XAttribute("lineageId", bci.LineageID)); } if (outCol.SerializeDataType) //Serialize Data Type Information { columnElement.Add( new XAttribute("dataType", bci.DataType.ToString()) , new XAttribute("length", bci.Length) , new XAttribute("precision", bci.Precision) , new XAttribute("scale", bci.Scale) ); } //if column value is null add isNull attribute to the column node if (buffer.IsNull(bci.Index)) { columnElement.Add(new XAttribute("isNull", true)); } else //get data for the column and store them as data of the column node { string colData = string.Empty; switch (col.DataType) { case DataType.DT_BYTES: bdata = buffer.GetBytes(bci.Index); colData = BytesToHexString(bdata); //convert binary data to a hexadecimal string break; case DataType.DT_IMAGE: bdata = buffer.GetBlobData(bci.Index, 0, (int)buffer.GetBlobLength(bci.Index)); colData = BytesToHexString(bdata); //convert binary data to a hexadecimal string break; case DataType.DT_NTEXT: bdata = buffer.GetBlobData(bci.Index, 0, (int)buffer.GetBlobLength(bci.Index)); colData = Encoding.Unicode.GetString(bdata); break; case DataType.DT_TEXT: bdata = buffer.GetBlobData(bci.Index, 0, (int)buffer.GetBlobLength(bci.Index)); colData = Encoding.GetEncoding(col.CodePage).GetString(bdata); break; default: colData = Convert.ToString(buffer[bci.Index], CultureInfo.InvariantCulture); break; } columnElement.SetValue(colData); rowElement.Add(columnElement); } } } if (outCol.DataType == DataType.DT_WSTR) { string str = rowElement.ToString(outCol.SaveOptions); if (str.Length > outCol.DataLen) { bool cancel = false; string msg = string.Format("Data Truncation has occured when processing row {0}. Could not write {1} characters into column [{2}] of length {3}", rowsProcessed, str.Length, outCol.Name, outCol.DataLen); this.ComponentMetaData.FireError(0, this.ComponentMetaData.Name, msg, string.Empty, 0, out cancel); if (cancel) { throw new System.Exception(msg); } break; } buffer.SetString(outCol.Index, str); } else { buffer.AddBlobData(outCol.Index, Encoding.Unicode.GetBytes(rowElement.ToString(outCol.SaveOptions))); } lastCol = outCol; } } } }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { OGRBufferCache cache = null; int geomIndex = -1; if (inputID == this._targetID) { cache = this._targetCache; geomIndex = this._targetGeomIndex; } else if (inputID == this._joinID) { cache = this._joinCache; geomIndex = this._joinGeomIndex; } while (buffer.NextRow()) { object[] bufferRow = new object[buffer.ColumnCount]; for (int i = 0; i < buffer.ColumnCount; i++) { if (buffer[i] is BlobColumn) { int blobSize = (int)buffer.GetBlobLength(i); byte[] blob = buffer.GetBlobData(i, 0, blobSize); bufferRow[i] = blob; } else { bufferRow[i] = buffer[i]; } } Geometry geom = Geometry.CreateFromWkb((byte[])bufferRow[geomIndex]); OGRBufferCacheRow row = new OGRBufferCacheRow(bufferRow, geom); cache.add(row); } if (buffer.EndOfRowset) { this._inputCount += 1; } if (this._inputCount == 2) { this._targetCache.createSpatialIndex(); foreach (OGRBufferCacheRow row in this._joinCache) { List <OGRBufferCacheRow> results = null; switch (this._relation) { case relationType.contains: results = this._targetCache.contains(row); break; case relationType.crosses: results = this._targetCache.crosses(row); break; case relationType.equals: results = this._targetCache.equals(row); break; case relationType.intersects: results = this._targetCache.intersects(row); break; case relationType.overlaps: results = this._targetCache.overlaps(row); break; case relationType.touches: results = this._targetCache.touches(row); break; case relationType.within: results = this._targetCache.within(row); break; } if (results.Count > 0) { foreach (OGRBufferCacheRow resultRow in results) { this._outputBuffer.AddRow(); foreach (columnInfoMap ci in this._joinColumnInfoMapList) { this._outputBuffer[ci.outputBufferIndex] = row[ci.inputBufferIndex]; } foreach (columnInfoMap ci in this._targetColumnInfoMapList) { this._outputBuffer[ci.outputBufferIndex] = resultRow[ci.inputBufferIndex]; } } } } this._outputBuffer.SetEndOfRowset(); } }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { OGRBufferCache cache = null; int geomIndex = -1; if (inputID == this._targetID) { cache = this._targetCache; geomIndex = this._targetGeomIndex; } else if (inputID == this._joinID) { cache = this._joinCache; geomIndex = this._joinGeomIndex; } while (buffer.NextRow()) { object[] bufferRow = new object[buffer.ColumnCount]; for (int i = 0; i < buffer.ColumnCount; i++) { if (buffer[i] is BlobColumn) { int blobSize = (int)buffer.GetBlobLength(i); byte[] blob = buffer.GetBlobData(i, 0, blobSize); bufferRow[i] = blob; } else { bufferRow[i] = buffer[i]; } } Geometry geom = Geometry.CreateFromWkb((byte[])bufferRow[geomIndex]); OGRBufferCacheRow row = new OGRBufferCacheRow(bufferRow, geom); cache.add(row); } if (buffer.EndOfRowset) { this._inputCount += 1; } if (this._inputCount == 2) { this._targetCache.createSpatialIndex(); foreach (OGRBufferCacheRow row in this._joinCache) { List<OGRBufferCacheRow> results = null; switch (this._relation) { case relationType.contains: results = this._targetCache.contains(row); break; case relationType.crosses: results = this._targetCache.crosses(row); break; case relationType.equals: results = this._targetCache.equals(row); break; case relationType.intersects: results = this._targetCache.intersects(row); break; case relationType.overlaps: results = this._targetCache.overlaps(row); break; case relationType.touches: results = this._targetCache.touches(row); break; case relationType.within: results = this._targetCache.within(row); break; } if (results.Count > 0) { foreach (OGRBufferCacheRow resultRow in results) { this._outputBuffer.AddRow(); foreach (columnInfoMap ci in this._joinColumnInfoMapList) { this._outputBuffer[ci.outputBufferIndex] = row[ci.inputBufferIndex]; } foreach (columnInfoMap ci in this._targetColumnInfoMapList) { this._outputBuffer[ci.outputBufferIndex] = resultRow[ci.inputBufferIndex]; } } } } this._outputBuffer.SetEndOfRowset(); } }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { if (buffer == null) { throw new ArgumentNullException("buffer"); } if (!buffer.EndOfRowset) { IDTSInput100 input = ComponentMetaData.InputCollection.GetObjectByID(inputID); int errorOutputID = -1; int errorOutputIndex = -1; int defaultOutputId = -1; GetErrorOutputInfo(ref errorOutputID, ref errorOutputIndex); defaultOutputId = errorOutputIndex == 0 ? ComponentMetaData.OutputCollection[1].ID : ComponentMetaData.OutputCollection[0].ID; while (buffer.NextRow()) { if (inputColumnInfos.Length == 0) { buffer.DirectRow(defaultOutputId); } else { var isError = false; var inputByteBuffer = new byte[1000]; var bufferUsed = 0; uint blobLength = 0; var columnToProcessID = 0; for (int i = 0; i < inputColumnInfos.Length; i++) { ColumnInfo info = inputColumnInfos[i]; columnToProcessID = info.bufferColumnIndex; if (!buffer.IsNull(columnToProcessID)) { switch (buffer.GetColumnInfo(columnToProcessID).DataType) { case DataType.DT_BOOL: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetBoolean(columnToProcessID)); break; case DataType.DT_IMAGE: blobLength = buffer.GetBlobLength(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetBlobData(columnToProcessID, 0, (int)blobLength)); break; case DataType.DT_BYTES: byte[] bytesFromBuffer = buffer.GetBytes(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, bytesFromBuffer); break; case DataType.DT_CY: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDecimal(columnToProcessID)); break; //case DataType.DT_DBTIMESTAMPOFFSET: // DateTimeOffset dateTimeOffset = buffer.GetDateTimeOffset(columnToProcessID); // Utility.Append(ref inputByteBuffer, ref bufferUsed, dateTimeOffset); // break; case DataType.DT_DBDATE: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDate(columnToProcessID), millisecondHandling); break; case DataType.DT_DATE: case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: case DataType.DT_FILETIME: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDateTime(columnToProcessID), millisecondHandling); break; case DataType.DT_DBTIME: case DataType.DT_DBTIME2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetTime(columnToProcessID)); break; case DataType.DT_GUID: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetGuid(columnToProcessID)); break; case DataType.DT_I1: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetSByte(columnToProcessID)); break; case DataType.DT_I2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt16(columnToProcessID)); break; case DataType.DT_I4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt32(columnToProcessID)); break; case DataType.DT_I8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt64(columnToProcessID)); break; case DataType.DT_STR: case DataType.DT_TEXT: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetString(columnToProcessID), Encoding.ASCII); break; case DataType.DT_NTEXT: case DataType.DT_WSTR: var wstr = buffer.GetString(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetString(columnToProcessID), Encoding.Unicode); break; case DataType.DT_R4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetSingle(columnToProcessID)); break; case DataType.DT_R8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDouble(columnToProcessID)); break; case DataType.DT_UI1: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetByte(columnToProcessID)); break; case DataType.DT_UI2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt16(columnToProcessID)); break; case DataType.DT_UI4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt32(columnToProcessID)); break; case DataType.DT_UI8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt64(columnToProcessID)); break; case DataType.DT_EMPTY: case DataType.DT_NULL: default: break; } } else if (!string.IsNullOrEmpty(nullValue)) { Utility.Append(ref inputByteBuffer, ref bufferUsed, nullValue, Encoding.ASCII); } } var iByteBuffer = bufferUsed; var trimmedByteBuffer = new byte[bufferUsed]; Array.Copy(inputByteBuffer, trimmedByteBuffer, iByteBuffer); var sha1HashDual = new SHA1CryptoServiceProvider(); var hash = BitConverter.ToString(sha1HashDual.ComputeHash(trimmedByteBuffer)).Replace("-", ""); buffer.SetString(outputColumnInfos[0].bufferColumnIndex, hash); if (!isError) { buffer.DirectRow(defaultOutputId); } } } } }
private void CopyColumnData(PipelineBuffer buffer, int inputBufferIndex, int outputBufferIndex) { if (buffer.IsNull(inputBufferIndex)) { this.outputBuffer.SetNull(outputBufferIndex); } else { BufferColumn bufferColumn = buffer.GetColumnInfo(inputBufferIndex); if (bufferColumn.DataType == DataType.DT_TEXT || bufferColumn.DataType == DataType.DT_NTEXT || bufferColumn.DataType == DataType.DT_IMAGE) { this.outputBuffer.AddBlobData(outputBufferIndex, buffer.GetBlobData(inputBufferIndex, 0, (int)buffer.GetBlobLength(inputBufferIndex))); } else { this.outputBuffer[outputBufferIndex] = buffer[inputBufferIndex]; } } }
static protected SqlGeometry GetGeometryData(int bufferIndex, PipelineBuffer buffer) { if (buffer.IsNull(bufferIndex)) { return null; } else { SqlGeometry geometry = new SqlGeometry(); byte[] blobData = buffer.GetBlobData(bufferIndex, 0, (int)buffer.GetBlobLength(bufferIndex)); try { geometry = SqlGeometry.STGeomFromWKB(new System.Data.SqlTypes.SqlBytes(blobData), 0); } catch (FormatException) { System.IO.MemoryStream memoryStream = new System.IO.MemoryStream((byte[])blobData); System.IO.BinaryReader binaryReader = new System.IO.BinaryReader(memoryStream); geometry.Read(binaryReader); } geometry = geometry.MakeValid(); return geometry; } }
/// <summary> /// Called when a PipelineBuffer is passed to the component. /// </summary> /// <param name="inputID">The ID of the Input that the buffer contains rows for.</param> /// <param name="buffer">The PipelineBuffer containing the columns defined in the IDTSInput100.</param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { if (buffer == null) { throw new ArgumentNullException("buffer"); } if (!buffer.EndOfRowset) { IDTSInput100 input = ComponentMetaData.InputCollection.GetObjectByID(inputID); var errorOutputID = -1; var errorOutputIndex = -1; var defaultOutputId = -1; GetErrorOutputInfo(ref errorOutputID, ref errorOutputIndex); defaultOutputId = errorOutputIndex == 0 ? ComponentMetaData.OutputCollection[1].ID : ComponentMetaData.OutputCollection[0].ID; while (buffer.NextRow()) { /// If the inputColumnInfos array has zero dimensions, then /// no input columns have been selected for the component. /// Direct the row to the default output. if (inputColumnInfos.Length == 0) { buffer.DirectRow(defaultOutputId); } else { var isError = false; var inputByteBuffer = new byte[1000]; var bufferUsed = 0; var nullHandling = String.Empty; foreach (var columnToProcessID in inputColumnInfos.Select(info => info.bufferColumnIndex)) { if (!buffer.IsNull(columnToProcessID)) { nullHandling += "N"; switch (buffer.GetColumnInfo(columnToProcessID).DataType) { case DataType.DT_BOOL: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetBoolean(columnToProcessID)); break; case DataType.DT_IMAGE: uint blobLength = buffer.GetBlobLength(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetBlobData(columnToProcessID, 0, (int)blobLength)); nullHandling += blobLength.ToString(CultureInfo.InvariantCulture); break; case DataType.DT_BYTES: byte[] bytesFromBuffer = buffer.GetBytes(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, bytesFromBuffer); nullHandling += bytesFromBuffer.GetLength(0).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_CY: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDecimal(columnToProcessID)); break; //#if SQL2005 //#else //case DataType.DT_DBTIMESTAMPOFFSET: // DateTimeOffset dateTimeOffset = buffer.GetDateTimeOffset(columnToProcessID); // Utility.Append(ref inputByteBuffer, ref bufferUsed, dateTimeOffset); // break; case DataType.DT_DBDATE: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDate(columnToProcessID), millisecondHandling); break; //#endif case DataType.DT_DATE: case DataType.DT_DBTIMESTAMP: #if SQL2005 #else case DataType.DT_DBTIMESTAMP2: case DataType.DT_FILETIME: #endif Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDateTime(columnToProcessID), millisecondHandling); break; #if SQL2005 #else case DataType.DT_DBTIME: case DataType.DT_DBTIME2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetTime(columnToProcessID)); break; #endif case DataType.DT_GUID: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetGuid(columnToProcessID)); break; case DataType.DT_I1: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetSByte(columnToProcessID)); break; case DataType.DT_I2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt16(columnToProcessID)); break; case DataType.DT_I4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt32(columnToProcessID)); break; case DataType.DT_I8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt64(columnToProcessID)); break; case DataType.DT_NTEXT: case DataType.DT_STR: case DataType.DT_TEXT: case DataType.DT_WSTR: String stringFromBuffer = buffer.GetString(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, stringFromBuffer, Encoding.UTF8); nullHandling += stringFromBuffer.Length.ToString(); break; case DataType.DT_R4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetSingle(columnToProcessID)); break; case DataType.DT_R8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDouble(columnToProcessID)); break; case DataType.DT_UI1: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetByte(columnToProcessID)); break; case DataType.DT_UI2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt16(columnToProcessID)); break; case DataType.DT_UI4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt32(columnToProcessID)); break; case DataType.DT_UI8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt64(columnToProcessID)); break; case DataType.DT_EMPTY: case DataType.DT_NULL: default: break; } } else { nullHandling += "Y"; } } Utility.Append(ref inputByteBuffer, ref bufferUsed, nullHandling, Encoding.UTF8); var sha1HashDual = new SHA1CryptoServiceProvider(); var fhash = sha1HashDual.ComputeHash(inputByteBuffer); var reverseByteBuffer = inputByteBuffer.Reverse().ToArray(); var rhash = sha1HashDual.ComputeHash(reverseByteBuffer); var hash1 = BitConverter.ToString(fhash).Replace("-", ""); // + "~" + BitConverter.ToString(rhash); var hash2 = BitConverter.ToString(rhash).Replace("-", ""); buffer.SetString(outputColumnInfos[0].bufferColumnIndex, hash1); buffer.SetString(outputColumnInfos[1].bufferColumnIndex, hash2); //buffer.SetInt16(outputColumnInfos[2].bufferColumnIndex, (Int16)(Math.Abs(BitConverter.ToInt16(fhash, 0)) % noOfPartitions)); /// Finished processing each of the columns in this row. /// If an error occurred and the error output is configured, then the row has already been directed to the error output, if configured. /// If not, then direct the row to the default output. if (!isError) { buffer.DirectRow(defaultOutputId); } } } } }
/// <summary> /// This is where the data is read from the input buffer /// </summary> /// <param name="inputID"></param> /// <param name="buffer"></param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { string sharepointUrl = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTSITEURL].Value; string sharepointList = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTNAME].Value; string sharepointListView = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTVIEWNAME].Value; short batchSize = (short)ComponentMetaData.CustomPropertyCollection[C_BATCHSIZE].Value; Enums.BatchType batchType = (Enums.BatchType)ComponentMetaData.CustomPropertyCollection[C_BATCHTYPE].Value; if (!buffer.EndOfRowset) { // Queue the data up for batching by the sharepoint accessor object var dataQueue = new List<Dictionary<string, string>>(); while (buffer.NextRow()) { var rowData = new Dictionary<string, string>(); foreach (var fieldName in _bufferLookup.Keys) { switch (_bufferLookupDataType[fieldName]) { case DataType.DT_STR: case DataType.DT_WSTR: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else rowData.Add(fieldName, buffer.GetString(_bufferLookup[fieldName])); break; case DataType.DT_NTEXT: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else { int colDataLength = (int)buffer.GetBlobLength(_bufferLookup[fieldName]); byte[] stringData = buffer.GetBlobData(_bufferLookup[fieldName], 0, colDataLength); rowData.Add(fieldName, Encoding.Unicode.GetString(stringData)); } break; case DataType.DT_R4: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else rowData.Add(fieldName, buffer.GetSingle(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_R8: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else rowData.Add(fieldName, buffer.GetDouble(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_UI1: case DataType.DT_I1: case DataType.DT_BOOL: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else rowData.Add(fieldName, buffer.GetBoolean(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_UI2: case DataType.DT_I2: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else rowData.Add(fieldName, buffer.GetInt16(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_UI4: case DataType.DT_I4: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else rowData.Add(fieldName, buffer.GetInt32(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_UI8: case DataType.DT_I8: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else rowData.Add(fieldName, buffer.GetInt64(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_GUID: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, String.Empty); else rowData.Add(fieldName, buffer.GetGuid(_bufferLookup[fieldName]).ToString()); break; case DataType.DT_DBTIMESTAMP: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, String.Empty); else rowData.Add(fieldName, buffer.GetDateTime(_bufferLookup[fieldName]).ToString("u").Replace(" ","T")); break; } } dataQueue.Add(rowData); } bool fireAgain = false; if (dataQueue.Count() > 0) { System.Diagnostics.Stopwatch timer = new System.Diagnostics.Stopwatch(); timer.Start(); System.Xml.Linq.XElement resultData; if (batchType == Enums.BatchType.Modification) { // Perform the update resultData = SharePointUtility.ListServiceUtility.UpdateListItems( new Uri(sharepointUrl), _credentials, sharepointList, sharepointListView, dataQueue, batchSize); } else { // Get the IDs read from the buffer var idList = from data in dataQueue where data["ID"].Trim().Length > 0 select data["ID"]; // Delete the list items with IDs resultData = SharePointUtility.ListServiceUtility.DeleteListItems( new Uri(sharepointUrl), _credentials, sharepointList, sharepointListView, idList); } timer.Stop(); var errorRows = from result in resultData.Descendants("errorCode") select result.Parent; int successRowsWritten = resultData.Elements().Count() - errorRows.Count(); string infoMsg = string.Format(CultureInfo.InvariantCulture, "Affected {0} records in list '{1}' at '{2}'. Elapsed time is {3}ms", successRowsWritten, sharepointList, sharepointUrl, timer.ElapsedMilliseconds); ComponentMetaData.FireInformation(0, ComponentMetaData.Name, infoMsg, "", 0, ref fireAgain); ComponentMetaData.IncrementPipelinePerfCounter( DTS_PIPELINE_CTR_ROWSWRITTEN, (uint)successRowsWritten); // Shovel any error rows to the error flow bool cancel; int errorIter = 0; foreach (var row in errorRows) { // Do not flood the error log. errorIter++; if (errorIter > 10) { ComponentMetaData.FireError(0, ComponentMetaData.Name, "Total of " + errorRows.Count().ToString(_culture) + ", only showing first 10.", "", 0, out cancel); return; } string idString = ""; XAttribute attrib = row.Element("row").Attribute("ID"); if (attrib != null) idString = "(SP ID=" + attrib.Value + ")"; string errorString = string.Format(CultureInfo.InvariantCulture, "Error on row {0}: {1} - {2} {3}", row.Attribute("ID"), row.Element("errorCode").Value, row.Element("errorDescription").Value, idString); ComponentMetaData.FireError(0, ComponentMetaData.Name, errorString, "", 0, out cancel); // Need to throw an exception, or else this step's box is green (should be red), even though the flow // is marked as failure regardless. throw new PipelineProcessException("Errors detected in this component - see SSIS Errors"); } } else { ComponentMetaData.FireInformation(0, ComponentMetaData.Name, "No rows found to update in destination.", "", 0, ref fireAgain); } } }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { Layer OGRLayer = this.getLayer(); FeatureDefn OGRFeatureDef = OGRLayer.GetLayerDefn(); int batchCount = 0; OGRLayer.StartTransaction(); //initialize columnInfo object columnInfo ci = new columnInfo(); while (buffer.NextRow()) { try { //Start transaction if (this.batchSize != 0 && batchCount % this.batchSize == 0) { OGRLayer.CommitTransaction(); OGRLayer.StartTransaction(); batchCount = 0; } Feature OGRFeature = new Feature(OGRFeatureDef); for (int i = 0; i < this.columnInformation.Count; i++) { ci = this.columnInformation[i]; if (!buffer.IsNull(ci.bufferColumnIndex)) { if (ci.geom) { byte[] geomBytes = new byte[buffer.GetBlobLength(ci.bufferColumnIndex)]; geomBytes = buffer.GetBlobData(ci.bufferColumnIndex, 0, geomBytes.Length); Geometry geom = Geometry.CreateFromWkb(geomBytes); OGRFeature.SetGeometry(geom); } else { int OGRFieldIndex = OGRFeatureDef.GetFieldIndex(ci.columnName); FieldDefn OGRFieldDef = OGRFeatureDef.GetFieldDefn(OGRFieldIndex); FieldType OGRFieldType = OGRFieldDef.GetFieldType(); //declare datetime variables DateTime dt; TimeSpan ts; switch (OGRFieldType) { //case FieldType.OFTBinary: // break; case FieldType.OFTDate: dt = buffer.GetDate(ci.bufferColumnIndex); OGRFeature.SetField(OGRFieldIndex, dt.Year, dt.Month, dt.Day, 0, 0, 0, 0); break; case FieldType.OFTDateTime: dt = buffer.GetDateTime(ci.bufferColumnIndex); //get timezone? OGRFeature.SetField(OGRFieldIndex, dt.Year, dt.Month, dt.Day, dt.Hour, dt.Minute, dt.Second, 0); break; case FieldType.OFTInteger: OGRFeature.SetField(OGRFieldIndex, buffer.GetInt32(ci.bufferColumnIndex)); break; case FieldType.OFTInteger64: OGRFeature.SetField(OGRFieldIndex, buffer.GetInt64(ci.bufferColumnIndex)); break; case FieldType.OFTReal: OGRFeature.SetField(OGRFieldIndex, buffer.GetDouble(ci.bufferColumnIndex)); break; case FieldType.OFTTime: ts = buffer.GetTime(ci.bufferColumnIndex); OGRFeature.SetField(OGRFieldIndex, 0, 0, 0, ts.Hours, ts.Minutes, ts.Seconds, 0); break; case FieldType.OFTString: default: OGRFeature.SetField(OGRFieldIndex, buffer.GetString(ci.bufferColumnIndex)); break; } } } } OGRLayer.CreateFeature(OGRFeature); batchCount++; //increment incrementPipelinePerfCounters to display correct # of rows written ComponentMetaData.IncrementPipelinePerfCounter(103, 1); } catch (Exception ex) { //Redirect row IDTSInputColumn100 inputColumn = ComponentMetaData.InputCollection[0].InputColumnCollection.GetInputColumnByLineageID(ci.lineageID); IDTSOutput100 output = ComponentMetaData.OutputCollection[0]; if (ci.errorDisposition == DTSRowDisposition.RD_RedirectRow) { int errorCode = System.Runtime.InteropServices.Marshal.GetHRForException(ex); buffer.DirectErrorRow(output.ID, errorCode, inputColumn.LineageID); } else if (ci.errorDisposition == DTSRowDisposition.RD_FailComponent || ci.errorDisposition == DTSRowDisposition.RD_NotUsed) { OGRLayer.RollbackTransaction(); ComponentMetaData.FireError(0, ComponentMetaData.Name, ex.Message, string.Empty, 0, out cancel); throw new Exception(ex.Message); } } } OGRLayer.CommitTransaction(); }
/// <summary> /// Get binary data from the selected column in the buffer, in accordance with the columns data type. /// </summary> /// <param name="dataType">The data type of the column.</param> /// <param name="columnIndex">The index of the column.</param> /// <param name="buffer">The reference to the input buffer.</param> /// <returns></returns> private Byte[] GetBytes(DataType dataType, int columnIndex, ref PipelineBuffer buffer) { String value = String.Empty; if (!buffer.IsNull(columnIndex)) { switch (dataType) { // Extract data from the column, based on the column data type. #region Extract Data case DataType.DT_BOOL: value = buffer.GetBoolean(columnIndex).ToString(); break; case DataType.DT_BYTES: return(buffer.GetBytes(columnIndex)); case DataType.DT_DBDATE: value = buffer.GetDate(columnIndex).ToString(); break; case DataType.DT_DBTIME: case DataType.DT_DBTIME2: value = buffer.GetTime(columnIndex).ToString(); break; case DataType.DT_DATE: case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: case DataType.DT_FILETIME: value = buffer.GetDateTime(columnIndex).ToString(); break; case DataType.DT_DBTIMESTAMPOFFSET: value = buffer.GetDateTimeOffset(columnIndex).ToString(); break; case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: case DataType.DT_CY: value = buffer.GetDecimal(columnIndex).ToString(); break; case DataType.DT_GUID: value = buffer.GetGuid(columnIndex).ToString(); break; case DataType.DT_I1: value = buffer.GetSByte(columnIndex).ToString(); break; case DataType.DT_I2: value = buffer.GetInt16(columnIndex).ToString(); break; case DataType.DT_I4: value = buffer.GetInt32(columnIndex).ToString(); break; case DataType.DT_I8: value = buffer.GetInt64(columnIndex).ToString(); break; case DataType.DT_IMAGE: case DataType.DT_NTEXT: case DataType.DT_TEXT: return(buffer.GetBlobData(columnIndex, (Int32)0, (Int32)buffer.GetBlobLength(columnIndex))); case DataType.DT_R4: value = buffer.GetSingle(columnIndex).ToString(); break; case DataType.DT_R8: value = buffer.GetDouble(columnIndex).ToString(); break; case DataType.DT_STR: case DataType.DT_WSTR: value = buffer.GetString(columnIndex); break; case DataType.DT_UI1: value = buffer.GetByte(columnIndex).ToString(); break; case DataType.DT_UI2: value = buffer.GetUInt16(columnIndex).ToString(); break; case DataType.DT_UI4: value = buffer.GetUInt32(columnIndex).ToString(); break; case DataType.DT_UI8: value = buffer.GetUInt64(columnIndex).ToString(); break; default: value = String.Empty; break; #endregion } } return(Encoding.Unicode.GetBytes(value)); }
/// <summary> /// This is where the data is read from the input buffer /// </summary> /// <param name="inputID"></param> /// <param name="buffer"></param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { string sharepointUrl = ""; string sharepointList = ""; string sharepointListView = ""; short batchSize = (short)2; Enums.BatchType batchType = Enums.BatchType.Deletion; if (!buffer.EndOfRowset) { // Queue the data up for batching by the sharepoint accessor object var dataQueue = new List <Dictionary <string, FieldValue> >(); while (buffer.NextRow()) { var rowData = new Dictionary <string, FieldValue>(); foreach (var fieldName in _bufferLookup.Keys) { if (buffer.IsNull(_bufferLookup[fieldName])) { // Do nothing, can ignore this field } else { FieldValue filedObj = new FieldValue(); switch (_bufferLookupDataType[fieldName]) { case DataType.DT_STR: case DataType.DT_WSTR: filedObj.value = buffer.GetString(_bufferLookup[fieldName]); filedObj.type = "string"; rowData.Add(fieldName, filedObj); break; case DataType.DT_NTEXT: int colDataLength = (int)buffer.GetBlobLength(_bufferLookup[fieldName]); byte[] stringData = buffer.GetBlobData(_bufferLookup[fieldName], 0, colDataLength); filedObj.value = Encoding.Unicode.GetString(stringData); filedObj.type = "string"; rowData.Add(fieldName, filedObj); break; case DataType.DT_R4: filedObj.value = buffer.GetSingle(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_CY: filedObj.value = buffer.GetDecimal(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_R8: filedObj.value = buffer.GetDouble(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI1: case DataType.DT_I1: case DataType.DT_BOOL: filedObj.value = buffer.GetBoolean(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Boolean"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI2: case DataType.DT_I2: filedObj.value = buffer.GetInt16(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI4: case DataType.DT_I4: filedObj.value = buffer.GetInt32(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI8: case DataType.DT_I8: filedObj.value = buffer.GetInt64(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_GUID: filedObj.value = buffer.GetGuid(_bufferLookup[fieldName]).ToString(); filedObj.type = "String"; rowData.Add(fieldName, filedObj); break; case DataType.DT_DBTIMESTAMP: filedObj.value = buffer.GetDateTime(_bufferLookup[fieldName]).ToString("u").Replace(" ", "T"); filedObj.type = "Datetime"; rowData.Add(fieldName, filedObj); break; case DataType.DT_DATE: filedObj.value = buffer.GetDateTime(_bufferLookup[fieldName]).ToString("yyyy-MM-dd"); filedObj.type = "Datetime"; rowData.Add(fieldName, filedObj); break; } } } dataQueue.Add(rowData); } bool fireAgain = false; Enums.TrueFalseValue removeRecords = (Enums.TrueFalseValue)ComponentMetaData.CustomPropertyCollection[C_REMOVERECORDS].Value; if (removeRecords == Enums.TrueFalseValue.True) { ClearRows(); } if (dataQueue.Count() > 0) { System.Diagnostics.Stopwatch timer = new System.Diagnostics.Stopwatch(); timer.Start(); System.Xml.Linq.XElement resultData; CreateDataset(dataQueue); AddClassRows(dataQueue); timer.Stop(); } else { ComponentMetaData.FireInformation(0, ComponentMetaData.Name, "No rows found to update in destination.", "", 0, ref fireAgain); } } }
/// <summary> /// This creates the hash value from a thread /// </summary> /// <param name="state">this is the thread state object that is passed</param> public static void CalculateHash(OutputColumn columnToProcess, PipelineBuffer buffer, bool safeNullHandling, bool millisecondHandling) { byte[] inputByteBuffer = new byte[1000]; Int32 bufferUsed = 0; string nullHandling = String.Empty; uint blobLength = 0; Int32 columnToProcessID = 0; DataType columnDataType = DataType.DT_NULL; // Step through each input column for that output column for (int j = 0; j < columnToProcess.Count; j++) { columnToProcessID = columnToProcess[j].ColumnId; // Only call this once, as it appears to be "slow". columnDataType = columnToProcess[j].ColumnDataType; // Skip NULL values, as they "don't" exist... if (!buffer.IsNull(columnToProcessID)) { nullHandling += "N"; switch (columnDataType) //buffer.GetColumnInfo(columnToProcessID).DataType) { case DataType.DT_BOOL: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetBoolean(columnToProcessID)); break; case DataType.DT_IMAGE: blobLength = buffer.GetBlobLength(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetBlobData(columnToProcessID, 0, (int)blobLength)); nullHandling += blobLength.ToString(); break; case DataType.DT_BYTES: byte[] bytesFromBuffer = buffer.GetBytes(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, bytesFromBuffer); nullHandling += bytesFromBuffer.GetLength(0).ToString(); break; case DataType.DT_CY: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDecimal(columnToProcessID)); break; case DataType.DT_DBTIMESTAMPOFFSET: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDateTimeOffset(columnToProcessID), millisecondHandling); break; case DataType.DT_DBDATE: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDate(columnToProcessID), millisecondHandling); break; case DataType.DT_DATE: case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: case DataType.DT_FILETIME: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDateTime(columnToProcessID), millisecondHandling); break; case DataType.DT_DBTIME: case DataType.DT_DBTIME2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetTime(columnToProcessID)); break; case DataType.DT_GUID: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetGuid(columnToProcessID)); break; case DataType.DT_I1: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetSByte(columnToProcessID)); break; case DataType.DT_I2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt16(columnToProcessID)); break; case DataType.DT_I4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt32(columnToProcessID)); break; case DataType.DT_I8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt64(columnToProcessID)); break; case DataType.DT_NTEXT: case DataType.DT_STR: case DataType.DT_TEXT: case DataType.DT_WSTR: String stringFromBuffer = buffer.GetString(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, stringFromBuffer, Encoding.UTF8); nullHandling += stringFromBuffer.Length.ToString(); break; case DataType.DT_R4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetSingle(columnToProcessID)); break; case DataType.DT_R8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDouble(columnToProcessID)); break; case DataType.DT_UI1: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetByte(columnToProcessID)); break; case DataType.DT_UI2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt16(columnToProcessID)); break; case DataType.DT_UI4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt32(columnToProcessID)); break; case DataType.DT_UI8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt64(columnToProcessID)); break; case DataType.DT_EMPTY: case DataType.DT_NULL: default: break; } } else { nullHandling += "Y"; } } if (safeNullHandling) { Utility.Append(ref inputByteBuffer, ref bufferUsed, nullHandling, Encoding.UTF8); } // Ok, we have all the data in a Byte Buffer // So now generate the Hash byte[] hash; switch (columnToProcess.HashType) { case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.None: hash = new byte[1]; break; case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.MD5: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.RipeMD160: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.SHA1: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.SHA256: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.SHA384: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.SHA512: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.CRC32: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.CRC32C: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.FNV1a32: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.FNV1a64: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.MurmurHash3a: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.xxHash: hash = columnToProcess.HashObject.ComputeHash(inputByteBuffer, 0, bufferUsed); break; default: hash = new byte[1]; break; } switch (columnToProcess.OutputHashDataType) { case MultipleHash.OutputTypeEnumerator.Binary: buffer.SetBytes(columnToProcess.OutputColumnId, hash); break; case MultipleHash.OutputTypeEnumerator.Base64String: buffer.SetString(columnToProcess.OutputColumnId, System.Convert.ToBase64String(hash, 0, hash.Length)); break; case MultipleHash.OutputTypeEnumerator.HexString: buffer.SetString(columnToProcess.OutputColumnId, String.Format("0x{0}", ByteArrayToHexViaLookup32(hash))); break; } }
/// <summary> /// Retrieves binary data from the given pipeline input column of any type. /// </summary> /// <param name="buffer"></param> /// <param name="inputColumn"></param> /// <param name="inputColumnIndex"></param> /// <returns></returns> private Byte[] GetColumnValueBytes(PipelineBuffer buffer, IDTSInputColumn100 inputColumn, Int32 inputColumnIndex) { if (buffer.IsNull(inputColumnIndex)) { return(new Byte[] { DEFAULT_BYTE }); } else { switch (inputColumn.DataType) { case DataType.DT_TEXT: case DataType.DT_NTEXT: case DataType.DT_IMAGE: return(buffer.GetBlobData(inputColumnIndex, 0, inputColumn.Length)); case DataType.DT_BYTES: return(buffer.GetBytes(inputColumnIndex)); case DataType.DT_STR: case DataType.DT_WSTR: return(Encoding.Unicode.GetBytes(buffer.GetString(inputColumnIndex))); case DataType.DT_BOOL: return(Encoding.Unicode.GetBytes(buffer.GetBoolean(inputColumnIndex).ToString())); case DataType.DT_DBDATE: return(Encoding.Unicode.GetBytes(buffer.GetDate(inputColumnIndex).ToString())); case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: case DataType.DT_FILETIME: return(Encoding.Unicode.GetBytes(buffer.GetDateTime(inputColumnIndex).ToString())); case DataType.DT_DBTIME: case DataType.DT_DBTIME2: return(Encoding.Unicode.GetBytes(buffer.GetTime(inputColumnIndex).ToString())); case DataType.DT_DBTIMESTAMPOFFSET: return(Encoding.Unicode.GetBytes(buffer.GetDateTimeOffset(inputColumnIndex).ToString())); case DataType.DT_CY: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: return(Encoding.Unicode.GetBytes(buffer.GetDecimal(inputColumnIndex).ToString())); case DataType.DT_I1: return(Encoding.Unicode.GetBytes(buffer.GetSByte(inputColumnIndex).ToString())); case DataType.DT_I2: return(Encoding.Unicode.GetBytes(buffer.GetInt16(inputColumnIndex).ToString())); case DataType.DT_I4: return(Encoding.Unicode.GetBytes(buffer.GetInt32(inputColumnIndex).ToString())); case DataType.DT_I8: return(Encoding.Unicode.GetBytes(buffer.GetInt64(inputColumnIndex).ToString())); case DataType.DT_UI1: return(Encoding.Unicode.GetBytes(buffer.GetByte(inputColumnIndex).ToString())); case DataType.DT_UI2: return(Encoding.Unicode.GetBytes(buffer.GetUInt16(inputColumnIndex).ToString())); case DataType.DT_UI4: return(Encoding.Unicode.GetBytes(buffer.GetUInt32(inputColumnIndex).ToString())); case DataType.DT_UI8: return(Encoding.Unicode.GetBytes(buffer.GetUInt64(inputColumnIndex).ToString())); case DataType.DT_R4: return(Encoding.Unicode.GetBytes(buffer.GetSingle(inputColumnIndex).ToString())); case DataType.DT_R8: return(Encoding.Unicode.GetBytes(buffer.GetDouble(inputColumnIndex).ToString())); case DataType.DT_GUID: return(Encoding.Unicode.GetBytes(buffer.GetGuid(inputColumnIndex).ToString())); default: return(new Byte[] { DEFAULT_BYTE }); } } }
private static void WriteColumnToStreamBinary(HashColumnsTransformation.HashColumnInfo hCol, HashColumnsTransformation.InputBufferColumnInfo bci, PipelineBuffer buffer, HashMemoryBuffers mb, StreamWriter sw) { int ci = bci.Index; byte[] bdata = null; byte[] decimalArray = new byte[16]; //Array for storing decimal numbers string asciiStr = null; BinaryWriter bw = mb.BinaryWriter; BufferColumn col = buffer.GetColumnInfo(bci.Index); bw.Write((byte)0);//Write byte (0) as start of field; if (buffer.IsNull(bci.Index)) { bw.Write((byte)1); //Write 1 representing NULL bw.Write(0); //write length of 0 for NULL } else { bw.Write((byte)0); //write 0 representing NOT NULL //Get buffer data lock (bci) { switch (col.DataType) { case DataType.DT_BOOL: bdata = BitConverter.GetBytes(buffer.GetBoolean(ci)); break; case DataType.DT_BYTES: bdata = buffer.GetBytes(ci); break; case DataType.DT_IMAGE: bdata = buffer.GetBlobData(ci, 0, (int)buffer.GetBlobLength(ci)); break; case DataType.DT_CY: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: bdata = Encoding.ASCII.GetBytes(buffer.GetDecimal(ci).ToString(CultureInfo.InvariantCulture)); break; case DataType.DT_DATE: case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: bw.Write(buffer.GetDateTime(ci).ToBinary()); break; case DataType.DT_FILETIME: bw.Write(buffer.GetInt64(ci)); break; case DataType.DT_DBDATE: bw.Write(buffer.GetDate(ci).ToBinary()); break; case DataType.DT_DBTIME: case DataType.DT_DBTIME2: bw.Write(buffer.GetTime(ci).Ticks); break; case DataType.DT_DBTIMESTAMPOFFSET: var dtoffset = buffer.GetDateTimeOffset(ci); BitConverter.GetBytes(dtoffset.DateTime.ToBinary()).CopyTo(decimalArray, 0); BitConverter.GetBytes(dtoffset.Offset.Ticks).CopyTo(decimalArray, 8); bw.Write(decimalArray); break; case DataType.DT_EMPTY: case DataType.DT_NULL: bdata = new byte[0]; break; case DataType.DT_GUID: bw.Write(Encoding.ASCII.GetBytes(buffer.GetGuid(ci).ToString())); break; case DataType.DT_I1: asciiStr = buffer.GetSByte(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_I2: asciiStr = buffer.GetInt16(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_I4: asciiStr = buffer.GetInt32(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_I8: asciiStr = buffer.GetInt64(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_R4: asciiStr = buffer.GetSingle(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_R8: asciiStr = buffer.GetDouble(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_UI1: asciiStr = buffer.GetByte(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_UI2: asciiStr = buffer.GetUInt16(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_UI4: asciiStr = buffer.GetUInt32(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_UI8: asciiStr = buffer.GetUInt64(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_NTEXT: case DataType.DT_TEXT: case DataType.DT_STR: case DataType.DT_WSTR: bdata = Encoding.Unicode.GetBytes(TrimString(hCol, buffer.GetString(ci))); break; default: bdata = new byte[0]; break; } } if (asciiStr != null) { bdata = Encoding.ASCII.GetBytes(asciiStr); } if (bdata != null) { bw.Write(bdata.Length); //write length of buffer bw.Write(bdata); //write bufferdata; } } }
public object GetColumnValue(PipelineBuffer buffer) { object value = null; if (!buffer.IsNull(Index)) { switch (DataType) { case DataType.DT_BOOL: value = buffer[Index]; // Boolean.Parse(buffer[colIndex].ToString()); break; case DataType.DT_DATE: case DataType.DT_DBDATE: case DataType.DT_DBTIME: case DataType.DT_DBTIME2: case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: value = buffer[Index]; // DateTime.Parse(buffer[colIndex].ToString()); break; case DataType.DT_NTEXT: case DataType.DT_TEXT: value = buffer.GetString(Index); break; case DataType.DT_IMAGE: uint len; if ((len = buffer.GetBlobLength(Index)) >= 2000000) { // The Limit of Entity size in CloudDB is 2M, reject current record if the size exceeds the limit buffer.DirectErrorRow(_errorOutputId, HResults.DTS_E_LOBLENGTHLIMITEXCEEDED, ID); break; } value = buffer.GetBlobData(Index, 0, (int)len); break; case DataType.DT_BYTES: value = buffer.GetBytes(Index); break; case DataType.DT_I1: case DataType.DT_I2: case DataType.DT_I4: case DataType.DT_I8: case DataType.DT_UI1: case DataType.DT_UI2: case DataType.DT_UI4: case DataType.DT_UI8: case DataType.DT_R4: case DataType.DT_R8: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: case DataType.DT_CY: value = decimal.Parse(buffer[Index].ToString(), CultureInfo.InvariantCulture); break; case DataType.DT_NULL: case DataType.DT_EMPTY: break; default: value = buffer[Index].ToString(); break; } } return(value); }
private static void WriteColumnToStreamUnicodeDelimited(int columnPosition, HashColumnsTransformation.HashColumnInfo hCol, HashColumnsTransformation.InputBufferColumnInfo bci, PipelineBuffer buffer, HashMemoryBuffers mb) { int ci = bci.Index; byte[] bdata = null; string strData = null; bool writeLen = false; bool isNull; bool trim = false; StreamWriter sw = mb.StreamWriter; BufferColumn col = buffer.GetColumnInfo(bci.Index); isNull = buffer.IsNull(ci); //When not first field, write field delimiter if (columnPosition > 0) { sw.Write(hCol.HashFieldsDelimiter); } if (isNull) { if (hCol.HashImplmentationType == HashColumnsTransformation.HashImplementationType.UnicodeStringDelimitedNullSafe || hCol.HashImplmentationType == HashColumnsTransformation.HashImplementationType.UnicodeStringDelmitedSafe) { //Safe handling: write 1 indicating null and field delimiter. sw.Write(1); sw.Write(hCol.HashFieldsDelimiter); } else //Non Safe handling = write replacement value { sw.Write(hCol.NullReplacement); } sw.Flush(); return; //return. no need for other processing as null value is stored in the field. } else if (hCol.HashImplmentationType == HashColumnsTransformation.HashImplementationType.UnicodeStringDelimitedNullSafe || hCol.HashImplmentationType == HashColumnsTransformation.HashImplementationType.UnicodeStringDelmitedSafe) { //Saefe handling: write 0 indicating non null value and field delimiter. Field value will be after the delimiter sw.Write(0); sw.Write(hCol.HashFieldsDelimiter); } //Get buffer data lock (bci) { switch (col.DataType) { case DataType.DT_BOOL: sw.Write(buffer.GetBoolean(ci) ? 1 : 0); break; case DataType.DT_BYTES: bdata = buffer.GetBytes(ci); break; case DataType.DT_IMAGE: bdata = buffer.GetBlobData(ci, 0, (int)buffer.GetBlobLength(ci)); break; case DataType.DT_CY: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: strData = buffer.GetDecimal(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_DATE: strData = buffer.GetDateTime(ci).ToString("yyyy-MM-dd HH"); break; case DataType.DT_DBTIMESTAMP: strData = buffer.GetDateTime(ci).ToString("yyyy-MM-dd HH:mm:ss.fff"); break; case DataType.DT_DBTIMESTAMP2: strData = buffer.GetDateTime(ci).ToString("yyyy-MM-dd HH:mm:ss.fffffff"); break; case DataType.DT_FILETIME: sw.Write(buffer.GetInt64(ci)); break; case DataType.DT_DBDATE: strData = buffer.GetDate(ci).ToString("yyyy-MM-dd"); break; #if NET35 case DataType.DT_DBTIME: strData = new DateTime(buffer.GetTime(ci).Ticks).ToString("HH:mm:ss"); break; case DataType.DT_DBTIME2: strData = new DateTime(buffer.GetTime(ci).Ticks).ToString("HH:mm:ss.fffffff"); break; #else case DataType.DT_DBTIME: strData = buffer.GetTime(ci).ToString("HH:mm:ss"); break; case DataType.DT_DBTIME2: strData = buffer.GetTime(ci).ToString("HH:mm:ss.fffffff"); break; #endif case DataType.DT_DBTIMESTAMPOFFSET: strData = buffer.GetDateTimeOffset(ci).ToString("yyyy-MM-dd HH:mm:ss.fffffff zzz"); break; case DataType.DT_EMPTY: case DataType.DT_NULL: bdata = new byte[0]; break; case DataType.DT_GUID: strData = buffer.GetGuid(ci).ToString(); break; case DataType.DT_I1: sw.Write(buffer.GetSByte(ci)); break; case DataType.DT_I2: sw.Write(buffer.GetInt16(ci)); break; case DataType.DT_I4: sw.Write(buffer.GetInt32(ci)); break; case DataType.DT_I8: sw.Write(buffer.GetInt64(ci)); break; case DataType.DT_R4: sw.Write(buffer.GetSingle(ci)); break; case DataType.DT_R8: sw.Write(buffer.GetDouble(ci)); break; case DataType.DT_UI1: sw.Write(buffer.GetByte(ci)); break; case DataType.DT_UI2: sw.Write(buffer.GetUInt16(ci)); break; case DataType.DT_UI4: sw.Write(buffer.GetUInt32(ci)); break; case DataType.DT_UI8: sw.Write(buffer.GetUInt64(ci)); break; case DataType.DT_NTEXT: case DataType.DT_TEXT: case DataType.DT_STR: case DataType.DT_WSTR: trim = true; strData = buffer.GetString(ci); if (hCol.HashImplmentationType == HashColumnsTransformation.HashImplementationType.UnicodeStringDelmitedSafe) { writeLen = true; } break; default: bdata = new byte[0]; break; } } if (bdata != null) { strData = BitConverter.ToString(bdata).Replace("-", ""); } if (strData != null) { if (trim) { strData = TrimString(hCol, strData); } if (writeLen) { sw.Write(strData.Length); sw.Write(hCol.HashFieldsDelimiter); } sw.Write(strData); } sw.Flush(); }
private static void WriteColumnToStreamOriginal(HashColumnsTransformation.InputBufferColumnInfo bci, PipelineBuffer buffer, HashMemoryBuffers mb) { int ci = bci.Index; byte[] bdata = null; byte[] decimalArray = mb.DecimalArray; //Array for storing decimal numbers BinaryWriter bw = mb.BinaryWriter; StreamWriter sw = mb.StreamWriter; BufferColumn col = buffer.GetColumnInfo(bci.Index); bw.Write((int)col.DataType); //write data type if (buffer.IsNull(bci.Index)) { bw.Write((byte)1); //Write 1 representing NULL bw.Write(0); //write length of 0 for NULL } else { bw.Write((byte)0); //write 0 representing NOT NULL //Get buffer data lock (bci) { switch (col.DataType) { case DataType.DT_BOOL: bdata = BitConverter.GetBytes(buffer.GetBoolean(ci)); break; case DataType.DT_BYTES: bdata = buffer.GetBytes(ci); break; case DataType.DT_IMAGE: case DataType.DT_NTEXT: case DataType.DT_TEXT: bdata = buffer.GetBlobData(ci, 0, (int)buffer.GetBlobLength(ci)); break; case DataType.DT_CY: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: var ia = decimal.GetBits(buffer.GetDecimal(ci)); for (int j = 0; j < 4; j++) { int k = 4 * j; decimalArray[k] = (byte)(ia[j] & 0xFF); decimalArray[k + 1] = (byte)(ia[j] >> 8 & 0xFF); decimalArray[k + 2] = (byte)(ia[j] >> 16 & 0xFF); decimalArray[k + 3] = (byte)(ia[j] >> 24 & 0xFF); } bdata = decimalArray; break; case DataType.DT_DATE: case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: bdata = BitConverter.GetBytes(buffer.GetDateTime(ci).ToBinary()); break; case DataType.DT_FILETIME: bdata = BitConverter.GetBytes(buffer.GetInt64(ci)); break; case DataType.DT_DBDATE: bw.Write(buffer.GetDate(ci).ToBinary()); break; case DataType.DT_DBTIME: case DataType.DT_DBTIME2: bdata = BitConverter.GetBytes(DateTime.MinValue.Add(buffer.GetTime(ci)).ToBinary()); break; case DataType.DT_DBTIMESTAMPOFFSET: var dtoffset = buffer.GetDateTimeOffset(ci); BitConverter.GetBytes(dtoffset.DateTime.ToBinary()).CopyTo(bdata, 0); BitConverter.GetBytes(DateTime.MinValue.Add(dtoffset.Offset).ToBinary()).CopyTo(bdata, 8); bdata = decimalArray; break; case DataType.DT_EMPTY: case DataType.DT_NULL: bdata = new byte[0]; break; case DataType.DT_GUID: bdata = buffer.GetGuid(ci).ToByteArray(); break; case DataType.DT_I1: bdata = BitConverter.GetBytes(buffer.GetSByte(ci)); break; case DataType.DT_I2: bdata = BitConverter.GetBytes(buffer.GetInt16(ci)); break; case DataType.DT_I4: bdata = BitConverter.GetBytes(buffer.GetInt32(ci)); break; case DataType.DT_I8: bdata = BitConverter.GetBytes(buffer.GetInt64(ci)); break; case DataType.DT_R4: bdata = BitConverter.GetBytes(buffer.GetSingle(ci)); break; case DataType.DT_R8: bdata = BitConverter.GetBytes(buffer.GetDouble(ci)); break; case DataType.DT_UI1: bdata = BitConverter.GetBytes(buffer.GetByte(ci)); break; case DataType.DT_UI2: bdata = BitConverter.GetBytes(buffer.GetUInt16(ci)); break; case DataType.DT_UI4: bdata = BitConverter.GetBytes(buffer.GetUInt32(ci)); break; case DataType.DT_UI8: bdata = BitConverter.GetBytes(buffer.GetUInt64(ci)); break; case DataType.DT_STR: case DataType.DT_WSTR: bdata = Encoding.Unicode.GetBytes(buffer.GetString(ci)); break; default: bdata = new byte[0]; break; } } if (bdata != null) { bw.Write(bdata.Length); //write length of buffer bw.Write(bdata); //write bufferdata; } } }
/// <summary> /// This is where the data is read from the input buffer /// </summary> /// <param name="inputID"></param> /// <param name="buffer"></param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { string sharepointUrl = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTSITEURL].Value; string sharepointList = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTNAME].Value; string sharepointListView = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTVIEWNAME].Value; short batchSize = (short)ComponentMetaData.CustomPropertyCollection[C_BATCHSIZE].Value; Enums.BatchType batchType = (Enums.BatchType)ComponentMetaData.CustomPropertyCollection[C_BATCHTYPE].Value; if (!buffer.EndOfRowset) { // Queue the data up for batching by the sharepoint accessor object var dataQueue = new List <Dictionary <string, string> >(); while (buffer.NextRow()) { var rowData = new Dictionary <string, string>(); foreach (var fieldName in _bufferLookup.Keys) { switch (_bufferLookupDataType[fieldName]) { case DataType.DT_STR: case DataType.DT_WSTR: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { rowData.Add(fieldName, buffer.GetString(_bufferLookup[fieldName])); } break; case DataType.DT_NTEXT: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { int colDataLength = (int)buffer.GetBlobLength(_bufferLookup[fieldName]); byte[] stringData = buffer.GetBlobData(_bufferLookup[fieldName], 0, colDataLength); rowData.Add(fieldName, Encoding.Unicode.GetString(stringData)); } break; case DataType.DT_R4: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { rowData.Add(fieldName, buffer.GetSingle(_bufferLookup[fieldName]).ToString(_culture)); } break; case DataType.DT_R8: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { rowData.Add(fieldName, buffer.GetDouble(_bufferLookup[fieldName]).ToString(_culture)); } break; case DataType.DT_UI1: case DataType.DT_I1: case DataType.DT_BOOL: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { rowData.Add(fieldName, buffer.GetBoolean(_bufferLookup[fieldName]).ToString(_culture)); } break; case DataType.DT_UI2: case DataType.DT_I2: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { rowData.Add(fieldName, buffer.GetInt16(_bufferLookup[fieldName]).ToString(_culture)); } break; case DataType.DT_UI4: case DataType.DT_I4: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { rowData.Add(fieldName, buffer.GetInt32(_bufferLookup[fieldName]).ToString(_culture)); } break; case DataType.DT_UI8: case DataType.DT_I8: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { rowData.Add(fieldName, buffer.GetInt64(_bufferLookup[fieldName]).ToString(_culture)); } break; case DataType.DT_GUID: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, String.Empty); } else { rowData.Add(fieldName, buffer.GetGuid(_bufferLookup[fieldName]).ToString()); } break; case DataType.DT_DBTIMESTAMP: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, String.Empty); } else { rowData.Add(fieldName, buffer.GetDateTime(_bufferLookup[fieldName]).ToString("u").Replace(" ", "T")); } break; } } dataQueue.Add(rowData); } bool fireAgain = false; if (dataQueue.Count() > 0) { System.Diagnostics.Stopwatch timer = new System.Diagnostics.Stopwatch(); timer.Start(); System.Xml.Linq.XElement resultData; if (batchType == Enums.BatchType.Modification) { // Perform the update resultData = SharePointUtility.ListServiceUtility.UpdateListItems( new Uri(sharepointUrl), _credentials, sharepointList, sharepointListView, dataQueue, batchSize); } else { // Get the IDs read from the buffer var idList = from data in dataQueue where data["ID"].Trim().Length > 0 select data["ID"]; // Delete the list items with IDs resultData = SharePointUtility.ListServiceUtility.DeleteListItems( new Uri(sharepointUrl), _credentials, sharepointList, sharepointListView, idList); } timer.Stop(); var errorRows = from result in resultData.Descendants("errorCode") select result.Parent; int successRowsWritten = resultData.Elements().Count() - errorRows.Count(); string infoMsg = string.Format(CultureInfo.InvariantCulture, "Affected {0} records in list '{1}' at '{2}'. Elapsed time is {3}ms", successRowsWritten, sharepointList, sharepointUrl, timer.ElapsedMilliseconds); ComponentMetaData.FireInformation(0, ComponentMetaData.Name, infoMsg, "", 0, ref fireAgain); ComponentMetaData.IncrementPipelinePerfCounter( DTS_PIPELINE_CTR_ROWSWRITTEN, (uint)successRowsWritten); // Shovel any error rows to the error flow bool cancel; int errorIter = 0; foreach (var row in errorRows) { // Do not flood the error log. errorIter++; if (errorIter > 10) { ComponentMetaData.FireError(0, ComponentMetaData.Name, "Total of " + errorRows.Count().ToString(_culture) + ", only showing first 10.", "", 0, out cancel); return; } string idString = ""; XAttribute attrib = row.Element("row").Attribute("ID"); if (attrib != null) { idString = "(SP ID=" + attrib.Value + ")"; } string errorString = string.Format(CultureInfo.InvariantCulture, "Error on row {0}: {1} - {2} {3}", row.Attribute("ID"), row.Element("errorCode").Value, row.Element("errorDescription").Value, idString); ComponentMetaData.FireError(0, ComponentMetaData.Name, errorString, "", 0, out cancel); // Need to throw an exception, or else this step's box is green (should be red), even though the flow // is marked as failure regardless. throw new PipelineProcessException("Errors detected in this component - see SSIS Errors"); } } else { ComponentMetaData.FireInformation(0, ComponentMetaData.Name, "No rows found to update in destination.", "", 0, ref fireAgain); } } }
public object GetColumnValue(PipelineBuffer buffer) { object value = null; if (!buffer.IsNull(Index)) { switch (DataType) { case DataType.DT_BOOL: value = buffer[Index]; // Boolean.Parse(buffer[colIndex].ToString()); break; case DataType.DT_DATE: case DataType.DT_DBDATE: case DataType.DT_DBTIME: case DataType.DT_DBTIME2: case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: value = buffer[Index]; // DateTime.Parse(buffer[colIndex].ToString()); break; case DataType.DT_NTEXT: case DataType.DT_TEXT: value = buffer.GetString(Index); break; case DataType.DT_IMAGE: uint len; if ((len = buffer.GetBlobLength(Index)) >= 2000000) { // The Limit of Entity size in CloudDB is 2M, reject current record if the size exceeds the limit buffer.DirectErrorRow(_errorOutputId, HResults.DTS_E_LOBLENGTHLIMITEXCEEDED, ID); break; } value = buffer.GetBlobData(Index, 0, (int)len); break; case DataType.DT_BYTES: value = buffer.GetBytes(Index); break; case DataType.DT_I1: case DataType.DT_I2: case DataType.DT_I4: case DataType.DT_I8: case DataType.DT_UI1: case DataType.DT_UI2: case DataType.DT_UI4: case DataType.DT_UI8: case DataType.DT_R4: case DataType.DT_R8: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: case DataType.DT_CY: value = decimal.Parse(buffer[Index].ToString(), CultureInfo.InvariantCulture); break; case DataType.DT_NULL: case DataType.DT_EMPTY: break; default: value = buffer[Index].ToString(); break; } } return value; }