public override void ProcessInput(int inputID, PipelineBuffer inputBuffer) { while (inputBuffer.NextRow()) { var parameters = GetWebServiceParameters(inputBuffer); var response = WebService.read(X3WebService.Context, PublicIdentifierValue, parameters); var xml = new XmlDocument(); xml.LoadXml(response.resultXml); var responseXmlWrapper = new X3ObjectResponseWrapper(xml); outputBuffer.AddRow(); foreach (var groupTag in responseXmlWrapper.ResultGroups) { foreach (var fieldTag in groupTag.Fields) { var columnName = GenerateOutputColumnName(groupTag.Id, fieldTag.Name); var outputCollumnIndex = GetOutputColumnIndex(columnName); var outputColumn = GetOutputColumn(columnName); SetColumnData(outputBuffer, outputCollumnIndex, fieldTag.Value, outputColumn.DataType); } } } if (inputBuffer.EndOfRowset) outputBuffer.SetEndOfRowset(); }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { while (buffer.NextRow()) { } }
public override void PrimeOutput(int outputs, int[] outputIDs, PipelineBuffer[] buffers) { var outputBuffer = buffers[0]; var parameters = new List<CAdxParamKeyValue>(); var response = WebService.query(X3WebService.Context, PublicIdentifierValue, parameters.ToArray(), 10); var xml = new XmlDocument(); xml.LoadXml(response.resultXml); var responseXmlWrapper = new X3ListResponseWrapper(xml); foreach (var lineTag in responseXmlWrapper.Lines) { outputBuffer.AddRow(); foreach (var fieldTag in lineTag.Fields) { var outputCollumnIndex = GetOutputColumnIndex(fieldTag.Name); var outputColumn = GetOutputColumn(fieldTag.Name); SetColumnData(outputBuffer, outputCollumnIndex, fieldTag.Value, outputColumn.DataType); } } outputBuffer.SetEndOfRowset(); }
public ComponentBufferService(PipelineBuffer mainBuffer, PipelineBuffer errorBuffer) { ArgumentVerifier.CheckObjectArgument(mainBuffer, "mainBuffer"); this.mainBuffer = mainBuffer; this.errorBuffer = errorBuffer; }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { base.ProcessInput(inputID, buffer); while (buffer.NextRow()) { var value = buffer.GetString(0); buffer.SetString(0, value + "TEST7"); } }
public override void transform(ref PipelineBuffer buffer, int defaultOutputId, int inputColumnBufferIndex, int outputColumnBufferIndex) { //Get WKT from buffer String wkt = buffer.GetString(inputColumnBufferIndex); Geometry geom = Geometry.CreateFromWkt(wkt); byte[] geomBytes = new byte[geom.WkbSize()]; geom.ExportToWkb(geomBytes); buffer.AddBlobData(outputColumnBufferIndex, geomBytes); buffer.DirectRow(defaultOutputId); }
public override void transform(ref PipelineBuffer buffer, int defaultOutputId, int inputColumnBufferIndex, int outputColumnBufferIndex) { //Get OGR Geometry from buffer byte[] geomBytes = new byte[buffer.GetBlobLength(inputColumnBufferIndex)]; geomBytes = buffer.GetBlobData(inputColumnBufferIndex, 0, geomBytes.Length); Geometry geom = Geometry.CreateFromWkb(geomBytes); double area = geom.GetArea(); buffer.SetDouble(outputColumnBufferIndex, area); buffer.DirectRow(defaultOutputId); }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { base.ProcessInput(inputID, buffer); while (buffer.NextRow()) { var firstColumn = buffer.GetString(0); buffer.SetString(1, firstColumn + " OK"); buffer.AddBlobData(0, new byte[] { 56, 67 }); } }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { base.ProcessInput(inputID, buffer); while(buffer.NextRow()) { var value = buffer.GetString(0); var upperCaseConverter = new UpperCaseConverter(); buffer.SetString(0, upperCaseConverter.Convert(value)); } }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { var count = 0; if (!buffer.EndOfRowset) { while (buffer.NextRow()) { // [行番号]カラムに行番号を付与する buffer.SetInt32(_rowCountColumnIndex, ++count); } } }
public override void transform(ref PipelineBuffer buffer, int defaultOutputId, int inputColumnBufferIndex, int outputColumnBufferIndex) { //Get OGR Geometry from buffer byte[] geomBytes = new byte[buffer.GetBlobLength(inputColumnBufferIndex)]; geomBytes = buffer.GetBlobData(inputColumnBufferIndex, 0, geomBytes.Length); Geometry geom = Geometry.CreateFromWkb(geomBytes); string kml; kml = geom.ExportToKML(null); buffer.SetString(outputColumnBufferIndex, kml); buffer.DirectRow(defaultOutputId); }
protected CAdxParamKeyValue[] GetWebServiceParameters(PipelineBuffer inputBuffer) { var parameters = new List<CAdxParamKeyValue>(); for (int i = 0; i < InputColumns.Count; i++) { var inputColumn = InputColumns[i]; var inputColumnIndex = GetInputCollumnIndex(inputColumn.Name); var inputColumnValue = inputBuffer.GetString(inputColumnIndex); parameters.Add(new CAdxParamKeyValue() { key = i.ToString(), value = inputColumnValue }); } return parameters.ToArray(); }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { if (buffer.EndOfRowset == false) { //Get output id info int defaultOutputID = -1; int errorOutputID = -1; int errorOutputIndex = -1; GetErrorOutputInfo(ref errorOutputID, ref errorOutputIndex); if (errorOutputIndex == 0) defaultOutputID = ComponentMetaData.OutputCollection[1].ID; else defaultOutputID = ComponentMetaData.OutputCollection[0].ID; while (buffer.NextRow()) { try { //Skip record if input column is null if (!buffer.IsNull(this.inputColumnBufferIndex)) { this.transform(ref buffer, defaultOutputID, this.inputColumnBufferIndex, this.outputColumnBufferIndex); } } catch (System.Exception ex) { //Get input IDTSInput100 input = ComponentMetaData.InputCollection.GetObjectByID(inputID); //Redirect row IDTSInputColumn100 inputColumn = input.InputColumnCollection[0]; if (inputColumn.ErrorRowDisposition == DTSRowDisposition.RD_RedirectRow) { int errorCode = System.Runtime.InteropServices.Marshal.GetHRForException(ex); buffer.DirectErrorRow(errorOutputID, errorCode, inputColumn.LineageID); } else if (inputColumn.ErrorRowDisposition == DTSRowDisposition.RD_FailComponent || inputColumn.ErrorRowDisposition == DTSRowDisposition.RD_NotUsed) { ComponentMetaData.FireError(0, ComponentMetaData.Name, ex.Message, string.Empty, 0, out cancel); throw new Exception(ex.Message); } } } } }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { //Debugger.Launch(); int columnIndex = ComponentMetaData.CustomPropertyCollection[Constants.SOURCE_COLUMN_INDEX].Value; string remotePath = ComponentMetaData.CustomPropertyCollection[Constants.HDFS_PATH_PROPERTY].Value; while (buffer.NextRow()) { string strFullFileName = buffer.GetString(columnIndex); string fileName = Path.GetFileName(strFullFileName); string remoteFileName = remotePath + "/" + fileName; client.CreateFile(strFullFileName, remoteFileName).Wait(); } }
public override void transform(ref PipelineBuffer buffer, int defaultOutputId, int inputColumnBufferIndex, int outputColumnBufferIndex) { //Get OGR Geometry from buffer byte[] geomBytes = new byte[buffer.GetBlobLength(inputColumnBufferIndex)]; geomBytes = buffer.GetBlobData(inputColumnBufferIndex, 0, geomBytes.Length); Geometry geom = Geometry.CreateFromWkb(geomBytes); geom = geom.SimplifyPreserveTopology(this.tolerance); geomBytes = new byte[geom.WkbSize()]; geom.ExportToWkb(geomBytes); buffer.ResetBlobData(inputColumnBufferIndex); buffer.AddBlobData(inputColumnBufferIndex, geomBytes); //Direct row to default output buffer.DirectRow(defaultOutputId); }
public override void PrimeOutput(int outputs, int[] outputIDs, PipelineBuffer[] buffers) { IDTSOutput100 output = ComponentMetaData.OutputCollection[0]; PipelineBuffer buffer = buffers[0]; IDTSCustomProperty100 collectionNameProp = ComponentMetaData.CustomPropertyCollection[COLLECTION_NAME_PROP_NAME]; if (database == null) { AcquireConnections(null); } if (string.IsNullOrEmpty(collectionNameProp.Value)) { throw new Exception("The collection name is null or empty!"); } ComponentMetaData.FireInformation(0, "MongoDataSource", "processing collection " + collectionNameProp.Value, String.Empty, 0, false); var collection = database.GetCollection(collectionNameProp.Value); var cursor = GetCollectionCursor(collection); foreach (BsonDocument document in cursor) { buffer.AddRow(); for (int x = 0; x <= columnInformation.Count - 1; x++) { ColumnInfo ci = (ColumnInfo)columnInformation[x]; try { if (document.Contains(ci.ColumnName) && document[ci.ColumnName] != null) { if (document.GetValue(ci.ColumnName).IsBsonNull) { buffer.SetNull(ci.BufferColumnIndex); } else { buffer[ci.BufferColumnIndex] = GetValue(document, ci); } } else { buffer.SetNull(ci.BufferColumnIndex); } } catch (Exception e) { throw new Exception("There was an issue with column '" + ci.ColumnName + "'", e); } } } buffer.SetEndOfRowset(); }
public override void PrimeOutput(int outputs, int[] outputIDs, PipelineBuffer[] buffers) { string strFileName = (string)m_ConnMgr.AcquireConnection(null); while (strFileName != null) { buffers[0].AddRow(); buffers[0].SetString(m_FileNameColumnIndex, strFileName); FileInfo fileInfo = new FileInfo(strFileName); byte[] fileData = new byte[fileInfo.Length]; FileStream fs = new FileStream(strFileName, FileMode.Open, FileAccess.Read, FileShare.Read); fs.Read(fileData, 0, fileData.Length); buffers[0].AddBlobData(m_FileBlobColumnIndex, fileData); strFileName = (string)m_ConnMgr.AcquireConnection(null); } buffers[0].SetEndOfRowset(); }
public override void ProcessInput(int inputID, PipelineBuffer inputBuffer) { base.ProcessInput(inputID, inputBuffer); while(inputBuffer.NextRow()) { var inputString = inputBuffer.GetString(0); var order = new Order() { OrderCode = inputString, Product = new Product() { Name = inputString + "Product" } }; var outputXml = XmlSerializer.XmlSerialize(order); var outputBytes = Encoding.UTF8.GetBytes(outputXml); outputBuffer.AddRow(); outputBuffer.AddBlobData(0, outputBytes); } if (inputBuffer.EndOfRowset) outputBuffer.SetEndOfRowset(); }
public override void ProcessInput(int inputID, PipelineBuffer inputBuffer) { base.ProcessInput(inputID, inputBuffer); var jsSerializer = new JavaScriptSerializer(); while (inputBuffer.NextRow()) { var inputBytes = inputBuffer.GetBlobData(0, 0, (int)inputBuffer.GetBlobLength(0)); var inputXml = Encoding.UTF8.GetString(inputBytes); var inputObject = XmlSerializer.XmlDeserialize<Order>(inputXml); var serializedObject = jsSerializer.Serialize(inputObject); if (serializedObject.Length > 4000 - 4) serializedObject = serializedObject.Substring(0, 4000 - 4) + " ..."; outputBuffer.AddRow(); outputBuffer.SetString(0, serializedObject); } if (inputBuffer.EndOfRowset) outputBuffer.SetEndOfRowset(); }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { IDTSInput100 input = ComponentMetaData.InputCollection.GetObjectByID(inputID); if (!buffer.EndOfRowset) { while (buffer.NextRow()) { for (int x = 0; x < inputBufferColindex.Length; x++) { string ProperCaseData = ""; try { ProperCaseData = InitCap_ProcessText(buffer.GetString(inputBufferColindex[x])); buffer.SetString(outputBufferColindex[x], ProperCaseData); } catch (Exception ex) { //buffer.DirectErrorRow(outputBufferColindex[x], -1, inputBufferColindex[x]); } } } } }
public override void PrimeOutput(int outputs, int[] outputIDs, PipelineBuffer[] buffers) { IDTSOutput100 output = ComponentMetaData.OutputCollection[0]; PipelineBuffer buffer = buffers[0]; object message; bool success; while (queueConsumer.IsRunning) { try { success = queueConsumer.Queue.Dequeue(100, out message); } catch (Exception) { break; } if (success) { BasicDeliverEventArgs e = (BasicDeliverEventArgs)message; var messageContent = System.Text.Encoding.UTF8.GetString(e.Body); buffer.AddRow(); buffer[0] = messageContent; buffer[1] = e.RoutingKey; } else { break; } } buffer.SetEndOfRowset(); }
protected void SetColumnData(PipelineBuffer buffer, int index, string value, DataType dataType) { switch (dataType) { case DataType.DT_WSTR: buffer.SetString(index, value); break; case DataType.DT_I4: buffer.SetInt32(index, Int32.Parse(value)); break; case DataType.DT_DECIMAL: buffer.SetDecimal(index, Decimal.Parse(value)); break; case DataType.DT_DATE: buffer.SetDateTime(index, DateTime.ParseExact(value, "yyyyMMdd", CultureInfo.InvariantCulture)); break; default: break; } }
private static object GetBufferColumnValue(PipelineBuffer buffer, ColumnInfo col) { if (buffer.IsNull(col.BufferIndex)) { return(null); } switch (col.ColumnDataType) { case DataType.DT_BOOL: return(buffer.GetBoolean(col.BufferIndex)); case DataType.DT_BYTES: return(buffer.GetBytes(col.BufferIndex)); case DataType.DT_CY: return(buffer.GetDecimal(col.BufferIndex)); case DataType.DT_DATE: return(buffer.GetDateTime(col.BufferIndex)); case DataType.DT_DBDATE: return(buffer.GetDate(col.BufferIndex)); case DataType.DT_DBTIME: return(buffer.GetTime(col.BufferIndex)); case DataType.DT_DBTIME2: return(buffer.GetTime(col.BufferIndex)); case DataType.DT_DBTIMESTAMP: return(buffer.GetDateTime(col.BufferIndex)); case DataType.DT_DBTIMESTAMP2: return(buffer.GetDateTime(col.BufferIndex)); case DataType.DT_DBTIMESTAMPOFFSET: return(buffer.GetDateTimeOffset(col.BufferIndex)); case DataType.DT_DECIMAL: return(buffer.GetDecimal(col.BufferIndex)); case DataType.DT_FILETIME: return(buffer.GetDateTime(col.BufferIndex)); case DataType.DT_GUID: return(buffer.GetGuid(col.BufferIndex)); case DataType.DT_I1: return(buffer.GetSByte(col.BufferIndex)); case DataType.DT_I2: return(buffer.GetInt16(col.BufferIndex)); case DataType.DT_I4: return(buffer.GetInt32(col.BufferIndex)); case DataType.DT_I8: return(buffer.GetInt64(col.BufferIndex)); case DataType.DT_IMAGE: return(buffer.GetBlobData(col.BufferIndex, 0, (int)buffer.GetBlobLength(col.BufferIndex))); case DataType.DT_NTEXT: return(buffer.GetBlobData(col.BufferIndex, 0, (int)buffer.GetBlobLength(col.BufferIndex))); case DataType.DT_NUMERIC: return(buffer.GetDecimal(col.BufferIndex)); case DataType.DT_R4: return(buffer.GetSingle(col.BufferIndex)); case DataType.DT_R8: return(buffer.GetDouble(col.BufferIndex)); case DataType.DT_STR: return(buffer.GetString(col.BufferIndex)); case DataType.DT_TEXT: return(buffer.GetBlobData(col.BufferIndex, 0, (int)buffer.GetBlobLength(col.BufferIndex))); case DataType.DT_UI1: return(buffer.GetByte(col.BufferIndex)); case DataType.DT_UI2: return(buffer.GetUInt16(col.BufferIndex)); case DataType.DT_UI4: return(buffer.GetUInt32(col.BufferIndex)); case DataType.DT_UI8: return(buffer.GetUInt64(col.BufferIndex)); case DataType.DT_WSTR: return(buffer.GetString(col.BufferIndex)); default: return(null); } }
public abstract TDestination ProcessInput(PipelineBuffer buffer);
/// <summary> /// This is where the data is read from the input buffer /// </summary> /// <param name="inputID"></param> /// <param name="buffer"></param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { string sharepointUrl = ""; string sharepointList = ""; string sharepointListView = ""; short batchSize = (short)2; Enums.BatchType batchType = Enums.BatchType.Deletion; if (!buffer.EndOfRowset) { // Queue the data up for batching by the sharepoint accessor object var dataQueue = new List <Dictionary <string, FieldValue> >(); while (buffer.NextRow()) { var rowData = new Dictionary <string, FieldValue>(); foreach (var fieldName in _bufferLookup.Keys) { if (buffer.IsNull(_bufferLookup[fieldName])) { // Do nothing, can ignore this field } else { FieldValue filedObj = new FieldValue(); switch (_bufferLookupDataType[fieldName]) { case DataType.DT_STR: case DataType.DT_WSTR: filedObj.value = buffer.GetString(_bufferLookup[fieldName]); filedObj.type = "string"; rowData.Add(fieldName, filedObj); break; case DataType.DT_NTEXT: int colDataLength = (int)buffer.GetBlobLength(_bufferLookup[fieldName]); byte[] stringData = buffer.GetBlobData(_bufferLookup[fieldName], 0, colDataLength); filedObj.value = Encoding.Unicode.GetString(stringData); filedObj.type = "string"; rowData.Add(fieldName, filedObj); break; case DataType.DT_R4: filedObj.value = buffer.GetSingle(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_CY: filedObj.value = buffer.GetDecimal(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_R8: filedObj.value = buffer.GetDouble(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI1: case DataType.DT_I1: case DataType.DT_BOOL: filedObj.value = buffer.GetBoolean(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Boolean"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI2: case DataType.DT_I2: filedObj.value = buffer.GetInt16(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI4: case DataType.DT_I4: filedObj.value = buffer.GetInt32(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI8: case DataType.DT_I8: filedObj.value = buffer.GetInt64(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_GUID: filedObj.value = buffer.GetGuid(_bufferLookup[fieldName]).ToString(); filedObj.type = "String"; rowData.Add(fieldName, filedObj); break; case DataType.DT_DBTIMESTAMP: filedObj.value = buffer.GetDateTime(_bufferLookup[fieldName]).ToString("u").Replace(" ", "T"); filedObj.type = "Datetime"; rowData.Add(fieldName, filedObj); break; case DataType.DT_DATE: filedObj.value = buffer.GetDateTime(_bufferLookup[fieldName]).ToString("yyyy-MM-dd"); filedObj.type = "Datetime"; rowData.Add(fieldName, filedObj); break; } } } dataQueue.Add(rowData); } bool fireAgain = false; Enums.TrueFalseValue removeRecords = (Enums.TrueFalseValue)ComponentMetaData.CustomPropertyCollection[C_REMOVERECORDS].Value; if (removeRecords == Enums.TrueFalseValue.True) { ClearRows(); } if (dataQueue.Count() > 0) { System.Diagnostics.Stopwatch timer = new System.Diagnostics.Stopwatch(); timer.Start(); System.Xml.Linq.XElement resultData; CreateDataset(dataQueue); AddClassRows(dataQueue); timer.Stop(); } else { ComponentMetaData.FireInformation(0, ComponentMetaData.Name, "No rows found to update in destination.", "", 0, ref fireAgain); } } }
/// <summary> /// Process the rows from the datasource /// </summary> /// <param name="inputID"></param> /// <param name="buffer"></param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { EntityCollection newEntityCollection = new EntityCollection(); List <OrganizationRequest> Rqs = new List <OrganizationRequest>(); Mapping.MappingItem mappedColumn; IDTSInputColumn100 inputcolumn; IDTSInput100 input = ComponentMetaData.InputCollection.GetObjectByID(inputID); Entity newEntity; while (buffer.NextRow()) { try { newEntity = new Entity(EntityName); bchCnt++; //adds the row to output buffer for futher processing. foreach (int col in mapInputColsToBufferCols) { inputcolumn = ComponentMetaData.InputCollection[0].InputColumnCollection[col]; mappedColumn = mapping.ColumnList.Find(x => x.ExternalColumnName == inputcolumn.Name && x.Map == true); if (mappedColumn != null) { if (buffer.IsNull(col) == false) { AttributesBuilder(mappedColumn, buffer[col], ref newEntity); } else { AttributesBuilder(mappedColumn, mappedColumn.DefaultValue, ref newEntity); } } } switch ((Operations)operation) { //Create case Operations.Create: Rqs.Add(new CreateRequest { Target = newEntity }); newEntity.Attributes["ownerid"] = new EntityReference("systemuser", currentUserId); break; //Update case Operations.Update: Rqs.Add(new UpdateRequest { Target = newEntity }); newEntity.Attributes["ownerid"] = new EntityReference("systemuser", currentUserId); break; //Delete case Operations.Delete: Rqs.Add(new DeleteRequest { Target = newEntity.ToEntityReference() }); break; //status case Operations.Status: Rqs.Add(new SetStateRequest { EntityMoniker = newEntity.ToEntityReference(), State = new OptionSetValue((int)newEntity.Attributes["statecode"]), Status = new OptionSetValue((int)newEntity.Attributes["statuscode"]) }); break; case Operations.Upsert: Rqs.Add(new UpsertRequest { Target = newEntity }); newEntity.Attributes["ownerid"] = new EntityReference("systemuser", currentUserId); break; case Operations.Workflow: newEntity.Attributes["ownerid"] = new EntityReference("systemuser", currentUserId); Rqs.Add(new ExecuteWorkflowRequest { EntityId = newEntity.Id, WorkflowId = Guid.Parse(WorkflowId) }); break; } newEntityCollection.Entities.Add(newEntity); rowIndexList.Add(ir); if (bchCnt == batchSize * 2 || (buffer.CurrentRow == buffer.RowCount || (buffer.RowCount % 2 != 0 && buffer.CurrentRow == buffer.RowCount - 1))) { int startBuffIndex = buffer.CurrentRow - (bchCnt - 1); CRMIntegrate[] IntegrationRows = SendRowsToCRM(newEntityCollection, EntityName, Rqs); sendOutputResults(IntegrationRows, buffer, startBuffIndex); } ir++; } catch (Exception ex) { switch (input.ErrorRowDisposition) { case DTSRowDisposition.RD_RedirectRow: buffer.DirectErrorRow(errorOutputId, 0, buffer.CurrentRow); break; case DTSRowDisposition.RD_IgnoreFailure: buffer.DirectRow(defaultOuputId); break; case DTSRowDisposition.RD_FailComponent: throw new Exception("There was and error processing rows. " + ex.Message); } } } }
/// <summary> /// Called at run time for source components and transformation components with /// asynchronous outputs to let these components add rows to the output buffers. /// </summary> /// <param name="outputs">The number of elements in the outputIDs and buffers arrays.</param> /// <param name="outputIDs">An array of Microsoft.SqlServer.Dts.Pipeline.Wrapper.IDTSOutput100 ID's.</param> /// <param name="buffers">An array of Microsoft.SqlServer.Dts.Pipeline.PipelineBuffer objects.</param> public override void PrimeOutput(int outputs, int[] outputIDs, PipelineBuffer[] buffers) { // Determine which buffer is for regular output, and which is for error output PipelineBuffer errorBuffer = null; PipelineBuffer defaultBuffer = null; for (int outputIndex = 0; outputIndex < outputs; outputIndex++) { if (outputIDs[outputIndex] == errorOutputID) { errorBuffer = buffers[outputIndex]; } else { defaultBuffer = buffers[outputIndex]; } } var cursor = GetCollectionCursor(ComponentMetaData.CustomPropertyCollection[COLLECTION_NAME_PROP_NAME].Value); var defaultOutputColumns = GetDefaultOutputColumns().ToArray(); foreach (BsonDocument document in cursor) { ColumnInfo failingColumnInfo = null; try { defaultBuffer.AddRow(); foreach (ColumnInfo columnInfo in this.columnInformata) { failingColumnInfo = columnInfo; if (document.Contains(columnInfo.ColumnName) && document[columnInfo.ColumnName] != null) { if (document.GetValue(columnInfo.ColumnName).IsBsonNull) { defaultBuffer.SetNull(columnInfo.OuputBufferColumnIndex); } else { var value = GetValue(document, columnInfo); try { defaultBuffer[columnInfo.OuputBufferColumnIndex] = value; } catch (DoesNotFitBufferException ex) { if (failingColumnInfo.OutputColumn.TruncationRowDisposition == DTSRowDisposition.RD_IgnoreFailure) { if (value is string) { defaultBuffer[columnInfo.OuputBufferColumnIndex] = value.ToString().Substring(0, columnInfo.OutputColumn.Length); } else { ComponentMetaData.FireWarning(0, "MongoDataSource", string.Format("Truncation of column {0} failed, as truncation of type {1} currently unsupported.", columnInfo.OutputColumn.Name, value.GetType().FullName), String.Empty, 0); } } else { throw ex; } } } } else { defaultBuffer.SetNull(columnInfo.OuputBufferColumnIndex); } } } catch (Exception ex) { DTSRowDisposition disposition = DTSRowDisposition.RD_NotUsed; if (ex is DoesNotFitBufferException) { disposition = failingColumnInfo.OutputColumn.TruncationRowDisposition; } else { disposition = failingColumnInfo.OutputColumn.ErrorRowDisposition; } HandleProcessingError(disposition, defaultBuffer, errorBuffer, failingColumnInfo, ex); } } if (defaultBuffer != null) { defaultBuffer.SetEndOfRowset(); } if (errorBuffer != null) { errorBuffer.SetEndOfRowset(); } }
private static void WriteColumnToStreamUnicodeDelimited(int columnPosition, HashColumnsTransformation.HashColumnInfo hCol, HashColumnsTransformation.InputBufferColumnInfo bci, PipelineBuffer buffer, HashMemoryBuffers mb) { int ci = bci.Index; byte[] bdata = null; string strData = null; bool writeLen = false; bool isNull; bool trim = false; StreamWriter sw = mb.StreamWriter; BufferColumn col = buffer.GetColumnInfo(bci.Index); isNull = buffer.IsNull(ci); //When not first field, write field delimiter if (columnPosition > 0) { sw.Write(hCol.HashFieldsDelimiter); } if (isNull) { if (hCol.HashImplmentationType == HashColumnsTransformation.HashImplementationType.UnicodeStringDelimitedNullSafe || hCol.HashImplmentationType == HashColumnsTransformation.HashImplementationType.UnicodeStringDelmitedSafe) { //Safe handling: write 1 indicating null and field delimiter. sw.Write(1); sw.Write(hCol.HashFieldsDelimiter); } else //Non Safe handling = write replacement value { sw.Write(hCol.NullReplacement); } sw.Flush(); return; //return. no need for other processing as null value is stored in the field. } else if (hCol.HashImplmentationType == HashColumnsTransformation.HashImplementationType.UnicodeStringDelimitedNullSafe || hCol.HashImplmentationType == HashColumnsTransformation.HashImplementationType.UnicodeStringDelmitedSafe) { //Saefe handling: write 0 indicating non null value and field delimiter. Field value will be after the delimiter sw.Write(0); sw.Write(hCol.HashFieldsDelimiter); } //Get buffer data lock (bci) { switch (col.DataType) { case DataType.DT_BOOL: sw.Write(buffer.GetBoolean(ci) ? 1 : 0); break; case DataType.DT_BYTES: bdata = buffer.GetBytes(ci); break; case DataType.DT_IMAGE: bdata = buffer.GetBlobData(ci, 0, (int)buffer.GetBlobLength(ci)); break; case DataType.DT_CY: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: strData = buffer.GetDecimal(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_DATE: strData = buffer.GetDateTime(ci).ToString("yyyy-MM-dd HH"); break; case DataType.DT_DBTIMESTAMP: strData = buffer.GetDateTime(ci).ToString("yyyy-MM-dd HH:mm:ss.fff"); break; case DataType.DT_DBTIMESTAMP2: strData = buffer.GetDateTime(ci).ToString("yyyy-MM-dd HH:mm:ss.fffffff"); break; case DataType.DT_FILETIME: sw.Write(buffer.GetInt64(ci)); break; case DataType.DT_DBDATE: strData = buffer.GetDate(ci).ToString("yyyy-MM-dd"); break; #if NET35 case DataType.DT_DBTIME: strData = new DateTime(buffer.GetTime(ci).Ticks).ToString("HH:mm:ss"); break; case DataType.DT_DBTIME2: strData = new DateTime(buffer.GetTime(ci).Ticks).ToString("HH:mm:ss.fffffff"); break; #else case DataType.DT_DBTIME: strData = buffer.GetTime(ci).ToString("HH:mm:ss"); break; case DataType.DT_DBTIME2: strData = buffer.GetTime(ci).ToString("HH:mm:ss.fffffff"); break; #endif case DataType.DT_DBTIMESTAMPOFFSET: strData = buffer.GetDateTimeOffset(ci).ToString("yyyy-MM-dd HH:mm:ss.fffffff zzz"); break; case DataType.DT_EMPTY: case DataType.DT_NULL: bdata = new byte[0]; break; case DataType.DT_GUID: strData = buffer.GetGuid(ci).ToString(); break; case DataType.DT_I1: sw.Write(buffer.GetSByte(ci)); break; case DataType.DT_I2: sw.Write(buffer.GetInt16(ci)); break; case DataType.DT_I4: sw.Write(buffer.GetInt32(ci)); break; case DataType.DT_I8: sw.Write(buffer.GetInt64(ci)); break; case DataType.DT_R4: sw.Write(buffer.GetSingle(ci)); break; case DataType.DT_R8: sw.Write(buffer.GetDouble(ci)); break; case DataType.DT_UI1: sw.Write(buffer.GetByte(ci)); break; case DataType.DT_UI2: sw.Write(buffer.GetUInt16(ci)); break; case DataType.DT_UI4: sw.Write(buffer.GetUInt32(ci)); break; case DataType.DT_UI8: sw.Write(buffer.GetUInt64(ci)); break; case DataType.DT_NTEXT: case DataType.DT_TEXT: case DataType.DT_STR: case DataType.DT_WSTR: trim = true; strData = buffer.GetString(ci); if (hCol.HashImplmentationType == HashColumnsTransformation.HashImplementationType.UnicodeStringDelmitedSafe) { writeLen = true; } break; default: bdata = new byte[0]; break; } } if (bdata != null) { strData = BitConverter.ToString(bdata).Replace("-", ""); } if (strData != null) { if (trim) { strData = TrimString(hCol, strData); } if (writeLen) { sw.Write(strData.Length); sw.Write(hCol.HashFieldsDelimiter); } sw.Write(strData); } sw.Flush(); }
public override void PrimeOutput(int outputs, int[] outputIDs, PipelineBuffer[] buffers) { IDTSOutput100 output = ComponentMetaData.OutputCollection[0]; PipelineBuffer buffer = buffers[0]; try { string lastId = string.Empty; while (true) { int count = 0; Entity[] entities = GetEntities(false, lastId); // Walk the rows in the DataReader, // and add them to the output buffer. foreach (Entity e in entities) { count++; lastId = e.Id; // Add a row to the output buffer. buffer.AddRow(); for (int x = 0; x < this._columnInfo.Count; x++) { ColumnInfo ci = this._columnInfo[x]; if (ci.ColumnName == DefaultIdColumnName) { buffer[ci.BufferColumnIndex] = e.Id; } else { object value = null; if (e.Properties.TryGetValue(ci.ColumnName, out value)) { buffer[ci.BufferColumnIndex] = value; } else { // NULL value for this column buffer.SetNull(ci.BufferColumnIndex); } } } } // Cloud DB starts to page at 500 entities // If we have less than 500, break out of the loop if (count < 500) { break; } } // Notify the data flow that we are finished adding rows to the output. } catch (Exception e) { ComponentMetaData.FireError(0, ComponentMetaData.Name, e.Message, string.Empty, 0, out this._cancel); throw; } finally { buffer.SetEndOfRowset(); } }
/// <summary> /// Builds Memory Stream data for Hashing from Input Columns /// </summary> /// <param name="hCol">HashColumnInfo</param> /// <param name="inputBufferColumns">InputBuferColumnInfo</param> /// <param name="buffer">input Buffer</param> /// <param name="mb">Memory Buffers</param> public static void BuildHashMemoryStream(HashColumnsTransformation.HashColumnInfo hCol, List <HashColumnsTransformation.InputBufferColumnInfo> inputBufferColumns, PipelineBuffer buffer, HashMemoryBuffers mb) { MemoryStream ms = mb.MemoryStream; StreamWriter sw = mb.StreamWriter; for (int i = 0; i < hCol.HashInputColumns.Count; i++) { int colIdx = hCol.HashInputColumns[i]; HashColumnsTransformation.InputBufferColumnInfo bci = inputBufferColumns[colIdx]; switch (hCol.HashImplmentationType) { case HashColumnsTransformation.HashImplementationType.BinarySafe: WriteColumnToStreamBinary(hCol, bci, buffer, mb, sw); break; case HashColumnsTransformation.HashImplementationType.UnicodeStringDelmited: WriteColumnToStreamUnicodeDelimited(i, hCol, bci, buffer, mb); break; default: WriteColumnToStreamOriginal(bci, buffer, mb); break; } } }
private static void WriteColumnToStreamBinary(HashColumnsTransformation.HashColumnInfo hCol, HashColumnsTransformation.InputBufferColumnInfo bci, PipelineBuffer buffer, HashMemoryBuffers mb, StreamWriter sw) { int ci = bci.Index; byte[] bdata = null; byte[] decimalArray = new byte[16]; //Array for storing decimal numbers string asciiStr = null; BinaryWriter bw = mb.BinaryWriter; BufferColumn col = buffer.GetColumnInfo(bci.Index); bw.Write((byte)0);//Write byte (0) as start of field; if (buffer.IsNull(bci.Index)) { bw.Write((byte)1); //Write 1 representing NULL bw.Write(0); //write length of 0 for NULL } else { bw.Write((byte)0); //write 0 representing NOT NULL //Get buffer data lock (bci) { switch (col.DataType) { case DataType.DT_BOOL: bdata = BitConverter.GetBytes(buffer.GetBoolean(ci)); break; case DataType.DT_BYTES: bdata = buffer.GetBytes(ci); break; case DataType.DT_IMAGE: bdata = buffer.GetBlobData(ci, 0, (int)buffer.GetBlobLength(ci)); break; case DataType.DT_CY: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: bdata = Encoding.ASCII.GetBytes(buffer.GetDecimal(ci).ToString(CultureInfo.InvariantCulture)); break; case DataType.DT_DATE: case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: bw.Write(buffer.GetDateTime(ci).ToBinary()); break; case DataType.DT_FILETIME: bw.Write(buffer.GetInt64(ci)); break; case DataType.DT_DBDATE: bw.Write(buffer.GetDate(ci).ToBinary()); break; case DataType.DT_DBTIME: case DataType.DT_DBTIME2: bw.Write(buffer.GetTime(ci).Ticks); break; case DataType.DT_DBTIMESTAMPOFFSET: var dtoffset = buffer.GetDateTimeOffset(ci); BitConverter.GetBytes(dtoffset.DateTime.ToBinary()).CopyTo(decimalArray, 0); BitConverter.GetBytes(dtoffset.Offset.Ticks).CopyTo(decimalArray, 8); bw.Write(decimalArray); break; case DataType.DT_EMPTY: case DataType.DT_NULL: bdata = new byte[0]; break; case DataType.DT_GUID: bw.Write(Encoding.ASCII.GetBytes(buffer.GetGuid(ci).ToString())); break; case DataType.DT_I1: asciiStr = buffer.GetSByte(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_I2: asciiStr = buffer.GetInt16(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_I4: asciiStr = buffer.GetInt32(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_I8: asciiStr = buffer.GetInt64(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_R4: asciiStr = buffer.GetSingle(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_R8: asciiStr = buffer.GetDouble(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_UI1: asciiStr = buffer.GetByte(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_UI2: asciiStr = buffer.GetUInt16(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_UI4: asciiStr = buffer.GetUInt32(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_UI8: asciiStr = buffer.GetUInt64(ci).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_NTEXT: case DataType.DT_TEXT: case DataType.DT_STR: case DataType.DT_WSTR: bdata = Encoding.Unicode.GetBytes(TrimString(hCol, buffer.GetString(ci))); break; default: bdata = new byte[0]; break; } } if (asciiStr != null) { bdata = Encoding.ASCII.GetBytes(asciiStr); } if (bdata != null) { bw.Write(bdata.Length); //write length of buffer bw.Write(bdata); //write bufferdata; } } }
private int ProcessObject(JObject obj, ref PipelineBuffer inputbuffer) { bool cancel = false; // Each objects corresponds to an output row. int res = 0; AddOutputRow(ref inputbuffer); // For each column requested from metadata, look for data into the object we parsed. Parallel.ForEach <IOMapEntry>(_iomap, _opt, delegate(IOMapEntry e) { int colIndex = _outColsMaps[e.OutputColName]; // If the user wants to get raw json, we should parse nothing: simply return all the json as a string if (e.OutputJsonColumnType == JsonTypes.RawJson) { string val = null; var vals = obj.SelectTokens(e.InputFieldPath); if (vals.Count() == 0) { val = null; } else if (vals.Count() == 1) { val = vals.ElementAt(0).ToString(); } else { JArray arr = new JArray(); foreach (var t in vals) { arr.Add(t); } val = arr.ToString(); } try { _outputBuffer[colIndex] = val; res++; } catch (DoesNotFitBufferException ex) { bool fireAgain = false; ComponentMetaData.FireError(ComponentConstants.ERROR_INVALID_BUFFER_SIZE, ComponentMetaData.Name, String.Format("Maximum size of column {0} is smaller than provided data. Please increase buffer size.", e.OutputColName), null, 0, out fireAgain); throw ex; } } else { // If it's not a json raw type, parse the value. try { IEnumerable <JToken> tokens = obj.SelectTokens(e.InputFieldPath); int count = tokens.Count(); if (count == 0) { if (!_warnNotified.Contains(colIndex)) { _warnNotified.Add(colIndex); ComponentMetaData.FireWarning(ComponentConstants.RUNTIME_GENERIC_ERROR, ComponentMetaData.Name, String.Format("No value has been found when parsing jsonpath {0} on column {1}. Is the jsonpath correct?", e.InputFieldPath, e.OutputColName), null, 0); } } else if (count == 1) { try { res++; _outputBuffer[colIndex] = tokens.ElementAt(0); } catch (DoesNotFitBufferException ex) { bool fireAgain = false; ComponentMetaData.FireError(ComponentConstants.ERROR_INVALID_BUFFER_SIZE, ComponentMetaData.Name, String.Format("Maximum size of column {0} is smaller than provided data. Please increase buffer size.", e.OutputColName), null, 0, out fireAgain); throw ex; } } else { if (!_warnNotified.Contains(colIndex)) { _warnNotified.Add(colIndex); ComponentMetaData.FireWarning(ComponentConstants.RUNTIME_GENERIC_ERROR, ComponentMetaData.Name, String.Format("Multiple values have been found when parsing jsonpath {0} on column {1}. This will led to line explosion, so I won't explode this here to save memory. Put a filter in pipeline to explode the lines, if needed.", e.InputFieldPath, e.OutputColName), null, 0); } // This case requires explosions. We cannot perform it here, so we output raw json JArray arr = new JArray(); foreach (var t in tokens) { arr.Add(t); } try { _outputBuffer[colIndex] = arr.ToString(); } catch (DoesNotFitBufferException ex) { bool fireAgain = false; ComponentMetaData.FireError(ComponentConstants.ERROR_INVALID_BUFFER_SIZE, ComponentMetaData.Name, String.Format("Maximum size of column {0} is smaller than provided data. Please increase buffer size.", e.OutputColName), null, 0, out fireAgain); throw ex; } } } catch (Newtonsoft.Json.JsonException ex) { bool fireAgain = false; ComponentMetaData.FireError(ComponentConstants.ERROR_SELECT_TOKEN, ComponentMetaData.Name, "SelectToken failed. This may be due to an invalid Xpath syntax / member name. However this error still happens if multiple tokens are returned and the value expected is single. Specific error was: " + ex.Message, null, 0, out fireAgain); throw ex; } } }); return(res); }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { if (buffer.EndOfRowset) { return; } BeginMessage(); while (buffer.NextRow()) { if (_rowCount == 1000) { EndMessage(); } _rowCount++; _allRowCount++; if ((_limitRowsToLog > 0) && (_allRowCount > _limitRowsToLog)) { _isHardError = true; } else { foreach (var info in _inputColumnInfos) { _auditRowDataCollection.Add(buffer.IsNull(info.BufferColumnIndex) ? new AuditRowData(_rowCount, info.Name, "") : new AuditRowData(_rowCount, info.Name, buffer[info.BufferColumnIndex].ToString())); //switch (info.Name) //{ // case "ErrorCode": // var errorDescription = ComponentMetaData.GetErrorDescription(buffer.GetInt32(info.BufferColumnIndex)); // if (string.IsNullOrEmpty(errorDescription)) // { // _auditRowDataCollection.Add(new AuditRowData(_rowCount, "ErrorDescription", // errorDescription)); // } // break; // case "ErrorColumn": // var errorColumnId = buffer[info.BufferColumnIndex].ToString(); // if (string.IsNullOrEmpty(errorColumnId)) // { // //var errorColumn = input.InputColumnCollection.GetInputColumnByLineageID(info.LineageID); // _auditRowDataCollection.Add(new AuditRowData(_rowCount, "ErrorColumn", errorColumnId)); // } // break; // default: // _auditRowDataCollection.Add(buffer.IsNull(info.BufferColumnIndex) // ? new AuditRowData(_rowCount, info.Name, "") // : new AuditRowData(_rowCount, info.Name, buffer[info.BufferColumnIndex].ToString())); // break; //} } } } if (_auditRowDataCollection.Count > 0) { EndMessage(); } if (!_isHardError) { return; } bool isTrue; ComponentMetaData.FireError(0, ComponentMetaData.Name, "Biml Row Audit: Too many rows logged, LimitNumberOfRowsToLog is negative and exceeded.", "", 0, out isTrue); }
public override void PrimeOutput(int outputs, int[] outputIDs, PipelineBuffer[] buffers) { outputBuffer = buffers[0]; }
/// <summary> /// This is where the data is read from the input buffer /// </summary> /// <param name="inputID"></param> /// <param name="buffer"></param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { string sharepointUrl = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTSITEURL].Value; string sharepointList = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTNAME].Value; string sharepointListView = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTVIEWNAME].Value; short batchSize = (short)ComponentMetaData.CustomPropertyCollection[C_BATCHSIZE].Value; Enums.BatchType batchType = (Enums.BatchType)ComponentMetaData.CustomPropertyCollection[C_BATCHTYPE].Value; if (!buffer.EndOfRowset) { // Queue the data up for batching by the sharepoint accessor object var dataQueue = new List <Dictionary <string, string> >(); while (buffer.NextRow()) { var rowData = new Dictionary <string, string>(); foreach (var fieldName in _bufferLookup.Keys) { if (buffer.IsNull(_bufferLookup[fieldName])) { // Do nothing, can ignore this field } else { switch (_bufferLookupDataType[fieldName]) { case DataType.DT_STR: case DataType.DT_WSTR: rowData.Add(fieldName, buffer.GetString(_bufferLookup[fieldName])); break; case DataType.DT_NTEXT: int colDataLength = (int)buffer.GetBlobLength(_bufferLookup[fieldName]); byte[] stringData = buffer.GetBlobData(_bufferLookup[fieldName], 0, colDataLength); rowData.Add(fieldName, Encoding.Unicode.GetString(stringData)); break; case DataType.DT_R4: rowData.Add(fieldName, buffer.GetSingle(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_CY: rowData.Add(fieldName, buffer.GetDecimal(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_R8: rowData.Add(fieldName, buffer.GetDouble(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_UI1: case DataType.DT_I1: case DataType.DT_BOOL: rowData.Add(fieldName, buffer.GetBoolean(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_UI2: case DataType.DT_I2: rowData.Add(fieldName, buffer.GetInt16(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_UI4: case DataType.DT_I4: rowData.Add(fieldName, buffer.GetInt32(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_UI8: case DataType.DT_I8: rowData.Add(fieldName, buffer.GetInt64(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_GUID: rowData.Add(fieldName, buffer.GetGuid(_bufferLookup[fieldName]).ToString()); break; case DataType.DT_DBTIMESTAMP: rowData.Add(fieldName, buffer.GetDateTime(_bufferLookup[fieldName]).ToString("u").Replace(" ", "T")); break; } } } dataQueue.Add(rowData); } bool fireAgain = false; if (dataQueue.Count() > 0) { System.Diagnostics.Stopwatch timer = new System.Diagnostics.Stopwatch(); timer.Start(); System.Xml.Linq.XElement resultData; if (batchType == Enums.BatchType.Modification) { // Perform the update resultData = SharePointUtility.ListServiceUtility.UpdateListItems( new Uri(sharepointUrl), _credentials, sharepointList, sharepointListView, dataQueue, batchSize); } else { // Get the IDs read from the buffer var idList = from data in dataQueue where data["ID"].Trim().Length > 0 select data["ID"]; // Delete the list items with IDs resultData = SharePointUtility.ListServiceUtility.DeleteListItems( new Uri(sharepointUrl), _credentials, sharepointList, sharepointListView, idList); } timer.Stop(); var errorRows = from result in resultData.Descendants("errorCode") select result.Parent; int successRowsWritten = resultData.Elements().Count() - errorRows.Count(); string infoMsg = string.Format(CultureInfo.InvariantCulture, "Affected {0} records in list '{1}' at '{2}'. Elapsed time is {3}ms", successRowsWritten, sharepointList, sharepointUrl, timer.ElapsedMilliseconds); ComponentMetaData.FireInformation(0, ComponentMetaData.Name, infoMsg, "", 0, ref fireAgain); ComponentMetaData.IncrementPipelinePerfCounter( DTS_PIPELINE_CTR_ROWSWRITTEN, (uint)successRowsWritten); // Shovel any error rows to the error flow bool cancel; int errorIter = 0; foreach (var row in errorRows) { // Do not flood the error log. errorIter++; if (errorIter > 10) { ComponentMetaData.FireError(0, ComponentMetaData.Name, "Total of " + errorRows.Count().ToString(_culture) + ", only showing first 10.", "", 0, out cancel); return; } string idString = ""; XAttribute attrib = row.Element("row").Attribute("ID"); if (attrib != null) { idString = "(SP ID=" + attrib.Value + ")"; } string errorString = string.Format(CultureInfo.InvariantCulture, "Error on row {0}: {1} - {2} {3}", row.Attribute("ID"), row.Element("errorCode").Value, row.Element("errorDescription").Value, idString); ComponentMetaData.FireError(0, ComponentMetaData.Name, errorString, "", 0, out cancel); // Need to throw an exception, or else this step's box is green (should be red), even though the flow // is marked as failure regardless. throw new PipelineProcessException("Errors detected in this component - see SSIS Errors"); } } else { ComponentMetaData.FireInformation(0, ComponentMetaData.Name, "No rows found to update in destination.", "", 0, ref fireAgain); } } }
/// <summary> /// Called at run time for source components and transformation components with /// asynchronous outputs to let these components add rows to the output buffers. /// </summary> /// <param name="outputs">The number of elements in the outputIDs and buffers arrays.</param> /// <param name="outputIDs">An array of Microsoft.SqlServer.Dts.Pipeline.Wrapper.IDTSOutput100 ID's.</param> /// <param name="buffers">An array of Microsoft.SqlServer.Dts.Pipeline.PipelineBuffer objects.</param> public override void PrimeOutput(int outputs, int[] outputIDs, PipelineBuffer[] buffers) { // Determine which buffer is for regular output, and which is for error output PipelineBuffer errorBuffer = null; PipelineBuffer defaultBuffer = null; for (int outputIndex = 0; outputIndex < outputs; outputIndex++) if (outputIDs[outputIndex] == errorOutputID) errorBuffer = buffers[outputIndex]; else defaultBuffer = buffers[outputIndex]; var cursor = GetCollectionCursor(ComponentMetaData.CustomPropertyCollection[COLLECTION_NAME_PROP_NAME].Value); var defaultOutputColumns = GetDefaultOutputColumns().ToArray(); foreach (BsonDocument document in cursor) { ColumnInfo failingColumnInfo = null; try { defaultBuffer.AddRow(); foreach (ColumnInfo columnInfo in this.columnInformata) { failingColumnInfo = columnInfo; if (document.Contains(columnInfo.ColumnName) && document[columnInfo.ColumnName] != null) { if (document.GetValue(columnInfo.ColumnName).IsBsonNull) { defaultBuffer.SetNull(columnInfo.OuputBufferColumnIndex); } else { var value = GetValue(document, columnInfo); try { defaultBuffer[columnInfo.OuputBufferColumnIndex] = value; } catch (DoesNotFitBufferException ex) { if (failingColumnInfo.OutputColumn.TruncationRowDisposition == DTSRowDisposition.RD_IgnoreFailure) if (value is string) defaultBuffer[columnInfo.OuputBufferColumnIndex] = value.ToString().Substring(0, columnInfo.OutputColumn.Length); else ComponentMetaData.FireWarning(0, "MongoDataSource", string.Format("Truncation of column {0} failed, as truncation of type {1} currently unsupported.", columnInfo.OutputColumn.Name, value.GetType().FullName), String.Empty, 0); else throw ex; } } } else { defaultBuffer.SetNull(columnInfo.OuputBufferColumnIndex); } } } catch (Exception ex) { DTSRowDisposition disposition = DTSRowDisposition.RD_NotUsed; if (ex is DoesNotFitBufferException) disposition = failingColumnInfo.OutputColumn.TruncationRowDisposition; else disposition = failingColumnInfo.OutputColumn.ErrorRowDisposition; HandleProcessingError(disposition, defaultBuffer, errorBuffer, failingColumnInfo, ex); } } if (defaultBuffer != null) defaultBuffer.SetEndOfRowset(); if (errorBuffer != null) errorBuffer.SetEndOfRowset(); }
/// <summary> /// Called when a PipelineBuffer is passed to the component. /// </summary> /// <param name="inputID">The ID of the Input that the buffer contains rows for.</param> /// <param name="buffer">The PipelineBuffer containing the columns defined in the IDTSInput100.</param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { if (buffer == null) { throw new ArgumentNullException("buffer"); } if (!buffer.EndOfRowset) { IDTSInput100 input = ComponentMetaData.InputCollection.GetObjectByID(inputID); var errorOutputID = -1; var errorOutputIndex = -1; var defaultOutputId = -1; GetErrorOutputInfo(ref errorOutputID, ref errorOutputIndex); defaultOutputId = errorOutputIndex == 0 ? ComponentMetaData.OutputCollection[1].ID : ComponentMetaData.OutputCollection[0].ID; while (buffer.NextRow()) { /// If the inputColumnInfos array has zero dimensions, then /// no input columns have been selected for the component. /// Direct the row to the default output. if (inputColumnInfos.Length == 0) { buffer.DirectRow(defaultOutputId); } else { var isError = false; var inputByteBuffer = new byte[1000]; var bufferUsed = 0; var nullHandling = String.Empty; foreach (var columnToProcessID in inputColumnInfos.Select(info => info.bufferColumnIndex)) { if (!buffer.IsNull(columnToProcessID)) { nullHandling += "N"; switch (buffer.GetColumnInfo(columnToProcessID).DataType) { case DataType.DT_BOOL: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetBoolean(columnToProcessID)); break; case DataType.DT_IMAGE: uint blobLength = buffer.GetBlobLength(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetBlobData(columnToProcessID, 0, (int)blobLength)); nullHandling += blobLength.ToString(CultureInfo.InvariantCulture); break; case DataType.DT_BYTES: byte[] bytesFromBuffer = buffer.GetBytes(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, bytesFromBuffer); nullHandling += bytesFromBuffer.GetLength(0).ToString(CultureInfo.InvariantCulture); break; case DataType.DT_CY: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDecimal(columnToProcessID)); break; //#if SQL2005 //#else //case DataType.DT_DBTIMESTAMPOFFSET: // DateTimeOffset dateTimeOffset = buffer.GetDateTimeOffset(columnToProcessID); // Utility.Append(ref inputByteBuffer, ref bufferUsed, dateTimeOffset); // break; case DataType.DT_DBDATE: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDate(columnToProcessID), millisecondHandling); break; //#endif case DataType.DT_DATE: case DataType.DT_DBTIMESTAMP: #if SQL2005 #else case DataType.DT_DBTIMESTAMP2: case DataType.DT_FILETIME: #endif Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDateTime(columnToProcessID), millisecondHandling); break; #if SQL2005 #else case DataType.DT_DBTIME: case DataType.DT_DBTIME2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetTime(columnToProcessID)); break; #endif case DataType.DT_GUID: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetGuid(columnToProcessID)); break; case DataType.DT_I1: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetSByte(columnToProcessID)); break; case DataType.DT_I2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt16(columnToProcessID)); break; case DataType.DT_I4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt32(columnToProcessID)); break; case DataType.DT_I8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt64(columnToProcessID)); break; case DataType.DT_NTEXT: case DataType.DT_STR: case DataType.DT_TEXT: case DataType.DT_WSTR: String stringFromBuffer = buffer.GetString(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, stringFromBuffer, Encoding.UTF8); nullHandling += stringFromBuffer.Length.ToString(); break; case DataType.DT_R4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetSingle(columnToProcessID)); break; case DataType.DT_R8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDouble(columnToProcessID)); break; case DataType.DT_UI1: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetByte(columnToProcessID)); break; case DataType.DT_UI2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt16(columnToProcessID)); break; case DataType.DT_UI4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt32(columnToProcessID)); break; case DataType.DT_UI8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt64(columnToProcessID)); break; case DataType.DT_EMPTY: case DataType.DT_NULL: default: break; } } else { nullHandling += "Y"; } } Utility.Append(ref inputByteBuffer, ref bufferUsed, nullHandling, Encoding.UTF8); var sha1HashDual = new SHA1CryptoServiceProvider(); var fhash = sha1HashDual.ComputeHash(inputByteBuffer); var reverseByteBuffer = inputByteBuffer.Reverse().ToArray(); var rhash = sha1HashDual.ComputeHash(reverseByteBuffer); var hash1 = BitConverter.ToString(fhash).Replace("-", ""); // + "~" + BitConverter.ToString(rhash); var hash2 = BitConverter.ToString(rhash).Replace("-", ""); buffer.SetString(outputColumnInfos[0].bufferColumnIndex, hash1); buffer.SetString(outputColumnInfos[1].bufferColumnIndex, hash2); //buffer.SetInt16(outputColumnInfos[2].bufferColumnIndex, (Int16)(Math.Abs(BitConverter.ToInt16(fhash, 0)) % noOfPartitions)); /// Finished processing each of the columns in this row. /// If an error occurred and the error output is configured, then the row has already been directed to the error output, if configured. /// If not, then direct the row to the default output. if (!isError) { buffer.DirectRow(defaultOutputId); } } } } }
public object GetColumnValue(PipelineBuffer buffer) { object value = null; if (!buffer.IsNull(Index)) { switch (DataType) { case DataType.DT_BOOL: value = buffer[Index]; // Boolean.Parse(buffer[colIndex].ToString()); break; case DataType.DT_DATE: case DataType.DT_DBDATE: case DataType.DT_DBTIME: case DataType.DT_DBTIME2: case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: value = buffer[Index]; // DateTime.Parse(buffer[colIndex].ToString()); break; case DataType.DT_NTEXT: case DataType.DT_TEXT: value = buffer.GetString(Index); break; case DataType.DT_IMAGE: uint len; if ((len = buffer.GetBlobLength(Index)) >= 2000000) { // The Limit of Entity size in CloudDB is 2M, reject current record if the size exceeds the limit buffer.DirectErrorRow(_errorOutputId, HResults.DTS_E_LOBLENGTHLIMITEXCEEDED, ID); break; } value = buffer.GetBlobData(Index, 0, (int)len); break; case DataType.DT_BYTES: value = buffer.GetBytes(Index); break; case DataType.DT_I1: case DataType.DT_I2: case DataType.DT_I4: case DataType.DT_I8: case DataType.DT_UI1: case DataType.DT_UI2: case DataType.DT_UI4: case DataType.DT_UI8: case DataType.DT_R4: case DataType.DT_R8: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: case DataType.DT_CY: value = decimal.Parse(buffer[Index].ToString(), CultureInfo.InvariantCulture); break; case DataType.DT_NULL: case DataType.DT_EMPTY: break; default: value = buffer[Index].ToString(); break; } } return(value); }
/// <summary> /// Sends Outputs to files. //TODO: Rewrite this method /// </summary> /// <param name="Integ"></param> /// <param name="buffer"></param> /// <param name="startBuffIndex"></param> private void sendOutputResults(CRMIntegrate[] Integ, PipelineBuffer buffer, int startBuffIndex) { IEnumerable <ExecuteMultipleResponseItem> FltResp; IEnumerable <ExecuteMultipleResponseItem> OkResp; int current = buffer.CurrentRow; buffer.CurrentRow = startBuffIndex; foreach (CRMIntegrate irsp in Integ) { if (irsp.Resp != null) { if (irsp.Resp.IsFaulted) { FltResp = irsp.Resp.Responses.Where(r => r.Fault != null); foreach (ExecuteMultipleResponseItem itm in FltResp) { buffer.DirectErrorRow(errorOutputId, itm.Fault.ErrorCode, buffer.CurrentRow); if (buffer.CurrentRow < buffer.RowCount) { buffer.NextRow(); } } } OkResp = irsp.Resp.Responses.Where(r => r.Fault == null); //int ResponseColumn = ComponentMetaData.InputCollection[0].InputColumnCollection.Count + ComponentMetaData.OutputCollection[0].OutputColumnCollection.Count; int ResponseColumn = ComponentMetaData.OutputCollection[0].OutputColumnCollection.Count - 1; foreach (ExecuteMultipleResponseItem itm in OkResp) { //Add the inserted GUID for Create Operation switch ((Operations)operation) { case Operations.Create: buffer.SetString(ResponseColumn, ((CreateResponse)itm.Response).id.ToString()); break; case Operations.Update: buffer.SetString(ResponseColumn, ((UpdateResponse)itm.Response).Results.FirstOrDefault().Value.ToString()); break; case Operations.Delete: buffer.SetString(ResponseColumn, ((DeleteResponse)itm.Response).Results.FirstOrDefault().Value.ToString()); break; case Operations.Upsert: buffer.SetString(ResponseColumn, ((UpsertResponse)itm.Response).Results.FirstOrDefault().Value.ToString()); break; case Operations.Status: buffer.SetString(ResponseColumn, ((SetStateResponse)itm.Response).Results.FirstOrDefault().Value.ToString()); break; case Operations.Workflow: buffer.SetString(ResponseColumn, ((ExecuteWorkflowResponse)itm.Response).Results.FirstOrDefault().Value.ToString()); break; } buffer.DirectRow(defaultOuputId); if (buffer.CurrentRow < buffer.RowCount) { buffer.NextRow(); } } } else if (irsp.ExceptionMessage != "") { buffer.DirectErrorRow(errorOutputId, -1, buffer.CurrentRow); if (buffer.CurrentRow < buffer.RowCount) { buffer.NextRow(); } } } buffer.CurrentRow = current; }
/// <summary> /// IDTSRuntimeComponent100 (OK) /// </summary> /// <param name="inputID"> </param> /// <param name="buffer"> </param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { ColumnInfo columnInfo; string columnName; Type columnType; object columnValue, obfuColumnValue; int rowIndex = 0; IMetaColumn metaColumn; if (!buffer.EndOfRowset) { while (buffer.NextRow()) { for (int columnIndex = 0; columnIndex < buffer.ColumnCount; columnIndex++) { if (columnIndex >= this.InputColumnInfos.Count) continue; columnInfo = this.InputColumnInfos[columnIndex]; columnName = columnInfo.name; columnType = InferClrTypeForSsisDataType(columnInfo.type); columnValue = buffer[columnInfo.bufferColumnIndex]; metaColumn = new MetaColumn() { ColumnIndex = columnIndex, ColumnName = columnName, ColumnType = columnType, ColumnIsNullable = null, TableIndex = 0, TagContext = null }; obfuColumnValue = this.OxymoronEngine.GetObfuscatedValue(metaColumn, columnValue); SetBufferValue(buffer, columnInfo.bufferColumnIndex, obfuColumnValue, columnInfo.type); //if (!buffer.IsNull(columnInfo.bufferColumnIndex)) //buffer[columnInfo.bufferColumnIndex] = columnValue; } //buffer.DirectRow(ComponentMetaData.OutputCollection[Constants.OUTPUT_NAME].ID); rowIndex++; } } //buffer.SetEndOfRowset(); }
/// <summary> /// Will perform the user-specified behaviour when a processing error occurs /// </summary> /// <param name="disposition">How the error should be handled</param> /// <param name="defaultBuffer">The default output buffer</param> /// <param name="errorBuffer">The error output buffer</param> /// <param name="failingColumnInfo">The information for the problematic column</param> /// <param name="ex">The exception caught from processing (optional)</param> private void HandleProcessingError(DTSRowDisposition disposition, PipelineBuffer defaultBuffer, PipelineBuffer errorBuffer, ColumnInfo failingColumnInfo, Exception ex) { switch (disposition) { case DTSRowDisposition.RD_RedirectRow: if (errorBuffer == null) { throw new InvalidOperationException("There must be an error output defined if redirection was specified"); } // Add a row to the error buffer. errorBuffer.AddRow(); // Get the values from the default buffer // and copy them to the error buffer. var errorOutputColumns = GetErrorOutputColumns().ToArray(); foreach (IDTSOutputColumn100 column in errorOutputColumns) { ColumnInfo copiedColumnInfo = GetColumnInfo(column.Name); if (copiedColumnInfo != null) { errorBuffer[copiedColumnInfo.ErrorOuputBufferColumnIndex] = defaultBuffer[copiedColumnInfo.OuputBufferColumnIndex]; } } // Set the error information. int errorCode = (ex == null ? 0 : System.Runtime.InteropServices.Marshal.GetHRForException(ex)); errorBuffer.SetErrorInfo(errorOutputID, errorCode, failingColumnInfo.OutputColumn.LineageID); // Remove the row that was added to the default buffer. defaultBuffer.RemoveRow(); break; case DTSRowDisposition.RD_FailComponent: throw new Exception(String.Format("There was an issue with column: {0}", failingColumnInfo.OutputColumn.Name), ex); } }
/// <summary> /// This is where the data is read from the input buffer /// </summary> /// <param name="inputID"></param> /// <param name="buffer"></param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { string sharepointUrl = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTSITEURL].Value; string sharepointList = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTNAME].Value; string sharepointListView = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTVIEWNAME].Value; short batchSize = (short)ComponentMetaData.CustomPropertyCollection[C_BATCHSIZE].Value; Enums.BatchType batchType = (Enums.BatchType)ComponentMetaData.CustomPropertyCollection[C_BATCHTYPE].Value; if (!buffer.EndOfRowset) { // Queue the data up for batching by the sharepoint accessor object var dataQueue = new List<Dictionary<string, string>>(); while (buffer.NextRow()) { var rowData = new Dictionary<string, string>(); foreach (var fieldName in _bufferLookup.Keys) { switch (_bufferLookupDataType[fieldName]) { case DataType.DT_STR: case DataType.DT_WSTR: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else rowData.Add(fieldName, buffer.GetString(_bufferLookup[fieldName])); break; case DataType.DT_NTEXT: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else { int colDataLength = (int)buffer.GetBlobLength(_bufferLookup[fieldName]); byte[] stringData = buffer.GetBlobData(_bufferLookup[fieldName], 0, colDataLength); rowData.Add(fieldName, Encoding.Unicode.GetString(stringData)); } break; case DataType.DT_R4: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else rowData.Add(fieldName, buffer.GetSingle(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_R8: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else rowData.Add(fieldName, buffer.GetDouble(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_UI1: case DataType.DT_I1: case DataType.DT_BOOL: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else rowData.Add(fieldName, buffer.GetBoolean(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_UI2: case DataType.DT_I2: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else rowData.Add(fieldName, buffer.GetInt16(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_UI4: case DataType.DT_I4: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else rowData.Add(fieldName, buffer.GetInt32(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_UI8: case DataType.DT_I8: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, string.Empty); else rowData.Add(fieldName, buffer.GetInt64(_bufferLookup[fieldName]).ToString(_culture)); break; case DataType.DT_GUID: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, String.Empty); else rowData.Add(fieldName, buffer.GetGuid(_bufferLookup[fieldName]).ToString()); break; case DataType.DT_DBTIMESTAMP: if (buffer.IsNull(_bufferLookup[fieldName])) rowData.Add(fieldName, String.Empty); else rowData.Add(fieldName, buffer.GetDateTime(_bufferLookup[fieldName]).ToString("u").Replace(" ","T")); break; } } dataQueue.Add(rowData); } bool fireAgain = false; if (dataQueue.Count() > 0) { System.Diagnostics.Stopwatch timer = new System.Diagnostics.Stopwatch(); timer.Start(); System.Xml.Linq.XElement resultData; if (batchType == Enums.BatchType.Modification) { // Perform the update resultData = SharePointUtility.ListServiceUtility.UpdateListItems( new Uri(sharepointUrl), _credentials, sharepointList, sharepointListView, dataQueue, batchSize); } else { // Get the IDs read from the buffer var idList = from data in dataQueue where data["ID"].Trim().Length > 0 select data["ID"]; // Delete the list items with IDs resultData = SharePointUtility.ListServiceUtility.DeleteListItems( new Uri(sharepointUrl), _credentials, sharepointList, sharepointListView, idList); } timer.Stop(); var errorRows = from result in resultData.Descendants("errorCode") select result.Parent; int successRowsWritten = resultData.Elements().Count() - errorRows.Count(); string infoMsg = string.Format(CultureInfo.InvariantCulture, "Affected {0} records in list '{1}' at '{2}'. Elapsed time is {3}ms", successRowsWritten, sharepointList, sharepointUrl, timer.ElapsedMilliseconds); ComponentMetaData.FireInformation(0, ComponentMetaData.Name, infoMsg, "", 0, ref fireAgain); ComponentMetaData.IncrementPipelinePerfCounter( DTS_PIPELINE_CTR_ROWSWRITTEN, (uint)successRowsWritten); // Shovel any error rows to the error flow bool cancel; int errorIter = 0; foreach (var row in errorRows) { // Do not flood the error log. errorIter++; if (errorIter > 10) { ComponentMetaData.FireError(0, ComponentMetaData.Name, "Total of " + errorRows.Count().ToString(_culture) + ", only showing first 10.", "", 0, out cancel); return; } string idString = ""; XAttribute attrib = row.Element("row").Attribute("ID"); if (attrib != null) idString = "(SP ID=" + attrib.Value + ")"; string errorString = string.Format(CultureInfo.InvariantCulture, "Error on row {0}: {1} - {2} {3}", row.Attribute("ID"), row.Element("errorCode").Value, row.Element("errorDescription").Value, idString); ComponentMetaData.FireError(0, ComponentMetaData.Name, errorString, "", 0, out cancel); // Need to throw an exception, or else this step's box is green (should be red), even though the flow // is marked as failure regardless. throw new PipelineProcessException("Errors detected in this component - see SSIS Errors"); } } else { ComponentMetaData.FireInformation(0, ComponentMetaData.Name, "No rows found to update in destination.", "", 0, ref fireAgain); } } }
/// <summary> /// This is where the data is loaded into the output buffer /// </summary> /// <param name="outputs"></param> /// <param name="outputIDs"></param> /// <param name="buffers"></param> public override void PrimeOutput(int outputs, int[] outputIDs, PipelineBuffer[] buffers) { string sharepointUrl = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTSITEURL].Value; string sharepointList = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTNAME].Value; string sharepointListView = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTVIEWNAME].Value; XElement camlQuery = XElement.Parse((string)ComponentMetaData.CustomPropertyCollection[C_CAMLQUERY].Value); short batchSize = (short)ComponentMetaData.CustomPropertyCollection[C_BATCHSIZE].Value; Enums.TrueFalseValue isRecursive = (Enums.TrueFalseValue)ComponentMetaData.CustomPropertyCollection[C_ISRECURSIVE].Value; Enums.TrueFalseValue includeFolders = (Enums.TrueFalseValue)ComponentMetaData.CustomPropertyCollection[C_INCLUDEFOLDERS].Value; PipelineBuffer outputBuffer = buffers[0]; // Get the field names from the output collection var fieldNames = (from col in ComponentMetaData.OutputCollection[0].OutputColumnCollection.Cast <IDTSOutputColumn>() select(string) col.CustomPropertyCollection[0].Value); // Load the data from sharepoint System.Diagnostics.Stopwatch timer = new System.Diagnostics.Stopwatch(); timer.Start(); var listData = SharePointUtility.ListServiceUtility.GetListItemData( new Uri(sharepointUrl), _credentials, sharepointList, sharepointListView, fieldNames, camlQuery, isRecursive == Enums.TrueFalseValue.True ? true : false, batchSize); timer.Stop(); bool fireAgain = false; int actualRowCount = 0; foreach (var row in listData) { // Determine if we should continue based on if this is a folder item or not (filter can be pushed up to CAML if // perf becomes an issue) bool canContinue = true; if ((row.ContainsKey("ContentType")) && (row["ContentType"] == "Folder") && (includeFolders == Enums.TrueFalseValue.False)) { canContinue = false; } if (canContinue) { actualRowCount++; outputBuffer.AddRow(); foreach (var fieldName in _bufferLookup.Keys) { if (row.ContainsKey(fieldName)) { switch (_bufferLookupDataType[fieldName]) { case DataType.DT_NTEXT: outputBuffer.AddBlobData(_bufferLookup[fieldName], Encoding.Unicode.GetBytes(row[fieldName].ToString())); break; case DataType.DT_WSTR: outputBuffer.SetString(_bufferLookup[fieldName], row[fieldName]); break; case DataType.DT_R8: outputBuffer.SetDouble(_bufferLookup[fieldName], double.Parse(row[fieldName], _culture)); break; case DataType.DT_I4: outputBuffer.SetInt32(_bufferLookup[fieldName], int.Parse(row[fieldName], _culture)); break; case DataType.DT_BOOL: outputBuffer.SetBoolean(_bufferLookup[fieldName], (int.Parse(row[fieldName], _culture) == 1)); break; case DataType.DT_GUID: outputBuffer.SetGuid(_bufferLookup[fieldName], new Guid(row[fieldName])); break; case DataType.DT_DBTIMESTAMP: outputBuffer.SetDateTime(_bufferLookup[fieldName], DateTime.Parse(row[fieldName], _culture)); break; } } else { switch (_bufferLookupDataType[fieldName]) { case DataType.DT_NTEXT: outputBuffer.AddBlobData(_bufferLookup[fieldName], Encoding.Unicode.GetBytes(String.Empty)); break; case DataType.DT_WSTR: outputBuffer.SetString(_bufferLookup[fieldName], String.Empty); break; case DataType.DT_BOOL: outputBuffer.SetBoolean(_bufferLookup[fieldName], false); break; default: outputBuffer.SetNull(_bufferLookup[fieldName]); break; } } } } } string infoMsg = string.Format(CultureInfo.InvariantCulture, "Loaded {0} records from list '{1}' at '{2}'. Elapsed time is {3}ms", actualRowCount, sharepointList, sharepointUrl, timer.ElapsedMilliseconds); ComponentMetaData.FireInformation(0, ComponentMetaData.Name, infoMsg, "", 0, ref fireAgain); ComponentMetaData.IncrementPipelinePerfCounter( DTS_PIPELINE_CTR_ROWSREAD, (uint)actualRowCount); outputBuffer.SetEndOfRowset(); }
public abstract void transform(ref PipelineBuffer buffer, int defaultOutputId, int inputColumnBufferIndex, int outputColumnBufferIndex);
public override void PrimeOutput(int outputs, int[] outputIDs, PipelineBuffer[] buffers) { //identify buffers PipelineBuffer errorBuffer = null; PipelineBuffer defaultBuffer = null; for (int x = 0; x < outputs; x++) { if (outputIDs[x] == errorOutputID) errorBuffer = buffers[x]; else defaultBuffer = buffers[x]; } //get ogrlayer and ogrlayer feature definition Layer OGRLayer; bool isSQLLayer = (!(ComponentMetaData.CustomPropertyCollection["SQL Statement"].Value == null || ComponentMetaData.CustomPropertyCollection["SQL Statement"].Value.ToString() == string.Empty)); if (isSQLLayer) { OGRLayer = getSQLLayer(); } else { OGRLayer = getLayer(); } Feature OGRFeature; FeatureDefn OGRFeatureDef = OGRLayer.GetLayerDefn(); //initialize columnInfo object columnInfo ci = new columnInfo(); //for each row in ogrlayer add row to output buffer while ((OGRFeature = OGRLayer.GetNextFeature()) != null) { try { defaultBuffer.AddRow(); //set buffer column values for (int i = 0; i < this.columnInformation.Count; i++) { ci = (columnInfo)this.columnInformation[i]; if (ci.geom) { Geometry geom = OGRFeature.GetGeometryRef(); if (geom != null) { byte[] geomBytes = new byte[geom.WkbSize()]; geom.ExportToWkb(geomBytes); defaultBuffer.AddBlobData(ci.bufferColumnIndex, geomBytes); } } else { int OGRFieldIndex = OGRFeatureDef.GetFieldIndex(ci.columnName); FieldDefn OGRFieldDef = OGRFeatureDef.GetFieldDefn(OGRFieldIndex); FieldType OGRFieldType = OGRFieldDef.GetFieldType(); //declare datetime variables int pnYear, pnMonth, pnDay, pnHour, pnMinute, pnSecond, pnTZFlag; DateTime dt; TimeSpan ts; switch (OGRFieldType) { //case FieldType.OFTBinary: // break; case FieldType.OFTDate: OGRFeature.GetFieldAsDateTime(OGRFieldIndex, out pnYear, out pnMonth, out pnDay, out pnHour, out pnMinute, out pnSecond, out pnTZFlag); dt = new DateTime(pnYear,pnMonth,pnDay); defaultBuffer.SetDate(ci.bufferColumnIndex, dt); break; case FieldType.OFTDateTime: OGRFeature.GetFieldAsDateTime(OGRFieldIndex, out pnYear, out pnMonth, out pnDay, out pnHour, out pnMinute, out pnSecond, out pnTZFlag); dt = new DateTime(pnYear,pnMonth,pnDay,pnHour,pnMinute,pnSecond); //set time zone? defaultBuffer.SetDateTime(ci.bufferColumnIndex, dt); break; case FieldType.OFTInteger: defaultBuffer.SetInt32(ci.bufferColumnIndex, OGRFeature.GetFieldAsInteger(OGRFieldIndex)); break; case FieldType.OFTReal: defaultBuffer.SetDouble(ci.bufferColumnIndex, OGRFeature.GetFieldAsDouble(OGRFieldIndex)); break; case FieldType.OFTTime: OGRFeature.GetFieldAsDateTime(OGRFieldIndex, out pnYear, out pnMonth, out pnDay, out pnHour, out pnMinute, out pnSecond, out pnTZFlag); ts = new TimeSpan(pnHour,pnMinute,pnSecond); defaultBuffer.SetTime(ci.bufferColumnIndex, ts); break; case FieldType.OFTString: default: defaultBuffer.SetString(ci.bufferColumnIndex, OGRFeature.GetFieldAsString(OGRFieldIndex)); break; } } } } catch (Exception ex) { //redirect to error buffer if (ci.errorDisposition == DTSRowDisposition.RD_RedirectRow) { // Add a row to the error buffer. errorBuffer.AddRow(); // Set the error information. int errorCode = System.Runtime.InteropServices.Marshal.GetHRForException(ex); errorBuffer.SetErrorInfo(errorOutputID, errorCode, ci.lineageID); // Remove the row that was added to the default buffer. defaultBuffer.RemoveRow(); } //fail component else if (ci.errorDisposition == DTSRowDisposition.RD_FailComponent || ci.errorDisposition == DTSRowDisposition.RD_NotUsed) { ComponentMetaData.FireError(0, "primeoutput failure", ex.ToString(), string.Empty, 0, out cancel); throw; } } } //set end of rowset for buffers if (defaultBuffer != null) defaultBuffer.SetEndOfRowset(); if (errorBuffer != null) errorBuffer.SetEndOfRowset(); //clean up layer object if (isSQLLayer) { this.OGRDataSource.ReleaseResultSet(OGRLayer); } }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { try { if (!performPostgresCopy) { // perform insert while (buffer.NextRow()) { // loop through column data in buffer and assign to parameters object for (int i = 0; i <= colsCount - 1; i++) { //odpCmd.Parameters[":" + columnInfos[i].ExternalColumnName].Value = buffer[i]; // the order of columns in the buffer isn't always as expected. // therefore, obtain the object/column value in the buffer using the column name. object ob = GetBufferColumnValue(buffer, columnInfos[i]); odpCmd.Parameters[":" + columnInfos[i].ExternalColumnName].Value = ob; } odpCmd.Prepare(); odpCmd.ExecuteNonQuery(); } } else { // perform postgres copy using (var writer = odpConnection.BeginBinaryImport(odpCopyCommand)) { while (buffer.NextRow()) { writer.StartRow(); for (int i = 0; i <= colsCount - 1; i++) { // the order of columns in the buffer isn't always as expected. // therefore, obtain the object/column value in the buffer using the column name. object ob = GetBufferColumnValue(buffer, columnInfos[i]); writer.Write(ob, columnInfos[i].ExternalSqlDbType); } // end copy for } //end copy while writer.Complete(); } //end copy using } //end copy if (performAsTransaction == true) { if (!odpTran.IsCompleted) { odpTran.Commit(); } } } catch (Exception e) { FireEvent(EventType.Error, "Error in ProcessInput : " + e.StackTrace); if (performAsTransaction == true) { if (!odpTran.IsCompleted) { odpTran.Rollback(); } } throw (e); } }
/// <summary> /// Get binary data from the selected column in the buffer, in accordance with the columns data type. /// </summary> /// <param name="dataType">The data type of the column.</param> /// <param name="columnIndex">The index of the column.</param> /// <param name="buffer">The reference to the input buffer.</param> /// <returns></returns> private Byte[] GetBytes(DataType dataType, int columnIndex, ref PipelineBuffer buffer) { String value = String.Empty; if (!buffer.IsNull(columnIndex)) { switch (dataType) { // Extract data from the column, based on the column data type. #region Extract Data case DataType.DT_BOOL: value = buffer.GetBoolean(columnIndex).ToString(); break; case DataType.DT_BYTES: return(buffer.GetBytes(columnIndex)); case DataType.DT_DBDATE: value = buffer.GetDate(columnIndex).ToString(); break; case DataType.DT_DBTIME: case DataType.DT_DBTIME2: value = buffer.GetTime(columnIndex).ToString(); break; case DataType.DT_DATE: case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: case DataType.DT_FILETIME: value = buffer.GetDateTime(columnIndex).ToString(); break; case DataType.DT_DBTIMESTAMPOFFSET: value = buffer.GetDateTimeOffset(columnIndex).ToString(); break; case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: case DataType.DT_CY: value = buffer.GetDecimal(columnIndex).ToString(); break; case DataType.DT_GUID: value = buffer.GetGuid(columnIndex).ToString(); break; case DataType.DT_I1: value = buffer.GetSByte(columnIndex).ToString(); break; case DataType.DT_I2: value = buffer.GetInt16(columnIndex).ToString(); break; case DataType.DT_I4: value = buffer.GetInt32(columnIndex).ToString(); break; case DataType.DT_I8: value = buffer.GetInt64(columnIndex).ToString(); break; case DataType.DT_IMAGE: case DataType.DT_NTEXT: case DataType.DT_TEXT: return(buffer.GetBlobData(columnIndex, (Int32)0, (Int32)buffer.GetBlobLength(columnIndex))); case DataType.DT_R4: value = buffer.GetSingle(columnIndex).ToString(); break; case DataType.DT_R8: value = buffer.GetDouble(columnIndex).ToString(); break; case DataType.DT_STR: case DataType.DT_WSTR: value = buffer.GetString(columnIndex); break; case DataType.DT_UI1: value = buffer.GetByte(columnIndex).ToString(); break; case DataType.DT_UI2: value = buffer.GetUInt16(columnIndex).ToString(); break; case DataType.DT_UI4: value = buffer.GetUInt32(columnIndex).ToString(); break; case DataType.DT_UI8: value = buffer.GetUInt64(columnIndex).ToString(); break; default: value = String.Empty; break; #endregion } } return(Encoding.Unicode.GetBytes(value)); }
private string GetFullAddressFromBuffer(PipelineBuffer buffer) { if (_columnIndices.ContainsKey(FullAddressColumnName)) { var fullAddress = buffer.GetString(_columnIndices[FullAddressColumnName]); if (!string.IsNullOrWhiteSpace(fullAddress)) { return(fullAddress); } } else { var addressParts = new List <string>(); if (_columnIndices.ContainsKey(Address1ColumnName)) { var address1 = buffer.GetString(_columnIndices[Address1ColumnName]); if (!string.IsNullOrWhiteSpace(address1)) { addressParts.Add(address1); } } if (_columnIndices.ContainsKey(Address2ColumnName)) { var address2 = buffer.GetString(_columnIndices[Address2ColumnName]); if (!string.IsNullOrWhiteSpace(address2)) { addressParts.Add(address2); } } if (_columnIndices.ContainsKey(CityColumnName)) { var city = buffer.GetString(_columnIndices[CityColumnName]); if (!string.IsNullOrWhiteSpace(city)) { addressParts.Add(city); } } if (_columnIndices.ContainsKey(StateColumnName)) { var state = buffer.GetString(_columnIndices[StateColumnName]); if (!string.IsNullOrWhiteSpace(state)) { addressParts.Add(state); } } if (_columnIndices.ContainsKey(ZipColumnName)) { var zip = buffer.GetString(_columnIndices[ZipColumnName]); if (!string.IsNullOrWhiteSpace(zip)) { addressParts.Add(zip); } } var result = string.Join(", ", addressParts); if (!string.IsNullOrWhiteSpace(result)) { return(result); } } return(null); }
/// <summary> /// This is where the data is loaded into the output buffer /// </summary> /// <param name="outputs"></param> /// <param name="outputIDs"></param> /// <param name="buffers"></param> public override void PrimeOutput(int outputs, int[] outputIDs, PipelineBuffer[] buffers) { string sharepointUrl = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTSITEURL].Value; string sharepointList = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTNAME].Value; string sharepointListView = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTVIEWNAME].Value; XElement camlQuery = XElement.Parse((string)ComponentMetaData.CustomPropertyCollection[C_CAMLQUERY].Value); short batchSize = (short)ComponentMetaData.CustomPropertyCollection[C_BATCHSIZE].Value; Enums.TrueFalseValue isRecursive = (Enums.TrueFalseValue)ComponentMetaData.CustomPropertyCollection[C_ISRECURSIVE].Value; Enums.TrueFalseValue includeFolders = (Enums.TrueFalseValue)ComponentMetaData.CustomPropertyCollection[C_INCLUDEFOLDERS].Value; PipelineBuffer outputBuffer = buffers[0]; // Get the field names from the output collection var fieldNames = (from col in ComponentMetaData.OutputCollection[0].OutputColumnCollection.Cast<IDTSOutputColumn>() select (string)col.CustomPropertyCollection[0].Value).ToArray(); // Load the data from sharepoint System.Diagnostics.Stopwatch timer = new System.Diagnostics.Stopwatch(); timer.Start(); var listData = SharePointUtility.ListServiceUtility.GetListItemData( new Uri(sharepointUrl), _credentials, sharepointList, sharepointListView, fieldNames, camlQuery, isRecursive == Enums.TrueFalseValue.True ? true : false, batchSize); timer.Stop(); bool fireAgain = false; int actualRowCount = 0; foreach (var row in listData) { // Determine if we should continue based on if this is a folder item or not (filter can be pushed up to CAML if // perf becomes an issue) bool canContinue = true; if ((row.ContainsKey("ContentType")) && (row["ContentType"] == "Folder") && (includeFolders == Enums.TrueFalseValue.False)) { canContinue = false; } if (canContinue) { actualRowCount++; outputBuffer.AddRow(); foreach (var fieldName in _bufferLookup.Keys) { if (row.ContainsKey(fieldName)) { switch (_bufferLookupDataType[fieldName]) { case DataType.DT_NTEXT: outputBuffer.AddBlobData(_bufferLookup[fieldName], Encoding.Unicode.GetBytes(row[fieldName].ToString())); break; case DataType.DT_WSTR: outputBuffer.SetString(_bufferLookup[fieldName], row[fieldName]); break; case DataType.DT_R8: outputBuffer.SetDouble(_bufferLookup[fieldName], double.Parse(row[fieldName], _culture)); break; case DataType.DT_I4: outputBuffer.SetInt32(_bufferLookup[fieldName], int.Parse(row[fieldName], _culture)); break; case DataType.DT_BOOL: outputBuffer.SetBoolean(_bufferLookup[fieldName], (int.Parse(row[fieldName], _culture) == 1)); break; case DataType.DT_GUID: outputBuffer.SetGuid(_bufferLookup[fieldName], new Guid(row[fieldName])); break; case DataType.DT_DBTIMESTAMP: outputBuffer.SetDateTime(_bufferLookup[fieldName], DateTime.Parse(row[fieldName], _culture)); break; } } else { switch (_bufferLookupDataType[fieldName]) { case DataType.DT_NTEXT: outputBuffer.AddBlobData(_bufferLookup[fieldName], Encoding.Unicode.GetBytes(String.Empty)); break; case DataType.DT_WSTR: outputBuffer.SetString(_bufferLookup[fieldName], String.Empty); break; case DataType.DT_BOOL: outputBuffer.SetBoolean(_bufferLookup[fieldName], false); break; default: outputBuffer.SetNull(_bufferLookup[fieldName]); break; } } } } } string infoMsg = string.Format(CultureInfo.InvariantCulture, "Loaded {0} records from list '{1}' at '{2}'. Elapsed time is {3}ms", actualRowCount, sharepointList, sharepointUrl, timer.ElapsedMilliseconds); ComponentMetaData.FireInformation(0, ComponentMetaData.Name, infoMsg, "", 0, ref fireAgain); ComponentMetaData.IncrementPipelinePerfCounter( DTS_PIPELINE_CTR_ROWSREAD, (uint)actualRowCount); outputBuffer.SetEndOfRowset(); }
/// <summary> /// Retrieves binary data from the given pipeline input column of any type. /// </summary> /// <param name="buffer"></param> /// <param name="inputColumn"></param> /// <param name="inputColumnIndex"></param> /// <returns></returns> private Byte[] GetColumnValueBytes(PipelineBuffer buffer, IDTSInputColumn100 inputColumn, Int32 inputColumnIndex) { if (buffer.IsNull(inputColumnIndex)) { return(new Byte[] { DEFAULT_BYTE }); } else { switch (inputColumn.DataType) { case DataType.DT_TEXT: case DataType.DT_NTEXT: case DataType.DT_IMAGE: return(buffer.GetBlobData(inputColumnIndex, 0, inputColumn.Length)); case DataType.DT_BYTES: return(buffer.GetBytes(inputColumnIndex)); case DataType.DT_STR: case DataType.DT_WSTR: return(Encoding.Unicode.GetBytes(buffer.GetString(inputColumnIndex))); case DataType.DT_BOOL: return(Encoding.Unicode.GetBytes(buffer.GetBoolean(inputColumnIndex).ToString())); case DataType.DT_DBDATE: return(Encoding.Unicode.GetBytes(buffer.GetDate(inputColumnIndex).ToString())); case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: case DataType.DT_FILETIME: return(Encoding.Unicode.GetBytes(buffer.GetDateTime(inputColumnIndex).ToString())); case DataType.DT_DBTIME: case DataType.DT_DBTIME2: return(Encoding.Unicode.GetBytes(buffer.GetTime(inputColumnIndex).ToString())); case DataType.DT_DBTIMESTAMPOFFSET: return(Encoding.Unicode.GetBytes(buffer.GetDateTimeOffset(inputColumnIndex).ToString())); case DataType.DT_CY: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: return(Encoding.Unicode.GetBytes(buffer.GetDecimal(inputColumnIndex).ToString())); case DataType.DT_I1: return(Encoding.Unicode.GetBytes(buffer.GetSByte(inputColumnIndex).ToString())); case DataType.DT_I2: return(Encoding.Unicode.GetBytes(buffer.GetInt16(inputColumnIndex).ToString())); case DataType.DT_I4: return(Encoding.Unicode.GetBytes(buffer.GetInt32(inputColumnIndex).ToString())); case DataType.DT_I8: return(Encoding.Unicode.GetBytes(buffer.GetInt64(inputColumnIndex).ToString())); case DataType.DT_UI1: return(Encoding.Unicode.GetBytes(buffer.GetByte(inputColumnIndex).ToString())); case DataType.DT_UI2: return(Encoding.Unicode.GetBytes(buffer.GetUInt16(inputColumnIndex).ToString())); case DataType.DT_UI4: return(Encoding.Unicode.GetBytes(buffer.GetUInt32(inputColumnIndex).ToString())); case DataType.DT_UI8: return(Encoding.Unicode.GetBytes(buffer.GetUInt64(inputColumnIndex).ToString())); case DataType.DT_R4: return(Encoding.Unicode.GetBytes(buffer.GetSingle(inputColumnIndex).ToString())); case DataType.DT_R8: return(Encoding.Unicode.GetBytes(buffer.GetDouble(inputColumnIndex).ToString())); case DataType.DT_GUID: return(Encoding.Unicode.GetBytes(buffer.GetGuid(inputColumnIndex).ToString())); default: return(new Byte[] { DEFAULT_BYTE }); } } }
/// <summary> /// IDTSRuntimeComponent100 (OK) /// </summary> /// <param name="outputs"> </param> /// <param name="outputIDs"> </param> /// <param name="buffers"> </param> public override void PrimeOutput(int outputs, int[] outputIDs, PipelineBuffer[] buffers) { // do nothing object obj = new object(); base.PrimeOutput(outputs, outputIDs, buffers); }
/// <summary> /// Calculates hash of Memaory Stream ands based ont he HashColumnInformation and stores the calculated hash into the pipeline output column /// </summary> /// <param name="hCol">HashColumnInfo for hash calculation</param> /// <param name="ms">Memory stream to calculate the hash</param> /// <param name="buffer">bufefr to store the calculated hash</param> public static void CalculateHashAndStoreValue(HashColumnsTransformation.HashColumnInfo hCol, HashMemoryBuffers mb, PipelineBuffer buffer) { byte[] hash; MemoryStream ms = mb.MemoryStream; ms.Position = 2; //Set Position to 0 prior computing hash to move right after the unicode characters to not include them in the hash calculation //Caculate Hash hash = hCol.HashAlgorithm.ComputeHash(ms); //Store the Hash into the Output HashColumn if (hCol.OutputDataType == DataType.DT_BYTES) { buffer.SetBytes(hCol.Index, hash); } else { string hashStr = BitConverter.ToString(hash).Replace("-", string.Empty); buffer.SetString(hCol.Index, hashStr); } if (ms.Capacity > MemoryStreamShringTreshod) { ms.Position = 0; ms.SetLength(0); ms.Capacity = MemoryStreamInitialSize; } }
private static void SetBufferValue(PipelineBuffer outputBuffer, int outputBufferIndex, object value, DataType dataType) { if ((object)outputBuffer == null) throw new ArgumentNullException("outputBuffer"); if ((object)value == null) outputBuffer.SetNull(outputBufferIndex); else { switch (dataType) { case DataType.DT_BOOL: outputBuffer.SetBoolean(outputBufferIndex, Convert.ToBoolean(value)); break; case DataType.DT_BYTES: outputBuffer.SetBytes(outputBufferIndex, (byte[])value); break; case DataType.DT_CY: outputBuffer.SetDecimal(outputBufferIndex, Convert.ToDecimal(value)); break; case DataType.DT_DATE: outputBuffer.SetDateTime(outputBufferIndex, Convert.ToDateTime(value)); break; case DataType.DT_DBDATE: outputBuffer[outputBufferIndex] = (DateTime)value; break; case DataType.DT_DBTIME: // it is not possible to populate DT_DBTIME columns from managed code in SSIS 2005 break; case DataType.DT_DBTIMESTAMP: outputBuffer.SetDateTime(outputBufferIndex, Convert.ToDateTime(value)); break; case DataType.DT_DECIMAL: outputBuffer.SetDecimal(outputBufferIndex, Convert.ToDecimal(value)); break; case DataType.DT_FILETIME: outputBuffer[outputBufferIndex] = value; break; case DataType.DT_GUID: outputBuffer.SetGuid(outputBufferIndex, (Guid)value); break; case DataType.DT_I1: outputBuffer.SetSByte(outputBufferIndex, Convert.ToSByte(value)); break; case DataType.DT_I2: outputBuffer.SetInt16(outputBufferIndex, Convert.ToInt16(value)); break; case DataType.DT_I4: outputBuffer.SetInt32(outputBufferIndex, Convert.ToInt32(value)); break; case DataType.DT_I8: outputBuffer.SetInt64(outputBufferIndex, Convert.ToInt64(value)); break; case DataType.DT_IMAGE: // we have to treat blob columns a little differently BlobColumn colDT_IMAGE = (BlobColumn)value; if (colDT_IMAGE.IsNull) outputBuffer.SetNull(outputBufferIndex); else outputBuffer.AddBlobData(outputBufferIndex, colDT_IMAGE.GetBlobData(0, (int)colDT_IMAGE.Length)); break; case DataType.DT_NTEXT: // we have to treat blob columns a little differently BlobColumn colDT_NTEXT = (BlobColumn)value; if (colDT_NTEXT.IsNull) outputBuffer.SetNull(outputBufferIndex); else outputBuffer.AddBlobData(outputBufferIndex, colDT_NTEXT.GetBlobData(0, (int)colDT_NTEXT.Length)); break; case DataType.DT_NULL: outputBuffer.SetNull(outputBufferIndex); break; case DataType.DT_NUMERIC: outputBuffer.SetDecimal(outputBufferIndex, Convert.ToDecimal(value)); break; case DataType.DT_R4: outputBuffer.SetSingle(outputBufferIndex, Convert.ToSingle(value)); break; case DataType.DT_R8: outputBuffer.SetDouble(outputBufferIndex, Convert.ToDouble(value)); break; case DataType.DT_STR: outputBuffer.SetString(outputBufferIndex, value.ToString()); break; case DataType.DT_TEXT: // we have to treat blob columns a little differently BlobColumn colDT_TEXT = (BlobColumn)value; if (colDT_TEXT.IsNull) outputBuffer.SetNull(outputBufferIndex); else outputBuffer.AddBlobData(outputBufferIndex, colDT_TEXT.GetBlobData(0, (int)colDT_TEXT.Length)); break; case DataType.DT_UI1: outputBuffer.SetByte(outputBufferIndex, Convert.ToByte(value)); break; case DataType.DT_UI2: outputBuffer.SetUInt16(outputBufferIndex, Convert.ToUInt16(value)); break; case DataType.DT_UI4: outputBuffer.SetUInt32(outputBufferIndex, Convert.ToUInt32(value)); break; case DataType.DT_UI8: outputBuffer.SetUInt64(outputBufferIndex, Convert.ToUInt64(value)); break; case DataType.DT_WSTR: outputBuffer.SetString(outputBufferIndex, value.ToString()); break; default: throw new InvalidOperationException(string.Format("Ah snap.")); } } }
public HashThreadState(HashColumnsTransformation.HashColumnInfo hCol, List <HashColumnsTransformation.InputBufferColumnInfo> inputBufferColumns, PipelineBuffer buffer, HashMemoryBuffers mb, ManualResetEvent resetEvent) { this.HashColumnInfo = hCol; this.InputBufferColumns = inputBufferColumns; this.PipelineBuffer = buffer; this.MemoryBuffers = mb; this.ResetEvent = resetEvent; }
/// <summary> /// This creates the hash value from a thread /// </summary> /// <param name="state">this is the thread state object that is passed</param> public static void CalculateHash(OutputColumn columnToProcess, PipelineBuffer buffer, bool safeNullHandling, bool millisecondHandling) { byte[] inputByteBuffer = new byte[1000]; Int32 bufferUsed = 0; string nullHandling = String.Empty; uint blobLength = 0; Int32 columnToProcessID = 0; DataType columnDataType = DataType.DT_NULL; // Step through each input column for that output column for (int j = 0; j < columnToProcess.Count; j++) { columnToProcessID = columnToProcess[j].ColumnId; // Only call this once, as it appears to be "slow". columnDataType = columnToProcess[j].ColumnDataType; // Skip NULL values, as they "don't" exist... if (!buffer.IsNull(columnToProcessID)) { nullHandling += "N"; switch (columnDataType) //buffer.GetColumnInfo(columnToProcessID).DataType) { case DataType.DT_BOOL: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetBoolean(columnToProcessID)); break; case DataType.DT_IMAGE: blobLength = buffer.GetBlobLength(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetBlobData(columnToProcessID, 0, (int)blobLength)); nullHandling += blobLength.ToString(); break; case DataType.DT_BYTES: byte[] bytesFromBuffer = buffer.GetBytes(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, bytesFromBuffer); nullHandling += bytesFromBuffer.GetLength(0).ToString(); break; case DataType.DT_CY: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDecimal(columnToProcessID)); break; case DataType.DT_DBTIMESTAMPOFFSET: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDateTimeOffset(columnToProcessID), millisecondHandling); break; case DataType.DT_DBDATE: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDate(columnToProcessID), millisecondHandling); break; case DataType.DT_DATE: case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: case DataType.DT_FILETIME: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDateTime(columnToProcessID), millisecondHandling); break; case DataType.DT_DBTIME: case DataType.DT_DBTIME2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetTime(columnToProcessID)); break; case DataType.DT_GUID: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetGuid(columnToProcessID)); break; case DataType.DT_I1: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetSByte(columnToProcessID)); break; case DataType.DT_I2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt16(columnToProcessID)); break; case DataType.DT_I4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt32(columnToProcessID)); break; case DataType.DT_I8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetInt64(columnToProcessID)); break; case DataType.DT_NTEXT: case DataType.DT_STR: case DataType.DT_TEXT: case DataType.DT_WSTR: String stringFromBuffer = buffer.GetString(columnToProcessID); Utility.Append(ref inputByteBuffer, ref bufferUsed, stringFromBuffer, Encoding.UTF8); nullHandling += stringFromBuffer.Length.ToString(); break; case DataType.DT_R4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetSingle(columnToProcessID)); break; case DataType.DT_R8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetDouble(columnToProcessID)); break; case DataType.DT_UI1: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetByte(columnToProcessID)); break; case DataType.DT_UI2: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt16(columnToProcessID)); break; case DataType.DT_UI4: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt32(columnToProcessID)); break; case DataType.DT_UI8: Utility.Append(ref inputByteBuffer, ref bufferUsed, buffer.GetUInt64(columnToProcessID)); break; case DataType.DT_EMPTY: case DataType.DT_NULL: default: break; } } else { nullHandling += "Y"; } } if (safeNullHandling) { Utility.Append(ref inputByteBuffer, ref bufferUsed, nullHandling, Encoding.UTF8); } // Ok, we have all the data in a Byte Buffer // So now generate the Hash byte[] hash; switch (columnToProcess.HashType) { case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.None: hash = new byte[1]; break; case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.MD5: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.RipeMD160: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.SHA1: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.SHA256: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.SHA384: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.SHA512: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.CRC32: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.CRC32C: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.FNV1a32: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.FNV1a64: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.MurmurHash3a: case Martin.SQLServer.Dts.MultipleHash.HashTypeEnumerator.xxHash: hash = columnToProcess.HashObject.ComputeHash(inputByteBuffer, 0, bufferUsed); break; default: hash = new byte[1]; break; } switch (columnToProcess.OutputHashDataType) { case MultipleHash.OutputTypeEnumerator.Binary: buffer.SetBytes(columnToProcess.OutputColumnId, hash); break; case MultipleHash.OutputTypeEnumerator.Base64String: buffer.SetString(columnToProcess.OutputColumnId, System.Convert.ToBase64String(hash, 0, hash.Length)); break; case MultipleHash.OutputTypeEnumerator.HexString: buffer.SetString(columnToProcess.OutputColumnId, String.Format("0x{0}", ByteArrayToHexViaLookup32(hash))); break; } }
/// <summary> /// ProcessInput performs the bulk of the actual "work" of writing /// XML. It is called once per buffer, per input. /// </summary> public override void ProcessInput(int inputID, PipelineBuffer buffer) { base.ProcessInput(inputID, buffer); // For performance, cache as much as possible now. // Count the number of columns. int columnCount = buffer.ColumnCount; // Find the cached column mapping list for this input. List <XmlColumnInfo> columns = m_bufmap[inputID]; // Find the input object for this ID, so we can pull out the XML mapping properties. // This is pretty slow, but we only do it once per buffer. IDTSInput100 input = this.ComponentMetaData.InputCollection.GetObjectByID(inputID); string elementName = (string)input.CustomPropertyCollection[Constants.ElementNameProperty].Value; string elementNamespace = (string)input.CustomPropertyCollection[Constants.ElementNamespaceProperty].Value; // The fun part: iterate the rows and write XML! while (buffer.NextRow()) { // Write the start for the row element m_xmlFile.WriteStartElement(elementName, elementNamespace); // Write each mapped column foreach (XmlColumnInfo columnInfo in columns) { // just skip null columns if (buffer.IsNull(columnInfo.BufferIndex)) { continue; } // Write the start of the attribute or element, as appropriate. if (columnInfo.IsAttribute) { m_xmlFile.WriteStartAttribute(columnInfo.Name); } else { m_xmlFile.WriteStartElement(columnInfo.Name); } // XmlWriter can't write GUIDs, so we have to do that ourselves. if (columnInfo.Type == DataType.DT_GUID) { m_xmlFile.WriteValue(buffer[columnInfo.BufferIndex].ToString()); } else { // For most types, XmlWriter does a great job of representing them // in the most natural way for XML. m_xmlFile.WriteValue(buffer[columnInfo.BufferIndex]); } // Close it up if (columnInfo.IsAttribute) { m_xmlFile.WriteEndAttribute(); } else { m_xmlFile.WriteEndElement(); } } // Close the row element m_xmlFile.WriteEndElement(); } }
private static void WriteColumnToStreamOriginal(HashColumnsTransformation.InputBufferColumnInfo bci, PipelineBuffer buffer, HashMemoryBuffers mb) { int ci = bci.Index; byte[] bdata = null; byte[] decimalArray = mb.DecimalArray; //Array for storing decimal numbers BinaryWriter bw = mb.BinaryWriter; StreamWriter sw = mb.StreamWriter; BufferColumn col = buffer.GetColumnInfo(bci.Index); bw.Write((int)col.DataType); //write data type if (buffer.IsNull(bci.Index)) { bw.Write((byte)1); //Write 1 representing NULL bw.Write(0); //write length of 0 for NULL } else { bw.Write((byte)0); //write 0 representing NOT NULL //Get buffer data lock (bci) { switch (col.DataType) { case DataType.DT_BOOL: bdata = BitConverter.GetBytes(buffer.GetBoolean(ci)); break; case DataType.DT_BYTES: bdata = buffer.GetBytes(ci); break; case DataType.DT_IMAGE: case DataType.DT_NTEXT: case DataType.DT_TEXT: bdata = buffer.GetBlobData(ci, 0, (int)buffer.GetBlobLength(ci)); break; case DataType.DT_CY: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: var ia = decimal.GetBits(buffer.GetDecimal(ci)); for (int j = 0; j < 4; j++) { int k = 4 * j; decimalArray[k] = (byte)(ia[j] & 0xFF); decimalArray[k + 1] = (byte)(ia[j] >> 8 & 0xFF); decimalArray[k + 2] = (byte)(ia[j] >> 16 & 0xFF); decimalArray[k + 3] = (byte)(ia[j] >> 24 & 0xFF); } bdata = decimalArray; break; case DataType.DT_DATE: case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: bdata = BitConverter.GetBytes(buffer.GetDateTime(ci).ToBinary()); break; case DataType.DT_FILETIME: bdata = BitConverter.GetBytes(buffer.GetInt64(ci)); break; case DataType.DT_DBDATE: bw.Write(buffer.GetDate(ci).ToBinary()); break; case DataType.DT_DBTIME: case DataType.DT_DBTIME2: bdata = BitConverter.GetBytes(DateTime.MinValue.Add(buffer.GetTime(ci)).ToBinary()); break; case DataType.DT_DBTIMESTAMPOFFSET: var dtoffset = buffer.GetDateTimeOffset(ci); BitConverter.GetBytes(dtoffset.DateTime.ToBinary()).CopyTo(bdata, 0); BitConverter.GetBytes(DateTime.MinValue.Add(dtoffset.Offset).ToBinary()).CopyTo(bdata, 8); bdata = decimalArray; break; case DataType.DT_EMPTY: case DataType.DT_NULL: bdata = new byte[0]; break; case DataType.DT_GUID: bdata = buffer.GetGuid(ci).ToByteArray(); break; case DataType.DT_I1: bdata = BitConverter.GetBytes(buffer.GetSByte(ci)); break; case DataType.DT_I2: bdata = BitConverter.GetBytes(buffer.GetInt16(ci)); break; case DataType.DT_I4: bdata = BitConverter.GetBytes(buffer.GetInt32(ci)); break; case DataType.DT_I8: bdata = BitConverter.GetBytes(buffer.GetInt64(ci)); break; case DataType.DT_R4: bdata = BitConverter.GetBytes(buffer.GetSingle(ci)); break; case DataType.DT_R8: bdata = BitConverter.GetBytes(buffer.GetDouble(ci)); break; case DataType.DT_UI1: bdata = BitConverter.GetBytes(buffer.GetByte(ci)); break; case DataType.DT_UI2: bdata = BitConverter.GetBytes(buffer.GetUInt16(ci)); break; case DataType.DT_UI4: bdata = BitConverter.GetBytes(buffer.GetUInt32(ci)); break; case DataType.DT_UI8: bdata = BitConverter.GetBytes(buffer.GetUInt64(ci)); break; case DataType.DT_STR: case DataType.DT_WSTR: bdata = Encoding.Unicode.GetBytes(buffer.GetString(ci)); break; default: bdata = new byte[0]; break; } } if (bdata != null) { bw.Write(bdata.Length); //write length of buffer bw.Write(bdata); //write bufferdata; } } }
public Input0ByIndexBuffer(PipelineBuffer Buffer, int[] BufferColumnIndexes, OutputNameMap OutputMap) : base(Buffer, BufferColumnIndexes, OutputMap) { }
/// <summary> /// Will perform the user-specified behaviour when a processing error occurs /// </summary> /// <param name="disposition">How the error should be handled</param> /// <param name="defaultBuffer">The default output buffer</param> /// <param name="errorBuffer">The error output buffer</param> /// <param name="failingColumnInfo">The information for the problematic column</param> /// <param name="ex">The exception caught from processing (optional)</param> private void HandleProcessingError(DTSRowDisposition disposition, PipelineBuffer defaultBuffer, PipelineBuffer errorBuffer, ColumnInfo failingColumnInfo, Exception ex) { switch (disposition) { case DTSRowDisposition.RD_RedirectRow: if (errorBuffer == null) throw new InvalidOperationException("There must be an error output defined if redirection was specified"); // Add a row to the error buffer. errorBuffer.AddRow(); // Get the values from the default buffer // and copy them to the error buffer. var errorOutputColumns = GetErrorOutputColumns().ToArray(); foreach (IDTSOutputColumn100 column in errorOutputColumns) { ColumnInfo copiedColumnInfo = GetColumnInfo(column.Name); if (copiedColumnInfo != null) errorBuffer[copiedColumnInfo.ErrorOuputBufferColumnIndex] = defaultBuffer[copiedColumnInfo.OuputBufferColumnIndex]; } // Set the error information. int errorCode = (ex == null ? 0 : System.Runtime.InteropServices.Marshal.GetHRForException(ex)); errorBuffer.SetErrorInfo(errorOutputID, errorCode, failingColumnInfo.OutputColumn.LineageID); // Remove the row that was added to the default buffer. defaultBuffer.RemoveRow(); break; case DTSRowDisposition.RD_FailComponent: throw new Exception(String.Format("There was an issue with column: {0}", failingColumnInfo.OutputColumn.Name), ex); } }
public static void BuildAndCalculateHash(HashColumnsTransformation.HashColumnInfo hCol, List <HashColumnsTransformation.InputBufferColumnInfo> inputBufferColumns, PipelineBuffer buffer, HashMemoryBuffers mb) { //Set length of the Stream to 2. //We are settiong the length to 2 tyo keep the two unicode identification bytes on the beginning as StreamWriter writes the bytes only once mb.MemoryStream.SetLength(2); //Write the input columns to Memory Stream HashColumnsTransformationHelper.BuildHashMemoryStream(hCol, inputBufferColumns, buffer, mb); //Calculate the Hash ans store it into Output columns HashColumnsTransformationHelper.CalculateHashAndStoreValue(hCol, mb, buffer); }