public override void ProcessInput(int inputID, PipelineBuffer buffer) { if (buffer.EndOfRowset == false) { //Get output id info int defaultOutputID = -1; int errorOutputID = -1; int errorOutputIndex = -1; GetErrorOutputInfo(ref errorOutputID, ref errorOutputIndex); if (errorOutputIndex == 0) { defaultOutputID = ComponentMetaData.OutputCollection[1].ID; } else { defaultOutputID = ComponentMetaData.OutputCollection[0].ID; } while (buffer.NextRow()) { try { //Skip record if input column is null if (!buffer.IsNull(this.inputColumnBufferIndex)) { this.transform(ref buffer, defaultOutputID, this.inputColumnBufferIndex, this.outputColumnBufferIndex); } } catch (System.Exception ex) { //Get input IDTSInput100 input = ComponentMetaData.InputCollection.GetObjectByID(inputID); //Redirect row IDTSInputColumn100 inputColumn = input.InputColumnCollection[0]; if (inputColumn.ErrorRowDisposition == DTSRowDisposition.RD_RedirectRow) { int errorCode = System.Runtime.InteropServices.Marshal.GetHRForException(ex); buffer.DirectErrorRow(errorOutputID, errorCode, inputColumn.LineageID); } else if (inputColumn.ErrorRowDisposition == DTSRowDisposition.RD_FailComponent || inputColumn.ErrorRowDisposition == DTSRowDisposition.RD_NotUsed) { ComponentMetaData.FireError(0, ComponentMetaData.Name, ex.Message, string.Empty, 0, out cancel); throw new Exception(ex.Message); } } } } }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { int colIndex = 0; while (buffer.NextRow()) { try { foreach (ColumnInfo col in _columnInfos) { colIndex = col.BufferIndex; if (Utility.ContainsFlag(col.Operation, CleaningOperation.SetNullDefault)) { ReplaceNullValueWithDefault(buffer, col); } if (Utility.ContainsFlag(col.Operation, CleaningOperation.TrimString)) { TrimString(buffer, col); } if (Utility.ContainsFlag(col.Operation, CleaningOperation.FormatValue)) { FormatValue(buffer, col); } if (Utility.ContainsFlag(col.Operation, CleaningOperation.ValidateRange)) { ValidateRange(buffer, col); } if (Utility.ContainsFlag(col.Operation, CleaningOperation.ValidateKnownGood)) { ValidateKnownGoodValue(buffer, col); } } buffer.DirectRow(_outputId); } catch (Exception) { buffer.DirectErrorRow(_errorOutId, 100, colIndex); } } }
//Run Time - Validate Phone Number public override void ProcessInput(int inputID, PipelineBuffer buffer) { if (!buffer.EndOfRowset) { while (buffer.NextRow()) { try { var phoneNumberColumnIndex = _phneNumberColumnInfo.PhoneNumberBufferIndex; var bufferColDataType = buffer.GetColumnInfo(inputBufferColumnIndex[_phneNumberColumnInfo.PhoneNumberBufferIndex]).DataType; var parsedPhoneNumberResult = GetParsedPhoneNumberResult(buffer, phoneNumberColumnIndex, bufferColDataType); SetPhoneNumberResultValuesToOutput(buffer, phoneNumberColumnIndex, parsedPhoneNumberResult); buffer.DirectRow(_outputId); } catch (Exception e) { buffer.DirectErrorRow(_errorOutId, 100, _phonenumberLinage); } } } }
public override void PrimeOutput(int outputs, int[] outputIDs, PipelineBuffer[] buffers) { //Do some stuff base.PrimeOutput(outputs, outputIDs, buffers); //SSIS STUFF IDTSOutput100 output = ComponentMetaData.OutputCollection.FindObjectByID(outputIDs[0]); PipelineBuffer buffer = buffers[0]; string path = ComponentMetaData.CustomPropertyCollection["FTP Path"].Value; int nrOfDays = ComponentMetaData.CustomPropertyCollection["FTP Files Not Older Then (Days)"].Value; //Get path part of file path string pathPart = System.IO.Path.GetDirectoryName(path); //ftp://ftp.matchit.no/Maintenance/Archive/*.* FileStruct[] files = _ftpHelper.GetFiles(path); foreach (FileStruct file in files) { //SKIP IF FILS IS OLDER THEN x DAYS if (file.CreateTime < DateTime.Now.AddDays(-nrOfDays)) { continue; } //BUILD THE PATH FOR EACH FILE string filePath = System.IO.Path.Combine(pathPart, file.Name); //Switch "\" with "/" filePath = filePath.Replace(@"\", @"/"); UriBuilder builder = new UriBuilder(); builder.Scheme = "ftp"; builder.Host = _ftpHelper.HostName; builder.Path = filePath; //GOT THE PATH String url = builder.ToString(); //GET THE STREAM FtpWebRequest request = (FtpWebRequest)WebRequest.Create(url); request.Method = WebRequestMethods.Ftp.DownloadFile; //Login string usr = ComponentMetaData.CustomPropertyCollection["FTP User"].Value; string pwd = ComponentMetaData.CustomPropertyCollection["FTP Password"].Value; request.Credentials = new NetworkCredential(usr, pwd); FtpWebResponse response = (FtpWebResponse)request.GetResponse(); //StreamReader reader = new StreamReader(stream, System.Text.Encoding.UTF8, true); Stream responseStream = response.GetResponseStream(); StreamReader reader = new StreamReader(responseStream); //Get lines to skip int rowsToSkip = ComponentMetaData.CustomPropertyCollection["Skip rows"].Value; //READ THE STREAM int currentLineNumber = 0; while (!reader.EndOfStream) { try { //Read line string line = reader.ReadLine(); //Skip x number of rows if (rowsToSkip > currentLineNumber) { currentLineNumber++; continue; } //splitt line string separator = ComponentMetaData.CustomPropertyCollection["Separator"].Value; char c = separator.ToCharArray()[0]; string[] values = line.Split(c); //create output buffer buffer.AddRow(); int i = 0; foreach (CustomDataColumn column in _columnsList) { switch (column.Type) { case "I4": buffer[i] = int.Parse(values[i]); break; case "WSTR": buffer[i] = values[i]; break; case "DBTIMESTAMP": string dateFormatString = column.Length; //For DateTime "length" is used for describing datetime format buffer[i] = DateTime.ParseExact(values[i], dateFormatString, CultureInfo.InvariantCulture); break; default: throw new NotImplementedException("Error, " + column.Type + " is not implemented"); } i++; } currentLineNumber++; } catch (Exception e) { buffer.DirectErrorRow(0, 1, currentLineNumber); } } } buffer.SetEndOfRowset(); }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { int errorOutputID = -1; int errorOutputIndex = -1; int GoodOutputId = -1; IDTSInput100 inp = ComponentMetaData.InputCollection.GetObjectByID(inputID); #region Output IDs GetErrorOutputInfo(ref errorOutputID, ref errorOutputIndex); // There is an error output defined errorOutputID = ComponentMetaData.OutputCollection["RSErrors"].ID; GoodOutputId = ComponentMetaData.OutputCollection["RSout"].ID; #endregion while (buffer.NextRow()) { // Check if we have columns to process if (_inputColumnInfos.Length == 0) { // We do not have to have columns. This is a Sync component so the // rows will flow through regardless. Could expand Validate to check // for columns in the InputColumnCollection buffer.DirectRow(GoodOutputId); } else { try { for (int x = 0; x < _inputColumnInfos.Length; x++) { ColumnInfo columnInfo = _inputColumnInfos[x]; if (!buffer.IsNull(columnInfo.bufferColumnIndex)) { // Get value as character array char[] chars = buffer.GetString(columnInfo.bufferColumnIndex).ToString().ToCharArray(); // Reverse order of characters in array Array.Reverse(chars); // Reassemble reversed value as string string s = new string(chars); // Set output value in buffer buffer.SetString(columnInfo.bufferColumnIndex, s); } } buffer.DirectRow(GoodOutputId); } catch (Exception ex) { switch (inp.ErrorRowDisposition) { case DTSRowDisposition.RD_RedirectRow: buffer.DirectErrorRow(errorOutputID, 0, buffer.CurrentRow); break; case DTSRowDisposition.RD_FailComponent: throw new Exception("Error processing " + ex.Message); case DTSRowDisposition.RD_IgnoreFailure: buffer.DirectRow(GoodOutputId); break; } } } } }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { if (buffer.EndOfRowset == false) { //Get output id info int defaultOutputID = -1; int errorOutputID = -1; int errorOutputIndex = -1; GetErrorOutputInfo(ref errorOutputID, ref errorOutputIndex); if (errorOutputIndex == 0) defaultOutputID = ComponentMetaData.OutputCollection[1].ID; else defaultOutputID = ComponentMetaData.OutputCollection[0].ID; while (buffer.NextRow()) { try { //Skip record if input column is null if (!buffer.IsNull(this.inputColumnBufferIndex)) { this.transform(ref buffer, defaultOutputID, this.inputColumnBufferIndex, this.outputColumnBufferIndex); } } catch (System.Exception ex) { //Get input IDTSInput100 input = ComponentMetaData.InputCollection.GetObjectByID(inputID); //Redirect row IDTSInputColumn100 inputColumn = input.InputColumnCollection[0]; if (inputColumn.ErrorRowDisposition == DTSRowDisposition.RD_RedirectRow) { int errorCode = System.Runtime.InteropServices.Marshal.GetHRForException(ex); buffer.DirectErrorRow(errorOutputID, errorCode, inputColumn.LineageID); } else if (inputColumn.ErrorRowDisposition == DTSRowDisposition.RD_FailComponent || inputColumn.ErrorRowDisposition == DTSRowDisposition.RD_NotUsed) { ComponentMetaData.FireError(0, ComponentMetaData.Name, ex.Message, string.Empty, 0, out cancel); throw new Exception(ex.Message); } } } } }
// This method is used to redirect error rows when the errror happens // during MERGING. Such an error would typically occur due to // violation of table constraints, an incorrect MERGE statement, etc. private void RedirectMergeErrors(PipelineBuffer buffer, int startBuffIndex, int errorOutputID) { int rowCount = m_table.Rows.Count; for (int index = 0; index <= rowCount; index++) { buffer.DirectErrorRow(startBuffIndex + index, errorOutputID, HResults.DTS_E_ADODESTERRORUPDATEROW, 0); } }
/// <summary> /// Sends Outputs to files. //TODO: Rewrite this method /// </summary> /// <param name="Integ"></param> /// <param name="buffer"></param> /// <param name="startBuffIndex"></param> private void sendOutputResults(CRMIntegrate[] Integ, PipelineBuffer buffer, int startBuffIndex) { IEnumerable <ExecuteMultipleResponseItem> FltResp; IEnumerable <ExecuteMultipleResponseItem> OkResp; int current = buffer.CurrentRow; buffer.CurrentRow = startBuffIndex; foreach (CRMIntegrate irsp in Integ) { if (irsp.Resp != null) { if (irsp.Resp.IsFaulted) { FltResp = irsp.Resp.Responses.Where(r => r.Fault != null); foreach (ExecuteMultipleResponseItem itm in FltResp) { buffer.DirectErrorRow(errorOutputId, itm.Fault.ErrorCode, buffer.CurrentRow); if (buffer.CurrentRow < buffer.RowCount) { buffer.NextRow(); } } } OkResp = irsp.Resp.Responses.Where(r => r.Fault == null); //int ResponseColumn = ComponentMetaData.InputCollection[0].InputColumnCollection.Count + ComponentMetaData.OutputCollection[0].OutputColumnCollection.Count; int ResponseColumn = ComponentMetaData.OutputCollection[0].OutputColumnCollection.Count - 1; foreach (ExecuteMultipleResponseItem itm in OkResp) { //Add the inserted GUID for Create Operation switch ((Operations)operation) { case Operations.Create: buffer.SetString(ResponseColumn, ((CreateResponse)itm.Response).id.ToString()); break; case Operations.Update: buffer.SetString(ResponseColumn, ((UpdateResponse)itm.Response).Results.FirstOrDefault().Value.ToString()); break; case Operations.Delete: buffer.SetString(ResponseColumn, ((DeleteResponse)itm.Response).Results.FirstOrDefault().Value.ToString()); break; case Operations.Upsert: buffer.SetString(ResponseColumn, ((UpsertResponse)itm.Response).Results.FirstOrDefault().Value.ToString()); break; case Operations.Status: buffer.SetString(ResponseColumn, ((SetStateResponse)itm.Response).Results.FirstOrDefault().Value.ToString()); break; case Operations.Workflow: buffer.SetString(ResponseColumn, ((ExecuteWorkflowResponse)itm.Response).Results.FirstOrDefault().Value.ToString()); break; } buffer.DirectRow(defaultOuputId); if (buffer.CurrentRow < buffer.RowCount) { buffer.NextRow(); } } } else if (irsp.ExceptionMessage != "") { buffer.DirectErrorRow(errorOutputId, -1, buffer.CurrentRow); if (buffer.CurrentRow < buffer.RowCount) { buffer.NextRow(); } } } buffer.CurrentRow = current; }
/// <summary> /// Process the rows from the datasource /// </summary> /// <param name="inputID"></param> /// <param name="buffer"></param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { EntityCollection newEntityCollection = new EntityCollection(); List <OrganizationRequest> Rqs = new List <OrganizationRequest>(); Mapping.MappingItem mappedColumn; IDTSInputColumn100 inputcolumn; IDTSInput100 input = ComponentMetaData.InputCollection.GetObjectByID(inputID); Entity newEntity; while (buffer.NextRow()) { try { newEntity = new Entity(EntityName); bchCnt++; //adds the row to output buffer for futher processing. foreach (int col in mapInputColsToBufferCols) { inputcolumn = ComponentMetaData.InputCollection[0].InputColumnCollection[col]; mappedColumn = mapping.ColumnList.Find(x => x.ExternalColumnName == inputcolumn.Name && x.Map == true); if (mappedColumn != null) { if (buffer.IsNull(col) == false) { AttributesBuilder(mappedColumn, buffer[col], ref newEntity); } else { AttributesBuilder(mappedColumn, mappedColumn.DefaultValue, ref newEntity); } } } switch ((Operations)operation) { //Create case Operations.Create: Rqs.Add(new CreateRequest { Target = newEntity }); newEntity.Attributes["ownerid"] = new EntityReference("systemuser", currentUserId); break; //Update case Operations.Update: Rqs.Add(new UpdateRequest { Target = newEntity }); newEntity.Attributes["ownerid"] = new EntityReference("systemuser", currentUserId); break; //Delete case Operations.Delete: Rqs.Add(new DeleteRequest { Target = newEntity.ToEntityReference() }); break; //status case Operations.Status: Rqs.Add(new SetStateRequest { EntityMoniker = newEntity.ToEntityReference(), State = new OptionSetValue((int)newEntity.Attributes["statecode"]), Status = new OptionSetValue((int)newEntity.Attributes["statuscode"]) }); break; case Operations.Upsert: Rqs.Add(new UpsertRequest { Target = newEntity }); newEntity.Attributes["ownerid"] = new EntityReference("systemuser", currentUserId); break; case Operations.Workflow: newEntity.Attributes["ownerid"] = new EntityReference("systemuser", currentUserId); Rqs.Add(new ExecuteWorkflowRequest { EntityId = newEntity.Id, WorkflowId = Guid.Parse(WorkflowId) }); break; } newEntityCollection.Entities.Add(newEntity); rowIndexList.Add(ir); if (bchCnt == batchSize * 2 || (buffer.CurrentRow == buffer.RowCount || (buffer.RowCount % 2 != 0 && buffer.CurrentRow == buffer.RowCount - 1))) { int startBuffIndex = buffer.CurrentRow - (bchCnt - 1); CRMIntegrate[] IntegrationRows = SendRowsToCRM(newEntityCollection, EntityName, Rqs); sendOutputResults(IntegrationRows, buffer, startBuffIndex); } ir++; } catch (Exception ex) { switch (input.ErrorRowDisposition) { case DTSRowDisposition.RD_RedirectRow: buffer.DirectErrorRow(errorOutputId, 0, buffer.CurrentRow); break; case DTSRowDisposition.RD_IgnoreFailure: buffer.DirectRow(defaultOuputId); break; case DTSRowDisposition.RD_FailComponent: throw new Exception("There was and error processing rows. " + ex.Message); } } } }
public object GetColumnValue(PipelineBuffer buffer) { object value = null; if (!buffer.IsNull(Index)) { switch (DataType) { case DataType.DT_BOOL: value = buffer[Index]; // Boolean.Parse(buffer[colIndex].ToString()); break; case DataType.DT_DATE: case DataType.DT_DBDATE: case DataType.DT_DBTIME: case DataType.DT_DBTIME2: case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: value = buffer[Index]; // DateTime.Parse(buffer[colIndex].ToString()); break; case DataType.DT_NTEXT: case DataType.DT_TEXT: value = buffer.GetString(Index); break; case DataType.DT_IMAGE: uint len; if ((len = buffer.GetBlobLength(Index)) >= 2000000) { // The Limit of Entity size in CloudDB is 2M, reject current record if the size exceeds the limit buffer.DirectErrorRow(_errorOutputId, HResults.DTS_E_LOBLENGTHLIMITEXCEEDED, ID); break; } value = buffer.GetBlobData(Index, 0, (int)len); break; case DataType.DT_BYTES: value = buffer.GetBytes(Index); break; case DataType.DT_I1: case DataType.DT_I2: case DataType.DT_I4: case DataType.DT_I8: case DataType.DT_UI1: case DataType.DT_UI2: case DataType.DT_UI4: case DataType.DT_UI8: case DataType.DT_R4: case DataType.DT_R8: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: case DataType.DT_CY: value = decimal.Parse(buffer[Index].ToString(), CultureInfo.InvariantCulture); break; case DataType.DT_NULL: case DataType.DT_EMPTY: break; default: value = buffer[Index].ToString(); break; } } return(value); }
/// <summary> /// Add the rows from the input buffer to the to the Container /// </summary> /// <param name="inputID">The ID of the IDTSInput100</param> /// <param name="buffer">The PipelineBuffer containing the records to process</param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { while (buffer.NextRow() == true) { // First we determine the ID string id = string.Empty; if (_CreateNewID) { id = Guid.NewGuid().ToString(); } else { if (!buffer.IsNull(_idColumnIndex)) { id = buffer[_idColumnIndex].ToString(); } else { ComponentMetaData.FireError(0, ComponentMetaData.Name, string.Format("The ID column '{0}' has a NULL value", _idColumnName), string.Empty, 0, out this._cancel); buffer.DirectErrorRow(_errorOutputId, 1, _idColumnId); continue; } } if (string.IsNullOrEmpty(id)) { ComponentMetaData.FireError(0, ComponentMetaData.Name, string.Format("Null or empty value from ID Column '{0}'", _idColumnName), string.Empty, 0, out this._cancel); buffer.DirectErrorRow(_errorOutputId, 1, _idColumnId); continue; } // Now fill in the rest of the entity information Entity entity = new Entity(id, buffer.CurrentRow); entity.Kind = _EntityKind; // All the other properties foreach (InputColumnInfo col in _inputColumnInfo) { object value = col.GetColumnValue(buffer); if (value != null) { entity.Properties[col.Name] = value; } } // Perform the insert if (_multithread) { DataPerThread threadData = new DataPerThread(entity, buffer); _threadPool.QueueUserWorkItem(new WaitCallback(InsertEntity), threadData); } else { _Container.InsertEntity(entity); } // this.ComponentMetaData.FireInformation(0, "SSDS Destination", "Sent Entity with ID:" + obj.Id, string.Empty, 0, ref _cancel); } if (_multithread) { // Wait for all of our work items to finish processing _threadPool.WaitOne(); } }
public override void ProcessInput(int inputID, PipelineBuffer buffer) { Layer OGRLayer = this.getLayer(); FeatureDefn OGRFeatureDef = OGRLayer.GetLayerDefn(); int batchCount = 0; OGRLayer.StartTransaction(); //initialize columnInfo object columnInfo ci = new columnInfo(); while (buffer.NextRow()) { try { //Start transaction if (this.batchSize != 0 && batchCount % this.batchSize == 0) { OGRLayer.CommitTransaction(); OGRLayer.StartTransaction(); batchCount = 0; } Feature OGRFeature = new Feature(OGRFeatureDef); for (int i = 0; i < this.columnInformation.Count; i++) { ci = this.columnInformation[i]; if (!buffer.IsNull(ci.bufferColumnIndex)) { if (ci.geom) { byte[] geomBytes = new byte[buffer.GetBlobLength(ci.bufferColumnIndex)]; geomBytes = buffer.GetBlobData(ci.bufferColumnIndex, 0, geomBytes.Length); Geometry geom = Geometry.CreateFromWkb(geomBytes); OGRFeature.SetGeometry(geom); } else { int OGRFieldIndex = OGRFeatureDef.GetFieldIndex(ci.columnName); FieldDefn OGRFieldDef = OGRFeatureDef.GetFieldDefn(OGRFieldIndex); FieldType OGRFieldType = OGRFieldDef.GetFieldType(); //declare datetime variables DateTime dt; TimeSpan ts; switch (OGRFieldType) { //case FieldType.OFTBinary: // break; case FieldType.OFTDate: dt = buffer.GetDate(ci.bufferColumnIndex); OGRFeature.SetField(OGRFieldIndex, dt.Year, dt.Month, dt.Day, 0, 0, 0, 0); break; case FieldType.OFTDateTime: dt = buffer.GetDateTime(ci.bufferColumnIndex); //get timezone? OGRFeature.SetField(OGRFieldIndex, dt.Year, dt.Month, dt.Day, dt.Hour, dt.Minute, dt.Second, 0); break; case FieldType.OFTInteger: OGRFeature.SetField(OGRFieldIndex, buffer.GetInt32(ci.bufferColumnIndex)); break; case FieldType.OFTInteger64: OGRFeature.SetField(OGRFieldIndex, buffer.GetInt64(ci.bufferColumnIndex)); break; case FieldType.OFTReal: OGRFeature.SetField(OGRFieldIndex, buffer.GetDouble(ci.bufferColumnIndex)); break; case FieldType.OFTTime: ts = buffer.GetTime(ci.bufferColumnIndex); OGRFeature.SetField(OGRFieldIndex, 0, 0, 0, ts.Hours, ts.Minutes, ts.Seconds, 0); break; case FieldType.OFTString: default: OGRFeature.SetField(OGRFieldIndex, buffer.GetString(ci.bufferColumnIndex)); break; } } } } OGRLayer.CreateFeature(OGRFeature); batchCount++; //increment incrementPipelinePerfCounters to display correct # of rows written ComponentMetaData.IncrementPipelinePerfCounter(103, 1); } catch (Exception ex) { //Redirect row IDTSInputColumn100 inputColumn = ComponentMetaData.InputCollection[0].InputColumnCollection.GetInputColumnByLineageID(ci.lineageID); IDTSOutput100 output = ComponentMetaData.OutputCollection[0]; if (ci.errorDisposition == DTSRowDisposition.RD_RedirectRow) { int errorCode = System.Runtime.InteropServices.Marshal.GetHRForException(ex); buffer.DirectErrorRow(output.ID, errorCode, inputColumn.LineageID); } else if (ci.errorDisposition == DTSRowDisposition.RD_FailComponent || ci.errorDisposition == DTSRowDisposition.RD_NotUsed) { OGRLayer.RollbackTransaction(); ComponentMetaData.FireError(0, ComponentMetaData.Name, ex.Message, string.Empty, 0, out cancel); throw new Exception(ex.Message); } } } OGRLayer.CommitTransaction(); }
public object GetColumnValue(PipelineBuffer buffer) { object value = null; if (!buffer.IsNull(Index)) { switch (DataType) { case DataType.DT_BOOL: value = buffer[Index]; // Boolean.Parse(buffer[colIndex].ToString()); break; case DataType.DT_DATE: case DataType.DT_DBDATE: case DataType.DT_DBTIME: case DataType.DT_DBTIME2: case DataType.DT_DBTIMESTAMP: case DataType.DT_DBTIMESTAMP2: value = buffer[Index]; // DateTime.Parse(buffer[colIndex].ToString()); break; case DataType.DT_NTEXT: case DataType.DT_TEXT: value = buffer.GetString(Index); break; case DataType.DT_IMAGE: uint len; if ((len = buffer.GetBlobLength(Index)) >= 2000000) { // The Limit of Entity size in CloudDB is 2M, reject current record if the size exceeds the limit buffer.DirectErrorRow(_errorOutputId, HResults.DTS_E_LOBLENGTHLIMITEXCEEDED, ID); break; } value = buffer.GetBlobData(Index, 0, (int)len); break; case DataType.DT_BYTES: value = buffer.GetBytes(Index); break; case DataType.DT_I1: case DataType.DT_I2: case DataType.DT_I4: case DataType.DT_I8: case DataType.DT_UI1: case DataType.DT_UI2: case DataType.DT_UI4: case DataType.DT_UI8: case DataType.DT_R4: case DataType.DT_R8: case DataType.DT_DECIMAL: case DataType.DT_NUMERIC: case DataType.DT_CY: value = decimal.Parse(buffer[Index].ToString(), CultureInfo.InvariantCulture); break; case DataType.DT_NULL: case DataType.DT_EMPTY: break; default: value = buffer[Index].ToString(); break; } } return value; }