/// <summary> /// This is where the data is read from the input buffer /// </summary> /// <param name="inputID"></param> /// <param name="buffer"></param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { string sharepointUrl = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTSITEURL].Value; string sharepointList = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTNAME].Value; string sharepointListView = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTVIEWNAME].Value; short batchSize = (short)ComponentMetaData.CustomPropertyCollection[C_BATCHSIZE].Value; Enums.BatchType batchType = (Enums.BatchType)ComponentMetaData.CustomPropertyCollection[C_BATCHTYPE].Value; if (!buffer.EndOfRowset) { // Queue the data up for batching by the sharepoint accessor object var dataQueue = new List <Dictionary <string, string> >(); while (buffer.NextRow()) { var rowData = new Dictionary <string, string>(); foreach (var fieldName in _bufferLookup.Keys) { switch (_bufferLookupDataType[fieldName]) { case DataType.DT_STR: case DataType.DT_WSTR: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { rowData.Add(fieldName, buffer.GetString(_bufferLookup[fieldName])); } break; case DataType.DT_NTEXT: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { int colDataLength = (int)buffer.GetBlobLength(_bufferLookup[fieldName]); byte[] stringData = buffer.GetBlobData(_bufferLookup[fieldName], 0, colDataLength); rowData.Add(fieldName, Encoding.Unicode.GetString(stringData)); } break; case DataType.DT_R4: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { rowData.Add(fieldName, buffer.GetSingle(_bufferLookup[fieldName]).ToString(_culture)); } break; case DataType.DT_R8: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { rowData.Add(fieldName, buffer.GetDouble(_bufferLookup[fieldName]).ToString(_culture)); } break; case DataType.DT_UI1: case DataType.DT_I1: case DataType.DT_BOOL: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { rowData.Add(fieldName, buffer.GetBoolean(_bufferLookup[fieldName]).ToString(_culture)); } break; case DataType.DT_UI2: case DataType.DT_I2: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { rowData.Add(fieldName, buffer.GetInt16(_bufferLookup[fieldName]).ToString(_culture)); } break; case DataType.DT_UI4: case DataType.DT_I4: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { rowData.Add(fieldName, buffer.GetInt32(_bufferLookup[fieldName]).ToString(_culture)); } break; case DataType.DT_UI8: case DataType.DT_I8: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, string.Empty); } else { rowData.Add(fieldName, buffer.GetInt64(_bufferLookup[fieldName]).ToString(_culture)); } break; case DataType.DT_GUID: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, String.Empty); } else { rowData.Add(fieldName, buffer.GetGuid(_bufferLookup[fieldName]).ToString()); } break; case DataType.DT_DBTIMESTAMP: if (buffer.IsNull(_bufferLookup[fieldName])) { rowData.Add(fieldName, String.Empty); } else { rowData.Add(fieldName, buffer.GetDateTime(_bufferLookup[fieldName]).ToString("u").Replace(" ", "T")); } break; } } dataQueue.Add(rowData); } bool fireAgain = false; if (dataQueue.Count() > 0) { System.Diagnostics.Stopwatch timer = new System.Diagnostics.Stopwatch(); timer.Start(); System.Xml.Linq.XElement resultData; if (batchType == Enums.BatchType.Modification) { // Perform the update resultData = SharePointUtility.ListServiceUtility.UpdateListItems( new Uri(sharepointUrl), _credentials, sharepointList, sharepointListView, dataQueue, batchSize); } else { // Get the IDs read from the buffer var idList = from data in dataQueue where data["ID"].Trim().Length > 0 select data["ID"]; // Delete the list items with IDs resultData = SharePointUtility.ListServiceUtility.DeleteListItems( new Uri(sharepointUrl), _credentials, sharepointList, sharepointListView, idList); } timer.Stop(); var errorRows = from result in resultData.Descendants("errorCode") select result.Parent; int successRowsWritten = resultData.Elements().Count() - errorRows.Count(); string infoMsg = string.Format(CultureInfo.InvariantCulture, "Affected {0} records in list '{1}' at '{2}'. Elapsed time is {3}ms", successRowsWritten, sharepointList, sharepointUrl, timer.ElapsedMilliseconds); ComponentMetaData.FireInformation(0, ComponentMetaData.Name, infoMsg, "", 0, ref fireAgain); ComponentMetaData.IncrementPipelinePerfCounter( DTS_PIPELINE_CTR_ROWSWRITTEN, (uint)successRowsWritten); // Shovel any error rows to the error flow bool cancel; int errorIter = 0; foreach (var row in errorRows) { // Do not flood the error log. errorIter++; if (errorIter > 10) { ComponentMetaData.FireError(0, ComponentMetaData.Name, "Total of " + errorRows.Count().ToString(_culture) + ", only showing first 10.", "", 0, out cancel); return; } string idString = ""; XAttribute attrib = row.Element("row").Attribute("ID"); if (attrib != null) { idString = "(SP ID=" + attrib.Value + ")"; } string errorString = string.Format(CultureInfo.InvariantCulture, "Error on row {0}: {1} - {2} {3}", row.Attribute("ID"), row.Element("errorCode").Value, row.Element("errorDescription").Value, idString); ComponentMetaData.FireError(0, ComponentMetaData.Name, errorString, "", 0, out cancel); // Need to throw an exception, or else this step's box is green (should be red), even though the flow // is marked as failure regardless. throw new PipelineProcessException("Errors detected in this component - see SSIS Errors"); } } else { ComponentMetaData.FireInformation(0, ComponentMetaData.Name, "No rows found to update in destination.", "", 0, ref fireAgain); } } }
/// <summary> /// This is where the data is read from the input buffer /// </summary> /// <param name="inputID"></param> /// <param name="buffer"></param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { string sharepointUrl = ""; string sharepointList = ""; string sharepointListView = ""; short batchSize = (short)2; Enums.BatchType batchType = Enums.BatchType.Deletion; if (!buffer.EndOfRowset) { // Queue the data up for batching by the sharepoint accessor object var dataQueue = new List <Dictionary <string, FieldValue> >(); while (buffer.NextRow()) { var rowData = new Dictionary <string, FieldValue>(); foreach (var fieldName in _bufferLookup.Keys) { if (buffer.IsNull(_bufferLookup[fieldName])) { // Do nothing, can ignore this field } else { FieldValue filedObj = new FieldValue(); switch (_bufferLookupDataType[fieldName]) { case DataType.DT_STR: case DataType.DT_WSTR: filedObj.value = buffer.GetString(_bufferLookup[fieldName]); filedObj.type = "string"; rowData.Add(fieldName, filedObj); break; case DataType.DT_NTEXT: int colDataLength = (int)buffer.GetBlobLength(_bufferLookup[fieldName]); byte[] stringData = buffer.GetBlobData(_bufferLookup[fieldName], 0, colDataLength); filedObj.value = Encoding.Unicode.GetString(stringData); filedObj.type = "string"; rowData.Add(fieldName, filedObj); break; case DataType.DT_R4: filedObj.value = buffer.GetSingle(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_CY: filedObj.value = buffer.GetDecimal(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_R8: filedObj.value = buffer.GetDouble(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI1: case DataType.DT_I1: case DataType.DT_BOOL: filedObj.value = buffer.GetBoolean(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Boolean"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI2: case DataType.DT_I2: filedObj.value = buffer.GetInt16(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI4: case DataType.DT_I4: filedObj.value = buffer.GetInt32(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI8: case DataType.DT_I8: filedObj.value = buffer.GetInt64(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_GUID: filedObj.value = buffer.GetGuid(_bufferLookup[fieldName]).ToString(); filedObj.type = "String"; rowData.Add(fieldName, filedObj); break; case DataType.DT_DBTIMESTAMP: filedObj.value = buffer.GetDateTime(_bufferLookup[fieldName]).ToString("u").Replace(" ", "T"); filedObj.type = "Datetime"; rowData.Add(fieldName, filedObj); break; case DataType.DT_DATE: filedObj.value = buffer.GetDateTime(_bufferLookup[fieldName]).ToString("yyyy-MM-dd"); filedObj.type = "Datetime"; rowData.Add(fieldName, filedObj); break; } } } dataQueue.Add(rowData); } bool fireAgain = false; Enums.TrueFalseValue removeRecords = (Enums.TrueFalseValue)ComponentMetaData.CustomPropertyCollection[C_REMOVERECORDS].Value; if (removeRecords == Enums.TrueFalseValue.True) { ClearRows(); } if (dataQueue.Count() > 0) { System.Diagnostics.Stopwatch timer = new System.Diagnostics.Stopwatch(); timer.Start(); System.Xml.Linq.XElement resultData; CreateDataset(dataQueue); AddClassRows(dataQueue); timer.Stop(); } else { ComponentMetaData.FireInformation(0, ComponentMetaData.Name, "No rows found to update in destination.", "", 0, ref fireAgain); } } }