private void CreateDataset(List <Dictionary <string, FieldValue> > data) { //In a production application, use more specific exception handling. try { //Create a POST web request to list all datasets Enums.TrueFalseValue isRecursive = (Enums.TrueFalseValue)ComponentMetaData.CustomPropertyCollection[C_USERFIFO].Value; var useFifostr = ""; if (isRecursive == Enums.TrueFalseValue.True) { useFifostr = "?defaultRetentionPolicy=basicFIFO"; } HttpWebRequest request = DatasetRequest(this.PowerBIDataSets + useFifostr, "POST", AccessToken()); var datasets = GetAllDatasets().Datasets((string)ComponentMetaData.CustomPropertyCollection[C_DATASET].Value); if (datasets.Count() == 0) { //POST request using the json schema from Product PostRequest(request, CreateNewDataSet(data)); } else { } } catch (Exception ex) { } }
/// <summary> /// This is where the data is loaded into the output buffer /// </summary> /// <param name="outputs"></param> /// <param name="outputIDs"></param> /// <param name="buffers"></param> public override void PrimeOutput(int outputs, int[] outputIDs, PipelineBuffer[] buffers) { string sharepointUrl = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTSITEURL].Value; string sharepointList = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTNAME].Value; string sharepointListView = (string)ComponentMetaData.CustomPropertyCollection[C_SHAREPOINTLISTVIEWNAME].Value; XElement camlQuery = XElement.Parse((string)ComponentMetaData.CustomPropertyCollection[C_CAMLQUERY].Value); short batchSize = (short)ComponentMetaData.CustomPropertyCollection[C_BATCHSIZE].Value; Enums.TrueFalseValue isRecursive = (Enums.TrueFalseValue)ComponentMetaData.CustomPropertyCollection[C_ISRECURSIVE].Value; Enums.TrueFalseValue includeFolders = (Enums.TrueFalseValue)ComponentMetaData.CustomPropertyCollection[C_INCLUDEFOLDERS].Value; PipelineBuffer outputBuffer = buffers[0]; // Get the field names from the output collection var fieldNames = (from col in ComponentMetaData.OutputCollection[0].OutputColumnCollection.Cast <IDTSOutputColumn>() select(string) col.CustomPropertyCollection[0].Value); // Load the data from sharepoint System.Diagnostics.Stopwatch timer = new System.Diagnostics.Stopwatch(); timer.Start(); var listData = SharePointUtility.ListServiceUtility.GetListItemData( new Uri(sharepointUrl), _credentials, sharepointList, sharepointListView, fieldNames, camlQuery, isRecursive == Enums.TrueFalseValue.True ? true : false, batchSize); timer.Stop(); bool fireAgain = false; int actualRowCount = 0; foreach (var row in listData) { // Determine if we should continue based on if this is a folder item or not (filter can be pushed up to CAML if // perf becomes an issue) bool canContinue = true; if ((row.ContainsKey("ContentType")) && (row["ContentType"] == "Folder") && (includeFolders == Enums.TrueFalseValue.False)) { canContinue = false; } if (canContinue) { actualRowCount++; outputBuffer.AddRow(); foreach (var fieldName in _bufferLookup.Keys) { if (row.ContainsKey(fieldName)) { switch (_bufferLookupDataType[fieldName]) { case DataType.DT_NTEXT: outputBuffer.AddBlobData(_bufferLookup[fieldName], Encoding.Unicode.GetBytes(row[fieldName].ToString())); break; case DataType.DT_WSTR: outputBuffer.SetString(_bufferLookup[fieldName], row[fieldName]); break; case DataType.DT_R8: outputBuffer.SetDouble(_bufferLookup[fieldName], double.Parse(row[fieldName], _culture)); break; case DataType.DT_I4: outputBuffer.SetInt32(_bufferLookup[fieldName], int.Parse(row[fieldName], _culture)); break; case DataType.DT_BOOL: outputBuffer.SetBoolean(_bufferLookup[fieldName], (int.Parse(row[fieldName], _culture) == 1)); break; case DataType.DT_GUID: outputBuffer.SetGuid(_bufferLookup[fieldName], new Guid(row[fieldName])); break; case DataType.DT_DBTIMESTAMP: outputBuffer.SetDateTime(_bufferLookup[fieldName], DateTime.Parse(row[fieldName], _culture)); break; } } else { switch (_bufferLookupDataType[fieldName]) { case DataType.DT_NTEXT: outputBuffer.AddBlobData(_bufferLookup[fieldName], Encoding.Unicode.GetBytes(String.Empty)); break; case DataType.DT_WSTR: outputBuffer.SetString(_bufferLookup[fieldName], String.Empty); break; case DataType.DT_BOOL: outputBuffer.SetBoolean(_bufferLookup[fieldName], false); break; default: outputBuffer.SetNull(_bufferLookup[fieldName]); break; } } } } } string infoMsg = string.Format(CultureInfo.InvariantCulture, "Loaded {0} records from list '{1}' at '{2}'. Elapsed time is {3}ms", actualRowCount, sharepointList, sharepointUrl, timer.ElapsedMilliseconds); ComponentMetaData.FireInformation(0, ComponentMetaData.Name, infoMsg, "", 0, ref fireAgain); ComponentMetaData.IncrementPipelinePerfCounter( DTS_PIPELINE_CTR_ROWSREAD, (uint)actualRowCount); outputBuffer.SetEndOfRowset(); }
/// <summary> /// This is where the data is read from the input buffer /// </summary> /// <param name="inputID"></param> /// <param name="buffer"></param> public override void ProcessInput(int inputID, PipelineBuffer buffer) { string sharepointUrl = ""; string sharepointList = ""; string sharepointListView = ""; short batchSize = (short)2; Enums.BatchType batchType = Enums.BatchType.Deletion; if (!buffer.EndOfRowset) { // Queue the data up for batching by the sharepoint accessor object var dataQueue = new List <Dictionary <string, FieldValue> >(); while (buffer.NextRow()) { var rowData = new Dictionary <string, FieldValue>(); foreach (var fieldName in _bufferLookup.Keys) { if (buffer.IsNull(_bufferLookup[fieldName])) { // Do nothing, can ignore this field } else { FieldValue filedObj = new FieldValue(); switch (_bufferLookupDataType[fieldName]) { case DataType.DT_STR: case DataType.DT_WSTR: filedObj.value = buffer.GetString(_bufferLookup[fieldName]); filedObj.type = "string"; rowData.Add(fieldName, filedObj); break; case DataType.DT_NTEXT: int colDataLength = (int)buffer.GetBlobLength(_bufferLookup[fieldName]); byte[] stringData = buffer.GetBlobData(_bufferLookup[fieldName], 0, colDataLength); filedObj.value = Encoding.Unicode.GetString(stringData); filedObj.type = "string"; rowData.Add(fieldName, filedObj); break; case DataType.DT_R4: filedObj.value = buffer.GetSingle(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_CY: filedObj.value = buffer.GetDecimal(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_R8: filedObj.value = buffer.GetDouble(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Double"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI1: case DataType.DT_I1: case DataType.DT_BOOL: filedObj.value = buffer.GetBoolean(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Boolean"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI2: case DataType.DT_I2: filedObj.value = buffer.GetInt16(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI4: case DataType.DT_I4: filedObj.value = buffer.GetInt32(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_UI8: case DataType.DT_I8: filedObj.value = buffer.GetInt64(_bufferLookup[fieldName]).ToString(_culture); filedObj.type = "Int64"; rowData.Add(fieldName, filedObj); break; case DataType.DT_GUID: filedObj.value = buffer.GetGuid(_bufferLookup[fieldName]).ToString(); filedObj.type = "String"; rowData.Add(fieldName, filedObj); break; case DataType.DT_DBTIMESTAMP: filedObj.value = buffer.GetDateTime(_bufferLookup[fieldName]).ToString("u").Replace(" ", "T"); filedObj.type = "Datetime"; rowData.Add(fieldName, filedObj); break; case DataType.DT_DATE: filedObj.value = buffer.GetDateTime(_bufferLookup[fieldName]).ToString("yyyy-MM-dd"); filedObj.type = "Datetime"; rowData.Add(fieldName, filedObj); break; } } } dataQueue.Add(rowData); } bool fireAgain = false; Enums.TrueFalseValue removeRecords = (Enums.TrueFalseValue)ComponentMetaData.CustomPropertyCollection[C_REMOVERECORDS].Value; if (removeRecords == Enums.TrueFalseValue.True) { ClearRows(); } if (dataQueue.Count() > 0) { System.Diagnostics.Stopwatch timer = new System.Diagnostics.Stopwatch(); timer.Start(); System.Xml.Linq.XElement resultData; CreateDataset(dataQueue); AddClassRows(dataQueue); timer.Stop(); } else { ComponentMetaData.FireInformation(0, ComponentMetaData.Name, "No rows found to update in destination.", "", 0, ref fireAgain); } } }