/// <summary> /// Extract tags from page descriptors. /// </summary> /// <param name="pageDescriptors"> /// XML document describing pages in the OneNote hierarchy or search result. /// </param> /// <param name="selectedPagesOnly">true to process only pages selected by user</param> /// <param name="omitUntaggedPages">drip untagged pages</param> internal void ExtractTags(XDocument pageDescriptors, bool selectedPagesOnly, bool omitUntaggedPages = false) { // parse the search results _tags.Clear(); _pages.Clear(); try { XNamespace one = pageDescriptors.Root.GetNamespaceOfPrefix("one"); Dictionary <string, TagPageSet> tags = new Dictionary <string, TagPageSet>(); foreach (XElement page in pageDescriptors.Descendants(one.GetName("Page"))) { TaggedPage tp = new TaggedPage(page); if (selectedPagesOnly && !tp.IsSelected) { continue; } // assign Tags int tagcount = 0; foreach (string tagname in tp.TagNames) { tagcount++; TagPageSet t; if (!tags.TryGetValue(tagname, out t)) { t = new TagPageSet(tagname); tags.Add(tagname, t); } t.AddPage(tp); tp.Tags.Add(t); } if (!omitUntaggedPages || tagcount > 0) { _pages.Add(tp.Key, tp); } } // bulk update for performance reasons _tags.UnionWith(tags.Values); TraceLogger.Log(TraceCategory.Info(), "Extracted {0} tags from {1} pages.", _tags.Count, _pages.Count); } catch (Exception ex) { TraceLogger.Log(TraceCategory.Error(), "Parsing Hierarchy data failed: {0}", ex); TraceLogger.Flush(); } }
public static bool Delete(string key) { bool returnVal = true; if (!string.IsNullOrWhiteSpace(key)) { lock (brokerStore) if (brokerStore.ContainsKey(key)) { brokerStore.Remove(key); } } TraceLogger.Log(string.Format("BrokerStore.Delete - Broker deleted from broker store [Count: {0}]", BrokerStore.brokerStore.Count)); return(returnVal); }
private void ScopeSelector_ScopeChanged(object sender, ScopeChangedEventArgs e) { try { pBar.Visibility = System.Windows.Visibility.Visible; string query = searchComboBox.Text; // using ContinueWith until I've discovered how to do implement async // events properly _model.FindPagesAsync(query, scopeSelect.SelectedScope).ContinueWith(tsk => Dispatcher.Invoke(() => { pBar.Visibility = System.Windows.Visibility.Hidden; searchComboBox.SelectedValue = query; })); } catch (System.Exception ex) { TraceLogger.Log(TraceCategory.Error(), "Changing search scope failed: {0}", ex); TraceLogger.ShowGenericErrorBox(Properties.Resources.TagSearch_Error_ScopeChange, ex); } e.Handled = true; }
private async void SearchButton_Click(object sender, RoutedEventArgs e) { string query = searchComboBox.Text; try { pBar.Visibility = System.Windows.Visibility.Visible; await _model.FindPagesAsync(query, scopeSelect.SelectedScope); searchComboBox.SelectedValue = query; pBar.Visibility = System.Windows.Visibility.Hidden; } catch (System.Exception ex) { TraceLogger.Log(TraceCategory.Error(), "search for '{0}' failed: {1}", query, ex); TraceLogger.ShowGenericErrorBox(Properties.Resources.TagSearch_Error_Find, ex); } e.Handled = true; }
/// <summary> /// Run the background tagger. /// </summary> /// <returns></returns> public Task Run() { TaskFactory tf = new TaskFactory(TaskCreationOptions.LongRunning, TaskContinuationOptions.None); CancellationToken cancel = _cancel.Token; return(tf.StartNew(() => { TraceLogger.Log(TraceCategory.Info(), "Background tagging service started"); try { OneNotePageProxy lastPage = null; while (!_jobs.IsCompleted) { TaggingJob j = _jobs.Take(); cancel.ThrowIfCancellationRequested(); try { lastPage = j.Execute(_onenote, lastPage); if (lastPage != null && _jobs.Count == 0) { // no more pending pages - must update the last one and stop carrying forward lastPage.Update(); lastPage = null; } } catch (Exception e) { lastPage = null; TraceLogger.ShowGenericErrorBox("page tagging failed", e); } } } catch (InvalidOperationException) { TraceLogger.Log(TraceCategory.Warning(), "Background tagging job queue depleted"); TraceLogger.Flush(); } catch (OperationCanceledException) { TraceLogger.Log(TraceCategory.Warning(), "Background tagging canceled"); TraceLogger.Flush(); } }, cancel)); }
internal int EnqueuePagesForTagging(TagOperation op, TaggingScope scope) { // bring suggestions up-to-date with new tags that may have been entered TagSuggestions.AddAll(from t in _pageTags where !TagSuggestions.ContainsKey(t.Key) select new HitHighlightedTagButtonModel() { TagName = t.TagName }); TagSuggestions.Save(); TagsAndPages tc = new TagsAndPages(OneNoteApp); // covert scope to context TagContext ctx; switch (scope) { default: case TaggingScope.CurrentNote: ctx = TagContext.CurrentNote; break; case TaggingScope.SelectedNotes: ctx = TagContext.SelectedNotes; break; case TaggingScope.CurrentSection: ctx = TagContext.CurrentSection; break; } tc.LoadPageTags(ctx); string[] pageTags = (from t in _pageTags.Values select t.TagName).ToArray(); int enqueuedPages = 0; foreach (string pageID in (from p in tc.Pages select p.Key)) { OneNoteApp.TaggingService.Add(new TaggingJob(pageID, pageTags, op)); enqueuedPages++; } TraceLogger.Log(TraceCategory.Info(), "{0} page(s) enqueued for tagging with '{1}' using {2}", enqueuedPages, string.Join(";", pageTags), op); TraceLogger.Flush(); return(enqueuedPages); }
private void ApplyPageTags(TagOperation op) { tagInput.FocusInput(); try { TaggingScope scope = ((TaggingScopeDescriptor)taggingScope.SelectedItem).Scope; int pagesTagged = _model.EnqueuePagesForTagging(op, scope); taggingScope.SelectedIndex = 0; tagInput.Clear(); suggestedTags.Highlighter = new TextSplitter(); suggestedTags.Notification = pagesTagged == 0 ? Properties.Resources.TagEditor_Popup_NothingTagged : string.Format(Properties.Resources.TagEditor_Popup_TaggingInProgress, pagesTagged); } catch (Exception xe) { TraceLogger.Log(TraceCategory.Error(), "Applying tags to page failed: {0}", xe); TraceLogger.ShowGenericErrorBox(Properties.Resources.TagEditor_TagUpdate_Error, xe); } }
private void Hyperlink_Click(object sender, RoutedEventArgs e) { try { HitHighlightedPageLink l = sender as HitHighlightedPageLink; if (l != null) { HitHighlightedPageLinkModel model = l.DataContext as HitHighlightedPageLinkModel; _model.NavigateTo(model.PageID); if (Keyboard.IsKeyDown(Key.LeftCtrl) || Keyboard.IsKeyDown(Key.RightCtrl)) { int ndx = foundPagesList.SelectedItems.IndexOf(model); if (ndx >= 0) { foundPagesList.SelectedItems.RemoveAt(ndx); } else { foundPagesList.SelectedItems.Add(model); } } else { // select the link foundPagesList.SelectedItem = model; } e.Handled = true; } } catch (System.Exception ex) { TraceLogger.Log(TraceCategory.Error(), "Navigation to OneNote page failed: {0}", ex); TraceLogger.ShowGenericErrorBox(Properties.Resources.TagSearch_Error_PageNavigation, ex); } }
/// <summary> /// Log a message to the screen, adding the current managed thread ID /// </summary> /// <param name="methodName"></param> /// <param name="message"></param> private void LogMessage(string methodName, string message) { TL?.Log(LogLevel.Information, $"AlpacaDiscovery - {methodName} - {Thread.CurrentThread.ManagedThreadId,2} {message}"); }
public virtual RpcResponse Execute() { StringBuilder sb = new StringBuilder(); sb.AppendLine(string.Format("Calling RPC, {0}, with Arguments:", this.RpcName)); if (this.CommandArgs != null) { int count = 1; foreach (var arg in this.CommandArgs) { if (arg != null) { if (arg is string[]) { string[] stringArgs = arg as string[]; int subCount = 0; if (stringArgs.Length == 0) { sb.AppendLine(string.Format("{0}:", count)); } else { foreach (string subArg in stringArgs) { if (string.IsNullOrWhiteSpace(subArg)) { sb.AppendLine(string.Format("{0}[{1}]: {2}", count, subCount++, "")); } else { string noCommas = subArg.ToString(); sb.AppendLine(string.Format("{0}[{1}]: {2}", count, subCount++, noCommas)); } } } count++; } else { string noCommas = arg.ToString(); sb.AppendLine(string.Format("{0}: {1}", count++, noCommas)); } } else { sb.AppendLine(string.Format("{0}: {1}", count++, "")); } } } else { sb.AppendLine("None"); } TraceLogger.Log(sb.ToString()); this.Response = this.broker.CallRpc(this.Context, this.RpcName, this.Version, this.CommandArgs); // TODO: Remove this tracing...? TraceLogger.Log(string.Format("Result from '{0}':\n\r{1}", this.RpcName, this.Response.Data)); if (this.Response.Status != RpcResponseStatus.Fail) { this.ProcessResponse(); } LogIt(); return(this.Response); }
protected virtual bool GetWindowHandle(IntPtr windowHandle, Object windowHandlesObj) { try { ArrayList windowHandles = windowHandlesObj as ArrayList; Regex reg = new Regex(windowCaptionToLocate); if (reg.IsMatch(Win32API.GetWindowText(windowHandle))) { windowHandles.Add(windowHandle); } return(true); } catch (Exception ex) { LogWriter.Log(ex); return(false); } }
/// <summary> /// Raised when an action is sent to this control /// </summary> /// <param name="args">args for the action</param> protected override void DoAction(Microsoft.Uii.Csr.RequestActionEventArgs args) { // Log process. LogWriter.Log(string.Format(CultureInfo.CurrentCulture, "{0} -- DoAction called for action: {1}", this.ApplicationName, args.Action), System.Diagnostics.TraceEventType.Information); // Process Actions. if (args.Action.Equals("SetExpanderState", StringComparison.OrdinalIgnoreCase)) { List <KeyValuePair <string, string> > actionDataList = Utility.SplitLines(args.Data, CurrentContext, localSession); string ExpanderName = Utility.GetAndRemoveParameter(actionDataList, "ExpanderName"); string ExpanderRequestedState = Utility.GetAndRemoveParameter(actionDataList, "State"); // Call back to process the event to make sure we are on the same thread. Dispatcher.BeginInvoke(DispatcherPriority.Normal, (Action)(() => { // Only act if we have the right data and the panel is in the right state. var frameworkItem = this.FindName(ExpanderName); if (frameworkItem != null && frameworkItem is Expander) { if (ExpanderRequestedState.Equals("expanded", StringComparison.OrdinalIgnoreCase)) { if (!((Expander)frameworkItem).IsExpanded) { ((Expander)frameworkItem).IsExpanded = true; } } if (ExpanderRequestedState.Equals("collapsed", StringComparison.OrdinalIgnoreCase)) { if (((Expander)frameworkItem).IsExpanded) { ((Expander)frameworkItem).IsExpanded = false; } } } })); } #region Example process action //if (args.Action.Equals("your action name", StringComparison.OrdinalIgnoreCase)) //{ // // Do some work // // Access CRM and fetch a Record // Microsoft.Xrm.Sdk.Messages.RetrieveRequest req = new Microsoft.Xrm.Sdk.Messages.RetrieveRequest(); // req.Target = new Microsoft.Xrm.Sdk.EntityReference( "account" , Guid.Parse("0EF05F4F-0D39-4219-A3F5-07A0A5E46FD5")); // req.ColumnSet = new Microsoft.Xrm.Sdk.Query.ColumnSet("accountid" , "name" ); // Microsoft.Xrm.Sdk.Messages.RetrieveResponse response = (Microsoft.Xrm.Sdk.Messages.RetrieveResponse)this._client.CrmInterface.ExecuteCrmOrganizationRequest(req, "Requesting Account"); // // Example of pulling some data out of the passed in data array // List<KeyValuePair<string, string>> actionDataList = Utility.SplitLines(args.Data, CurrentContext, localSession); // string valueIwant = Utility.GetAndRemoveParameter(actionDataList, "mykey"); // assume there is a myKey=<value> in the data. // // Example of pushing data to USD // string global = Utility.GetAndRemoveParameter(actionDataList, "global"); // Assume there is a global=true/false in the data // bool saveInGlobalSession = false; // if (!String.IsNullOrEmpty(global)) // saveInGlobalSession = bool.Parse(global); // Dictionary<string, CRMApplicationData> myDataToSet = new Dictionary<string, CRMApplicationData>(); // // add a string: // myDataToSet.Add("myNewKey", new CRMApplicationData() { name = "myNewKey", type = "string", value = "TEST" }); // // add a entity lookup: // myDataToSet.Add("myNewKey", new CRMApplicationData() { name = "myAccount", type = "lookup", value = "account,0EF05F4F-0D39-4219-A3F5-07A0A5E46FD5,MyAccount" }); // if (saveInGlobalSession) // { // // add context item to the global session // ((DynamicsCustomerRecord)((AgentDesktopSession)localSessionManager.GlobalSession).Customer.DesktopCustomer).MergeReplacementParameter(this.ApplicationName, myDataToSet, true); // } // else // { // // Add context item to the current session. // ((DynamicsCustomerRecord)((AgentDesktopSession)localSessionManager.ActiveSession).Customer.DesktopCustomer).MergeReplacementParameter(this.ApplicationName, myDataToSet, true); // } //} #endregion base.DoAction(args); }
/// <summary> /// Add items to the sorted collection in batches. /// </summary> /// <remarks> /// Groups the given items into contiguous ranges of batches and adds each batch at /// once, firing one change notification per batch. /// </remarks> /// <param name="items">items to add</param> internal void AddAll(IEnumerable <TValue> items) { List <KeyValuePair <int, TValue> > toAdd = new List <KeyValuePair <int, TValue> >(); foreach (TValue item in items) { if (!_dictionary.ContainsKey(item.Key)) { // lookup insertion point int insertionPoint = _sortedList.BinarySearch(new KeyValuePair <TSort, TValue>(item.SortKey, item), _comparer); #if DEBUG Debug.Assert(insertionPoint < 0, string.Format("Item with key {0} already present in list at index {1}", item.Key, insertionPoint)); #endif if (insertionPoint < 0) { _dictionary.Add(item.Key, item); toAdd.Add(new KeyValuePair <int, TValue>(~insertionPoint, item)); } else { TraceLogger.Log(TraceCategory.Error(), "List is inconsistency! Attempting to recover"); TraceLogger.Flush(); _dictionary.Add(item.Key, item); } } } toAdd.Sort(_indexComparer); // process the sorted list of items to add in reverse order so that we do not // have to correct indices while (toAdd.Count > 0) { List <KeyValuePair <TSort, TValue> > batch = new List <KeyValuePair <TSort, TValue> >(); int lastItemIndex = toAdd.Count - 1; KeyValuePair <int, TValue> itemToAdd = toAdd[lastItemIndex]; // add the first item to the batch int insertionPoint = itemToAdd.Key; batch.Add(new KeyValuePair <TSort, TValue>(itemToAdd.Value.SortKey, itemToAdd.Value)); toAdd.RemoveAt(lastItemIndex); lastItemIndex = toAdd.Count - 1; while (lastItemIndex >= 0 && toAdd[lastItemIndex].Key == insertionPoint) { itemToAdd = toAdd[lastItemIndex]; batch.Add(new KeyValuePair <TSort, TValue>(itemToAdd.Value.SortKey, itemToAdd.Value)); toAdd.RemoveAt(lastItemIndex); lastItemIndex = toAdd.Count - 1; } batch.Sort(_comparer); _sortedList.InsertRange(insertionPoint, batch); if (CollectionChanged != null) { NotifyCollectionChangedEventArgs args = new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Add, (from b in batch select b.Value).ToList(), insertionPoint); CollectionChanged(this, args); } } }
/// <summary> /// Remove or rename tag from suggestions when user control is tapped. /// </summary> /// <param name="sender">user control emitting this event</param> /// <param name="e"> event details</param> private void Tag_Action(object sender, RoutedEventArgs e) { var rt = sender as RemovableTag; var rt_mdl = rt.DataContext as RemovableTagModel; string[] toRemove = new string[] { rt_mdl.Key }; if ("DeleteTag".Equals(rt.Tag)) { _model.SuggestedTags.RemoveAll(toRemove); // schedule all pages with this tag for tag removal if (rt_mdl.Tag != null) { foreach (var tp in rt_mdl.Tag.Pages) { _model.OneNoteApp.TaggingService.Add(new TaggingJob(tp.ID, toRemove, TagOperation.SUBTRACT)); } suggestedTags.Notification = rt_mdl.Tag.Pages.Count == 0 ? Properties.Resources.TagEditor_Popup_NothingTagged : string.Format(Properties.Resources.TagEditor_Popup_TaggingInProgress, rt_mdl.Tag.Pages.Count); TraceLogger.Log(TraceCategory.Info(), "{0} page(s) enqueued for background tagging; Operation SUBTRACT {1}", rt_mdl.Tag.Pages.Count, toRemove[0]); } else { suggestedTags.Notification = Properties.Resources.TagEditor_Popup_NothingTagged; } } else if ("RenameTag".Equals(rt.Tag)) { _model.SuggestedTags.RemoveAll(toRemove); string[] newTagNames = (from tn in OneNotePageProxy.ParseTags(rt_mdl.LocalName) select TagFormatter.Format(tn)).ToArray(); // create new tag models unless they already exist List <RemovableTagModel> newTagModels = new List <RemovableTagModel>(); foreach (var newName in newTagNames) { RemovableTagModel tagmodel; if (!_model.SuggestedTags.TryGetValue(newName, out tagmodel)) { tagmodel = new RemovableTagModel() { Tag = new TagPageSet(newName) }; newTagModels.Add(tagmodel); } else if (tagmodel.Tag == null && rt_mdl.Tag != null) { tagmodel.Tag = new TagPageSet(newName); } if (rt_mdl.Tag != null) { // copy the pages into the new tag and update the tag count foreach (var pg in rt_mdl.Tag.Pages) { tagmodel.Tag.Pages.Add(pg); } tagmodel.UseCount = tagmodel.Tag.Pages.Count; } } _model.SuggestedTags.AddAll(newTagModels); if (rt_mdl.Tag != null) { // remove the old tag and add new tag to the pages foreach (var tp in rt_mdl.Tag.Pages) { _model.OneNoteApp.TaggingService.Add(new TaggingJob(tp.ID, toRemove, TagOperation.SUBTRACT)); _model.OneNoteApp.TaggingService.Add(new TaggingJob(tp.ID, newTagNames, TagOperation.UNITE)); } suggestedTags.Notification = rt_mdl.Tag.Pages.Count == 0 ? Properties.Resources.TagEditor_Popup_NothingTagged : string.Format(Properties.Resources.TagEditor_Popup_TaggingInProgress, rt_mdl.Tag.Pages.Count); TraceLogger.Log(TraceCategory.Info(), "{0} page(s) enqueued for background tagging; Operation UNITE {1} SUBTRACT {2}", rt_mdl.Tag.Pages.Count, string.Join(",", newTagNames), toRemove[0]); } else { suggestedTags.Notification = Properties.Resources.TagEditor_Popup_NothingTagged; } } TraceLogger.Flush(); _model.SaveChanges(); }
private async void Page_MenuItem_Click(object sender, RoutedEventArgs e) { MenuItem item = sender as MenuItem; if (item != null) { switch (item.Tag.ToString()) { case "Refresh": string query = searchComboBox.Text; try { pBar.Visibility = System.Windows.Visibility.Visible; await _model.FindPagesAsync(query, scopeSelect.SelectedScope); searchComboBox.SelectedValue = query; pBar.Visibility = System.Windows.Visibility.Hidden; } catch (System.Exception ex) { TraceLogger.Log(TraceCategory.Error(), "search for '{0}' failed: {1}", query, ex); TraceLogger.ShowGenericErrorBox(Properties.Resources.TagSearch_Error_Find, ex); } break; case "ClearSelection": foundPagesList.UnselectAll(); break; case "SelectAll": foundPagesList.SelectAll(); break; case "MarkSelection": string[] marker = new string[] { "-✩-" }; int pagesTagged = 0; foreach (var mdl in _model.Pages.Where((p) => p.IsSelected)) { _model.OneNoteApp.TaggingService.Add(new Tagger.TaggingJob(mdl.PageID, marker, Tagger.TagOperation.UNITE)); pagesTagged++; } tagsPanel.Notification = pagesTagged == 0 ? Properties.Resources.TagEditor_Popup_NothingTagged : string.Format(Properties.Resources.TagEditor_Popup_TaggingInProgress, pagesTagged); break; case "CopyLinks": string header = @"Version:0.9 StartHTML:{0:D6} EndHTML:{1:D6} StartFragment:{2:D6} EndFragment:{3:D6} StartSelection:{4:D6} EndSelection:{5:D6}"; string htmlpre = @"<HTML> <BODY> <!--StartFragment-->"; StringBuilder links = new StringBuilder(); foreach (var mdl in _model.Pages.Where((p) => p.IsSelected)) { string pageTitle = mdl.LinkTitle; try { if (links.Length > 0) { links.Append("<br />"); } links.Append(@"<a href="""); links.Append(mdl.PageLink); links.Append(@""">"); links.Append(mdl.LinkTitle); links.Append("</a>"); } catch (Exception ex) { TraceLogger.Log(TraceCategory.Error(), "Link to page '{0}' could not be created: {1}", pageTitle, ex); TraceLogger.ShowGenericErrorBox(Properties.Resources.TagSearch_Error_CopyLink, ex); } } string htmlpost = @"<!--EndFragment--> </BODY> </HTML>"; string strLinks = links.ToString(); string clip = string.Format(header, header.Length, header.Length + htmlpre.Length + strLinks.Length + htmlpost.Length, header.Length + htmlpre.Length, header.Length + htmlpre.Length + strLinks.Length, header.Length + htmlpre.Length, header.Length + htmlpre.Length + strLinks.Length) + htmlpre + strLinks + htmlpost; Clipboard.SetText(clip, TextDataFormat.Html); break; } e.Handled = true; } }
public void LogError(string message) { _logWriter.Log(message, TraceEventType.Error); }
internal static void AddToQueue(TraceLogger logger, CrmServiceClient service, string filepath) { const string SEPERATOR = "------------------------------------------------------"; logger.Log(SEPERATOR, TraceEventType.Information); logger.Log("Starting to queue items... this may take a while... ", TraceEventType.Information); if (!File.Exists(filepath)) { logger.Log(new FileNotFoundException($"Unable to locate queueitems.json at $filepath")); logger.Log(SEPERATOR, TraceEventType.Information); return; } var json = File.ReadAllText(filepath); if (string.IsNullOrWhiteSpace(json)) { logger.Log(new FileLoadException("Queueitems.json was empty.")); logger.Log(SEPERATOR, TraceEventType.Information); return; } try { dynamic data = JsonConvert.DeserializeObject(json); var requests = new OrganizationRequestCollection(); if (data == null) { logger.Log("Json data was null", TraceEventType.Information); logger.Log(SEPERATOR, TraceEventType.Information); return; } int i = 0, j = 0; foreach (var queueitem in data) { i++; if (i >= QUEUEITEMBATCHSIZE && i % QUEUEITEMBATCHSIZE == 0) { j++; } var request = new AddToQueueRequest(); if (Guid.TryParse(queueitem.queueid.ToString(), out Guid queueid)) { if (Guid.TryParse(queueitem.targetid.ToString(), out Guid targetid)) { if (!String.IsNullOrEmpty(queueitem.targetentity.ToString())) { try { request.Target = new EntityReference(queueitem.targetentity.ToString(), targetid); request.DestinationQueueId = queueid; request.QueueItemProperties = new Entity("queueitem"); requests.Add(request); if (requests.Count >= QUEUEITEMBATCHSIZE) { ExecuteMultipleRequests(logger, service, requests, j); } } catch (Exception ex) { logger.Log("Unable to queue current item."); logger.Log(ex); } } else { logger.Log($"Item {i} - There was an empty target entity in the queueitems.json", TraceEventType.Information); } } else { logger.Log($"Item {i} - Unable to parse targetid guid: {queueitem.targetid.ToString()}", TraceEventType.Information); } } else { logger.Log($"Item {i} - Unable to parse queueid guid: {queueitem.queueid.ToString()}", TraceEventType.Information); } } if (requests.Count > 0) { ExecuteMultipleRequests(logger, service, requests, j); } } catch (Exception ex) { logger.Log("Unable to queue items."); logger.Log(ex); } finally { logger.Log(SEPERATOR, TraceEventType.Information); } }
/// <summary> /// Updates annotation and choice element if presents in xsd schema /// </summary> /// <param name="schemaObject">Single schema object to update annotation</param> private void HandleSchemaObject(XmlSchemaObject schemaObject) { XmlSchemaElement element = schemaObject as XmlSchemaElement; if (element != null) { //target.Log(TraceLevel.Verbose, "Updates documentation for Simple Type : " + simpleType.Name); //UpdateAnnotation(simpleType); GetIntuitEntities(element); } // Schemaobject type is Simple Type XmlSchemaSimpleType simpleType = schemaObject as XmlSchemaSimpleType; if (simpleType != null) { target.Log(TraceLevel.Verbose, "Updates documentation for Simple Type : " + simpleType.Name); UpdateAnnotation(simpleType); } // Schemaobject type is Complex Type XmlSchemaComplexType complexType = schemaObject as XmlSchemaComplexType; if (complexType != null) { // Handle properties within schema object (complex type) XmlSchemaObjectCollection properties = complexType.Attributes; foreach (XmlSchemaObject property in properties) { target.Log(TraceLevel.Verbose, "Updates documentation for Property Type under " + complexType.Name + " Complex type"); UpdateAnnotation(property, complexType.Name); } UpdateAnnotation(complexType); target.Log(TraceLevel.Verbose, complexType.Name); } XmlSchemaSequence sequence = null; if (complexType != null) { sequence = complexType.ContentTypeParticle as XmlSchemaSequence; } else { sequence = schemaObject as XmlSchemaSequence; } if (sequence != null) { string complexTypeName = ""; if (complexType != null) { complexTypeName = complexType.Name; } else { complexTypeName = "Generic Nested"; } // Handle Sequence within schema object (complex type) if (sequence != null) { target.Log(TraceLevel.Verbose, "Sequence elements found under :" + complexTypeName + " Complex type and counts are : " + sequence.Items.Count.ToString()); foreach (XmlSchemaObject childElementInSequence in sequence.Items) { XmlSchemaElement elementInClass = childElementInSequence as XmlSchemaElement; if (elementInClass != null) { target.Log(TraceLevel.Verbose, "Updates documentation for Sequence Type under " + complexTypeName + " Complex type"); UpdateAnnotation(childElementInSequence, complexTypeName); } // Handle choice element within sequence element (complex type) XmlSchemaChoice choiceElement = childElementInSequence as XmlSchemaChoice; if (choiceElement != null) { target.Log(TraceLevel.Verbose, "Choice elements found under :" + complexTypeName + " Complex type and under Sequence"); XmlSchemaObjectCollection choiceObjects = choiceElement.Items; foreach (XmlSchemaObject choiceObject in choiceObjects) { XmlSchemaSequence seqInsideChoice = choiceObject as XmlSchemaSequence; // Handle sequence element within choice element (complex type) if (seqInsideChoice != null) { HandleSchemaObject(seqInsideChoice); } else if (choiceElement.Parent.Parent == complexType && choiceElement.MaxOccurs == 1) { foreach (XmlSchemaObject choiceItem in choiceElement.Items) { XmlSchemaElement choiceChild = choiceItem as XmlSchemaElement; if (choiceChild != null) { UpdateAnnotation(choiceChild, complexTypeName); if (choiceChild.MaxOccurs == 1) { target.Log(TraceLevel.Verbose, "Called SeperateItem method for :" + choiceChild.Name + " Choice element under :" + complexTypeName + " Complex type and under Sequence"); SeperateItem(choiceChild, complexTypeName); HandleSchemaObject(choiceChild); } } else { XmlSchemaSequence seqInsideChoiceItem = choiceItem as XmlSchemaSequence; if (seqInsideChoiceItem != null) { HandleSchemaObject(seqInsideChoiceItem); } else { throw new System.Exception("Unknown XML schema choice type"); } } } } // closing sequence element within choice element } } // closing choice element within sequence element } } } // Handle Attribute element within complex type XmlSchemaAttribute attribute = schemaObject as XmlSchemaAttribute; if (attribute != null) { UpdateAnnotation(attribute); } }
internal static void Deploy(TraceLogger logger, CrmServiceClient service, string pbiConfigPath, string pkgFolder) { // TODO: Improve logging code // ReSharper disable once InconsistentNaming const string SEPERATOR = "------------------------------------------------------"; logger.Log(SEPERATOR, TraceEventType.Information); logger.Log("Starting to deploy Power BI report(s)... ", TraceEventType.Information); if (!File.Exists(pbiConfigPath)) { logger.Log(new FileNotFoundException($"Unable to locate pbiconfig.json at {pbiConfigPath}")); logger.Log(SEPERATOR, TraceEventType.Information); return; } var json = File.ReadAllText(pbiConfigPath); if (string.IsNullOrWhiteSpace(json)) { logger.Log(new FileLoadException("pbiconfig.json was empty.")); logger.Log(SEPERATOR, TraceEventType.Information); return; } try { dynamic data = JsonConvert.DeserializeObject(json); if (data == null) { logger.Log("Json data was null", TraceEventType.Information); logger.Log(SEPERATOR, TraceEventType.Information); return; } var clientId = (string)data.clientId; var clientSecret = (string)data.clientSecret; var username = (string)data.username; var password = (string)data.password; foreach (var pbiUpdate in data.pbiUpdates) { var token = AuthenticationHelper.GetAccessToken(clientId, clientSecret, username, password); // Originally wrote the code using Power BI REST .net client: https://www.nuget.org/packages/Microsoft.PowerBIDeployer.Api/ // See https://github.com/devkeydet/pbi-vsts-helper for an example. However, the .net client was causing errors with PackageDeployer.exe. // So I dropped to straight REST calls. var pbiBaseUri = "https://api.powerbi.com/v1.0/myorg"; using (var httpClient = new HttpClient()) { httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", token); // Get groups logger.Log("Getting groups..."); var groupEndpoint = $"{pbiBaseUri}/groups"; var response = httpClient.GetAsync(groupEndpoint).Result; logger.Log($"Get groups status code: {response.StatusCode}"); dynamic responseObject = JsonConvert.DeserializeObject(response.Content.ReadAsStringAsync().Result); var groups = responseObject.value; var groupFound = false; var groupId = ""; // Find out if group exists foreach (var group in groups) { if (group.name == pbiUpdate.groupName) { groupFound = true; groupId = group.id; logger.Log($"Group id found: {groupId}"); break; } } // If it doesn't, create it if (!groupFound) { logger.Log("Group not found. Creating group..."); dynamic addGroupBody = new ExpandoObject(); addGroupBody.name = pbiUpdate.groupName; var stringContent = new StringContent(JsonConvert.SerializeObject(addGroupBody), Encoding.UTF8, "application/json"); response = httpClient.PostAsync(groupEndpoint, stringContent).Result; logger.Log($"Create group status code: {response.StatusCode}"); responseObject = JsonConvert.DeserializeObject(response.Content.ReadAsStringAsync().Result); groupId = responseObject.id; } logger.Log($"Starting import process for {pbiUpdate.reportFileName}..."); var pbiFilePath = $"{pkgFolder}\\{pbiUpdate.reportFileName}"; logger.Log($"Report file path: {pbiFilePath}"); var bytes = File.ReadAllBytes(pbiFilePath); var importsEndpoint = $"{groupEndpoint}/{groupId}/imports"; var multiPartContent = new MultipartFormDataContent(); var byteArrayContent = new ByteArrayContent(bytes); multiPartContent.Add(byteArrayContent); var reportFileName = pbiUpdate.reportFileName.ToString(); var importPostUrl = $"{importsEndpoint}?datasetDisplayName={HttpUtility.UrlEncode(reportFileName)}&nameConflict=Overwrite"; logger.Log($"Import post url (first attempt): {importPostUrl}"); response = httpClient.PostAsync(importPostUrl, multiPartContent).Result; logger.Log($"Import report status code (first attempt): {response.StatusCode}"); if (!response.IsSuccessStatusCode) { multiPartContent = new MultipartFormDataContent(); byteArrayContent = new ByteArrayContent(bytes); multiPartContent.Add(byteArrayContent); importPostUrl = $"{importsEndpoint}?datasetDisplayName={HttpUtility.UrlEncode(reportFileName)}"; logger.Log($"Import post url (second attempt): {importPostUrl}"); response = httpClient.PostAsync(importPostUrl, multiPartContent).Result; logger.Log($"Import report status code (second attempt): {response.StatusCode}"); } responseObject = JsonConvert.DeserializeObject(response.Content.ReadAsStringAsync().Result); var importId = responseObject.id; string reportId; string datasetId; // Check for Succeeded logger.Log("Checking import status..."); while (true) { response = httpClient.GetAsync($"{importsEndpoint}/{importId}").Result; responseObject = JsonConvert.DeserializeObject(response.Content.ReadAsStringAsync().Result); logger.Log($"Import status: {responseObject.importState}..."); if (responseObject.importState == "Succeeded") { reportId = responseObject.reports[0].id; datasetId = responseObject.datasets[0].id; break; } Thread.Sleep(TimeSpan.FromSeconds(5)); } var datasetsEndpoint = $"{groupEndpoint}/{groupId}/datasets"; // Update the data sources so that they point to the right Dynamics instance. logger.Log("Updating datasources..."); var content = new StringContent(pbiUpdate.dataSourceUpdates.ToString(), Encoding.UTF8, "application/json"); var updatedatasourcesEndpoint = $"{datasetsEndpoint}/{datasetId}/updatedatasources"; var result = httpClient.PostAsync(updatedatasourcesEndpoint, content).Result; if (!result.IsSuccessStatusCode) { logger.Log(new Exception("ERROR: Failed to update the datasource")); } // TODO: would be great if we could update the data source credentials and then refresh it. // Don't see how to do that with the current API:https://msdn.microsoft.com/en-us/library/mt784652.aspx logger.Log("Updating dashboards..."); // Update dashboard(s) using the new report id and group id foreach (var dashboardUpdate in pbiUpdate.dashboardUpdates) { var dashboard = service.Retrieve("systemform", new Guid(dashboardUpdate.dashboardId.ToString()), new ColumnSet("formxml")); var formXml = dashboard["formxml"].ToString(); formXml = formXml.Replace(dashboardUpdate.reportIdToFind.ToString(), reportId); formXml = formXml.Replace(dashboardUpdate.groupIdToFind.ToString(), groupId); dashboard["formxml"] = formXml; service.Update(dashboard); } } } logger.Log("Publishing all customizations"); var publishAllCustomizationsRequest = new PublishAllXmlRequest(); service.Execute(publishAllCustomizationsRequest); } catch (Exception ex) { logger.Log("Unexpected error deploying to Power BI."); logger.Log(ex); } finally { logger.Log("Finished deploying Power BI report(s)... ", TraceEventType.Information); logger.Log(SEPERATOR, TraceEventType.Information); } }
protected void Session_OnEnd() { string brokerKey = (string)this.Session[RpcBrokerUtility.BrokerKeyName]; if (!string.IsNullOrWhiteSpace(brokerKey)) { IRpcBroker broker = BrokerStore.Get(brokerKey); if (broker != null) { RpcBrokerUtility.CloseBroker(broker); BrokerStore.Delete(brokerKey); } Session[RpcBrokerUtility.BrokerKeyName] = ""; TraceLogger.Log(string.Format(" -- Broker connection with key [{0}] has been closed -- ", brokerKey)); } if (Session["OutgoingCdaDocs"] != null) { List <string> docList = (List <string>)Session["OutgoingCdaDocs"]; foreach (string doc in docList) { try { TraceLogger.Log(string.Format("Deleting CDA temporary file, {0}.", doc)); System.IO.File.Delete(doc); } catch (Exception genericException) { string message = string.Format("Could not delete temporary CDA file {0}", doc); ErrorLogger.Log(genericException, message); } } } if (Session["CdaExportFolder"] != null) { string exportFolder = Session["CdaExportFolder"].ToString(); if (Directory.Exists(exportFolder)) { string[] files = System.IO.Directory.GetFiles(exportFolder); foreach (string file in files) { string tempUpper = System.IO.Path.GetFileName(file).ToUpper(); if ((tempUpper != "CDA.XSL") && (tempUpper != "WARNING READ THIS.TXT")) { DateTime fileDateTime = System.IO.File.GetLastWriteTime(file); DateTime expiredDateTime = DateTime.Now.AddDays(-1); if (expiredDateTime > fileDateTime) { try { System.IO.File.Delete(file); } catch (Exception ex) { ErrorLogger.Log(ex, "Could not delete temporary CDA file"); } } } } } } }
/// <summary> /// Raised when an action is sent to this control /// </summary> /// <param name="args">args for the action</param> protected override void DoAction(Microsoft.Uii.Csr.RequestActionEventArgs args) { // Log process. LogWriter.Log(string.Format(CultureInfo.CurrentCulture, "{0} -- DoAction called for action: {1}", this.ApplicationName, args.Action), System.Diagnostics.TraceEventType.Information); if (args.Action.Equals("SetView", StringComparison.OrdinalIgnoreCase)) { List <KeyValuePair <string, string> > actionDataList = Utility.SplitLines(args.Data, CurrentContext, localSession); string value = Utility.GetAndRemoveParameter(actionDataList, "View"); // asume there is a myKey=<value> in the data. switch (value.ToLower()) { case "oneday": SetStyle(scheduler.OneDayStyle); break; case "workingweek": SetStyle(scheduler.WorkingWeekStyle); break; case "week": SetStyle(scheduler.WeekStyle); break; case "month": SetStyle(scheduler.MonthStyle); break; case "timeline": SetStyle(scheduler.TimeLineStyle); break; } #region Example process action // // Access CRM and fetch a Record // Microsoft.Xrm.Sdk.Messages.RetrieveRequest req = new Microsoft.Xrm.Sdk.Messages.RetrieveRequest(); // req.Target = new Microsoft.Xrm.Sdk.EntityReference( "account" , Guid.Parse("0EF05F4F-0D39-4219-A3F5-07A0A5E46FD5")); // req.ColumnSet = new Microsoft.Xrm.Sdk.Query.ColumnSet("accountid" , "name" ); // Microsoft.Xrm.Sdk.Messages.RetrieveResponse response = (Microsoft.Xrm.Sdk.Messages.RetrieveResponse)this._client.CrmInterface.ExecuteCrmOrganizationRequest(req, "Requesting Account"); // // Example of pulling some data out of the passed in data array // List<KeyValuePair<string, string>> actionDataList = Utility.SplitLines(args.Data, CurrentContext, localSession); // string valueIwant = Utility.GetAndRemoveParameter(actionDataList, "mykey"); // asume there is a myKey=<value> in the data. // // Example of pushing data to USD // string global = Utility.GetAndRemoveParameter(actionDataList, "global"); // Assume there is a global=true/false in the data // bool saveInGlobalSession = false; // if (!String.IsNullOrEmpty(global)) // saveInGlobalSession = bool.Parse(global); // Dictionary<string, CRMApplicationData> myDataToSet = new Dictionary<string, CRMApplicationData>(); // // add a string: // myDataToSet.Add("myNewKey", new CRMApplicationData() { name = "myNewKey", type = "string", value = "TEST" }); // // add a entity lookup: // myDataToSet.Add("myNewKey", new CRMApplicationData() { name = "myAccount", type = "lookup", value = "account,0EF05F4F-0D39-4219-A3F5-07A0A5E46FD5,MyAccount" }); // if (saveInGlobalSession) // { // // add context item to the global session // ((DynamicsCustomerRecord)((AgentDesktopSession)localSessionManager.GlobalSession).Customer.DesktopCustomer).MergeReplacementParameter(this.ApplicationName, myDataToSet, true); // } // else // { // // Add context item to the current session. // ((DynamicsCustomerRecord)((AgentDesktopSession)localSessionManager.ActiveSession).Customer.DesktopCustomer).MergeReplacementParameter(this.ApplicationName, myDataToSet, true); // } #endregion } base.DoAction(args); }