public static void ExportDocList(string outFolder, string username, string password) { GDataCredentials credentials = new GDataCredentials(username, password); RequestSettings settings = new RequestSettings("GDocBackup", credentials); settings.AutoPaging = true; settings.PageSize = 100; DocumentsRequest request = new DocumentsRequest(settings); Feed<Document> feed = request.GetEverything(); List<Document> docs = new List<Document>(); foreach (Document entry in feed.Entries) docs.Add(entry); using (StreamWriter outFile = new StreamWriter(Path.Combine(outFolder, "doclist.txt"), false), outFile2 = new StreamWriter(Path.Combine(outFolder, "doclistdetails.txt"), false)) { foreach (Document doc in docs) { string s = doc.Title + "\t" + doc.ResourceId; outFile.WriteLine(s); outFile2.WriteLine(s); foreach (string pf in doc.ParentFolders) outFile2.WriteLine("\t\t\t" + pf); } outFile.Close(); outFile2.Close(); } }
private static void DeleteTestNote(DocumentsRequest service) { //ToDo: Doesn'T work always, frequently throwing 401, Precondition failed, maybe Google API bug //service.Delete(createdEntry); //Todo: Workaround to load document again DocumentQuery query = new DocumentQuery(service.BaseUri); query.NumberToRetrieve = 500; Feed <Document> feed = service.Get <Document>(query); Logger.Log("Loaded Google notes", EventType.Information); foreach (Document entry in feed.Entries) { if (entry.Title == "AN_OUTLOOK_TEST_NOTE") { //service.Delete(entry); service.Delete(new Uri(Google.GData.Documents.DocumentsListQuery.documentsBaseUri + "/" + entry.ResourceId), entry.ETag); Logger.Log("Deleted Google note", EventType.Information); //break; } } }
public static void ExportDocList(string outFolder, string username, string password) { GDataCredentials credentials = new GDataCredentials(username, password); RequestSettings settings = new RequestSettings("GDocBackup", credentials); settings.AutoPaging = true; settings.PageSize = 100; DocumentsRequest request = new DocumentsRequest(settings); Feed <Document> feed = request.GetEverything(); List <Document> docs = new List <Document>(); foreach (Document entry in feed.Entries) { docs.Add(entry); } using (StreamWriter outFile = new StreamWriter(Path.Combine(outFolder, "doclist.txt"), false), outFile2 = new StreamWriter(Path.Combine(outFolder, "doclistdetails.txt"), false)) { foreach (Document doc in docs) { string s = doc.Title + "\t" + doc.ResourceId; outFile.WriteLine(s); outFile2.WriteLine(s); foreach (string pf in doc.ParentFolders) { outFile2.WriteLine("\t\t\t" + pf); } } outFile.Close(); outFile2.Close(); } }
public void OAuth2LeggedDocumentsTest() { Tracing.TraceMsg("Entering OAuth2LeggedDocumentsTest"); RequestSettings rs = new RequestSettings(this.ApplicationName, this.oAuthConsumerKey, this.oAuthConsumerSecret, this.oAuthUser, this.oAuthDomain); DocumentsRequest dr = new DocumentsRequest(rs); Feed <Document> f = dr.GetDocuments(); // modify one foreach (Document d in f.Entries) { string s = d.AtomEntry.EditUri.ToString(); d.AtomEntry.EditUri = new AtomUri(s.Replace("@", "%40")); dr.Update(d); AclQuery q = new AclQuery(); q.Uri = d.AccessControlList; Feed <Google.AccessControl.Acl> facl = dr.Get <Google.AccessControl.Acl>(q); foreach (Google.AccessControl.Acl a in facl.Entries) { s = a.AclEntry.EditUri.ToString(); a.AclEntry.EditUri = new AtomUri(s.Replace("@", "%40")); dr.Update(a); } } }
/// <summary> /// tests etag refresh on an entry level /// </summary> [Test] public void ModelTestEntryETagRefresh() { RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); // settings.PageSize = 15; DocumentsRequest r = new DocumentsRequest(settings); // this returns the server default answer Feed <Document> feed = r.GetDocuments(); Document d = null; foreach (Document x in feed.Entries) { Assert.IsTrue(x != null, "We should have something"); d = x; } Assert.IsTrue(d != null, "We should have something"); // now this should result in a notmodified try { Document refresh = r.Retrieve(d); Assert.IsTrue(refresh == null, "we should not be here"); } catch (GDataNotModifiedException g) { Assert.IsTrue(g != null); } }
public static void Exec(string[] args) { Console.WriteLine("*** GetDocList ***"); Console.WriteLine("--- START ---"); string username = args[1]; string password = args[2]; GDataCredentials credentials = new GDataCredentials(username, password); RequestSettings settings = new RequestSettings("GDocBackup", credentials); settings.AutoPaging = true; settings.PageSize = 100; DocumentsRequest request = new DocumentsRequest(settings); Feed<Document> feed = request.GetEverything(); List<Document> docs = new List<Document>(); foreach (Document entry in feed.Entries) docs.Add(entry); StreamWriter outFile = new StreamWriter("doclist.txt", false); StreamWriter outFile2 = new StreamWriter("doclistdetails.txt", false); foreach (Document doc in docs) { string s = doc.Title + "\t" + doc.ResourceId; Console.WriteLine(s); outFile.WriteLine(s); outFile2.WriteLine(s); foreach (string pf in doc.ParentFolders) outFile2.WriteLine("\t\t\t" + pf); } outFile.Close(); outFile2.Close(); Console.WriteLine("--- END ---"); }
[Test] public void ModelTestFeedETagRefresh() { RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); // settings.PageSize = 15; DocumentsRequest r = new DocumentsRequest(settings); // this returns the server default answer Feed <Document> feed = r.GetDocuments(); foreach (Document d in feed.Entries) { Assert.IsTrue(d != null, "We should have something"); } Feed <Document> reload = r.Get(feed, FeedRequestType.Refresh); // now this should result in a notmodified try { foreach (Document d in reload.Entries) { Assert.IsTrue(d == null, "We should not get here"); } } catch (GDataNotModifiedException g) { Assert.IsTrue(g != null); } }
/* public static Stream GetDocUri_NON_FUNZIONA(DocumentsRequest request, Document doc, Document.DownloadType type) { // NON FUNZIONA (porca putt...!) string docID = doc.ResourceId.Replace("document:", ""); string uriS = "http://docs.google.com/feeds/download/documents/Export?" + "docID=" + docID + "&exportFormat=" + type.ToString(); Uri target = new Uri(uriS); return request.Service.Query(target); } */ public static Stream GetDocExportStream(DocumentsRequest request, Document doc, Document.DownloadType downloadtype) { // Questa funziona ma mi pare na stronz... string format = downloadtype.ToString(); string url = doc.DocumentEntry.Content.AbsoluteUri + "&exportFormat=" + format + "&format=" + format; return request.Service.Query(new Uri(url)); }
/* * public static Stream GetDocUri_NON_FUNZIONA(DocumentsRequest request, Document doc, Document.DownloadType type) * { * // NON FUNZIONA (porca putt...!) * string docID = doc.ResourceId.Replace("document:", ""); * string uriS = "http://docs.google.com/feeds/download/documents/Export?" + * "docID=" + docID + * "&exportFormat=" + type.ToString(); * Uri target = new Uri(uriS); * return request.Service.Query(target); * } */ public static Stream GetDocExportStream(DocumentsRequest request, Document doc, Document.DownloadType downloadtype) { // Questa funziona ma mi pare na stronz... string format = downloadtype.ToString(); string url = doc.DocumentEntry.Content.AbsoluteUri + "&exportFormat=" + format + "&format=" + format; return(request.Service.Query(new Uri(url))); }
private void GetDocListExec() { WriteMessage("*** GetDocList ***"); WriteMessage("--- START ---"); try { string username = tbUserName.Text;; string password = tbPassword.Text; GDataCredentials credentials = new GDataCredentials(username, password); RequestSettings settings = new RequestSettings("GDocBackup", credentials); settings.AutoPaging = true; settings.PageSize = 100; DocumentsRequest request = new DocumentsRequest(settings); Feed <Document> feed = request.GetEverything(); List <Document> docs = new List <Document>(); foreach (Document entry in feed.Entries) { docs.Add(entry); } StreamWriter outFile = new StreamWriter("doclist.txt", false); StreamWriter outFile2 = new StreamWriter("doclistdetails.txt", false); WriteMessage("Exporting document list. Please wait..."); foreach (Document doc in docs) { string s = doc.Title + "\t" + doc.ResourceId; //WriteMessage(s); outFile.WriteLine(s); outFile2.WriteLine(s); foreach (string pf in doc.ParentFolders) { outFile2.WriteLine("\t\t\t" + pf); } } WriteMessage("Created file: doclist.txt"); WriteMessage("Created file: doclistdetails.txt"); outFile.Close(); outFile2.Close(); } catch (Exception ex) { WriteMessage("EXCEPTION: " + ex.ToString()); } WriteMessage("--- END ---"); }
public async Task <(List <DocumentsResponse>, ErrorResponse)> GetDocumentsAsync(string token, DateTime fromDate, int organizationId, long afterKey, int count) { var request = new HttpRequestMessage(HttpMethod.Post, KONTUR_FACTORING_URL + "/v2/documents"); request.Headers.Add("Authorization", $"Bearer {token}"); var requestBody = new DocumentsRequest(afterKey, count, fromDate, new List <int> { organizationId }); return(await MakeRequest <List <DocumentsResponse>, ErrorResponse>(request, requestBody)); }
public GoogleDocsRepository(string code) { m_token = GetSessionToken(code); if (string.IsNullOrEmpty(m_email)) { using (MiniProfiler.Current.Step("Get gmail address")) { DocumentsRequest request = new DocumentsRequest(new RequestSettings(MvcApplication.APPNAME, m_token.access_token)); m_email = request.GetFolders().AtomFeed.Authors[0].Email; } } }
private void GetDocListExec() { WriteMessage("*** GetDocList ***"); WriteMessage("--- START ---"); try { string username = tbUserName.Text; ; string password = tbPassword.Text; GDataCredentials credentials = new GDataCredentials(username, password); RequestSettings settings = new RequestSettings("GDocBackup", credentials); settings.AutoPaging = true; settings.PageSize = 100; DocumentsRequest request = new DocumentsRequest(settings); Feed<Document> feed = request.GetEverything(); List<Document> docs = new List<Document>(); foreach (Document entry in feed.Entries) docs.Add(entry); StreamWriter outFile = new StreamWriter("doclist.txt", false); StreamWriter outFile2 = new StreamWriter("doclistdetails.txt", false); WriteMessage("Exporting document list. Please wait..."); foreach (Document doc in docs) { string s = doc.Title + "\t" + doc.ResourceId; //WriteMessage(s); outFile.WriteLine(s); outFile2.WriteLine(s); foreach (string pf in doc.ParentFolders) outFile2.WriteLine("\t\t\t" + pf); } WriteMessage("Created file: doclist.txt"); WriteMessage("Created file: doclistdetails.txt"); outFile.Close(); outFile2.Close(); } catch (Exception ex) { WriteMessage("EXCEPTION: " + ex.ToString()); } WriteMessage("--- END ---"); }
public Form1() { InitializeComponent(); GoogleClientLogin loginDialog = new GoogleClientLogin(new DocumentsService("GoogleDocumentsSample"), "*****@*****.**"); if (loginDialog.ShowDialog() == DialogResult.OK) { RequestSettings settings = new RequestSettings("GoogleDocumentsSample", loginDialog.Credentials); settings.AutoPaging = true; settings.PageSize = 100; if (settings != null) { this.request = new DocumentsRequest(settings); this.Text = "Successfully logged in"; Feed<Document> feed = this.request.GetEverything(); // this takes care of paging the results in foreach (Document entry in feed.Entries) { all.Add(entry); } TreeNode noFolder = null; noFolder = new TreeNode("Items with no folder"); this.documentsView.Nodes.Add(noFolder); noFolder.SelectedImageIndex = 0; noFolder.ImageIndex = 0; foreach (Document entry in all) { // let's add those with no parents for the toplevel if (entry.ParentFolders.Count == 0) { if (entry.Type != Document.DocumentType.Folder) { AddToTreeView(noFolder.Nodes, entry); } else { TreeNode n = AddToTreeView(this.documentsView.Nodes, entry); AddAllChildren(n.Nodes, entry); } } } } } }
public Form1() { InitializeComponent(); GoogleClientLogin loginDialog = new GoogleClientLogin(new DocumentsService("GoogleDocumentsSample"), "*****@*****.**"); if (loginDialog.ShowDialog() == DialogResult.OK) { RequestSettings settings = new RequestSettings("GoogleDocumentsSample", loginDialog.Credentials); settings.AutoPaging = true; settings.PageSize = 100; if (settings != null) { this.request = new DocumentsRequest(settings); this.Text = "Successfully logged in"; Feed <Document> feed = this.request.GetEverything(); // this takes care of paging the results in foreach (Document entry in feed.Entries) { all.Add(entry); } TreeNode noFolder = null; noFolder = new TreeNode("Items with no folder"); this.documentsView.Nodes.Add(noFolder); noFolder.SelectedImageIndex = 0; noFolder.ImageIndex = 0; foreach (Document entry in all) { // let's add those with no parents for the toplevel if (entry.ParentFolders.Count == 0) { if (entry.Type != Document.DocumentType.Folder) { AddToTreeView(noFolder.Nodes, entry); } else { TreeNode n = AddToTreeView(this.documentsView.Nodes, entry); AddAllChildren(n.Nodes, entry); } } } } } }
/// <summary> /// tests moving a document in and out of folders /// </summary> [Test] public void ModelTestMoveDocuments() { const string folderTitle = "That is a new & weird folder"; const string docTitle = "that's the doc"; RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); // settings.PageSize = 15; DocumentsRequest r = new DocumentsRequest(settings); Document folder = new Document(); folder.Type = Document.DocumentType.Folder; folder.Title = folderTitle; /// first create the folder folder = r.CreateDocument(folder); Assert.IsTrue(folder.Title == folderTitle); // let's create a document Document doc = new Document(); doc.Type = Document.DocumentType.Document; doc.Title = docTitle; doc = r.CreateDocument(doc); // create the child r.MoveDocumentTo(folder, doc); // get the folder content list Feed <Document> children = r.GetFolderContent(folder); bool fFound = false; foreach (Document child in children.Entries) { if (child.ResourceId == doc.ResourceId) { fFound = true; break; } } Assert.IsTrue(fFound, "should have found the document in the folder"); }
/// <summary> /// tests including acls during feed download /// </summary> [Test] public void ModelTestIncludeACLs() { RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); // settings.PageSize = 15; DocumentsRequest r = new DocumentsRequest(settings); r.BaseUri = DocumentsListQuery.documentsAclUri; // this returns the server default answer Feed <Document> feed = r.GetDocuments(); foreach (Document x in feed.Entries) { Assert.IsTrue(x != null, "We should have something"); Assert.IsNotNull(x.AccessControlList); } }
public override bool Execute() { GDataCredentials credentials = GetDataCredentials(); RequestSettings settings = new RequestSettings("code.google.com/p/exult/", credentials); settings.AutoPaging = true; settings.PageSize = 100; DocumentsRequest request = new DocumentsRequest(settings); Feed <Document> feed = request.GetFolders(); List <ITaskItem> outputs = new List <ITaskItem>(); // this takes care of paging the results in List <Document> entries = feed.Entries.ToList(); IDictionary <string, Document> documentDictionary = entries.ToDictionary(item => item.Self); RequireDirectory(TargetDirectory); foreach (Document entry in entries) { if (_Cancelled) { return(false); } List <PathMapping> paths = GetPaths(entry, documentDictionary).ToList(); //handle each path, as we may allow multiple locations for a collection foreach (PathMapping path in paths) { if (Pattern == null || PatternExpression.IsMatch(path.TitlePath)) { Log.LogMessage(MessageImportance.High, "Matched \"{0}\"", path.TitlePath); outputs.Add(BuildFolder(entry, path)); } else { Log.LogMessage(MessageImportance.Low, "Skipped \"{0}\"", path.TitlePath); } } } Folders = outputs.ToArray(); return(true); }
public override bool Execute() { GDataCredentials credentials = GetDataCredentials(); RequestSettings settings = new RequestSettings("code.google.com/p/exult/", credentials); settings.AutoPaging = true; settings.PageSize = 100; DocumentsRequest request = new DocumentsRequest(settings); Feed<Document> feed = request.GetFolders(); List<ITaskItem> outputs = new List<ITaskItem>(); // this takes care of paging the results in List<Document> entries = feed.Entries.ToList(); IDictionary<string, Document> documentDictionary = entries.ToDictionary(item => item.Self); RequireDirectory(TargetDirectory); foreach (Document entry in entries) { if (_Cancelled) { return false; } List<PathMapping> paths = GetPaths(entry, documentDictionary).ToList(); //handle each path, as we may allow multiple locations for a collection foreach (PathMapping path in paths) { if (Pattern == null || PatternExpression.IsMatch(path.TitlePath)) { Log.LogMessage(MessageImportance.High, "Matched \"{0}\"", path.TitlePath); outputs.Add(BuildFolder(entry, path)); } else { Log.LogMessage(MessageImportance.Low, "Skipped \"{0}\"", path.TitlePath); } } } Folders = outputs.ToArray(); return true; }
/// <summary> /// Moves an entry (document or folder) into a folder. /// </summary> /// <param name="folder">A Document object representing the folder where the given entry will be moved.</param> /// <param name="entryToBeMoved">A Document object representing the entry (document or folder) that will be moved.</param> public static void MoveEntry(Document folder, Document entryToBeMoved) { DocumentsRequest request; Document movedEntry; try { request = new DocumentsRequest(_settings) { Service = { ProtocolMajor = 3 } }; var reqFactory = (GDataRequestFactory)request.Service.RequestFactory; reqFactory.CustomHeaders.Clear(); reqFactory.Proxy = GetProxy(); movedEntry = request.MoveDocumentTo(folder, entryToBeMoved); } catch (GDataRequestException exRequest) { var error = GetErrorMessage(exRequest); if (exRequest.ResponseString == null && error.ToLowerInvariant().Contains("execution of request failed")) { throw new GDataRequestException("Couldn't move entry - internet down?"); } Trace.WriteLine(DateTime.Now + " - NocsService - couldn't move entry: " + error); throw new GDataRequestException(string.Format("Couldn't move entry: {0} - {1}", entryToBeMoved.DocumentEntry.Title.Text, Tools.TrimErrorMessage(error))); } catch (Exception ex) { var error = GetErrorMessage(ex); throw new Exception(string.Format("Couldn't move entry: {0} - {1}", entryToBeMoved.DocumentEntry.Title.Text, error)); } // let's update dictionaries if (movedEntry.Type == Document.DocumentType.Folder) { AllFolders[entryToBeMoved.ResourceId] = movedEntry; } else { AllDocuments[entryToBeMoved.ResourceId] = movedEntry; } }
private void ss() { GDataCredentials credentials = new GDataCredentials("*****@*****.**", "198ytdblbvrfpa$$w0rd"); RequestSettings settings = new RequestSettings("Testing", credentials); settings.AutoPaging = true; settings.PageSize = 100; DocumentsRequest documentsRequest = new DocumentsRequest(settings); Feed <Document> documentFeed = documentsRequest.GetDocuments(); Document doc = new Document(); foreach (Document document in documentFeed.Entries) { } documentsRequest.CreateDocument(new Document()); }
/// <summary> /// tests document download /// </summary> [Test] public void ModelTestDocumentDownload() { RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); settings.AutoPaging = true; DocumentsRequest r = new DocumentsRequest(settings); // this returns the server default answer Feed <Document> feed = r.GetDocuments(); foreach (Document x in feed.Entries) { Assert.IsTrue(x != null, "We should have something"); Stream ret = r.Download(x, Document.DownloadType.pdf); ret.Close(); } }
/// <summary> /// tests moving a document in and out of folders /// </summary> [Test] public void ModelTestArbitraryDownload() { RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); // settings.PageSize = 15; DocumentsRequest r = new DocumentsRequest(settings); Feed <Document> feed = r.GetEverything(); foreach (Document d in feed.Entries) { Stream res = r.Download(d, null); Assert.IsNotNull(res, "The download stream should not be null"); } foreach (Document d in feed.Entries) { Stream res = r.Download(d, "pdf"); Assert.IsNotNull(res, "The download stream should not be null"); } }
public static void Exec(string[] args) { Console.WriteLine("*** GetDocList ***"); Console.WriteLine("--- START ---"); string username = args[1]; string password = args[2]; GDataCredentials credentials = new GDataCredentials(username, password); RequestSettings settings = new RequestSettings("GDocBackup", credentials); settings.AutoPaging = true; settings.PageSize = 100; DocumentsRequest request = new DocumentsRequest(settings); Feed <Document> feed = request.GetEverything(); List <Document> docs = new List <Document>(); foreach (Document entry in feed.Entries) { docs.Add(entry); } StreamWriter outFile = new StreamWriter("doclist.txt", false); StreamWriter outFile2 = new StreamWriter("doclistdetails.txt", false); foreach (Document doc in docs) { string s = doc.Title + "\t" + doc.ResourceId; Console.WriteLine(s); outFile.WriteLine(s); outFile2.WriteLine(s); foreach (string pf in doc.ParentFolders) { outFile2.WriteLine("\t\t\t" + pf); } } outFile.Close(); outFile2.Close(); Console.WriteLine("--- END ---"); }
public IList <CategoryValue> SelectAll() { //var query = new DocumentsListQuery(); //query.ShowFolders=true; //DocumentsFeed feed = Service.Query(query); //var res= new List<CategoryValue>(); //foreach (var d in feed.Entries.Cast<DocumentEntry>().Where(e => e.IsFolder)) //{ // res.Add(new CategoryValue // { // Name = d.Title.Text, // }); //} //return res; RequestSettings settings = new RequestSettings("GoogleDocumentsSample", Service.Credentials); var req = new DocumentsRequest(settings); var res = new List <CategoryValue>(); var feed = req.GetFolders(); foreach (var d in feed.Entries) { var cat = new CategoryValue { Name = d.Title, CategoryID = GetInternalId(d.ResourceId) }; if (d.ParentFolders.Count > 0) { cat.ParentCategoryID = GetInternalId(GetGlobaId(d.ParentFolders[0])); } res.Add(cat); } return(res); }
/// <summary> /// Creates a new folder in Google Docs. /// </summary> /// <param name="folderName">Name for the new folder.</param> public static void CreateNewFolder(string folderName) { try { var request = new DocumentsRequest(_settings); var reqFactory = (GDataRequestFactory)request.Service.RequestFactory; reqFactory.Proxy = GetProxy(); var newFolder = new Document { Type = Document.DocumentType.Folder, Title = folderName }; newFolder = request.CreateDocument(newFolder); // let's add the new directory to our folder dictionary if (newFolder != null) { AllFolders.Add(newFolder.ResourceId, newFolder); } } catch (GDataRequestException exRequest) { var error = !string.IsNullOrEmpty(exRequest.ResponseString) ? exRequest.ResponseString : exRequest.Message; if (exRequest.ResponseString == null && error.ToLowerInvariant().Contains("execution of request failed")) { throw new GDataRequestException("Couldn't create folder - internet down?"); } Trace.WriteLine(DateTime.Now + " - NocsService - couldn't create folder: " + error); throw new GDataRequestException(string.Format("Couldn't create folder: {0} - {1}", folderName, Tools.TrimErrorMessage(error))); } catch (Exception ex) { var error = GetErrorMessage(ex); Trace.WriteLine(DateTime.Now + " - NocsService - couldn't create folder: " + error); throw new Exception(string.Format("Couldn't create folder: {0} - {1}", folderName, error)); } }
public void ModelTestACLs() { RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); // settings.PageSize = 15; DocumentsRequest r = new DocumentsRequest(settings); // this returns the server default answer Feed <Document> feed = r.GetDocuments(); foreach (Document x in feed.Entries) { Assert.IsTrue(x != null, "We should have something"); Assert.IsNotNull(x.AccessControlList); Feed <Acl> f = r.Get <Acl>(x.AccessControlList); foreach (Acl a in f.Entries) { Assert.IsNotNull(a.Role); Assert.IsNotNull(a.Scope); Assert.IsNotNull(a.Scope.Type); Assert.IsNotNull(a.Scope.Value); } } }
/// <summary> /// Creates a new document in Google Docs. /// </summary> /// <param name="folderId">An entry id for any given folder in which the new document is to be saved.</param> /// <param name="title">Title for the new document.</param> /// <param name="content">HTML content for the new document.</param> /// <param name="createDefaultDirectory"> /// true = create a default directory ('Nocs') /// fales = don't create a default directory /// </param> /// <returns>A newly created Document.</returns> public static Document CreateNewDocument(string folderId, string title, string content, bool createDefaultDirectory) { DocumentEntry newEntry; Document newDocument; content = Tools.FormatEditorContentToHtml(title, content); try { var request = new DocumentsRequest(_settings); var reqFactory = (GDataRequestFactory)request.Service.RequestFactory; reqFactory.Proxy = GetProxy(); // we'll first create a default 'Nocs'-folder if one isn't already created if (createDefaultDirectory) { var defaultFolder = new Document { Type = Document.DocumentType.Folder, Title = "Nocs" }; defaultFolder = request.CreateDocument(defaultFolder); // if we created our default directory, let's add it to our folder dictionary if (defaultFolder != null) { AllFolders.Add(defaultFolder.ResourceId, defaultFolder); folderId = defaultFolder.ResourceId; } } SetupService(null, null, 3, null, null); var textStream = new MemoryStream(Encoding.UTF8.GetBytes(content)); // we might be creating this document inside a particular folder var postUri = !string.IsNullOrEmpty(folderId) ? new Uri(string.Format(DocumentsListQuery.foldersUriTemplate, folderId)) : new Uri(DocumentsListQuery.documentsBaseUri); newEntry = _documentService.Insert(postUri, textStream, DocumentContentType, title) as DocumentEntry; } catch (GDataRequestException exRequest) { var error = !string.IsNullOrEmpty(exRequest.ResponseString) ? exRequest.ResponseString : exRequest.Message; if (exRequest.ResponseString == null && error.ToLowerInvariant().Contains("execution of request failed")) { throw new GDataRequestException("Couldn't create document - internet down?"); } // we'll also check for InvalidEntryException: Could not convert document // - assuming it's a known problem in GData API related to sharing, we will just ignore it if (error.ToLowerInvariant().Contains("could not convert document")) { Debug.WriteLine(string.Format("Couldn't convert document while creating a document: {0}", title)); return(null); } Trace.WriteLine(string.Format("{0} - Couldn't create a new document: {1} - {2}", DateTime.Now, title, error)); throw new GDataRequestException(string.Format("Couldn't create a new document: {0} - {1}", title, Tools.TrimErrorMessage(error))); } catch (Exception ex) { var error = GetErrorMessage(ex); Trace.WriteLine(DateTime.Now + " - NocsService - couldn't create a new document: " + error); throw new Exception(string.Format("Couldn't create document: {0} - {1}", title, error)); } // let's create a new Document if (newEntry != null) { newEntry.IsDraft = false; newDocument = new Document { AtomEntry = newEntry, Title = title, Content = Tools.ParseContent(content) }; // let's add the new document to our document dictionary and return it AllDocuments.Add(newDocument.ResourceId, newDocument); return(newDocument); } // we should never get here throw new Exception((string.Format("Couldn't create document: {0} - internet down?", title))); }
public void CreateNewNote() { string gmailUsername; string syncProfile; GoogleAPITests.LoadSettings(out gmailUsername, out syncProfile); DocumentsRequest service; var scopes = new List <string>(); //Contacts-Scope scopes.Add("https://www.google.com/m8/feeds"); //Notes-Scope scopes.Add("https://docs.google.com/feeds/"); //scopes.Add("https://docs.googleusercontent.com/"); //scopes.Add("https://spreadsheets.google.com/feeds/"); //Calendar-Scope //scopes.Add("https://www.googleapis.com/auth/calendar"); scopes.Add(CalendarService.Scope.Calendar); UserCredential credential; byte[] jsonSecrets = Properties.Resources.client_secrets; using (var stream = new MemoryStream(jsonSecrets)) { FileDataStore fDS = new FileDataStore(Logger.AuthFolder, true); GoogleClientSecrets clientSecrets = GoogleClientSecrets.Load(stream); credential = GCSMOAuth2WebAuthorizationBroker.AuthorizeAsync( clientSecrets.Secrets, scopes.ToArray(), gmailUsername, CancellationToken.None, fDS). Result; OAuth2Parameters parameters = new OAuth2Parameters { ClientId = clientSecrets.Secrets.ClientId, ClientSecret = clientSecrets.Secrets.ClientSecret, // Note: AccessToken is valid only for 60 minutes AccessToken = credential.Token.AccessToken, RefreshToken = credential.Token.RefreshToken }; RequestSettings settings = new RequestSettings("GoContactSyncMod", parameters); service = new DocumentsRequest(settings); //Instantiate an Authenticator object according to your authentication, to use ResumableUploader _authenticator = new OAuth2Authenticator("GCSM Unit Tests", parameters); } //Delete previously created test note. DeleteTestNote(service); Document newEntry = new Document(); newEntry.Type = Document.DocumentType.Document; newEntry.Title = "AN_OUTLOOK_TEST_NOTE"; string file = NotePropertiesUtils.CreateNoteFile("AN_OUTLOOK_TEST_NOTE", "This is just a test note to test GoContactSyncMod", null); newEntry.MediaSource = new MediaFileSource(file, MediaFileSource.GetContentTypeForFileName(file)); #region normal flow, only working to create documents without content (metadata only), e.g. for Notes folder Document createdEntry = Synchronizer.SaveGoogleNote(null, newEntry, service); Assert.IsNotNull(createdEntry.DocumentEntry.Id.Uri); Logger.Log("Created Google note", EventType.Information); //Wait 5 seconds to give the testcase the chance to finish System.Threading.Thread.Sleep(5000); //delete test note DeleteTestNote(service); #endregion #region workaround flow to use UploadDocument, not needed anymore because of new approach to use ResumableUploader //Google.GData.Documents.DocumentEntry createdEntry2 = service.Service.UploadDocument(file, newEntry.Title); //Assert.IsNotNull(createdEntry2.Id.Uri); ////delete test note //DeleteTestNote(service); #endregion #region New approach how to update an existing document: https://developers.google.com/google-apps/documents-list/#updatingchanging_documents_and_files //Instantiate the ResumableUploader component. ResumableUploader uploader = new ResumableUploader(); uploader.AsyncOperationCompleted += new AsyncOperationCompletedEventHandler(OnGoogleNoteCreated); Synchronizer.CreateGoogleNote(newEntry, file, service, uploader, _authenticator); #endregion //Wait 5 seconds to give the testcase the chance to finish the Async events System.Threading.Thread.Sleep(5000); DeleteTestNote(service); }
public GoogleDocs(GoogleOAuthSession token, string email) { m_token = token; m_email = email; m_request = new DocumentsRequest(new RequestSettings(MvcApplication.APPNAME, m_token.access_token)); }
/// <summary> /// tests moving a document in and out of folders /// </summary> [Test] public void ModelTestMoveDocuments() { const string folderTitle = "That is a new & weird folder"; const string docTitle = "that's the doc"; RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); // settings.PageSize = 15; DocumentsRequest r = new DocumentsRequest(settings); Document folder = new Document(); folder.Type = Document.DocumentType.Folder; folder.Title = folderTitle; /// first create the folder folder = r.CreateDocument(folder); Assert.IsTrue(folder.Title == folderTitle); // let's create a document Document doc = new Document(); doc.Type = Document.DocumentType.Document; doc.Title = docTitle; doc = r.CreateDocument(doc); // create the child r.MoveDocumentTo(folder, doc); // get the folder content list Feed<Document> children = r.GetFolderContent(folder); bool fFound = false; foreach (Document child in children.Entries ) { if (child.DocumentId == doc.DocumentId) { fFound = true; break; } } Assert.IsTrue(fFound, "should have found the document in the folder"); }
/// <summary> /// tests etag refresh on an entry level /// </summary> [Test] public void ModelTestEntryETagRefresh() { RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); // settings.PageSize = 15; DocumentsRequest r = new DocumentsRequest(settings); // this returns the server default answer Feed<Document> feed = r.GetDocuments(); Document d = null; foreach (Document x in feed.Entries ) { Assert.IsTrue(x != null, "We should have something"); d = x; } Assert.IsTrue(d != null, "We should have something"); // now this should result in a notmodified try { Document refresh = r.Retrieve(d); Assert.IsTrue(refresh == null, "we should not be here"); } catch (GDataNotModifiedException g) { Assert.IsTrue(g!=null); } }
[Test] public void ModelTestFeedETagRefresh() { RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); // settings.PageSize = 15; DocumentsRequest r = new DocumentsRequest(settings); // this returns the server default answer Feed<Document> feed = r.GetDocuments(); foreach (Document d in feed.Entries ) { Assert.IsTrue(d != null, "We should have something"); } Feed<Document> reload = r.Get(feed, FeedRequestType.Refresh); // now this should result in a notmodified try { foreach (Document d in reload.Entries ) { Assert.IsTrue(d == null, "We should not get here"); } } catch (GDataNotModifiedException g) { Assert.IsTrue(g!=null); } }
/// <summary> /// tests etag refresh on an entry level /// </summary> [Test] public void ModelTestFolders() { const string testTitle = "That is a new & weird subfolder"; const string parentTitle = "Granddaddy folder"; string parentID; string folderID; RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); // settings.PageSize = 15; DocumentsRequest r = new DocumentsRequest(settings); Document folder = new Document(); folder.Type = Document.DocumentType.Folder; folder.Title = testTitle; /// first create the folder folder = r.CreateDocument(folder); Assert.IsTrue(folder.Title == testTitle); r.Delete(folder); // let's create a hierarchy Document parent = new Document(); parent.Type = Document.DocumentType.Folder; parent.Title = parentTitle; parent = r.CreateDocument(parent); parentID = parent.Id; // create the child folder = new Document(); folder.Type = Document.DocumentType.Folder; folder.Title = testTitle; /// first create the folder folder = r.CreateDocument(folder); folderID = folder.Id; // now move the folder into the parent r.MoveDocumentTo(parent, folder); // now get the folder list Feed <Document> folders = r.GetFolders(); int iVerify = 2; List <Document> list = new List <Document>(); foreach (Document f in folders.Entries) { list.Add(f); } bool found = false; foreach (Document f in list) { Assert.IsTrue(f.Type == Document.DocumentType.Folder, "this should be a folder"); if (f.Id == parentID) { iVerify--; } if (f.Id == folderID) { iVerify--; // let's find the guy again. foreach (Document d in list) { if (f.ParentFolders.Contains(d.Self)) { found = true; break; } } } } Assert.IsTrue(found, "we did not find the parent folder"); Assert.IsTrue(iVerify == 0, "We should have found both folders"); }
/// <summary> /// Downloads the contents of a single document. /// </summary> /// <param name="doc">A Document object whose content is to be downloaded.</param> /// <returns>Content of the document as a string.</returns> public static Document GetDocumentContent(Document doc) { DocumentsRequest request; StreamReader reader; string html; var newStyleDocument = false; try { request = new DocumentsRequest(_settings); var reqFactory = (GDataRequestFactory)request.Service.RequestFactory; reqFactory.Proxy = GetProxy(); var contentAbsoluteUri = doc.DocumentEntry.Content.AbsoluteUri; newStyleDocument = contentAbsoluteUri.Contains("export/Export"); var requestUri = string.Format("{0}&exportFormat={1}&format={1}", contentAbsoluteUri, "html"); var stream = request.Service.Query(new Uri(requestUri)); //var stream = request.Download(doc, Document.DownloadType.html); reader = new StreamReader(stream); // let's read the stream to end to retrieve the entire html html = reader.ReadToEnd(); } catch (GDataRequestException exRequest) { var error = GetErrorMessage(exRequest); if (exRequest.ResponseString == null && error.ToLowerInvariant().Contains("execution of request failed")) { throw new GDataRequestException("Couldn't download content - internet down?"); } var knownIssues = ConsecutiveKnownIssuesOccurred(GetContentLock, "GetDocumentContent", doc, error, ref _getContentAttempts, 1); if (knownIssues == KnownIssuesResult.Retry) { doc = GetDocumentContent(doc); doc.Summary = null; _getContentAttempts = 0; return(doc); } if (knownIssues == KnownIssuesResult.LimitReached) { return(doc); } Trace.WriteLine(DateTime.Now + " - NocsService - couldn't download content: " + error); throw new GDataRequestException(string.Format("Couldn't download document: {0} - {1}", doc.DocumentEntry.Title.Text, Tools.TrimErrorMessage(error))); } catch (Exception ex) { var error = GetErrorMessage(ex); throw new Exception(string.Format("Couldn't download document: {0} - {1}", doc.DocumentEntry.Title.Text, error)); } // let's first parse the Google Docs -specific html content var match = Tools.GetMatchForDocumentContent(html); if (match.Success) { // body found, let's now tweak the content before returning it var content = match.Groups[1].Value; doc.Content = newStyleDocument ? Tools.ParseNewStyleContent(content) : Tools.ParseContent(content); return(doc); } // if we get here, something went wrong - document content doesn't match throw new Exception("Invalid html content for document: " + doc.DocumentEntry.Title.Text); }
/// <summary> /// Updates a given DocumentEntry with new content. /// </summary> /// <param name="doc">Document object to be updated.</param> public static Document SaveDocument(Document doc) { Working = true; var entryToUpdate = doc.DocumentEntry; var updatedContent = Tools.FormatEditorContentToHtml(doc.Title, doc.Content); try { // the media feed is used to update a document's content body: // http://docs.google.com/feeds/default/media/ResourceId var mediaUri = new Uri(string.Format(DocumentsListQuery.mediaUriTemplate, doc.ResourceId)); var textStream = new MemoryStream(Encoding.UTF8.GetBytes(updatedContent)); var request = new DocumentsRequest(_settings); var reqFactory = (GDataRequestFactory)request.Service.RequestFactory; reqFactory.Proxy = GetProxy(); // let's set ETag because we're making an update reqFactory.CustomHeaders.Add(string.Format("{0}: {1}", GDataRequestFactory.IfMatch, entryToUpdate.Etag)); var oldEtag = entryToUpdate.Etag; doc.AtomEntry = request.Service.Update(mediaUri, textStream, DocumentContentType, entryToUpdate.Title.Text) as DocumentEntry; Debug.WriteLine(string.Format("ETag changed while saving {0}: {1} -> {2}", entryToUpdate.Title.Text, oldEtag, doc.ETag)); } catch (GDataRequestException exRequest) { var response = exRequest.Response as HttpWebResponse; if (response != null && response.StatusCode == HttpStatusCode.PreconditionFailed && exRequest.ResponseString.ToLowerInvariant().Contains("etagsmismatch")) { // ETags don't match -> this document has been updated outside this instance of Nocs // therefore instead of saving this file we will just wait for an update // TODO: implement some faster way for updating? Debug.WriteLine(string.Format("ETags don't match for {0} - document updated outside Nocs - returning an unchanged document", doc.ETag)); doc.Summary = "unchanged"; return(doc); } var error = GetErrorMessage(exRequest); if (exRequest.ResponseString == null && error.ToLowerInvariant().Contains("execution of request failed")) { throw new GDataRequestException("Couldn't download content, connection timed out"); } // we'll also check for InvalidEntryException: Could not convert document // - assuming it's a known problem in GData API related to sharing, we will update the document and return it if (error.ToLowerInvariant().Contains("could not convert document")) { Debug.WriteLine(string.Format("Couldn't convert document: {0} -> updating it..", doc.DocumentEntry.Title.Text)); var updated = GetUpdatedDocument(doc); if (updated != null) { doc = updated; } } else { Trace.WriteLine(string.Format("{0} - Couldn't save document: {1} - {2}", DateTime.Now, doc.DocumentEntry.Title.Text, error)); throw new GDataRequestException(string.Format("Couldn't save document: {0} - {1}", doc.DocumentEntry.Title.Text, Tools.TrimErrorMessage(error))); } } catch (Exception ex) { var error = GetErrorMessage(ex); throw new Exception(string.Format("Couldn't save document: {0} - {1}", doc.DocumentEntry.Title.Text, error)); } finally { Working = false; } // let's update internal directory to avoid ETag-mismatch AllDocuments[doc.ResourceId] = doc; return(doc); }
/// <summary> /// Fetches/updates all DocumentEntries in a Dictionary and wraps them in a Noc class. /// </summary> /// <returns>Dictionary of ResourceId's and Noc objects.</returns> public static void UpdateAllEntries() { if (AllDocuments == null) { AllDocuments = new Dictionary <string, Document>(); } if (AllFolders == null) { AllFolders = new Dictionary <string, Document>(); } // let's first make sure the user is authenticated if (_settings == null) { throw new Exception("User hasn't been authenticated - internet down?"); } try { var request = new DocumentsRequest(_settings) { Service = { ProtocolMajor = 3 } //BaseUri = DocumentsListQuery.documentsAclUri }; var reqFactory = (GDataRequestFactory)request.Service.RequestFactory; reqFactory.Proxy = GetProxy(); // we'll fetch all entries AllEntriesFeed = request.GetEverything(); // if we've already retrieved items, let's clear the dictionaries before updating them if (AllDocuments.Count > 0) { AllDocuments.Clear(); } if (AllFolders.Count > 0) { AllFolders.Clear(); } foreach (var entry in AllEntriesFeed.Entries) { // let's only add documents and folders if (entry.Type == Document.DocumentType.Document) { AllDocuments.Add(entry.ResourceId, entry); } else if (entry.Type == Document.DocumentType.Folder) { AllFolders.Add(entry.ResourceId, entry); } } } catch (GDataNotModifiedException) { // since doclist updates timestamps on feeds based on access, // etags are useless here and we shouldn't get here return; } catch (GDataRequestException exRequest) { var error = GetErrorMessage(exRequest); if (exRequest.ResponseString == null && error.ToLowerInvariant().Contains("execution of request failed")) { throw new GDataRequestException("Couldn't fetch all entries - internet down?"); } Trace.WriteLine(string.Format("\n{0} - NocsService: couldn't fetch all entries: {1}\n", DateTime.Now, error)); throw new GDataRequestException(Tools.TrimErrorMessage(error)); } catch (Exception ex) { var error = GetErrorMessage(ex); Trace.WriteLine(string.Format("\n{0} - NocsService: couldn't fetch all entries: {1}\n", DateTime.Now, error)); throw new Exception(error); } }
private void ss() { GDataCredentials credentials = new GDataCredentials("*****@*****.**", "198ytdblbvrfpa$$w0rd"); RequestSettings settings = new RequestSettings("Testing", credentials); settings.AutoPaging = true; settings.PageSize = 100; DocumentsRequest documentsRequest = new DocumentsRequest(settings); Feed<Document> documentFeed = documentsRequest.GetDocuments(); Document doc = new Document(); foreach (Document document in documentFeed.Entries) { } documentsRequest.CreateDocument(new Document()); }
public async Task Run( [TimerTrigger("0 */5 * * * *")] TimerInfo timerInfo, [DurableClient] IDurableOrchestrationClient starter) { if (starter is null) { throw new ArgumentNullException(nameof(starter)); } var allGlobalXSettings = await _mediator.Send(new GlobalXOrgSettingsQuery() { DocumentSyncEnabled = true }); var allExceptions = new List <Exception>(); foreach (var globalXSettings in allGlobalXSettings) { var lastDocumentSyncInstant = Instant.FromDateTimeUtc(globalXSettings.LastDocumentSyncUtc); try { var validator = new GlobalXOrgSettings.Validator(); validator.ValidateAndThrow(globalXSettings); } catch (ValidationException vex) { allExceptions.Add(vex); _logger.LogError(vex, $"Error encountered processing documents for org key'{globalXSettings?.ActionstepOrgKey}'. Settings are invalid."); } try { var documentsQuery = new DocumentsRequest() { UserId = globalXSettings.GlobalXAdminId, After = lastDocumentSyncInstant.WithOffset(Offset.Zero), Statuses = { DocumentStatus.Complete } }; var thisSyncTime = _clock.GetCurrentInstant(); await foreach (var documentWithoutVersions in _globalXService.GetDocuments(documentsQuery)) { if (!documentWithoutVersions.DocumentId.HasValue) { _logger.LogError("Error encountered processing document. Response was missing DocumentId for" + " org '{ActionstepOrgKey}', GlobalX Admin ID: '{GlobalXAdminId}'", globalXSettings?.ActionstepOrgKey, globalXSettings?.GlobalXAdminId); continue; } Document documentWithVersions; try { documentWithVersions = await _globalXService.GetDocument(documentWithoutVersions.DocumentId.Value, globalXSettings.GlobalXAdminId); } catch (Exception ex) { _logger.LogError(ex, "Error encountered retrieving document version information for DocumentId '{DocumentId}'" + " org '{ActionstepOrgKey}', GlobalX Admin ID: '{GlobalXAdminId}'", documentWithoutVersions.DocumentId.Value, globalXSettings?.ActionstepOrgKey, globalXSettings?.GlobalXAdminId); continue; } foreach (var documentVersion in documentWithVersions.DocumentVersions) { try { var documentVersionId = documentVersion.DocumentVersionId.ToString(); // Only process "Complete" documents if (documentVersion.StatusDescription != DocumentStatus.Complete.ToString()) { _logger.LogInformation("Skipping document version '{DocumentVersionId}' and statys '{Status}', because it's status is not {RequiredDocumentStatus}.", documentVersionId, documentVersion.StatusDescription, DocumentStatus.Complete.ToString()); continue; } // Only process PDFs if (documentVersion.MimeType != _mimeTypePdf) { _logger.LogInformation("Skipping document version '{DocumentVersionId}' and Mime type '{DocumentMimeType}', as it does not have the mime type {RequiredMimeType}.", documentVersionId, documentVersion.MimeType, _mimeTypePdf); continue; } // Only process document versions updated since the last sync if (documentVersion.Timestamp.Value.ToInstant() < lastDocumentSyncInstant) { _logger.LogInformation("Skipping document version '{DocumentVersionId}' with Timestamp '{Timestamp}', because it's timestamp is before the last sync job time of 'LastDocumentSync' which means that this document version should already have been processed.", documentVersionId, documentVersion.Timestamp.Value, lastDocumentSyncInstant); continue; } var instanceId = GlobalXDocumentSyncOrchestrator.InstancePrefix + documentVersionId; var existingInstance = await starter.GetStatusAsync(instanceId); if (existingInstance is null) { _logger.LogDebug("About to start '{OrchestratorName}' for document version '{DocumentVersionId}'.", nameof(GlobalXDocumentSyncOrchestrator), documentVersionId, documentVersion.Timestamp.Value, lastDocumentSyncInstant); await starter.StartNewAsync( orchestratorFunctionName : nameof(GlobalXDocumentSyncOrchestrator), instanceId : instanceId, input : new CopyDocumentVersionToActionstepCommand() { GlobalXUserId = globalXSettings.GlobalXAdminId, ActionstepUserId = globalXSettings.ActionstepSyncUserId, ActionstepOrgKey = globalXSettings.ActionstepOrgKey, MinimumMatterIdToSync = globalXSettings.MinimumMatterIdToSync, // Use document without versions because we only care about the single version being // processed by this orchestrator. Any remaining versions are unnecessary. Document = documentWithoutVersions, DocumentVersion = documentVersion }); } else { _logger.LogInformation("Orchestration '{OrchestratorName}' for document version '{DocumentVersionId}' is already running, so does not need to be started", nameof(GlobalXDocumentSyncOrchestrator), documentVersionId); } } catch (Exception ex) { allExceptions.Add(ex); _logger.LogError(ex, "Error encountered processing document version '{DocumentVersionId}' for" + " org '{ActionstepOrgKey}', GlobalX Admin ID: '{GlobalXAdminId}'", documentVersion?.DocumentVersionId, globalXSettings?.ActionstepOrgKey, globalXSettings?.GlobalXAdminId); } } } await _mediator.Send(new SetLastDocumentSyncTimeCommand(globalXSettings.ActionstepOrgKey, thisSyncTime)); } catch (Exception ex) { allExceptions.Add(ex); _logger.LogError(ex, "Error encountered while retrieving document versions for" + " org '{ActionstepOrgKey}', GlobalX Admin ID: '{GlobalXAdminId}'", globalXSettings?.ActionstepOrgKey, globalXSettings?.GlobalXAdminId); } } if (allExceptions.Count > 0) { // If there were any failures, throwing ensures that the TimerJob shows up as failed. throw new AggregateException("One or more failures encountered while processing GlobalX documents.", allExceptions); } }
/// <summary> /// Exec backup (internal) /// </summary> private int ExecBackupSingleUser(string username) { DoFeedback(new string('-', 80)); DoFeedback("--- ExecBackupSingleUser - username="******" ---"); DoFeedback(new string('-', 80)); _lastException = null; _duplicatedDocNames = new List <string>(); // Setup credentials and connection DoFeedback("Setup connection & get doc list"); RequestSettings settings; if (_config.appsMode == false) { GDataCredentials credentials = new GDataCredentials(_config.userName, _config.password); settings = new RequestSettings("GDocBackup", credentials); settings.AutoPaging = true; settings.PageSize = 100; } else { settings = new RequestSettings("GDocBackup", _config.appsDomain, _config.appsOAuthSecret, username, _config.appsDomain); settings.AutoPaging = true; settings.PageSize = 100; //settings.Maximum = 10000; } DocumentsRequest request = new DocumentsRequest(settings); if (_config.iwebproxy != null) { request.Proxy = _config.iwebproxy; } // Get doc list from GDocs Feed <Document> feed = request.GetEverything(); List <Document> docs = new List <Document>(); foreach (Document entry in feed.Entries) { docs.Add(entry); } // Search for duplicated doc names in the same folder _duplicatedDocNames = this.FindDuplicatedNames(docs); DoFeedback("Duplicated Doc Names [" + _duplicatedDocNames.Count + "]"); _duplicatedDocNames.ForEach(delegate(string s) { DoFeedback(" - " + s); }); // Builds/updates local folder structure if (_config.appsMode) { this.BuildFolders(null, docs, Path.Combine(_config.outDir, username)); } else { this.BuildFolders(null, docs, _config.outDir); } foreach (String k in _folderDict.Keys) { DoFeedbackDebug("FolderDict: " + k + " --> " + _folderDict[k]); } this.DumpAllDocInfo(docs); // Main Docs loop! int errorCount = 0; for (int i = 0; i < docs.Count; i++) { Document doc = docs[i]; DoFeedback("ITEM: " + doc.Title + " (" + doc.Type + ") [" + (i + 1).ToString() + "/" + docs.Count + "]", ((double)i) / docs.Count); Document.DownloadType[] downloadTypes = null; switch (doc.Type) { case Document.DocumentType.Document: downloadTypes = _config.docExpType; break; case Document.DocumentType.Presentation: downloadTypes = _config.presExpType; break; case Document.DocumentType.Spreadsheet: downloadTypes = _config.sprdExpType; break; case Document.DocumentType.PDF: downloadTypes = new Document.DownloadType[] { Document.DownloadType.pdf }; break; case Document.DocumentType.Drawing: downloadTypes = _config.drawExpType; break; case Document.DocumentType.Unknown: downloadTypes = new Document.DownloadType[] { Document.DownloadType.zip }; // download format not used! It's only a "place-holder". break; default: break; } if (downloadTypes != null) { int maxTentativi = 2; for (int tentativi = 0; tentativi < maxTentativi; tentativi++) { try { // * WorkAround for drawing * // Detect if drawing and then force downloadtype to pdf //bool isDrawing = doc.ResourceId.StartsWith("drawing:"); // drawing:14TBycKwlpXJ25N...... //if (isDrawing) // downloadTypes = new Document.DownloadType[] { Document.DownloadType.pdf }; // bool isDrawing = false; foreach (Document.DownloadType downloadtype in downloadTypes) { // Build local file path string outFolderPath; if (doc.ParentFolders.Count == 0) { outFolderPath = _config.appsMode ? Path.Combine(_config.outDir, username) : _config.outDir; } else { DoFeedback("Try to get folder from dict using key=[" + doc.ParentFolders[0] + "]"); outFolderPath = _folderDict[doc.ParentFolders[0]]; } string outFileFP = (doc.Type == Document.DocumentType.Unknown) ? Path.Combine(outFolderPath, this.RemoveInvalidChars(doc.Title, true)) : Path.Combine(outFolderPath, this.RemoveInvalidChars(doc.Title, false) + "." + ConvertDownloadTypeToFileExtension(downloadtype)); // Get current local file in infos FileInfo fi = new FileInfo(outFileFP); DateTime locFileDateTime = fi.LastWriteTime; DateTime gdocFileDateTime = doc.Updated; // Mono and/or Ubuntu (...linux) does not support milliseconds info when saving DataTime to FileInfo.LastWriteTime. So... I remove it! :) locFileDateTime = this.RemoveMilliseconds(locFileDateTime); gdocFileDateTime = this.RemoveMilliseconds(gdocFileDateTime); bool downloadDoc = (!fi.Exists || _config.downloadAll); if (_config.dateDiff.HasValue) { if (Math.Abs(locFileDateTime.Subtract(gdocFileDateTime).TotalSeconds) > _config.dateDiff.Value) { downloadDoc = true; } } else { if (locFileDateTime != gdocFileDateTime) { downloadDoc = true; } } if (downloadDoc) { DoFeedback("Start exporting " + doc.Title + "(Type=" + doc.Type + ") --> " + downloadtype.ToString()); Stream gdocStream = null; try { if (doc.Type == Document.DocumentType.Unknown) { String downloadUrl = doc.DocumentEntry.Content.Src.ToString(); Uri downloadUri = new Uri(downloadUrl); if (_config.appsMode) { // add xoauth_requestor_id to the doc url if not present if (!downloadUrl.Contains("xoauth_requestor_id=")) { downloadUri = new Uri(downloadUrl + "&xoauth_requestor_id=" + this.BuildDomainUserFullName(username)); } } gdocStream = request.Service.Query(downloadUri); } else if (doc.Type == Document.DocumentType.Document) { gdocStream = request.Download(doc, downloadtype.ToString()); } else if (doc.Type == Document.DocumentType.Spreadsheet) { gdocStream = request.Download(doc, downloadtype.ToString()); // WAS: downloadtype.ToString()); } else if (doc.Type == Document.DocumentType.Presentation) { gdocStream = request.Download(doc, downloadtype.ToString()); } else if (doc.Type == Document.DocumentType.Drawing) { gdocStream = request.Download(doc, downloadtype.ToString()); } else if (doc.Type != Document.DocumentType.PDF) { // *** ??? *** gdocStream = request.Download(doc, downloadtype); } else { // *** PDF *** if (_config.appsMode) { // add xoauth_requestor_id to the doc url if not present string url = doc.DocumentEntry.Content.Src.ToString(); if (!url.Contains("xoauth_requestor_id=")) { doc.DocumentEntry.Content.Src = new AtomUri(url + "&xoauth_requestor_id=" + this.BuildDomainUserFullName(username)); } } gdocStream = request.Download(doc, null); } using (FileStream outFile = new FileStream(outFileFP, FileMode.Create, FileAccess.Write)) { byte[] buffer = new byte[8192]; int bytesRead; while ((bytesRead = gdocStream.Read(buffer, 0, buffer.Length)) > 0) { outFile.Write(buffer, 0, bytesRead); } outFile.Close(); } gdocStream.Close(); } finally { if (gdocStream != null) { gdocStream.Dispose(); } } new FileInfo(outFileFP).LastWriteTime = doc.Updated; DoFeedback("End exporting " + doc.Title + "(Type=" + doc.Type + ") --> " + downloadtype.ToString()); // ------------------------------------------------------------------------------------------------------------------------ // Workaround for Issue 100 - http://code.google.com/p/gdocbackup/issues/detail?id=100 if (doc.Type == Document.DocumentType.Presentation) { bool isPPTX = false; using (FileStream presentationFile = new FileStream(outFileFP, FileMode.Open, FileAccess.Read)) { int byte1 = presentationFile.ReadByte(); int byte2 = presentationFile.ReadByte(); isPPTX = (byte1 == 80 && byte2 == 75); // 80 75 = "PK" (pptx is a zip. Every zip starts with "PK" presentationFile.Close(); } if (!isPPTX) { string newName = outFileFP.Remove(outFileFP.Length - 1); File.Delete(newName); File.Move(outFileFP, newName); DoFeedback("Presentation API bug: renaming output file [" + newName + "]"); } } // ------------------------------------------------------------------------------------------------------------------------ } else { DoFeedback("Skipped doc: " + doc.Title); } // Send Feedback DoFeedback(new FeedbackObject( (_config.appsMode ? username + "#" + doc.Title : doc.Title), doc.Type.ToString(), (doc.Type == Document.DocumentType.Unknown) ? "BIN" : downloadtype.ToString(), downloadDoc ? "BCKUP" : "SKIP", "", locFileDateTime, gdocFileDateTime)); tentativi = maxTentativi; } } catch (Exception ex) { if (tentativi == maxTentativi - 1) { errorCount++; DoFeedback("DOC-ERROR: " + ex.ToString()); DoFeedback(new FeedbackObject( (_config.appsMode ? username + "#" + doc.Title : doc.Title), doc.Type.ToString(), "", "ERROR", "", null, null)); } else { DoFeedback("DOC-ERROR: (attempt " + tentativi + ") " + ex.ToString()); } } } } else { if (doc.Type != Document.DocumentType.Folder) { DoFeedback(new FeedbackObject(doc.Title, doc.Type.ToString(), "", "NONE", "", null, null)); } } } return(errorCount); }
public void LoginToGoogle(string username) { Logger.Log("Connecting to Google...", EventType.Information); if (ContactsRequest == null && SyncContacts || DocumentsRequest == null && SyncNotes || EventRequest == null & SyncAppointments) { //OAuth2 for all services List<String> scopes = new List<string>(); //Contacts-Scope scopes.Add("https://www.google.com/m8/feeds"); //Notes-Scope scopes.Add("https://docs.google.com/feeds/"); //scopes.Add("https://docs.googleusercontent.com/"); //scopes.Add("https://spreadsheets.google.com/feeds/"); //Calendar-Scope //scopes.Add("https://www.googleapis.com/auth/calendar"); scopes.Add(CalendarService.Scope.Calendar); //take user credentials UserCredential credential; //load client secret from ressources byte[] jsonSecrets = Properties.Resources.client_secrets; //using (var stream = new FileStream(Application.StartupPath + "\\client_secrets.json", FileMode.Open, FileAccess.Read)) using (var stream = new MemoryStream(jsonSecrets)) { FileDataStore fDS = new FileDataStore(Logger.AuthFolder, true); GoogleClientSecrets clientSecrets = GoogleClientSecrets.Load(stream); credential = GCSMOAuth2WebAuthorizationBroker.AuthorizeAsync( clientSecrets.Secrets, scopes.ToArray(), username, CancellationToken.None, fDS). Result; var initializer = new Google.Apis.Services.BaseClientService.Initializer(); initializer.HttpClientInitializer = credential; OAuth2Parameters parameters = new OAuth2Parameters { ClientId = clientSecrets.Secrets.ClientId, ClientSecret = clientSecrets.Secrets.ClientSecret, // Note: AccessToken is valid only for 60 minutes AccessToken = credential.Token.AccessToken, RefreshToken = credential.Token.RefreshToken }; Logger.Log(Application.ProductName, EventType.Information); RequestSettings settings = new RequestSettings( Application.ProductName, parameters); if (SyncContacts) { //ContactsRequest = new ContactsRequest(rs); ContactsRequest = new ContactsRequest(settings); } if (SyncNotes) { //DocumentsRequest = new DocumentsRequest(rs); DocumentsRequest = new DocumentsRequest(settings); //Instantiate an Authenticator object according to your authentication, to use ResumableUploader //GDataCredentials cred = new GDataCredentials(credential.Token.AccessToken); //GOAuth2RequestFactory rf = new GOAuth2RequestFactory(null, Application.ProductName, parameters); //DocumentsRequest.Service.RequestFactory = rf; authenticator = new OAuth2Authenticator(Application.ProductName, parameters); } if (SyncAppointments) { //ContactsRequest = new Google.Contacts.ContactsRequest() var CalendarRequest = new CalendarService(initializer); //CalendarRequest.setUserCredentials(username, password); calendarList = CalendarRequest.CalendarList.List().Execute().Items; //Get Primary Calendar, if not set from outside if (string.IsNullOrEmpty(SyncAppointmentsGoogleFolder)) foreach (var calendar in calendarList) { if (calendar.Primary != null && calendar.Primary.Value) { SyncAppointmentsGoogleFolder = calendar.Id; break; } } if (SyncAppointmentsGoogleFolder == null) throw new Exception("Google Calendar not defined (primary not found)"); //EventQuery query = new EventQuery("https://www.google.com/calendar/feeds/default/private/full"); //Old v2 approach: EventQuery query = new EventQuery("https://www.googleapis.com/calendar/v3/calendars/default/events"); EventRequest = CalendarRequest.Events; } } } Synchronizer.UserName = username; int maxUserIdLength = Synchronizer.OutlookUserPropertyMaxLength - (Synchronizer.OutlookUserPropertyTemplate.Length - 3 + 2);//-3 = to remove {0}, +2 = to add length for "id" or "up" string userId = username; if (userId.Length > maxUserIdLength) userId = userId.GetHashCode().ToString("X"); //if a user id would overflow UserProperty name, then use that user id hash code as id. //Remove characters not allowed for Outlook user property names: []_# userId = userId.Replace("#", "").Replace("[", "").Replace("]", "").Replace("_", ""); OutlookPropertyPrefix = string.Format(Synchronizer.OutlookUserPropertyTemplate, userId); }
private static void DeleteTestNote(DocumentsRequest service) { //ToDo: Doesn'T work always, frequently throwing 401, Precondition failed, maybe Google API bug //service.Delete(createdEntry); //Todo: Workaround to load document again DocumentQuery query = new DocumentQuery(service.BaseUri); query.NumberToRetrieve = 500; Feed<Document> feed = service.Get<Document>(query); Logger.Log("Loaded Google notes", EventType.Information); foreach (Document entry in feed.Entries) { if (entry.Title == "AN_OUTLOOK_TEST_NOTE") { //service.Delete(entry); service.Delete(new Uri(Google.GData.Documents.DocumentsListQuery.documentsBaseUri + "/" + entry.ResourceId), entry.ETag); Logger.Log("Deleted Google note", EventType.Information); //break; } } }
public void ModelTestACLs() { RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); // settings.PageSize = 15; DocumentsRequest r = new DocumentsRequest(settings); // this returns the server default answer Feed<Document> feed = r.GetDocuments(); foreach (Document x in feed.Entries) { Assert.IsTrue(x != null, "We should have something"); Assert.IsNotNull(x.AccessControlList); Feed<Acl> f = r.Get<Acl>(x.AccessControlList); foreach (Acl a in f.Entries) { Assert.IsNotNull(a.Role); Assert.IsNotNull(a.Scope); Assert.IsNotNull(a.Scope.Type); Assert.IsNotNull(a.Scope.Value); } } }
public void CreateNewNote() { string gmailUsername; string syncProfile; GoogleAPITests.LoadSettings(out gmailUsername, out syncProfile); DocumentsRequest service; var scopes = new List<string>(); //Contacts-Scope scopes.Add("https://www.google.com/m8/feeds"); //Notes-Scope scopes.Add("https://docs.google.com/feeds/"); //scopes.Add("https://docs.googleusercontent.com/"); //scopes.Add("https://spreadsheets.google.com/feeds/"); //Calendar-Scope //scopes.Add("https://www.googleapis.com/auth/calendar"); scopes.Add(CalendarService.Scope.Calendar); UserCredential credential; byte[] jsonSecrets = Properties.Resources.client_secrets; using (var stream = new MemoryStream(jsonSecrets)) { FileDataStore fDS = new FileDataStore(Logger.AuthFolder, true); GoogleClientSecrets clientSecrets = GoogleClientSecrets.Load(stream); credential = GCSMOAuth2WebAuthorizationBroker.AuthorizeAsync( clientSecrets.Secrets, scopes.ToArray(), gmailUsername, CancellationToken.None, fDS). Result; OAuth2Parameters parameters = new OAuth2Parameters { ClientId = clientSecrets.Secrets.ClientId, ClientSecret = clientSecrets.Secrets.ClientSecret, // Note: AccessToken is valid only for 60 minutes AccessToken = credential.Token.AccessToken, RefreshToken = credential.Token.RefreshToken }; RequestSettings settings = new RequestSettings("GoContactSyncMod", parameters); service = new DocumentsRequest(settings); //Instantiate an Authenticator object according to your authentication, to use ResumableUploader _authenticator = new OAuth2Authenticator("GCSM Unit Tests", parameters); } //Delete previously created test note. DeleteTestNote(service); Document newEntry = new Document(); newEntry.Type = Document.DocumentType.Document; newEntry.Title = "AN_OUTLOOK_TEST_NOTE"; string file = NotePropertiesUtils.CreateNoteFile("AN_OUTLOOK_TEST_NOTE", "This is just a test note to test GoContactSyncMod", null); newEntry.MediaSource = new MediaFileSource(file, MediaFileSource.GetContentTypeForFileName(file)); #region normal flow, only working to create documents without content (metadata only), e.g. for Notes folder Document createdEntry = Synchronizer.SaveGoogleNote(null, newEntry, service); Assert.IsNotNull(createdEntry.DocumentEntry.Id.Uri); Logger.Log("Created Google note", EventType.Information); //Wait 5 seconds to give the testcase the chance to finish System.Threading.Thread.Sleep(5000); //delete test note DeleteTestNote(service); #endregion #region workaround flow to use UploadDocument, not needed anymore because of new approach to use ResumableUploader //Google.GData.Documents.DocumentEntry createdEntry2 = service.Service.UploadDocument(file, newEntry.Title); //Assert.IsNotNull(createdEntry2.Id.Uri); ////delete test note //DeleteTestNote(service); #endregion #region New approach how to update an existing document: https://developers.google.com/google-apps/documents-list/#updatingchanging_documents_and_files //Instantiate the ResumableUploader component. ResumableUploader uploader = new ResumableUploader(); uploader.AsyncOperationCompleted += new AsyncOperationCompletedEventHandler(OnGoogleNoteCreated); Synchronizer.CreateGoogleNote(newEntry, file, service, uploader, _authenticator); #endregion //Wait 5 seconds to give the testcase the chance to finish the Async events System.Threading.Thread.Sleep(5000); DeleteTestNote(service); }
/// <summary> /// tests including acls during feed download /// </summary> [Test] public void ModelTestIncludeACLs() { RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); // settings.PageSize = 15; DocumentsRequest r = new DocumentsRequest(settings); r.BaseUri = DocumentsListQuery.documentsAclUri; // this returns the server default answer Feed<Document> feed = r.GetDocuments(); foreach (Document x in feed.Entries ) { Assert.IsTrue(x != null, "We should have something"); Assert.IsNotNull(x.AccessControlList); } }
/// <summary> /// tests document download /// </summary> [Test] public void ModelTestDocumentDownload() { RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); settings.AutoPaging = true; DocumentsRequest r = new DocumentsRequest(settings); // this returns the server default answer Feed<Document> feed = r.GetDocuments(); Document d = null; foreach (Document x in feed.Entries ) { Assert.IsTrue(x != null, "We should have something"); Stream ret = r.Download(x, Document.DownloadType.pdf); ret.Close(); } }
/// <summary> /// tests moving a document in and out of folders /// </summary> [Test] public void ModelTestArbitraryDownload() { RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); // settings.PageSize = 15; DocumentsRequest r = new DocumentsRequest(settings); Feed<Document> feed = r.GetEverything(); foreach (Document d in feed.Entries) { Stream res = r.Download(d, null); Assert.IsNotNull(res, "The download stream should not be null"); } foreach (Document d in feed.Entries) { Stream res = r.Download(d, "pdf"); Assert.IsNotNull(res, "The download stream should not be null"); } }
/// <summary> /// tests etag refresh on an entry level /// </summary> [Test] public void ModelTestFolders() { const string testTitle = "That is a new & weird subfolder"; const string parentTitle = "Granddaddy folder"; string parentID; string folderID; RequestSettings settings = new RequestSettings(this.ApplicationName, this.userName, this.passWord); // settings.PageSize = 15; DocumentsRequest r = new DocumentsRequest(settings); Document folder = new Document(); folder.Type = Document.DocumentType.Folder; folder.Title = testTitle; /// first create the folder folder = r.CreateDocument(folder); Assert.IsTrue(folder.Title == testTitle); r.Delete(folder); // let's create a hierarchy Document parent = new Document(); parent.Type = Document.DocumentType.Folder; parent.Title = parentTitle; parent = r.CreateDocument(parent); parentID = parent.Id; // create the child folder = new Document(); folder.Type = Document.DocumentType.Folder; folder.Title = testTitle; /// first create the folder folder = r.CreateDocument(folder); folderID = folder.Id; // now move the folder into the parent r.MoveDocumentTo(parent, folder); // now get the folder list Feed<Document> folders = r.GetFolders(); int iVerify = 2; List<Document> list = new List<Document>(); foreach (Document f in folders.Entries ) { list.Add(f); } bool found = false; foreach (Document f in list ) { Assert.IsTrue(f.Type == Document.DocumentType.Folder, "this should be a folder"); if (f.Id == parentID) { iVerify--; } if (f.Id == folderID) { iVerify--; // let's find the guy again. foreach (Document d in list) { if (f.ParentFolders.Contains(d.Self)) { found = true; break; } } } } Assert.IsTrue(found, "we did not find the parent folder"); Assert.IsTrue(iVerify==0, "We should have found both folders"); }
public void OAuth2LeggedDocumentsTest() { Tracing.TraceMsg("Entering OAuth2LeggedDocumentsTest"); RequestSettings rs = new RequestSettings(this.ApplicationName, this.oAuthConsumerKey, this.oAuthConsumerSecret, this.oAuthUser, this.oAuthDomain); DocumentsRequest dr = new DocumentsRequest(rs); Feed<Document> f = dr.GetDocuments(); // modify one foreach (Document d in f.Entries) { string s = d.AtomEntry.EditUri.ToString(); d.AtomEntry.EditUri = new AtomUri(s.Replace("@", "%40")); dr.Update(d); AclQuery q = new AclQuery(); q.Uri = d.AccessControlList; Feed<Google.AccessControl.Acl> facl = dr.Get<Google.AccessControl.Acl>(q); foreach (Google.AccessControl.Acl a in facl.Entries) { s = a.AclEntry.EditUri.ToString(); a.AclEntry.EditUri = new AtomUri(s.Replace("@", "%40")); dr.Update(a); } } }
public void Dispose() { m_request = null; }
/// <summary> /// Fetches a potential updated Document for syncing purposes. /// If the document hasn't been updated in Google Docs, will return null. /// </summary> /// <param name="document">Document to be updated.</param> /// <returns> /// Document if an updated entry is found. /// null if no updated item is found. /// </returns> public static Document GetUpdatedDocument(Document document) { var originalEtag = document.DocumentEntry.Etag; Document refreshed; try { var request = new DocumentsRequest(_settings); var reqFactory = (GDataRequestFactory)request.Service.RequestFactory; reqFactory.Proxy = GetProxy(); refreshed = request.Retrieve(document); } catch (GDataNotModifiedException) { // if response is 304 (NotModified) -> document hasn't changed Debug.WriteLine(string.Format("Document hasn't changed: {0} - {1} -> {2}", document.Title, originalEtag, document.ETag)); return(null); } catch (GDataRequestException exRequest) { var error = GetErrorMessage(exRequest); // if we encounter a ResourceNotFoundException, there's no need to add an error job, // the AutoFetchAll-worker will handle removing the tab if (error.ToLowerInvariant().Contains("resourcenotfoundexception")) { return(null); } if (exRequest.ResponseString == null && error.ToLowerInvariant().Contains("execution of request failed")) { throw new GDataRequestException("Couldn't sync document - internet down?"); } Trace.WriteLine(DateTime.Now + " - NocsService - couldn't check if doc updated: " + error); throw new GDataRequestException(string.Format("Couldn't check if document was updated: {0} - {1}", document.DocumentEntry.Title.Text, Tools.TrimErrorMessage(error))); } catch (Exception ex) { var error = GetErrorMessage(ex); Trace.WriteLine(DateTime.Now + " - NocsService - couldn't check if doc updated: " + ex.Message); throw new GDataRequestException(string.Format("Couldn't check if document was updated: {0} - {1}", document.DocumentEntry.Title.Text, error)); } if (refreshed != null) { Debug.WriteLine(string.Format("Found updated document: {0} - {1} -> {2}", refreshed.Title, originalEtag, refreshed.ETag)); // let's update our internal dictionary if (refreshed.Type == Document.DocumentType.Folder) { AllFolders[document.ResourceId] = refreshed; } else { AllDocuments[document.ResourceId] = refreshed; } return(refreshed); } // if we get here, the document hasn't updated return(null); }
/// <summary> /// save the google note /// </summary> /// <param name="googleNote"></param> public static Document SaveGoogleNote(Document parentFolder, Document googleNote, DocumentsRequest documentsRequest) { //check if this contact was not yet inserted on google. if (googleNote.DocumentEntry.Id.Uri == null) { //insert contact. Uri feedUri = null; if (parentFolder != null) { try {//In case of Notes folder creation, the GoogleNotesFolder.DocumentEntry.Content.AbsoluteUri throws a NullReferenceException feedUri = new Uri(parentFolder.DocumentEntry.Content.AbsoluteUri); } catch (Exception) { } } if (feedUri == null) feedUri = new Uri(documentsRequest.BaseUri); try { Document createdEntry = documentsRequest.Insert(feedUri, googleNote); //ToDo: Workaround also doesn't help: Utilities.SaveGoogleNoteContent(this, createdEntry, googleNote); Logger.Log("Created new Google folder: " + createdEntry.Title, EventType.Information); return createdEntry; } catch (Exception ex) { string responseString = ""; if (ex is GDataRequestException) responseString = EscapeXml(((GDataRequestException)ex).ResponseString); string xml = GetXml(googleNote); string newEx = String.Format("Error saving NEW Google note: {0}. \n{1}\n{2}", responseString, ex.Message, xml); throw new ApplicationException(newEx, ex); } } else { try { //note already present in google. just update Document updated = documentsRequest.Update(googleNote); //ToDo: Workaround also doesn't help: Utilities.SaveGoogleNoteContent(this, updated, googleNote); return updated; } catch (Exception ex) { string responseString = ""; if (ex is GDataRequestException) responseString = EscapeXml(((GDataRequestException)ex).ResponseString); string xml = GetXml(googleNote); string newEx = String.Format("Error saving EXISTING Google note: {0}. \n{1}\n{2}", responseString, ex.Message, xml); throw new ApplicationException(newEx, ex); } } }
public static void CreateGoogleNote(/*Document parentFolder, */Document googleNote, object UserData, DocumentsRequest documentsRequest, ResumableUploader uploader, OAuth2Authenticator authenticator) { // Define the resumable upload link Uri createUploadUrl = new Uri("https://docs.google.com/feeds/upload/create-session/default/private/full"); //Uri createUploadUrl = new Uri(GoogleNotesFolder.AtomEntry.EditUri.ToString()); AtomLink link = new AtomLink(createUploadUrl.AbsoluteUri); link.Rel = ResumableUploader.CreateMediaRelation; googleNote.DocumentEntry.Links.Add(link); //if (parentFolder != null) // googleNote.DocumentEntry.ParentFolders.Add(new AtomLink(parentFolder.DocumentEntry.SelfUri.ToString())); // Set the service to be used to parse the returned entry googleNote.DocumentEntry.Service = documentsRequest.Service; // Start the upload process //uploader.InsertAsync(_authenticator, match.GoogleNote.DocumentEntry, new object()); uploader.InsertAsync(authenticator, googleNote.DocumentEntry, UserData); }