private static XDocument GetDocument(AzureFile fileType, string directoryPath, out XNamespace xNamespace, out string filePath, out List <XName> roleQualifiedNames) { roleQualifiedNames = new List <XName>(); switch (fileType) { case AzureFile.Definition: filePath = string.Format("{0}\\ServiceDefinition.csdef", directoryPath); xNamespace = XNamespace.Get("http://schemas.microsoft.com/ServiceHosting/2008/10/ServiceDefinition"); roleQualifiedNames.Add(XName.Get("WebRole", xNamespace.NamespaceName)); roleQualifiedNames.Add(XName.Get("WorkerRole", xNamespace.NamespaceName)); break; case AzureFile.Local: filePath = string.Format("{0}\\ServiceConfiguration.Local.cscfg", directoryPath); xNamespace = XNamespace.Get("http://schemas.microsoft.com/ServiceHosting/2008/10/ServiceConfiguration"); roleQualifiedNames.Add(XName.Get("Role", xNamespace.NamespaceName)); break; case AzureFile.Cloud: filePath = string.Format("{0}\\ServiceConfiguration.Cloud.cscfg", directoryPath); xNamespace = XNamespace.Get("http://schemas.microsoft.com/ServiceHosting/2008/10/ServiceConfiguration"); roleQualifiedNames.Add(XName.Get("Role", xNamespace.NamespaceName)); break; default: throw new ArgumentOutOfRangeException(string.Format("Unknown enum value \"{0}\"", fileType)); } if (!File.Exists(filePath)) { throw new FileNotFoundException(string.Format("Azure file was not found at \"{0}\"", Path.GetFullPath(filePath))); } var document = XDocument.Load(filePath, LoadOptions.SetLineInfo); return(document); }
public async Task UpdateImageAsync(long projectId, AzureFile image) { var project = await _projectRepository.GetByIdAsync(projectId); project.ImageUrl = await UploadImageAndGetUrlAsync(projectId, image); await _projectRepository.SaveChangesAsync(); }
private Task <string> UploadImageAndGetUrlAsync(long projectId, AzureFile image) { if (image == null) { return(null); } var imageName = $"project-{projectId}/image-{Guid.NewGuid()}{image.Extension}"; return(_projectStorageProvider.UploadAndGetUriAsync(imageName, image)); }
/// <summary> /// Unify and save transaction on db /// </summary> /// <param name="transactions"></param> /// <param name="_hubContext"></param> /// <param name="urlFileImported"></param> public void UnifyTransactions(List <Transaction> transactions, IHubContext <NotifyHub, ITypedHubClient> _hubContext, string urlFileImported) { _hubContext.Clients.All.UpdatePercent("Unifying transactions"); var dc = getNewDataContext(); var duplication = new List <Transaction>(); try { var repository = new AzureFile(); var allTransactions = new ConcurrentDictionary <string, Transaction>(); var transactionDB = dc.Transaction.ToList(); var total = transactions.Count() + transactionDB.Count(); var count = 1; transactionDB.ForEach(t => { var add = allTransactions.TryAdd(t.Hash, t); if (add == false) { duplication.Add(t); } SendImportProgress(count++, total, "Unifying transactions", _hubContext); }); transactions.ForEach(t => { var add = allTransactions.TryAdd(t.Hash, t); if (add == false) { duplication.Add(t); } SendImportProgress(count++, total, "Unifying transactions", _hubContext); }); var transactionsTreated = allTransactions.Select(t => t.Value).ToList(); var urlDuplicates = repository.SaveCSV(duplication.ConvertAll(d => new TransactionViewModel(d))); var import = new Import() { Date = DateTime.Now, FileImported = urlFileImported, FileDuplicate = urlDuplicates, TotalTransactions = transactions.Count(), TotalTransactionsDuplicates = duplication.Count(), TotalTransactionsSaves = transactions.Count() - duplication.Count() }; dc.BulkInsertOrUpdate(transactionsTreated, progress : delegate(decimal s) { sendProgress(s, "Save transactions", _hubContext); }); dc.Add(import); dc.SaveChanges(); _hubContext.Clients.All.Sucess(new ImportViewModel(import)); } catch (Exception ex) { _hubContext.Clients.All.Error("Error unity transactions"); throw ex; } }
/// <summary> /// Validate the object. /// </summary> /// <exception cref="ValidationException"> /// Thrown if validation fails /// </exception> public virtual void Validate() { if (Name == null) { throw new ValidationException(ValidationRules.CannotBeNull, "Name"); } if (AzureFile != null) { AzureFile.Validate(); } }
public async Task <ActionResult> UploadDocument([Bind(Include = "NewDocument")] DocumentsViewModel submittedVM, HttpPostedFileBase file) { bool documentUploadSucceeded = false; string failMsg = null; try { if (file != null && file.ContentLength > 0) { // Validate file size here... don't let bigger than 10 MB be uploaded (Azure max is 30 MB, but that's pretty big) if (file.ContentLength > 10000000) { documentUploadSucceeded = false; failMsg = "The file is too large to be uploaded to Azure. Max file size is 10 MB."; } else { // Upload to Azure File Storage AzureBlobStorage azureBlob = new AzureBlobStorage(); AzureFile azureFile = await azureBlob.UploadDocumentAsync(file, submittedVM.NewDocument); // Save Document information to database Document doc = new Document(); doc.Name = Path.GetFileName(file.FileName); doc.Url = azureFile.Document.Url; doc.Uploaded = DateTime.Now; doc.Extension = Path.GetExtension(file.FileName); if (submittedVM.NewDocument != null && submittedVM.NewDocument.AudienceId != null) { doc.AudienceId = submittedVM.NewDocument.AudienceId; } long saved = Repository.Documents.Save(doc); documentUploadSucceeded = saved < 1 ? false : true; failMsg = "Document could not be uploaded due to an internal error. Please contact your system administrator."; } } else { documentUploadSucceeded = false; failMsg = "A valid file is required to submit the form."; } } catch (Exception ex) { // TODO: wire this up on the front end - show a modal when the form reloads. documentUploadSucceeded = false; failMsg = "Document could not be uploaded due to an internal error. Please contact your system administrator."; } return(RedirectToAction("Documents", new { documentUploadSucceeded = documentUploadSucceeded, failMsg = failMsg })); }
public async Task <List <Models.Document> > DownloadFilesToAzure(Models.DocumentLibrary docLib) { List <Models.Document> documentsSaved = new List <Models.Document>(); try { using (ClientContext context = new ClientContext(docLib.SiteUrl)) { ListItemCollection items = GetFilesRecursively(context, docLib); // For each file returned, download the file foreach (var item in items) { // Pull file information from SharePoint string fileRef = (string)item["FileRef"]; var fileName = System.IO.Path.GetFileName(fileRef); var fileInfo = Microsoft.SharePoint.Client.File.OpenBinaryDirect(context, fileRef); // Construct the document object to save to the db Models.Document newDocument = new Models.Document(); newDocument.Name = fileName; newDocument.Extension = Path.GetExtension(fileName); newDocument.DocumentLibraryId = docLib.DocumentLibraryId; newDocument.AudienceId = docLib.AudienceId; //Console.WriteLine("File name => " + fileName); //Console.WriteLine("Mime Type => " + MIMEAssistant.GetMIMEType(fileName)); //ClientResult<Stream> fileStream = file.OpenBinaryStream(); //Microsoft.SharePoint.Client.File.OpenBinaryDirect(context, fileRef); // Save to Azure AzureBlobStorage azureStorage = new AzureBlobStorage(); AzureFile azureFile = await azureStorage.UploadDocumentAsync(fileInfo.Stream, newDocument, docLib, fileName, MIMEAssistant.GetMIMEType(fileName)); // Set url to document object to save to db newDocument.Url = azureFile.Document.Url; newDocument.Uploaded = DateTime.UtcNow; documentsSaved.Add(newDocument); } } return(documentsSaved); } catch (Exception e) { // TODO: add error handling here... Console.WriteLine("SharePoint.DownloadFilesToAzure debug, " + e.Message); return(null); } }
public IActionResult DownloadFileDuplicate(string fileName) { try { if (fileName.IsNullOrEmptyOrBlank()) { return(StatusCode(404, "Erro downloading file")); } var repository = new AzureFile(); var csv = repository.GetFile(fileName, "duplicates"); var file = File(new UTF8Encoding().GetBytes(csv), "text/csv", fileName); return(file); } catch (Exception ex) { throw ex; } }
/// <summary> /// Locate a file at the given subpath by directly mapping path segments to Azure File Storage directories. /// </summary> /// <param name="path">A path under the root file share.</param> /// <returns>The file information. Callers must check <see cref="IFile.Exists"/>. public async Task <IFile> GetFileInfoAsync( string path, CancellationToken cancellationToken = default(CancellationToken)) { var file = _share.GetFileReference(path); try { await file.FetchAttributesAsync(); var fileInfo = new AzureFile(file); return(fileInfo); } catch (StorageException ex) when(StorageExceptionHelper.IsNotFoundStorageException(ex)) { return(new NotFoundFile(path)); } }
/// <summary> /// Validate the object. /// </summary> /// <exception cref="ValidationException"> /// Thrown if validation fails /// </exception> public virtual void Validate() { if (Name == null) { throw new ValidationException(ValidationRules.CannotBeNull, "Name"); } if (AwsElasticBlockStore != null) { AwsElasticBlockStore.Validate(); } if (AzureDisk != null) { AzureDisk.Validate(); } if (AzureFile != null) { AzureFile.Validate(); } if (Cephfs != null) { Cephfs.Validate(); } if (Cinder != null) { Cinder.Validate(); } if (FlexVolume != null) { FlexVolume.Validate(); } if (GcePersistentDisk != null) { GcePersistentDisk.Validate(); } if (GitRepo != null) { GitRepo.Validate(); } if (Glusterfs != null) { Glusterfs.Validate(); } if (HostPath != null) { HostPath.Validate(); } if (Iscsi != null) { Iscsi.Validate(); } if (Nfs != null) { Nfs.Validate(); } if (PersistentVolumeClaim != null) { PersistentVolumeClaim.Validate(); } if (PhotonPersistentDisk != null) { PhotonPersistentDisk.Validate(); } if (PortworxVolume != null) { PortworxVolume.Validate(); } if (Projected != null) { Projected.Validate(); } if (Quobyte != null) { Quobyte.Validate(); } if (Rbd != null) { Rbd.Validate(); } if (ScaleIO != null) { ScaleIO.Validate(); } if (VsphereVolume != null) { VsphereVolume.Validate(); } }
/// <summary> /// Validate the object. /// </summary> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown if validation fails /// </exception> public virtual void Validate() { if (AwsElasticBlockStore != null) { AwsElasticBlockStore.Validate(); } if (AzureDisk != null) { AzureDisk.Validate(); } if (AzureFile != null) { AzureFile.Validate(); } if (Cephfs != null) { Cephfs.Validate(); } if (Cinder != null) { Cinder.Validate(); } if (FlexVolume != null) { FlexVolume.Validate(); } if (GcePersistentDisk != null) { GcePersistentDisk.Validate(); } if (Glusterfs != null) { Glusterfs.Validate(); } if (HostPath != null) { HostPath.Validate(); } if (Iscsi != null) { Iscsi.Validate(); } if (Local != null) { Local.Validate(); } if (Nfs != null) { Nfs.Validate(); } if (PhotonPersistentDisk != null) { PhotonPersistentDisk.Validate(); } if (PortworxVolume != null) { PortworxVolume.Validate(); } if (Quobyte != null) { Quobyte.Validate(); } if (Rbd != null) { Rbd.Validate(); } if (ScaleIO != null) { ScaleIO.Validate(); } if (VsphereVolume != null) { VsphereVolume.Validate(); } }
private static void CreateAzureFile(IEnumerable <XmlAppSetting> appSettings, Project project, IEnumerable <AzureMapping> mappings, AzureFile fileType, string directoryPath) { string filePath; XNamespace xNamespace; List <XName> roleQualifiedNames; XDocument document = GetDocument(fileType, directoryPath, out xNamespace, out filePath, out roleQualifiedNames); //Find the ConfigurationSettings for this specific project XElement projectRole = null; List <XElement> allRoles = new List <XElement>(); foreach (var roleQualifiedName in roleQualifiedNames) { allRoles.AddRange(document.Descendants(roleQualifiedName)); } foreach (var xElement in allRoles) { var nameAttribute = GetAttribute(xElement, "name", true); if (nameAttribute.Equals(project.ProjectName, StringComparison.OrdinalIgnoreCase)) { projectRole = xElement; break; } } if (projectRole == null) { //This document doesnt have a definition for this project return; } XName configSettingsQualifiedName = XName.Get("ConfigurationSettings", xNamespace.NamespaceName); IEnumerable <XElement> configSettingsNodes = projectRole.Descendants(configSettingsQualifiedName); /*Remove existing settings for this project*/ XName settingQualifiedName = XName.Get("Setting", xNamespace.NamespaceName); configSettingsNodes.Descendants(settingQualifiedName).Remove(); switch (fileType) { case AzureFile.Definition: SaveDefinitionSettings(document, appSettings, configSettingsNodes, filePath, settingQualifiedName); break; case AzureFile.Local: var mapping = mappings.FirstOrDefault(m => m.Target.Equals("Local", StringComparison.OrdinalIgnoreCase)); SaveActualSettings(document, appSettings, configSettingsNodes, filePath, settingQualifiedName, project, mapping.Name); break; case AzureFile.Cloud: var cloudMapping = mappings.FirstOrDefault(m => m.Target.Equals("Cloud", StringComparison.OrdinalIgnoreCase)); SaveActualSettings(document, appSettings, configSettingsNodes, filePath, settingQualifiedName, project, cloudMapping.Name); break; default: throw new ArgumentOutOfRangeException(string.Format("Unknown enum value \"{0}\"", fileType)); } }
public async Task CreateApplicationAsync(CreateExpertApplicationRequest request, long userId, AzureFile cv, AzureFile scan, AzureFile photo) { var country = await GetCountryAsync(request.CountryIsoCode); var expertApplication = new ExpertApplication { FirstName = request.FirstName, LastName = request.LastName, Sex = request.Sex, ApplicantId = userId, BirthDate = request.BirthDate, CountryId = country.Id, City = request.City, Description = request.Description, Why = request.Why, FacebookLink = request.FacebookLink, LinkedInLink = request.LinkedInLink, BitcointalkLink = request.BitcointalkLink, DocumentNumber = request.DocumentNumber, DocumentType = request.DocumentType.ToDomain(), ApplyDate = _clock.UtcNow }; expertApplication.SetAreas(request.Areas); _expertApplicationRepository.Add(expertApplication); await _expertApplicationRepository.SaveChangesAsync(); var applicationId = expertApplication.Id.ToString(); var scanName = $"application-{applicationId}/scan-{Guid.NewGuid()}{scan.Extension}"; var photoName = $"application-{applicationId}/photo-{Guid.NewGuid()}{photo.Extension}"; var cvName = $"application-{applicationId}/cv-{Guid.NewGuid()}{cv.Extension}"; var links = await Task.WhenAll(_expertApplicationsStorageProvider.UploadAndGetUriAsync(scanName, scan), _expertApplicationsStorageProvider.UploadAndGetUriAsync(cvName, cv), _expertApplicationsStorageProvider.UploadAndGetUriAsync(photoName, photo)); expertApplication.ScanUrl = links[0]; expertApplication.CvUrl = links[1]; expertApplication.PhotoUrl = links[2]; await _expertApplicationRepository.SaveChangesAsync(); }
public async Task UpdateTeamMemberPhotoAsync(long projectId, long projectTeamMemberId, AzureFile photo) { var photoName = $"project-{projectTeamMemberId}/photo-{Guid.NewGuid()}{photo.Extension}"; var link = await _projectTeamMembersStorageProvider.UploadAndGetUriAsync(photoName, photo); var project = await GetByIdAsync(projectId); project.UpdateTeamMemberPhotoLink(projectTeamMemberId, link); await _projectRepository.SaveChangesAsync(); }
public IFile OpenFile(string sPath, bool fWrite) { var f = new AzureFile(); return(f); }
/// <summary> /// Read file OFX /// </summary> /// <param name="file"></param> /// <param name="_hubContext"></param> /// <param name="urlPath"></param> /// <returns>list of transactions</returns> public List <Transaction> ReadTransactions(IFormFile file, IHubContext <NotifyHub, ITypedHubClient> _hubContext, out string urlPath) { var transactions = new List <Transaction>(); _hubContext.Clients.All.UpdatePercent("Reading OFX file"); try { var format = "yyyyMMddHHmmss"; CultureInfo provider = CultureInfo.InvariantCulture; var repository = new AzureFile(); urlPath = repository.SaveFile(file, "importedofx", new List <string> { ".OFX" }); if (string.IsNullOrWhiteSpace(urlPath)) { return(transactions); } using (Stream stream = file.OpenReadStream()) { using (StreamReader read = new StreamReader(stream)) { var lines = read.ReadToEnd().Split(new char[] { '\n' }); var total = lines.Count(); stream.Position = 0; var line = ""; int count = 1; while ((line = read.ReadLine()) != null) { if (line.Contains("<TRNTYPE>")) { var TRNTYPE = line.Split("<TRNTYPE>")[1].Trim(); SendImportProgress(count++, total, "Reading OFX file", _hubContext); var DTPOSTED = read.ReadLine().Split("<DTPOSTED>")[1].Split('[')[0].Trim(); SendImportProgress(count++, total, "Reading OFX file", _hubContext); var TRNAMT = read.ReadLine().Split("<TRNAMT>")[1].Trim(); SendImportProgress(count++, total, "Reading OFX file", _hubContext); var MEMO = read.ReadLine().Split("<MEMO>")[1].Trim(); SendImportProgress(count++, total, "Reading OFX file", _hubContext); var transaction = new Transaction { Date = DateTime.ParseExact(DTPOSTED, format, provider), Memo = MEMO, Type = TRNTYPE, Value = Convert.ToDecimal(TRNAMT.Replace('.', ',')), }; transaction.Hash = GetHashTransactionImport(transaction); transactions.Add(transaction); } else { SendImportProgress(count++, total, "Reading OFX file", _hubContext); } } read.Close(); } stream.Close(); } } catch (Exception ex) { _hubContext.Clients.All.Error("Error ao ler arquivo"); throw ex; } return(transactions); }