/// <summary> /// Ends the get. /// </summary> /// <param name="asyncResult">The asynchronous result.</param> /// <param name="context">The context.</param> /// <param name="requiresViewSecurity">if set to <c>true</c> [requires security].</param> /// <returns></returns> public BinaryFile EndGet(IAsyncResult asyncResult, HttpContext context, out bool requiresViewSecurity) { // restore the command from the context SqlCommand cmd = (SqlCommand)context.Items["cmd"]; using (SqlDataReader reader = cmd.EndExecuteReader(asyncResult)) { // [NP] Check if reader has data (Part of https://github.com/SparkDevNetwork/Rock/pull/3050) if (reader.Read()) { BinaryFile binaryFile = new BinaryFile(); // Columns must be read in Sequential Order (see stored procedure spCore_BinaryFileGet) binaryFile.Id = reader["Id"] as int? ?? 0; binaryFile.IsTemporary = ((bool)reader["IsTemporary"]); binaryFile.IsSystem = (bool)reader["IsSystem"]; binaryFile.BinaryFileTypeId = reader["BinaryFileTypeId"] as int?; // return requiresViewSecurity to let caller know that security needs to be checked on this binaryFile before viewing requiresViewSecurity = (bool)reader["RequiresViewSecurity"]; binaryFile.FileName = reader["FileName"] as string; binaryFile.MimeType = reader["MimeType"] as string; binaryFile.ModifiedDateTime = reader["ModifiedDateTime"] as DateTime?; binaryFile.Description = reader["Description"] as string; int?storageEntityTypeId = reader["StorageEntityTypeId"] as int?; binaryFile.SetStorageEntityTypeId(storageEntityTypeId); var guid = reader["Guid"]; if (guid is Guid) { binaryFile.Guid = (Guid)guid; } binaryFile.StorageEntitySettings = reader["StorageEntitySettings"] as string; binaryFile.Path = reader["Path"] as string; binaryFile.FileSize = reader["FileSize"] as long?; binaryFile.DatabaseData = new BinaryFileData(); // read the fileContent from the database just in case it's stored in the database, otherwise, the Provider will get it // TODO do as a stream instead var content = reader["Content"] as byte[]; if (content != null) { binaryFile.DatabaseData.Content = content; } return(binaryFile); } else { requiresViewSecurity = false; return(null); } } }
/// <summary> /// Ends the get. /// </summary> /// <param name="asyncResult">The asynchronous result.</param> /// <param name="context">The context.</param> /// <returns></returns> public BinaryFile EndGet(IAsyncResult asyncResult, HttpContext context) { // restore the command from the context SqlCommand cmd = (SqlCommand)context.Items["cmd"]; using (SqlDataReader reader = cmd.EndExecuteReader(asyncResult)) { BinaryFile binaryFile = new BinaryFile(); // Columns must be read in Sequential Order (see stored procedure spBinaryFileGet) reader.Read(); binaryFile.Id = reader["Id"] as int? ?? 0; binaryFile.IsTemporary = (reader["IsTemporary"] as int?) == 1; binaryFile.IsSystem = (reader["IsSystem"] as int?) == 1; binaryFile.BinaryFileTypeId = reader["BinaryFileTypeId"] as int?; binaryFile.Url = reader["Url"] as string; binaryFile.FileName = reader["FileName"] as string; binaryFile.MimeType = reader["MimeType"] as string; binaryFile.LastModifiedDateTime = reader["LastModifiedDateTime"] as DateTime?; binaryFile.Description = reader["Description"] as string; int?storageEntityTypeId = reader["StorageEntityTypeId"] as int?; binaryFile.SetStorageEntityTypeId(storageEntityTypeId); var guid = reader["Guid"]; if (guid is Guid) { binaryFile.Guid = (Guid)guid; } string entityTypeName = reader["StorageEntityTypeName"] as string; binaryFile.Data = new BinaryFileData(); // read the fileContent from the database just in case it's stored in the database, otherwise, the Provider will get it var content = reader["Content"]; if (content != null) { binaryFile.Data.Content = content as byte[]; } Rock.Storage.ProviderComponent provider = Rock.Storage.ProviderContainer.GetComponent(entityTypeName); binaryFile.Data.Content = provider.GetFileContent(binaryFile, context); return(binaryFile); } }
/// <summary> /// Saves the specified <see cref="Rock.Model.BinaryFile"/>. /// </summary> /// <param name="item">A <see cref="Rock.Model.BinaryFile"/> to save.</param> /// <param name="personId">A <see cref="System.Int32"/> representing the PersonId of the <see cref="Rock.Model.Person"/> who is saving the BinaryFile..</param> /// <returns></returns> public override bool Save(BinaryFile item, int?personId) { item.LastModifiedDateTime = DateTime.Now; Rock.Storage.ProviderComponent storageProvider = DetermineBinaryFileStorageProvider(item); if (storageProvider != null) { //// if this file is getting replaced, and we can determine the StorageProvider, use the provider to get and remove the file from the provider's //// external storage medium before we save it again. This especially important in cases where the provider for this filetype has changed //// since it was last saved // first get the FileContent from the old/current fileprovider in case we need to save it somewhere else item.Data = item.Data ?? new BinaryFileData(); item.Data.Content = storageProvider.GetFileContent(item, HttpContext.Current); // now, remove it from the old/current fileprovider storageProvider.RemoveFile(item, HttpContext.Current); } // when a file is saved (unless it is getting Deleted/Saved), it should use the StoredEntityType that is associated with the BinaryFileType if (item.BinaryFileType != null) { // make sure that it updated to use the same storage as specified by the BinaryFileType if (item.StorageEntityTypeId != item.BinaryFileType.StorageEntityTypeId) { item.SetStorageEntityTypeId(item.BinaryFileType.StorageEntityTypeId); storageProvider = DetermineBinaryFileStorageProvider(item); } } if (storageProvider != null) { // save the file to the provider's new storage medium storageProvider.SaveFile(item, HttpContext.Current); } return(base.Save(item, personId)); }
/// <summary> /// Maps the specified folder. /// </summary> /// <param name="folder">The folder.</param> /// <param name="ministryFileType">Type of the ministry file.</param> /// <param name="storageProvider">The storage provider.</param> public void Map(ZipArchive folder, BinaryFileType ministryFileType, ProviderComponent storageProvider) { var lookupContext = new RockContext(); var personEntityTypeId = EntityTypeCache.GetId <Person>(); var fileFieldTypeId = FieldTypeCache.Read(Rock.SystemGuid.FieldType.FILE.AsGuid(), lookupContext).Id; var existingAttributes = new AttributeService(lookupContext).GetByFieldTypeId(fileFieldTypeId) .Where(a => a.EntityTypeId == personEntityTypeId) .ToDictionary(a => a.Key, a => a.Id); var emptyJsonObject = "{}"; var newFileList = new List <DocumentKeys>(); int completed = 0; int totalRows = folder.Entries.Count; int percentage = (totalRows - 1) / 100 + 1; ReportProgress(0, string.Format("Verifying files import ({0:N0} found.", totalRows)); foreach (var file in folder.Entries) { var fileExtension = Path.GetExtension(file.Name); var fileMimeType = Extensions.GetMIMEType(file.Name); if (BinaryFileComponent.FileTypeBlackList.Contains(fileExtension)) { LogException("Binary File Import", string.Format("{0} filetype not allowed ({1})", fileExtension, file.Name)); continue; } else if (fileMimeType == null) { LogException("Binary File Import", string.Format("{0} filetype not recognized ({1})", fileExtension, file.Name)); continue; } string[] parsedFileName = file.Name.Split('_'); // Ministry docs should follow this pattern: // 0. Firstname // 1. Lastname // 2. ForeignId // 3. Filename var personForeignId = parsedFileName[2].AsType <int?>(); var personKeys = BinaryFileComponent.ImportedPeople.FirstOrDefault(p => p.IndividualId == personForeignId); if (personKeys != null) { var rockFile = new Rock.Model.BinaryFile(); rockFile.IsSystem = false; rockFile.IsTemporary = false; rockFile.FileName = file.Name; rockFile.MimeType = fileMimeType; rockFile.BinaryFileTypeId = ministryFileType.Id; rockFile.CreatedDateTime = file.LastWriteTime.DateTime; rockFile.ModifiedDateTime = ImportDateTime; rockFile.Description = string.Format("Imported as {0}", file.Name); rockFile.SetStorageEntityTypeId(ministryFileType.StorageEntityTypeId); rockFile.StorageEntitySettings = emptyJsonObject; if (ministryFileType.AttributeValues.Any()) { rockFile.StorageEntitySettings = ministryFileType.AttributeValues .ToDictionary(a => a.Key, v => v.Value.Value).ToJson(); } // use base stream instead of file stream to keep the byte[] // NOTE: if byte[] converts to a string it will corrupt the stream using (var fileContent = new StreamReader(file.Open())) { rockFile.ContentStream = new MemoryStream(fileContent.BaseStream.ReadBytesToEnd()); } var attributePattern = "[A-Za-z0-9-]+"; var attributeName = Regex.Match(parsedFileName[3].RemoveWhitespace(), attributePattern); var attributeKey = attributeName.Value.RemoveWhitespace(); // change key to default key for Background Check Documents if (attributeKey == "BackgroundCheck") { attributeKey = "BackgroundCheckDocument"; } if (!existingAttributes.ContainsKey(attributeKey)) { var newAttribute = new Attribute(); newAttribute.FieldTypeId = fileFieldTypeId; newAttribute.EntityTypeId = personEntityTypeId; newAttribute.EntityTypeQualifierColumn = string.Empty; newAttribute.EntityTypeQualifierValue = string.Empty; newAttribute.Key = attributeKey; newAttribute.Name = attributeName.Value; newAttribute.Description = attributeName.Value + " created by binary file import"; newAttribute.CreatedDateTime = ImportDateTime; newAttribute.ModifiedDateTime = ImportDateTime; newAttribute.IsGridColumn = false; newAttribute.IsMultiValue = false; newAttribute.IsRequired = false; newAttribute.AllowSearch = false; newAttribute.IsSystem = false; newAttribute.Order = 0; newAttribute.AttributeQualifiers.Add(new AttributeQualifier() { Key = "binaryFileType", Value = ministryFileType.Guid.ToString() }); lookupContext.Attributes.Add(newAttribute); lookupContext.SaveChanges(); existingAttributes.Add(newAttribute.Key, newAttribute.Id); } newFileList.Add(new DocumentKeys() { PersonId = personKeys.PersonId, AttributeId = existingAttributes[attributeKey], File = rockFile }); completed++; if (completed % percentage < 1) { int percentComplete = completed / percentage; ReportProgress(percentComplete, string.Format("{0:N0} files imported ({1}% complete).", completed, percentComplete)); } else if (completed % ReportingNumber < 1) { SaveFiles(newFileList, storageProvider); // Reset list newFileList.Clear(); ReportPartialProgress(); } } } if (newFileList.Any()) { SaveFiles(newFileList, storageProvider); } ReportProgress(100, string.Format("Finished files import: {0:N0} addresses imported.", completed)); }
/// <summary> /// Maps the specified folder. /// </summary> /// <param name="folder">The folder.</param> /// <param name="ministryFileType">Type of the ministry file.</param> /// <param name="storageProvider">The storage provider.</param> public void Map( ZipArchive folder, BinaryFileType ministryFileType, ProviderComponent storageProvider ) { var lookupContext = new RockContext(); var personEntityTypeId = EntityTypeCache.GetId<Person>(); var fileFieldTypeId = FieldTypeCache.Read( Rock.SystemGuid.FieldType.FILE.AsGuid(), lookupContext ).Id; var existingAttributes = new AttributeService( lookupContext ).GetByFieldTypeId( fileFieldTypeId ) .Where( a => a.EntityTypeId == personEntityTypeId ) .ToDictionary( a => a.Key, a => a.Id ); var emptyJsonObject = "{}"; var newFileList = new List<DocumentKeys>(); int completed = 0; int totalRows = folder.Entries.Count; int percentage = ( totalRows - 1 ) / 100 + 1; ReportProgress( 0, string.Format( "Verifying files import ({0:N0} found.", totalRows ) ); foreach ( var file in folder.Entries ) { var fileExtension = Path.GetExtension( file.Name ); var fileMimeType = Extensions.GetMIMEType( file.Name ); if ( BinaryFileComponent.FileTypeBlackList.Contains( fileExtension ) ) { LogException( "Binary File Import", string.Format( "{0} filetype not allowed ({1})", fileExtension, file.Name ) ); continue; } else if ( fileMimeType == null ) { LogException( "Binary File Import", string.Format( "{0} filetype not recognized ({1})", fileExtension, file.Name ) ); continue; } string[] parsedFileName = file.Name.Split( '_' ); // Ministry docs should follow this pattern: // 0. Firstname // 1. Lastname // 2. ForeignId // 3. Filename var personForeignId = parsedFileName[2].AsType<int?>(); var personKeys = BinaryFileComponent.ImportedPeople.FirstOrDefault( p => p.IndividualId == personForeignId ); if ( personKeys != null ) { var rockFile = new Rock.Model.BinaryFile(); rockFile.IsSystem = false; rockFile.IsTemporary = false; rockFile.FileName = file.Name; rockFile.MimeType = fileMimeType; rockFile.BinaryFileTypeId = ministryFileType.Id; rockFile.CreatedDateTime = file.LastWriteTime.DateTime; rockFile.ModifiedDateTime = ImportDateTime; rockFile.Description = string.Format( "Imported as {0}", file.Name ); rockFile.SetStorageEntityTypeId( ministryFileType.StorageEntityTypeId ); rockFile.StorageEntitySettings = emptyJsonObject; if ( ministryFileType.AttributeValues.Any() ) { rockFile.StorageEntitySettings = ministryFileType.AttributeValues .ToDictionary( a => a.Key, v => v.Value.Value ).ToJson(); } // use base stream instead of file stream to keep the byte[] // NOTE: if byte[] converts to a string it will corrupt the stream using ( var fileContent = new StreamReader( file.Open() ) ) { rockFile.ContentStream = new MemoryStream( fileContent.BaseStream.ReadBytesToEnd() ); } var attributePattern = "[A-Za-z0-9-]+"; var attributeName = Regex.Match( parsedFileName[3].RemoveWhitespace(), attributePattern ); var attributeKey = attributeName.Value.RemoveWhitespace(); // change key to default key for Background Check Documents if ( attributeKey == "BackgroundCheck" ) { attributeKey = "BackgroundCheckDocument"; } if ( !existingAttributes.ContainsKey( attributeKey ) ) { var newAttribute = new Attribute(); newAttribute.FieldTypeId = fileFieldTypeId; newAttribute.EntityTypeId = personEntityTypeId; newAttribute.EntityTypeQualifierColumn = string.Empty; newAttribute.EntityTypeQualifierValue = string.Empty; newAttribute.Key = attributeKey; newAttribute.Name = attributeName.Value; newAttribute.Description = attributeName.Value + " created by binary file import"; newAttribute.CreatedDateTime = ImportDateTime; newAttribute.ModifiedDateTime = ImportDateTime; newAttribute.IsGridColumn = false; newAttribute.IsMultiValue = false; newAttribute.IsRequired = false; newAttribute.AllowSearch = false; newAttribute.IsSystem = false; newAttribute.Order = 0; newAttribute.AttributeQualifiers.Add( new AttributeQualifier() { Key = "binaryFileType", Value = ministryFileType.Guid.ToString() } ); lookupContext.Attributes.Add( newAttribute ); lookupContext.SaveChanges(); existingAttributes.Add( newAttribute.Key, newAttribute.Id ); } newFileList.Add( new DocumentKeys() { PersonId = personKeys.PersonId, AttributeId = existingAttributes[attributeKey], File = rockFile } ); completed++; if ( completed % percentage < 1 ) { int percentComplete = completed / percentage; ReportProgress( percentComplete, string.Format( "{0:N0} files imported ({1}% complete).", completed, percentComplete ) ); } else if ( completed % ReportingNumber < 1 ) { SaveFiles( newFileList, storageProvider ); // Reset list newFileList.Clear(); ReportPartialProgress(); } } } if ( newFileList.Any() ) { SaveFiles( newFileList, storageProvider ); } ReportProgress( 100, string.Format( "Finished files import: {0:N0} addresses imported.", completed ) ); }
/// <summary> /// Maps the specified folder. /// </summary> /// <param name="folder">The folder.</param> /// <param name="personImageType">Type of the person image file.</param> /// <param name="storageProvider">The storage provider.</param> public void Map( ZipArchive folder, BinaryFileType personImageType, ProviderComponent storageProvider ) { // check for existing images var lookupContext = new RockContext(); var existingImageList = new PersonService( lookupContext ).Queryable().AsNoTracking() .Where( p => p.Photo != null ) .ToDictionary( p => p.Id, p => p.Photo.CreatedDateTime ); var emptyJsonObject = "{}"; var newFileList = new Dictionary<int, Rock.Model.BinaryFile>(); int completed = 0; int totalRows = folder.Entries.Count; int percentage = ( totalRows - 1 ) / 100 + 1; ReportProgress( 0, string.Format( "Verifying files import ({0:N0} found.", totalRows ) ); foreach ( var file in folder.Entries ) { var fileExtension = Path.GetExtension( file.Name ); if ( BinaryFileComponent.FileTypeBlackList.Contains( fileExtension ) ) { LogException( "Binary File Import", string.Format( "{0} filetype not allowed ({1})", fileExtension, file.Name ) ); continue; } var personForeignId = Path.GetFileNameWithoutExtension( file.Name ).AsType<int?>(); var personKeys = BinaryFileComponent.ImportedPeople.FirstOrDefault( p => p.IndividualId == personForeignId ); if ( personKeys != null ) { // only import the most recent profile photo if ( !existingImageList.ContainsKey( personKeys.PersonId ) || existingImageList[personKeys.PersonId].Value < file.LastWriteTime.DateTime ) { var rockFile = new Rock.Model.BinaryFile(); rockFile.IsSystem = false; rockFile.IsTemporary = false; rockFile.FileName = file.Name; rockFile.BinaryFileTypeId = personImageType.Id; rockFile.MimeType = Extensions.GetMIMEType( file.Name ); rockFile.CreatedDateTime = file.LastWriteTime.DateTime; rockFile.ModifiedDateTime = ImportDateTime; rockFile.Description = string.Format( "Imported as {0}", file.Name ); rockFile.SetStorageEntityTypeId( personImageType.StorageEntityTypeId ); rockFile.StorageEntitySettings = emptyJsonObject; if ( personImageType.AttributeValues.Any() ) { rockFile.StorageEntitySettings = personImageType.AttributeValues .ToDictionary( a => a.Key, v => v.Value.Value ).ToJson(); } // use base stream instead of file stream to keep the byte[] // NOTE: if byte[] converts to a string it will corrupt the stream using ( var fileContent = new StreamReader( file.Open() ) ) { rockFile.ContentStream = new MemoryStream( fileContent.BaseStream.ReadBytesToEnd() ); } newFileList.Add( personKeys.PersonId, rockFile ); } completed++; if ( completed % percentage < 1 ) { int percentComplete = completed / percentage; ReportProgress( percentComplete, string.Format( "{0:N0} files imported ({1}% complete).", completed, percentComplete ) ); } else if ( completed % ReportingNumber < 1 ) { SaveFiles( newFileList, storageProvider ); // add image keys to master list foreach ( var newFile in newFileList ) { existingImageList.AddOrReplace( newFile.Key, newFile.Value.CreatedDateTime ); } // Reset batch list newFileList.Clear(); ReportPartialProgress(); } } } if ( newFileList.Any() ) { SaveFiles( newFileList, storageProvider ); } ReportProgress( 100, string.Format( "Finished files import: {0:N0} addresses imported.", completed ) ); }
/// <summary> /// Maps the specified folder. /// </summary> /// <param name="folder">The folder.</param> /// <param name="transactionImageType">Type of the transaction image file.</param> public void Map(ZipArchive folder, BinaryFileType transactionImageType) { var lookupContext = new RockContext(); var emptyJsonObject = "{}"; var newFileList = new Dictionary <int, Rock.Model.BinaryFile>(); var transactionIdList = new FinancialTransactionService(lookupContext) .Queryable().AsNoTracking().Where(t => t.ForeignId != null) .ToDictionary(t => (int)t.ForeignId, t => t.Id); var storageProvider = transactionImageType.StorageEntityTypeId == DatabaseProvider.EntityType.Id ? (ProviderComponent)DatabaseProvider : (ProviderComponent)FileSystemProvider; int completed = 0; int totalRows = folder.Entries.Count; int percentage = (totalRows - 1) / 100 + 1; ReportProgress(0, string.Format("Verifying files import ({0:N0} found.", totalRows)); foreach (var file in folder.Entries) { var fileExtension = Path.GetExtension(file.Name); if (BinaryFileComponent.FileTypeBlackList.Contains(fileExtension)) { LogException("Binary File Import", string.Format("{0} filetype not allowed ({1})", fileExtension, file.Name)); continue; } int?transactionId = Path.GetFileNameWithoutExtension(file.Name).AsType <int?>(); if (transactionId != null && transactionIdList.ContainsKey((int)transactionId)) { var rockFile = new Rock.Model.BinaryFile(); rockFile.IsSystem = false; rockFile.IsTemporary = false; rockFile.FileName = file.Name; rockFile.BinaryFileTypeId = transactionImageType.Id; rockFile.CreatedDateTime = file.LastWriteTime.DateTime; rockFile.MimeType = Extensions.GetMIMEType(file.Name); rockFile.Description = string.Format("Imported as {0}", file.Name); rockFile.SetStorageEntityTypeId(transactionImageType.StorageEntityTypeId); rockFile.StorageEntitySettings = emptyJsonObject; if (transactionImageType.AttributeValues.Any()) { rockFile.StorageEntitySettings = transactionImageType.AttributeValues .ToDictionary(a => a.Key, v => v.Value.Value).ToJson(); } // use base stream instead of file stream to keep the byte[] // NOTE: if byte[] converts to a string it will corrupt the stream using (var fileContent = new StreamReader(file.Open())) { rockFile.ContentStream = new MemoryStream(fileContent.BaseStream.ReadBytesToEnd()); } newFileList.Add(transactionIdList[(int)transactionId], rockFile); completed++; if (completed % percentage < 1) { int percentComplete = completed / percentage; ReportProgress(percentComplete, string.Format("{0:N0} files imported ({1}% complete).", completed, percentComplete)); } else if (completed % ReportingNumber < 1) { SaveFiles(newFileList, storageProvider); // Reset list newFileList.Clear(); ReportPartialProgress(); } } } if (newFileList.Any()) { SaveFiles(newFileList, storageProvider); } ReportProgress(100, string.Format("Finished files import: {0:N0} addresses imported.", completed)); }
/// <summary> /// Maps the specified folder. /// </summary> /// <param name="folder">The folder.</param> /// <param name="personImageType">Type of the person image file.</param> public void Map(ZipArchive folder, BinaryFileType personImageType) { // check for existing images var lookupContext = new RockContext(); var existingImageList = new PersonService(lookupContext).Queryable().AsNoTracking() .Where(p => p.Photo != null) .ToDictionary(p => p.Id, p => p.Photo.CreatedDateTime); var emptyJsonObject = "{}"; var newFileList = new Dictionary <int, Rock.Model.BinaryFile>(); var storageProvider = personImageType.StorageEntityTypeId == DatabaseProvider.EntityType.Id ? (ProviderComponent)DatabaseProvider : (ProviderComponent)FileSystemProvider; var completedItems = 0; var totalEntries = folder.Entries.Count; var percentage = (totalEntries - 1) / 100 + 1; ReportProgress(0, string.Format("Verifying person images import ({0:N0} found.", totalEntries)); foreach (var file in folder.Entries) { var fileExtension = Path.GetExtension(file.Name); if (FileTypeBlackList.Contains(fileExtension)) { LogException("Binary File Import", string.Format("{0} filetype not allowed ({1})", fileExtension, file.Name)); continue; } var personForeignId = Path.GetFileNameWithoutExtension(file.Name).AsType <int?>(); var personKeys = ImportedPeople.FirstOrDefault(p => p.PersonForeignId == personForeignId); if (personKeys != null) { // only import the most recent profile photo if (!existingImageList.ContainsKey(personKeys.PersonId) || existingImageList[personKeys.PersonId].Value < file.LastWriteTime.DateTime) { var rockFile = new Rock.Model.BinaryFile { IsSystem = false, IsTemporary = false, FileName = file.Name, BinaryFileTypeId = personImageType.Id, MimeType = GetMIMEType(file.Name), CreatedDateTime = file.LastWriteTime.DateTime, Description = string.Format("Imported as {0}", file.Name) }; rockFile.SetStorageEntityTypeId(personImageType.StorageEntityTypeId); rockFile.StorageEntitySettings = emptyJsonObject; if (personImageType.AttributeValues.Any()) { rockFile.StorageEntitySettings = personImageType.AttributeValues .ToDictionary(a => a.Key, v => v.Value.Value).ToJson(); } // use base stream instead of file stream to keep the byte[] // NOTE: if byte[] converts to a string it will corrupt the stream using (var fileContent = new StreamReader(file.Open())) { rockFile.ContentStream = new MemoryStream(fileContent.BaseStream.ReadBytesToEnd()); } newFileList.Add(personKeys.PersonId, rockFile); } completedItems++; if (completedItems % percentage < 1) { var percentComplete = completedItems / percentage; ReportProgress(percentComplete, string.Format("{0:N0} person image files imported ({1}% complete).", completedItems, percentComplete)); } else if (completedItems % ReportingNumber < 1) { SaveFiles(newFileList, storageProvider); // add image keys to master list foreach (var newFile in newFileList) { existingImageList.AddOrReplace(newFile.Key, newFile.Value.CreatedDateTime); } // Reset batch list newFileList.Clear(); ReportPartialProgress(); } } } if (newFileList.Any()) { SaveFiles(newFileList, storageProvider); } lookupContext.Dispose(); ReportProgress(100, string.Format("Finished files import: {0:N0} person images imported.", completedItems)); }
/// <summary> /// Maps the specified folder. /// </summary> /// <param name="folder">The folder.</param> /// <param name="ministryFileType">Type of the ministry file.</param> public int Map(ZipArchive folder, BinaryFileType ministryFileType) { var lookupContext = new RockContext(); var personEntityTypeId = EntityTypeCache.GetId <Person>(); var binaryFileTypeService = new BinaryFileTypeService(lookupContext); var fileFieldTypeId = FieldTypeCache.Get(Rock.SystemGuid.FieldType.FILE.AsGuid(), lookupContext).Id; var backgroundFieldTypeId = FieldTypeCache.Get(Rock.SystemGuid.FieldType.BACKGROUNDCHECK.AsGuid(), lookupContext).Id; var existingAttributes = new AttributeService(lookupContext).GetByFieldTypeId(fileFieldTypeId) .Where(a => a.EntityTypeId == personEntityTypeId) .ToDictionary(a => a.Key, a => a); var backgroundCheckFileAttributes = new AttributeService(lookupContext).GetByFieldTypeId(backgroundFieldTypeId) .Where(a => a.EntityTypeId == personEntityTypeId) .ToDictionary(a => a.Key, a => a); foreach (var backgroundCheckFileAttribute in backgroundCheckFileAttributes) { if (!existingAttributes.ContainsKey(backgroundCheckFileAttribute.Key)) { existingAttributes.Add(backgroundCheckFileAttribute.Key, backgroundCheckFileAttribute.Value); } } var emptyJsonObject = "{}"; var newFileList = new List <DocumentKeys>(); var completedItems = 0; var totalRows = folder.Entries.Count; var percentage = (totalRows - 1) / 100 + 1; ReportProgress(0, string.Format("Verifying ministry document import ({0:N0} found)", totalRows)); foreach (var file in folder.Entries.OrderBy(f => f.Name)) { var fileExtension = Path.GetExtension(file.Name); if (FileTypeBlackList.Contains(fileExtension)) { LogException("Binary File Import", string.Format("{0} filetype not allowed ({1})", fileExtension, file.Name)); continue; } var nameWithoutExtension = file.Name.ReplaceLastOccurrence(fileExtension, string.Empty); var parsedFileName = nameWithoutExtension.Split('_'); // Ministry docs should follow this pattern: // 0. Firstname // 1. Lastname // 2. ForeignId // 3. Filename // 4. Doc Id if (parsedFileName.Length < 3) { break; } var personForeignId = parsedFileName[2].AsType <int?>(); var personKeys = ImportedPeople.FirstOrDefault(p => p.PersonForeignId == personForeignId); if (personKeys != null) { var attributeName = string.Empty; var documentForeignId = string.Empty; if (parsedFileName.Count() > 4) { attributeName = parsedFileName[3]; documentForeignId = parsedFileName[4]; } else { var filename = parsedFileName[3].ReplaceLastOccurrence(fileExtension, string.Empty); attributeName = Regex.Replace(filename, "\\d{4,}[.\\w]+$", string.Empty); documentForeignId = Regex.Match(filename, "\\d+$").Value; } // append "Document" to attribute name to create unique attributes // this matches core attribute "Background Check Document" attributeName = !attributeName.EndsWith("Document", StringComparison.OrdinalIgnoreCase) ? string.Format("{0} Document", attributeName) : attributeName; var attributeKey = attributeName.RemoveSpecialCharacters(); Attribute fileAttribute = null; var attributeBinaryFileType = ministryFileType; if (!existingAttributes.ContainsKey(attributeKey)) { fileAttribute = new Attribute { FieldTypeId = fileFieldTypeId, EntityTypeId = personEntityTypeId, EntityTypeQualifierColumn = string.Empty, EntityTypeQualifierValue = string.Empty, Key = attributeKey, Name = attributeName, Description = string.Format("{0} created by binary file import", attributeName), IsGridColumn = false, IsMultiValue = false, IsRequired = false, AllowSearch = false, IsSystem = false, Order = 0 }; fileAttribute.AttributeQualifiers.Add(new AttributeQualifier() { Key = "binaryFileType", Value = ministryFileType.Guid.ToString() }); lookupContext.Attributes.Add(fileAttribute); lookupContext.SaveChanges(); existingAttributes.Add(fileAttribute.Key, fileAttribute); } else { // if attribute already exists in Rock, override default file type with the Rock-specified file type fileAttribute = existingAttributes[attributeKey]; var attributeBinaryFileTypeGuid = fileAttribute.AttributeQualifiers.FirstOrDefault(q => q.Key.Equals("binaryFileType")); if (attributeBinaryFileTypeGuid != null) { attributeBinaryFileType = binaryFileTypeService.Get(attributeBinaryFileTypeGuid.Value.AsGuid()); } } var rockFile = new Rock.Model.BinaryFile { IsSystem = false, IsTemporary = false, MimeType = GetMIMEType(file.Name), BinaryFileTypeId = attributeBinaryFileType.Id, FileName = file.Name, Description = string.Format("Imported as {0}", file.Name), CreatedDateTime = file.LastWriteTime.DateTime, ModifiedDateTime = file.LastWriteTime.DateTime, CreatedByPersonAliasId = ImportPersonAliasId, ForeignKey = documentForeignId, ForeignId = documentForeignId.AsIntegerOrNull() }; rockFile.SetStorageEntityTypeId(attributeBinaryFileType.StorageEntityTypeId); rockFile.StorageEntitySettings = emptyJsonObject; if (attributeBinaryFileType.AttributeValues != null) { rockFile.StorageEntitySettings = attributeBinaryFileType.AttributeValues .ToDictionary(a => a.Key, v => v.Value.Value).ToJson(); } // use base stream instead of file stream to keep the byte[] // NOTE: if byte[] converts to a string it will corrupt the stream using (var fileContent = new StreamReader(file.Open())) { rockFile.ContentStream = new MemoryStream(fileContent.BaseStream.ReadBytesToEnd()); } newFileList.Add(new DocumentKeys() { PersonId = personKeys.PersonId, AttributeId = fileAttribute.Id, File = rockFile }); completedItems++; if (completedItems % percentage < 1) { var percentComplete = completedItems / percentage; ReportProgress(percentComplete, string.Format("{0:N0} ministry document files imported ({1}% complete).", completedItems, percentComplete)); } if (completedItems % ReportingNumber < 1) { SaveFiles(newFileList); // Reset list newFileList.Clear(); ReportPartialProgress(); } } } if (newFileList.Any()) { SaveFiles(newFileList); } ReportProgress(100, string.Format("Finished documents import: {0:N0} ministry documents imported.", completedItems)); return(completedItems); }
/// <summary> /// Maps the specified folder. /// </summary> /// <param name="folder">The folder.</param> /// <param name="requestDocumentType">The benevolence request document file type.</param> public int Map(ZipArchive folder, BinaryFileType requestDocumentType) { var lookupContext = new RockContext(); var emptyJsonObject = "{}"; var newFileList = new Dictionary <KeyValuePair <int, int>, Rock.Model.BinaryFile>(); var benevolenceRequestService = new BenevolenceRequestService(lookupContext); var importedRequests = benevolenceRequestService .Queryable().AsNoTracking().Where(t => t.ForeignId != null) .ToDictionary(t => ( int )t.ForeignId, t => t.Id); var importedRequestDocuments = new BenevolenceRequestDocumentService(lookupContext) .Queryable().AsNoTracking().Where(t => t.ForeignId != null) .ToDictionary(t => ( int )t.ForeignId, t => t.Id); var storageProvider = requestDocumentType.StorageEntityTypeId == DatabaseProvider.EntityType.Id ? ( ProviderComponent )DatabaseProvider : ( ProviderComponent )FileSystemProvider; var completedItems = 0; var totalEntries = folder.Entries.Count; var percentage = (totalEntries - 1) / 100 + 1; ReportProgress(0, string.Format("Verifying benevolence request documents import ({0:N0} found.", totalEntries)); foreach (var file in folder.Entries) { var fileExtension = Path.GetExtension(file.Name); if (FileTypeBlackList.Contains(fileExtension)) { LogException("Binary File Import", string.Format("{0} filetype not allowed ({1})", fileExtension, file.Name)); continue; } var nameWithoutExtension = file.Name.ReplaceLastOccurrence(fileExtension, string.Empty); var parsedFileName = nameWithoutExtension.Split('_').ToList(); // // Benevolence Request docs should follow this pattern: // 0. Request ForeignId // 1. FileName // 2. Doc Id var foreignBenevolenceRequestId = parsedFileName[0].AsType <int?>(); // Make sure the Benevolence Request exists if (foreignBenevolenceRequestId != null && importedRequests.ContainsKey(( int )foreignBenevolenceRequestId)) { var benevolenceRequest = benevolenceRequestService.Queryable().AsNoTracking().FirstOrDefault(r => r.ForeignId.HasValue && r.ForeignId == foreignBenevolenceRequestId); var documentForeignId = -1; var fileName = string.Empty; if (parsedFileName.Count() >= 3) { documentForeignId = parsedFileName.LastOrDefault().AsInteger(); // If document foreignId is provided, make sure it doesn't already exist if (documentForeignId > 0 && importedRequestDocuments.ContainsKey(documentForeignId)) { continue; } // Extract filename parsedFileName.RemoveAt(parsedFileName.Count() - 1); // Remove Doc Id from end parsedFileName.RemoveAt(0); // Remove Request ForeignId from beginning fileName = string.Join("_", parsedFileName); } else { var filename = file.Name.ReplaceLastOccurrence(fileExtension, string.Empty); } // Create the binary file var rockFile = new Rock.Model.BinaryFile { IsSystem = false, IsTemporary = false, MimeType = GetMIMEType(file.Name), BinaryFileTypeId = requestDocumentType.Id, FileName = fileName, CreatedDateTime = file.LastWriteTime.DateTime, ModifiedDateTime = file.LastWriteTime.DateTime, CreatedByPersonAliasId = ImportPersonAliasId }; rockFile.SetStorageEntityTypeId(requestDocumentType.StorageEntityTypeId); rockFile.StorageEntitySettings = emptyJsonObject; if (requestDocumentType.AttributeValues.Any()) { rockFile.StorageEntitySettings = requestDocumentType.AttributeValues .ToDictionary(a => a.Key, v => v.Value.Value).ToJson(); } // use base stream instead of file stream to keep the byte[] // NOTE: if byte[] converts to a string it will corrupt the stream using (var fileContent = new StreamReader(file.Open())) { rockFile.ContentStream = new MemoryStream(fileContent.BaseStream.ReadBytesToEnd()); } // add this document file to the Rock transaction newFileList.Add(new KeyValuePair <int, int>(importedRequests[( int )foreignBenevolenceRequestId], documentForeignId), rockFile); completedItems++; if (completedItems % percentage < 1) { var percentComplete = completedItems / percentage; ReportProgress(percentComplete, string.Format("{0:N0} benevolence document files imported ({1}% complete).", completedItems, percentComplete)); } if (completedItems % ReportingNumber < 1) { SaveFiles(newFileList, storageProvider); // Reset list newFileList.Clear(); ReportPartialProgress(); } } } if (newFileList.Any()) { SaveFiles(newFileList, storageProvider); } ReportProgress(100, string.Format("Finished document import: {0:N0} benevolence documents imported.", completedItems)); return(completedItems); }
/// <summary> /// Maps the specified folder. /// </summary> /// <param name="folder">The folder.</param> /// <param name="transactionImageType">Type of the transaction image file.</param> /// <param name="storageProvider">The storage provider.</param> public void Map( ZipArchive folder, BinaryFileType transactionImageType, ProviderComponent storageProvider ) { var lookupContext = new RockContext(); var emptyJsonObject = "{}"; var newFileList = new Dictionary<int, Rock.Model.BinaryFile>(); var transactionIdList = new FinancialTransactionService( lookupContext ) .Queryable().AsNoTracking().Where( t => t.ForeignId != null ) .ToDictionary( t => (int)t.ForeignId, t => t.Id ); int completed = 0; int totalRows = folder.Entries.Count; int percentage = ( totalRows - 1 ) / 100 + 1; ReportProgress( 0, string.Format( "Verifying files import ({0:N0} found.", totalRows ) ); foreach ( var file in folder.Entries ) { var fileExtension = Path.GetExtension( file.Name ); var fileMimeType = Extensions.GetMIMEType( file.Name ); if ( BinaryFileComponent.FileTypeBlackList.Contains( fileExtension ) ) { LogException( "Binary File Import", string.Format( "{0} filetype not allowed ({1})", fileExtension, file.Name ) ); continue; } else if ( fileMimeType == null ) { LogException( "Binary File Import", string.Format( "{0} filetype not recognized ({1})", fileExtension, file.Name ) ); continue; } int? transactionId = Path.GetFileNameWithoutExtension( file.Name ).AsType<int?>(); if ( transactionId != null && transactionIdList.ContainsKey( (int)transactionId ) ) { var rockFile = new Rock.Model.BinaryFile(); rockFile.IsSystem = false; rockFile.IsTemporary = false; rockFile.FileName = file.Name; rockFile.MimeType = fileMimeType; rockFile.BinaryFileTypeId = transactionImageType.Id; rockFile.CreatedDateTime = file.LastWriteTime.DateTime; rockFile.ModifiedDateTime = ImportDateTime; rockFile.Description = string.Format( "Imported as {0}", file.Name ); rockFile.SetStorageEntityTypeId( transactionImageType.StorageEntityTypeId ); rockFile.StorageEntitySettings = emptyJsonObject; if ( transactionImageType.AttributeValues.Any() ) { rockFile.StorageEntitySettings = transactionImageType.AttributeValues .ToDictionary( a => a.Key, v => v.Value.Value ).ToJson(); } // use base stream instead of file stream to keep the byte[] // NOTE: if byte[] converts to a string it will corrupt the stream using ( var fileContent = new StreamReader( file.Open() ) ) { rockFile.ContentStream = new MemoryStream( fileContent.BaseStream.ReadBytesToEnd() ); } newFileList.Add( transactionIdList[(int)transactionId], rockFile ); completed++; if ( completed % percentage < 1 ) { int percentComplete = completed / percentage; ReportProgress( percentComplete, string.Format( "{0:N0} files imported ({1}% complete).", completed, percentComplete ) ); } else if ( completed % ReportingNumber < 1 ) { SaveFiles( newFileList, storageProvider ); // Reset list newFileList.Clear(); ReportPartialProgress(); } } } if ( newFileList.Any() ) { SaveFiles( newFileList, storageProvider ); } ReportProgress( 100, string.Format( "Finished files import: {0:N0} addresses imported.", completed ) ); }