// Get user images with a specified user ID public (List <Image>, List <String>) GetUserImages(string CreatorUserId) { List <Image> Image = _images.Find(Image => Image.CreatorUserId == CreatorUserId).SortByDescending(e => e.CreatedOn).ToList(); List <String> B64Files = new List <String>(Image.Count); for (int i = 0; i < Image.Count; i++) { System.Diagnostics.Debug.WriteLine(i); var Fs = new MemoryStream(); _bucket.DownloadToStream(Image[i].ImageId, Fs); B64Files.Add(Convert.ToBase64String(Fs.ToArray())); } return(Image, B64Files); }
private static Stream DownloadFile(GridFSBucket fs, ObjectId id) { var stream = new MemoryStream(); fs.DownloadToStream(id, stream); return(stream); }
private void button5_Click(object sender, EventArgs e) //download { //var client = new MongoClient("mongodb+srv://admin:[email protected]/group1db?retryWrites=true&w=majority"); var client = new MongoClient("mongodb+srv://admin:[email protected]/group1db?retryWrites=true&w=majority"); //personal mongoDB account made to test after issues with provided one var database = client.GetDatabase("group1db"); var fs = new GridFSBucket(database); //var collecFiles = database.GetCollection<BsonDocument>("fs.files"); var filter = Builders <GridFSFileInfo> .Filter.And(Builders <GridFSFileInfo> .Filter.Regex(x => x.Filename, "csv")); var list = fs.Find(filter).ToList(); var userpath = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData); var endpath = userpath + "/Analysis/"; Directory.CreateDirectory(endpath); foreach (GridFSFileInfo doc in list) { string path = endpath + doc.Filename; using (var stream = File.OpenWrite(path)) { fs.DownloadToStream(doc.Id, stream); } } //Analysis.LoadJson(); debugLabel.Text = "Download Complete"; }
/// <summary> /// 下载文件 /// </summary> /// <param name="objectID"></param> /// <param name="stream"></param> public void FileGet(string objectID, Stream stream) { GridFSBucket fs = new GridFSBucket(_db); ObjectId obj = new ObjectId(objectID); fs.DownloadToStream(obj, stream); }
/// <summary> /// 下载成流 /// </summary> /// <param name="id">mongodb主键</param> /// <returns></returns> public Stream DownloadToStream(ObjectId id) { Stream destination = new MemoryStream(); bucket.DownloadToStream(id, destination); return(destination); }
private void RequestRepository(TransferRepository repository, Reporter reporter) { var repozipFile = RepoEnv.TempFiles.CreateFile(); UpdateLock.EnterUpgradeableReadLock(); try { Log.Info("Incomming Transfer Request for Repository {Name}", repository.RepoName); reporter.Send(RepositoryMessages.GetRepo); var data = _repos.AsQueryable().FirstOrDefault(r => r.RepoName == repository.RepoName); if (data == null) { reporter.Compled(OperationResult.Failure(RepoErrorCodes.DatabaseNoRepoFound)); return; } var commitInfo = _gitHubClient.Repository.Commit.GetSha1(data.RepoId, "HEAD").Result; var repozip = repozipFile.Stream; if (!(commitInfo != data.LastUpdate && UpdateRepository(data, reporter, repository, commitInfo, repozip))) { reporter.Send(RepositoryMessages.GetRepositoryFromDatabase); Log.Info("Downloading Repository {Name} From Server", repository.RepoName); repozip.SetLength(0); _bucket.DownloadToStream(ObjectId.Parse(data.FileName), repozip); } //_reporter = reporter; //repozip.Seek(0, SeekOrigin.Begin); //Timers.StartSingleTimer(_reporter, new TransferFailed(string.Empty, FailReason.Timeout, data.RepoName), TimeSpan.FromMinutes(10)); var request = DataTransferRequest.FromStream(repository.OperationId, repozip, repository.Manager ?? throw new ArgumentNullException("FileManager"), commitInfo); request.SendCompletionBack = true; _dataTransfer.Request(request); _currentTransfers[request.OperationId] = repozipFile; reporter.Compled(OperationResult.Success(new FileTransactionId(request.OperationId))); } finally { UpdateLock.ExitUpgradeableReadLock(); } }
/// <summary> ///下载文件 /// </summary> /// <param name="id">注意这个是files_id的值,而不是_id的值</param> /// <param name="destinationStream">文件流或者内存流</param> /// <param name="options"></param> public void DownloadToStream(ObjectId id, Stream destinationStream, GridFSDownloadOptions options = null) { MongoClient client = GetClient(m_connectionStr); var db = client.GetDatabase(DatabaseName); var bucket = new GridFSBucket(db, BucksOptions); bucket.DownloadToStream(id, destinationStream, options); }
// ************************************************************************** //************************** DOWNLOAD ************************************** private void DownloadFile(ObjectId id, string dir) { IGridFSBucket bucket = new GridFSBucket(CloudDB.database); Stream destination = File.OpenWrite(dir); bucket.DownloadToStream(id, destination); destination.Close(); }
/// <summary> /// 获得文件流 /// </summary> /// <param name="bucketName"></param> /// <param name="fileId"></param> /// <returns></returns> public static Stream GetFileStream(string bucketName, string fileId) { MemoryStream stream = new MemoryStream(); GridFSBucketOptions options = new GridFSBucketOptions(); options.BucketName = bucketName; var bucket = new GridFSBucket(mongoContext, options); bucket.DownloadToStream(new ObjectId(fileId), stream); return(stream); }
/// <summary> /// 根据ObjectId下载一个文件 /// </summary> /// <param name="filepath"></param> /// <param name="objectid"></param> /// <param name="collectionName"></param> public void DownLoadFile(string filepath, string objectid, string collectionName) { FileStream fileStream = new FileStream(filepath, FileMode.Append); var bucket = new GridFSBucket(this.MongoDatabase, new GridFSBucketOptions { BucketName = collectionName, ChunkSizeBytes = 358400, WriteConcern = WriteConcern.WMajority, ReadPreference = ReadPreference.Secondary }); var fileid = new ObjectId(objectid); bucket.DownloadToStream(fileid, fileStream); fileStream.Close(); }
public void DownloadFileAsync(String fileName, String ruta) { var laBaseDeDatos = ConectarConBaseDeDatos(); IGridFSBucket bucket = new GridFSBucket(laBaseDeDatos); var filter = Builders <GridFSFileInfo <ObjectId> > .Filter.Eq(x => x.Filename, fileName); var searchResult = bucket.Find(filter); var fileEntry = searchResult.FirstOrDefault(); var file = ruta + fileName; using (Stream fs = new FileStream(file, FileMode.CreateNew, FileAccess.Write)) { bucket.DownloadToStream(fileEntry.Id, fs); fs.Close(); } }
public void Download(string fileId, Stream destination) { try { GridFSBucket gridFS = new GridFSBucket(this.GetDB()); GridFSFileInfo gridFSFile = gridFS.Find(Builders <GridFSFileInfo> .Filter.Eq("_id", new ObjectId(fileId))).FirstOrDefault(); if (gridFSFile != null) { gridFS.DownloadToStream(new ObjectId(fileId), destination); } else { throw new Exception($"cannot find {fileId} file."); } } catch { throw; } }
public String GetImagePath(string id) { string fileName = Path.Combine(HttpContext.Current.Server.MapPath("~/images"), id + ".png"); if (!File.Exists(fileName)) { MongoContext mc = new MongoContext(); var database = mc.MongoCliente.GetDatabase("imagen"); var bucket = new GridFSBucket(database); ID = id; using (var fs = new FileStream(fileName, FileMode.Create)) { bucket.DownloadToStream(_id, fs); fs.Close(); } } return("~/images/" + id + ".png"); }
public IActionResult Download(string id) { try { var client = new MongoClient("mongodb://localhost:27017"); IMongoDatabase db = client.GetDatabase("TestCol"); GridFSBucket bucket = new GridFSBucket(db); //byte[] file = bucket.DownloadAsBytes(ObjectId.Parse(id)); Stream stream = new MemoryStream(); bucket.DownloadToStream(ObjectId.Parse(id), stream); stream.Position = 0; return(File(stream, "image/png")); } catch (Exception ex) { throw; } }
public override Stream GetAttachment(string id, string attachmentName) { var bucket = new GridFSBucket(database, new GridFSBucketOptions() { BucketName = "attachments" }); var document = bucket.Find(Builders <GridFSFileInfo> .Filter.Eq(e => e.Metadata["attachment-id"], id + "-" + attachmentName)).FirstOrDefault(); if (document != null) { Stream destination = new MemoryStream(); bucket.DownloadToStream(document.Id, destination); destination.Position = 0; return(destination); } else { throw new AttachmentNotFoundNoSQLException(); } }
private void backgroundWorker2_DoWork_1(object sender, DoWorkEventArgs e) { // download function //var client = new MongoClient("mongodb+srv://admin:[email protected]/group1db?retryWrites=true&w=majority") var client = new MongoClient("mongodb+srv://admin:[email protected]/group1db?retryWrites=true&w=majority"); var database = client.GetDatabase("group1db"); var fs = new GridFSBucket(database); //var collecFiles = database.GetCollection<BsonDocument>("fs.files"); var filter = Builders <GridFSFileInfo> .Filter.And(Builders <GridFSFileInfo> .Filter.Regex(x => x.Filename, "csv")); var list = fs.Find(filter).ToList(); var endpath = this.userpath + "\\Analysis\\"; Directory.CreateDirectory(endpath); foreach (GridFSFileInfo doc in list) { string path = endpath + doc.Filename; using (var stream = File.OpenWrite(path)) { fs.DownloadToStream(doc.Id, stream); } } }
/// <summary> /// these information come from: rm.Stream /// - FormatInfo.BlobId /// - Filename.name /// - Filename.ext /// </summary> /// <param name="fileId"></param> /// <param name="dateTimeUploadFilter">I do not want to sync blob that are more recent than this /// value, to avoid syncing blob while they are handled by DS</param> public Boolean Sync( string fileId, ArtifactSyncJobConfig config, DateTime?dateTimeUploadFilter = null) { var cfg = _configurator.GetConfiguration(fileId, config); // initialize Mongo Databases IGridFSBucket <string> sourceBucket; MongoUrl sourceMongoUrl; IGridFSBucket <string> destinationBucket; MongoUrl destinationMongoUrl; IMongoDatabase sourceDatabase; sourceMongoUrl = new MongoUrl(cfg.SourceConnectionString); sourceDatabase = new MongoClient(sourceMongoUrl).GetDatabase(sourceMongoUrl.DatabaseName); sourceBucket = new GridFSBucket <string>(sourceDatabase, new GridFSBucketOptions { BucketName = cfg.Bucket, ChunkSizeBytes = 1048576, // 1MB }); IMongoDatabase destinationDatabase; destinationMongoUrl = new MongoUrl(cfg.DestConnectionString); destinationDatabase = new MongoClient(destinationMongoUrl).GetDatabase(destinationMongoUrl.DatabaseName); IMongoCollection <BsonDocument> destinationCollection = destinationDatabase.GetCollection <BsonDocument>(cfg.Bucket + ".files"); destinationBucket = new GridFSBucket <string>(destinationDatabase, new GridFSBucketOptions { BucketName = cfg.Bucket, ChunkSizeBytes = 1048576, // 1MB }); // before uploading the new element check if it's already in the destination database (maybe it's an alias) var findIdFilter = Builders <GridFSFileInfo <string> > .Filter.Eq(x => x.Id, fileId); using (var cursor = destinationBucket.Find(findIdFilter)) { var exists = cursor.FirstOrDefault(); if (exists != null) { return(true); //Already synced, true } } var source = sourceBucket.Find(findIdFilter).FirstOrDefault(); if (source == null) { return(false); //source stream does not exists } if (dateTimeUploadFilter.HasValue && source.UploadDateTime > dateTimeUploadFilter) { return(false); //Consider this as not existing. } var sw = Stopwatch.StartNew(); Console.WriteLine("Sync needed Tenant {0}/{1}: ", config.Tenant, fileId); GridFSUploadOptions options = new GridFSUploadOptions(); options.ChunkSizeBytes = source.ChunkSizeBytes; options.ContentType = source.ContentType; using (var destinationStream = destinationBucket.OpenUploadStream(fileId, source.Filename, options)) { sourceBucket.DownloadToStream(fileId, destinationStream); } destinationCollection.UpdateOne( Builders <BsonDocument> .Filter.Eq("_id", fileId), Builders <BsonDocument> .Update.Set("uploadDate", source.UploadDateTime) ); Console.WriteLine("DONE {0}/{1} ({2} ms)", config.Tenant, fileId, sw.ElapsedMilliseconds); sw.Stop(); return(true); }