public void Flush() { byte[] buffer = new byte[4096]; int bytesRead = 0; int blockCount = 0; string fileId = ""; string directoryID = db.ExecuteScalar("SELECT id FROM directories WHERE fullpath = @path", new Dictionary <string, object>() { { "@path", vsrc.ParentPath.Path } }); DataTable fileInfo = db.GetDataTable("SELECT * from entities where isdir = @isdir and fname = @name and dir = @dir", new Dictionary <string, object>() { { "@name", vsrc.EntityName }, { "@isdir", 0 }, { "@dir", directoryID } }); // If file does not exist - create file record if (fileInfo.Rows.Count == 0) { string insertEntityQuery = "INSERT INTO entities (fname, dir, size, cdate, mdate, isdir, accessdate, filehash) VALUES (@fname, @dir, @size, @cdate, @mdate, @isdir, @access, @filehash)"; double ctime = DateTime.Now.UnixTimeStamp(); string fileHash = ""; using (FileStream fs = new FileStream(this.Path, FileMode.Open, FileAccess.Read)) { fileHash = fs.GetSHA512(); } db.ExecuteNonQuery(insertEntityQuery, new Dictionary <string, object>() { { "@fname", vsrc.EntityName }, { "@dir", directoryID }, { "@size", new FileInfo(Path).Length }, { "@cdate", ctime }, { "@mdate", ctime }, { "@access", ctime }, { "@isdir", 0 }, { "@filehash", fileHash } }); fileInfo = db.GetDataTable("SELECT * from entities where isdir = @isdir and fname = @name and dir = @dir", new Dictionary <string, object>() { { "@name", vsrc.EntityName }, { "@isdir", 0 }, { "@dir", directoryID } }); } fileId = fileInfo.Rows[0]["id"].ToString(); using (FileStream f = System.IO.File.Open(this.Path, FileMode.Open, FileAccess.Read)) { Int64 lenthOfFile = f.Length; Int64 totalRead = 0; using (ProgressBar pb = new ProgressBar(disableProgress)) { while ((bytesRead = f.Read(buffer, 0, buffer.Length)) > 0) { pb.Report((((double)totalRead) / lenthOfFile)); totalRead = totalRead + bytesRead; string hash1 = buffer.JenkinsOneAtATime(); string id = ""; string hash2 = ""; string hash2Check = ""; hash2 = buffer.GetSHA512(0, bytesRead); hash2Check = db.ExecuteScalar("SELECT id FROM blocks WHERE hash2 = @hash2 and hash1 = @hash1", new Dictionary <string, object>() { { "@hash2", hash2 }, { "@hash1", hash1 } }); id = hash2Check; if (id == "") { // need to create block Guid g = Guid.NewGuid(); string name = g.ToString(); string encName = AESWrapper.EncryptToString(name, key); encName = encName.Replace("/", "_"); encName = "/" + encName; encName = encName.Replace("//", "__"); /*byte[] compressed; * using (System.IO.MemoryStream instream = new MemoryStream(buffer)) * { * using (System.IO.MemoryStream outstream = new MemoryStream()) * { * using (GZipStream s = new GZipStream(outstream, CompressionMode.Compress)) * { * instream.CopyTo(s); * } * * compressed = outstream.ToArray(); * } * }*/ foreach (IFileSystem fs in fsdst) { /*try * { * using (Stream s = fs.CreateFile(FileSystemPath.Parse("/test"))) * { * * } * } * catch (Exception e) * { * Console.WriteLine("[Error]: file system - " + fs.ToString() + " is unreachable."); * continue; * }*/ //fs. using (var fstream = fs.OpenOrCreate(FileSystemPath.Parse($"/{DATAFILE}"), FileAccess.ReadWrite)) { //byte[] cipher = AESWrapper.EncryptToByte(buffer, key, 0, bytesRead); DeDupeStorage storage = new DeDupeStorage(fstream, db); byte[] cipher = AESWrapper.EncryptToByte(buffer, key, 0, bytesRead); storage.AddFile(encName, cipher); } /*using (var fstream = fs.OpenOrCreate(FileSystemPath.Parse($"/{DATAFILE}"), * FileAccess.ReadWrite)) * { * //byte[] cipher = AESWrapper.EncryptToByte(buffer, key, 0, bytesRead); * DeDupeStorage storage = new DeDupeStorage(fstream, db); * using (var zippedStream = new MemoryStream()) * { * using (var archive = new ZipArchive(zippedStream, ZipArchiveMode.Update)) * { * var ent = archive.CreateEntry("data.bin", CompressionLevel.Optimal); * using (var zipent = ent.Open()) * { * zipent.Write(buffer, 0, bytesRead); * } * } * * byte[] zipData = zippedStream.ToArray(); * byte[] cipher = AESWrapper.EncryptToByte(zipData, key, 0, zipData.Length); * storage.AddFile(encName, cipher); * * } * }*/ /*using (var fstream = fs.OpenOrCreate(FileSystemPath.Parse($"/{DATAFILE}"), FileAccess.ReadWrite)) * { * using (var zip = new ZipArchive(fstream, ZipArchiveMode.Update)) * { * var ent = zip.CreateEntry(encName, CompressionLevel.Optimal); * using (var entfs = ent.Open()) * { * byte[] cipher = AESWrapper.EncryptToByte(buffer, key, 0, bytesRead); * entfs.Write(cipher); * } * } * }*/ /*using (Stream s = fs.CreateFile(FileSystemPath.Parse($"{encName}"))) * { * byte[] cipher = AESWrapper.EncryptToByte(buffer, key, 0, bytesRead); * s.Write(cipher); * }*/ string blockInsertSQL = "INSERT INTO blocks (hash1, size, name, location, hash2) VALUES (@hash1, @size, @name, @location, @hash2)"; db.ExecuteNonQuery(blockInsertSQL, new Dictionary <string, object>() { { "@hash1", hash1 }, { "@size", bytesRead }, { "@name", encName }, { "@location", fs.ToString() }, { "@hash2", hash2 } }); hash2Check = db.ExecuteScalar( "SELECT id FROM blocks WHERE hash2 = @hash2 and hash1 = @hash1 and location = @location", new Dictionary <string, object>() { { "@hash2", hash2 }, { "@hash1", hash1 }, { "@location", fs.ToString() } }); id = hash2Check; string fileBlockInsertSQL = "INSERT INTO fileblocks (file_id, block_id, block_order) VALUES (@fileId, @blockId, @blockOrder)"; db.ExecuteNonQuery(fileBlockInsertSQL, new Dictionary <string, object>() { { "@fileId", fileId }, { "@blockId", id }, { "@blockOrder", blockCount } }); } } else { string fileBlockInsertSQL = "INSERT INTO fileblocks (file_id, block_id, block_order) VALUES (@fileId, @blockId, @blockOrder)"; db.ExecuteNonQuery(fileBlockInsertSQL, new Dictionary <string, object>() { { "@fileId", fileId }, { "@blockId", id }, { "@blockOrder", blockCount } }); } blockCount++; } } } }