/** * <summary>Removes all entries identical between the <paramref name="unoptimizedFile"/> and the <paramref name="referenceFiles"/> from the <paramref name="unoptimizedFile"/>.</summary> * <remarks>This function was intended to be passed a <see cref="PackedFile"/> that contains DB tables. If it is passed a PackedFile without DB tables it will not work properly.</remarks> * * <param name="unoptimizedFile">The <see cref="PackedFile"/> to be optimized. It must contain a DB table for the method to work.</param> * <param name="referenceFiles">A <see cref="List{DBFile}"/> of <see cref="DBFile">DBFiles</see> that should be checked for identical table rows in the <paramref name="unoptimizedFile"/>.</param> * * <returns>A new <see cref="PackedFile"/> that contains the optimized data from the <paramref name="unoptimizedFile"/> or null if the resulting <see cref="PackedFile"/> would be empty.</returns> */ public PackedFile OptimizePackedDBFile(PackedFile unoptimizedFile, List <DBFile> referenceFiles) { PackedFile result = unoptimizedFile; DBFile modDBFile = FromPacked(unoptimizedFile); if (modDBFile != null) { foreach (DBFile file in referenceFiles) { if (TypesCompatible(modDBFile, file)) { modDBFile.Entries.RemoveAll(file.ContainsRow); } } if (modDBFile.Entries.Count != 0) { result.Data = PackedFileDbCodec.GetCodec(unoptimizedFile).Encode(modDBFile); } else { result = null; } } return(result); }
protected void Page_Load(object sender, EventArgs e) { DBConfig cfg = new DBConfig(); DBFile db = cfg.db(); db.Clear(); }
void mkpath() { var id = this.reqString("id"); var pathLoc = this.reqStringDecode("pathLoc"); FileInf fileSvr = new FileInf(); fileSvr.id = id; fileSvr.nameLoc = Path.GetFileName(pathLoc); fileSvr.nameSvr = fileSvr.nameLoc; fileSvr.pathLoc = pathLoc; PathBuilderUuid pb = new PathBuilderUuid(); fileSvr.pathSvr = pb.genFile(id, fileSvr.nameLoc); fileSvr.pathSvr = fileSvr.pathSvr.Replace("\\", "/"); //数据库存在相同文件 DBFile db = new DBFile(); FileInf fileExist = new FileInf(); db.Add(ref fileSvr); //触发事件 up6_biz_event.file_create(fileSvr); // JObject o = new JObject(); o["pathSvr"] = fileSvr.pathSvr; this.toContent(o); }
/* * If no argument was given to the help clause, print a list of all db tables. * If arguments were given, print out a list of fields and their types of the tables * corresponding to each argument; primary key fields will be marked with a "*". */ public override void Execute() { if (AllTables) { // dump all table names foreach (PackedFile packed in PackedFiles) { if (packed.FullPath.StartsWith("db")) { Console.WriteLine(DBFile.Typename(packed.FullPath)); } } } else { foreach (DBFile dbFile in DbFiles) { Console.WriteLine("{0}:", dbFile.CurrentType.Name); foreach (FieldInfo info in dbFile.CurrentType.Fields) { string reference = info.FieldReference != null ? string.Format(" -> {0}:{1}", info.ReferencedTable, info.ReferencedField) : ""; Console.WriteLine("{0} : {1}{2}{3}", info.Name, info.TypeName, info.PrimaryKey ? "*" : "", reference); } } } }
private void сохранитьВЖурналToolStripMenuItem1_Click(object sender, EventArgs e) { DBFile File = new DBFile(); SaveFileDialog svf = new SaveFileDialog(); svf.Filter = "BD Data|*.bd"; if (svf.ShowDialog() == DialogResult.OK) { File.FileDB = svf.FileName; } foreach (var item in treeView1.Nodes) { var o = item as TreeNode; var d = o.Text.Substring(1, 6); DBNode n = new DBNode() { FilePath = o.Name, Camera = "Created by OtherCoder", Date = new DateTime(int.Parse(d.Substring(0, 2)), int.Parse(d.Substring(2, 2)), int.Parse(d.Substring(4))), RecordTime = FFprobe.FFprobe.GetDuration(o.Name) }; File.AddNode(n); } File.Save(); MessageBox.Show("Успешно"); }
public async Task AddCourse() { var semesters = await courseHandler.GetCurrentSemester(); Assert.IsTrue(semesters.Count > 0); var course = semesters.SelectMany(x => x.Courses).FirstOrDefault(); var file = Path.GetTempFileName(); var data = "This is a test File\n"; for (var i = 0; i < 1000; i++) { File.AppendAllText(file, data); } var db = new DBFile() { FilePath = file, FileName = Path.GetFileName(file) }; var res = await courseHandler.AddMaterial(course.Id, new System.Collections.Generic.List <DBFile> { db }); Assert.IsTrue(res.Actionstatus); }
protected void Page_Load(object sender, EventArgs e) { var md5 = this.reqString("md5"); var uid = this.reqToInt("uid"); var id = this.reqString("id"); var pid = this.reqString("pid"); var cbk = this.reqStringSafe("callback"); var cover = this.reqToInt("cover"); //是否覆盖 var nameLoc = this.reqStringDecode("nameLoc"); //文件名称 //返回值。1表示成功 int ret = 0; if (string.IsNullOrEmpty(id)) { }//参数不为空 else { DBConfig cfg = new DBConfig(); DBFile db = cfg.db(); db.complete(id); //覆盖同名文件-更新同名文件状态 if (cover == 1) { db.delete(pid, nameLoc, uid, id); } up6_biz_event.file_post_complete(id); ret = 1; } this.toContentJson(cbk + "(" + ret + ")");//必须返回jsonp格式数据 }
/* * This doesn't really belong here... * changes all strings in an existing table definition to string_asci. */ public void ConvertAllStringsToAscii(string packFile) { PackFile file = new PackFileCodec().Open(packFile); foreach (PackedFile packed in file) { if (packed.FullPath.StartsWith("db")) { string typename = DBFile.Typename(packed.FullPath); DBFileHeader header = PackedFileDbCodec.readHeader(packed); if (!string.IsNullOrEmpty(header.GUID)) { // List<FieldInfo> infos = DBTypeMap.Instance.GetInfoByGuid(header.GUID); if (!CanDecode(packed)) { // we don't have an entry for this yet; try out the ones we have List <TypeInfo> allInfos = DBTypeMap.Instance.GetAllInfos(typename); if (allInfos.Count > 0) { // TryDecode(packed, header, allInfos); } else { Console.WriteLine("no info at all for {0}", typename); } } else { Console.WriteLine("already have info for {0}", header.GUID); } } } } }
private void button4_Click(object sender, EventArgs e) { FolderBrowserDialog fd = new FolderBrowserDialog(); fd.Description = "Папка для загрузки файлов"; if (fd.ShowDialog() == DialogResult.OK) { OpenFileDialog opg = new OpenFileDialog(); opg.Filter = "DB File|*.db"; if (opg.ShowDialog() == DialogResult.OK) { var db = DBFile.Read(opg.FileName); progressBar1.Maximum = 0; progressBar1.Value = 0; progressBar1.Style = ProgressBarStyle.Blocks; Sizefiles.Clear(); foreach (var item in db) { //MessageBox.Show(DownloadingPaths.ToPath(setting.URLToHTTPPort) + DownloadingPaths.SD + item.HTTPPath); progressBar1.Maximum += 100; BtnDownload(DownloadingPaths.ToPath(setting.URLToHTTPPort) + DownloadingPaths.SD + item.HTTPPath, fd.SelectedPath + "\\" + item.HTTPPath.Split('/').Where(x => x.Contains(".")).ToArray()[0]); } } } }
/* * Create an optimized packed file from the given one. */ PackedFile CreateOptimizedFile(PackedFile toOptimize) { PackedFile result = toOptimize; // special handling for db files; leave all others as they are. if (toOptimize.FullPath.StartsWith("db")) { try { DBFile modDbFile = FromPacked(toOptimize); if (modDbFile != null) { DBFile gameDbFile = FindInGamePacks(toOptimize); if (TypesCompatible(modDbFile, gameDbFile)) { DBFileHeader header = new DBFileHeader(modDbFile.Header); DBFile optimizedFile = new DBFile(header, modDbFile.CurrentType); optimizedFile.Entries.AddRange(GetDifferingRows(modDbFile, gameDbFile)); if (optimizedFile.Entries.Count != 0) { result.Data = PackedFileDbCodec.GetCodec(toOptimize).Encode(optimizedFile); } else { result = null; } } } } catch (Exception e) { Console.Error.WriteLine(e); } } return(result); }
List <string> GetMatchingFieldNames(DBFile file, List <string> toMatch) { List <string> result = new List <string>(); for (int i = 0; i < file.CurrentType.Fields.Count; i++) { List <string> values = GetFieldValues(file.Entries, i); if (Enumerable.SequenceEqual <string>(values, toMatch)) { result.Add(file.CurrentType.Fields[i].Name); } else if (file.CurrentType.Name.Equals("boolean") && values.Count == toMatch.Count) { // check for booleans stored as ints in the xml bool match = true; for (int valIndex = 0; valIndex < toMatch.Count; valIndex++) { bool matchValue = Boolean.Parse(toMatch[i]); bool checkValue = "1".Equals(values[i]); if (matchValue != checkValue) { match = false; break; } } if (match) { result.Add(file.CurrentType.Fields[i].Name); } } } return(result); }
public static DBFile GetFile (DB db, int work_id, string filename, bool throw_on_multiple) { DBFile result = null; using (IDbCommand cmd = db.CreateCommand ()) { cmd.CommandText = @" SELECT File.id, File.md5, File.file_id, File.mime, File.compressed_mime, File.size, File.file_id, File.hidden OR WorkFile.hidden AS hidden, CASE WHEN WorkFile.filename = '' THEN File.filename ELSE WorkFile.filename END FROM WorkFile INNER JOIN File ON WorkFile.file_id = File.id WHERE WorkFile.work_id = @work_id AND (WorkFile.filename = @filename OR (WorkFile.filename = '' AND File.filename = @filename)); "; DB.CreateParameter (cmd, "work_id", work_id); DB.CreateParameter (cmd, "filename", filename); using (IDataReader reader = cmd.ExecuteReader ()) { if (!reader.Read ()) return null; result = new DBFile (reader); if (throw_on_multiple && reader.Read ()) throw new Exception (string.Format ("Found more than one file in work with id {0} whose filename is '{1}'", work_id, filename)); } } return result; }
public FieldCorrespondencyFinder(string packFile, string xmlDir) { xmlDirectory = xmlDir; DBTypeMap.Instance.InitializeTypeMap(Directory.GetCurrentDirectory()); // initialize patchFileValues from pack file PackFile pack = new PackFileCodec().Open(packFile); foreach (PackedFile contained in pack.Files) { if (contained.FullPath.StartsWith("db")) { // no need to resolve if it's done already... string tableName = DBFile.Typename(contained.FullPath).Replace("_tables", ""); try { PackedFileDbCodec codec = PackedFileDbCodec.GetCodec(contained); codec.AutoadjustGuid = false; DBFile dbFile = codec.Decode(contained.Data); MappedDataTable table = new MappedDataTable(tableName); ValuesFromPack(table, dbFile); ValuesFromXml(table); mappedTables[tableName] = table; #if DEBUG } catch (Exception e) { Console.Error.WriteLine(e.Message); } #else } catch { }
public static void WriteToDisk(this DBWorkFile wf, DB db, string dir) { byte [] buffer = new byte [1024]; int read; string filename = Path.Combine(dir, wf.filename); DBFile file = DBFile_Extensions.Create(db, wf.file_id); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } using (Stream stream = db.Download(wf)) { using (FileStream fs = new FileStream(filename, FileMode.Create, FileAccess.Write, FileShare.Read)) { while (0 != (read = stream.Read(buffer, 0, buffer.Length))) { fs.Write(buffer, 0, read); } } } if (file.compressed_mime == "application/x-gzip") { FileUtilities.GZUncompress(filename); } }
protected void Page_Load(object sender, EventArgs e) { string id = Request.QueryString["id"]; string uid = Request.QueryString["uid"]; string cak = Request.QueryString["callback"]; int ret = 0; //参数为空 if (!string.IsNullOrEmpty(id)) { DBFile.complete(id); FileInf folder = new FileInf(); folder.id = id; DBFile.read(ref folder); //扫描文件夹 fd_scan sc = new fd_scan(); sc.scan(folder); //合并完毕 DBFile.merged(id); ret = 1; //触发事件 up7_biz_event.folder_post_complete(id); } Response.Write(cak + "(" + ret + ")"); }
protected void Page_Load(object sender, EventArgs e) { string uid = Request.QueryString["uid"]; string id = Request.QueryString["id"]; string cbk = Request.QueryString["callback"]; //返回值。1表示成功 int ret = 0; if (string.IsNullOrEmpty(uid) || string.IsNullOrEmpty(id)) { }//参数不为空 else { //标识已完成 DBFile.complete(id); //合并完毕 DBFile.merged(id); ret = 1; //触发事件 up7_biz_event.file_post_complete(id); } Response.Write(cbk + "(" + ret + ")");//必须返回jsonp格式数据 }
/* * Insert the previously given values into the db table. * A warning will be printed and no data added if the given data doesn't * fit the db file's structure. */ public override void Execute() { // insert always into packed files at the save to file foreach (PackedFile packed in PackedFiles) { // we'll read from packed, but that is in the source pack; // get or create the db file in the target pack DBFile targetFile = GetTargetFile(packed); foreach (RowValues insertValues in Source.Values) { if (targetFile.CurrentType.Fields.Count == insertValues.Count) { DBRow newRow = targetFile.GetNewEntry(); for (int i = 0; i < newRow.Count; i++) { newRow[i].Value = insertValues[i]; } targetFile.Entries.Add(newRow); } else { Console.WriteLine("Cannot insert: was given {0} values, expecting {1} in {2}", insertValues.Count, targetFile.CurrentType.Fields.Count, packed.FullPath); Console.WriteLine("Values: {0}", string.Join(",", insertValues)); } } // encode and store in target pack PackedFile newPacked = new PackedFile(packed.FullPath, false); newPacked.Data = PackedFileDbCodec.GetCodec(newPacked).Encode(targetFile); SaveTo.Add(newPacked, true); } }
public byte[] ReadFromFile(string file) { var con = GetDBConnection(); DBFile f = con.FindWithQuery <DBFile>("SELECT * FROM FileStorage WHERE ID=?", file); return(f?.Contents ?? null); }
public static bool HeaderVersionObsolete(PackedFile packedFile) { DBFileHeader header = PackedFileDbCodec.readHeader(packedFile); string type = DBFile.Typename(packedFile.FullPath); int maxVersion = GameManager.Instance.GetMaxDbVersion(type); return(DBTypeMap.Instance.IsSupported(type) && maxVersion != 0 && (header.Version < maxVersion)); }
public override bool CanTest(PackedFile packed) { bool result = packed.FullPath.StartsWith("db"); result &= !DBFile.Typename(packed.FullPath).Equals("models_building_tables"); result &= !DBFile.Typename(packed.FullPath).Equals("models_naval_tables"); return(result); }
public static string GetFolderData(int fid, ref FolderInf root) { StringBuilder sb = new StringBuilder(); sb.Append("select "); sb.Append("xf.fd_name"); sb.Append(",xf.fd_length"); sb.Append(",xf.fd_size"); sb.Append(",xf.fd_pid"); sb.Append(",xf.fd_pathLoc"); sb.Append(",xf.fd_pathSvr"); sb.Append(",xf.fd_folders"); sb.Append(",xf.fd_files"); sb.Append(",xf.fd_filesComplete"); sb.Append(" from down_folders as df"); sb.Append(" left join xdb_files as xf"); sb.Append(" on xf.fd_id = df.fd_id"); sb.Append(" where df.fd_id=@fd_id and xf.fd_complete=1;"); DbHelper db = new DbHelper(); DbCommand cmd = db.GetCommand(sb.ToString()); db.AddInInt32(cmd, "@fd_id", fid); DbDataReader r = db.ExecuteReader(cmd); //FolderInf root = new FolderInf(); if (r.Read()) { root.m_name = r.GetString(0); root.m_lenLoc = r.GetInt64(1); root.m_size = r.GetString(2); root.m_pidSvr = r.GetInt32(3); root.m_idSvr = fid; root.m_pathLoc = r.GetString(4); root.m_pathSvr = r.GetString(5); root.foldersCount = r.GetInt32(6); root.filesCount = r.GetInt32(7); root.filesComplete = r.GetInt32(8); } r.Close(); //单独取已上传长度 root.lenSvr = DBFolder.GetLenPosted(fid); //取文件信息 JArray files = new JArray(); List <string> ids = new List <string>(); DBFile.GetCompletes(fid, ref files, ref ids); JObject obj = (JObject)JToken.FromObject(root); obj["files"] = files; obj["length"] = root.m_lenLoc; obj["ids"] = string.Join(",", ids.ToArray()); return(obj.ToString()); }
public static void WriteToStream(int fileID, Stream stream) { if (null == stream) { throw new ArgumentNullException("Stream cannot be null."); } DBFile.WriteToStream(fileID, stream); }
public static void ReadFromStream(int fileID, Stream stream) { using (DbTransaction tran = DbTransaction.Begin()) { DBFile.ReadFromStream(fileID, stream); tran.Commit(); } }
public FileDetailVM(DBFile dbFile, IMessenger messenger) { fileDBAccess = dbFile; this.messenger = messenger; DeleteCommand = new RelayCommand(DeleteFile); messenger.Register <FileChangedMessage>(FileChanged); }
public static void RemoveFile (this DBWork me, DB db, DBFile file) { using (IDbCommand cmd = db.CreateCommand ()) { cmd.CommandText = "DELETE FROM WorkFile WHERE work_id = @work_id AND file_id = @file_id;"; DB.CreateParameter (cmd, "work_id", me.id); DB.CreateParameter (cmd, "file_id", file.id); cmd.ExecuteNonQuery (); } }
/******************************************************************************************** * This function constructs the System.Data.DataTable we use to not only store our data, * * but to bind as our visuals data source. * ********************************************************************************************/ private DataTable CreateTable(PackedFile currentPackedFile, DBFile table) { DataTable constructionTable = new DataTable(currentPackedFile.Name); DataColumn constructionColumn; List <DataColumn> keyList = new List <DataColumn>(); constructionTable.BeginLoadData(); foreach (FieldInfo columnInfo in table.CurrentType.Fields) { // Create the new column, using object as the data type for all columns, this way we avoid the WPF DataGrid's built in // data validation abilities in favor of our own implementation. constructionColumn = new DataColumn(columnInfo.Name, typeof(string)); if (columnInfo.TypeCode == TypeCode.Int16 || columnInfo.TypeCode == TypeCode.Int32 || columnInfo.TypeCode == TypeCode.Single) { constructionColumn = new DataColumn(columnInfo.Name, typeof(double)); } constructionColumn.AllowDBNull = true; constructionColumn.Unique = false; constructionColumn.ReadOnly = true; // Save the FKey if it exists if (!String.IsNullOrEmpty(columnInfo.ForeignReference)) { constructionColumn.ExtendedProperties.Add("FKey", columnInfo.ForeignReference); } // If the column is a primary key, save it for later adding if (columnInfo.PrimaryKey) { keyList.Add(constructionColumn); } constructionTable.Columns.Add(constructionColumn); } // If the table has primary keys, set them. if (keyList.Count > 0) { constructionTable.PrimaryKey = keyList.ToArray(); } // Now that the DataTable schema is constructed, add in all the data. foreach (List <FieldInstance> rowentry in table.Entries) { constructionTable.Rows.Add(rowentry.Select(n => n.Value).ToArray <object>()); } constructionTable.EndLoadData(); constructionTable.AcceptChanges(); return(constructionTable); }
public byte[] Process(PackedFile file) { byte[] result = file.Data; using (MemoryStream stream = new MemoryStream()) { DBFile dbFile = PackedFileDbCodec.Decode(file); TextDbCodec.Instance.Encode(stream, dbFile); result = stream.ToArray(); } return(result); }
public async Task <Guid> AddFile(DBFile file) { if (file.Id == Guid.Empty) { file.Id = Guid.NewGuid(); } await TblFiles.Insert(file, false).ExecuteAsync().ConfigureAwait(false); return(file.Id); }
public SQLiteConnectionED(EDDSqlDbSelection?maindb = null, bool utctimeindicator = false, bool initializing = false, bool shortlived = true) : base(initializing) { bool locktaken = false; try { if (!initializing && !_initialized) { System.Diagnostics.Trace.WriteLine($"Database {typeof(TConn).Name} initialized before Initialize()"); System.Diagnostics.Trace.WriteLine(new System.Diagnostics.StackTrace(2, true).ToString()); if (typeof(TConn) == typeof(SQLiteConnectionUser)) { SQLiteConnectionUser.Initialize(); } else if (typeof(TConn) == typeof(SQLiteConnectionSystem)) { SQLiteConnectionSystem.Initialize(); } } _schemaLock.EnterReadLock(); locktaken = true; // System.Threading.Monitor.Enter(monitor); //Console.WriteLine("Connection open " + System.Threading.Thread.CurrentThread.Name); DBFile = GetSQLiteDBFile(maindb ?? EDDSqlDbSelection.EDDUser); _cn = DbFactory.CreateConnection(); // Use the database selected by maindb as the 'main' database _cn.ConnectionString = "Data Source=" + DBFile.Replace("\\", "\\\\") + ";Pooling=true;"; if (utctimeindicator) // indicate treat dates as UTC. { _cn.ConnectionString += "DateTimeKind=Utc;"; } _transactionLock = new SQLiteTxnLockED <TConn>(); _cn.Open(); } catch { if (_transactionLock != null) { _transactionLock.Dispose(); } if (locktaken) { _schemaLock.ExitReadLock(); } throw; } }
public Stream Download(DBFile file) { if (file.file_id.HasValue) { return(new DBFileStream(file, this)); } else { return(new System.IO.Compression.GZipStream(new FileStream(DBFile_Extensions.GetFullPath(file.md5), FileMode.Open, FileAccess.Read), System.IO.Compression.CompressionMode.Decompress)); } }
private bool hasChanges(PropertiesDialog dialog, DBFile item) { if (!dialog.ItemName.Equals(item.Name)) return true; if (item is PhotoAlbumDB.Entities.Image && item != null) { PhotoAlbumDB.Entities.Image img = item as PhotoAlbumDB.Entities.Image; if (!dialog.ItemDescription.Equals(img.Description)) { return true; } } return false; }
public DBFile DeleteFile(IUser user, DBFile dbfile) { Debug.WriteLine("Server: Try to delete. "+dbfile.ID); try{ DataTable dt = _db.Select("SELECT md5 FROM files " + "WHERE id = '"+dbfile.ID+"'"); if(dt.Rows.Count <= 0){ dbfile.Status = FileStatus.FileNotInDatabase; return dbfile; }else if(dt.Rows.Count > 1){ dbfile.Status = FileStatus.FileIDNotUnique; return dbfile; }else if(dt.Rows.Count == 1){ string hash = dt.Rows[0].ItemArray[0].ToString(); if(File.Exists(_serverStoragePath+hash)) File.Delete(_serverStoragePath+hash); else Debug.WriteLine("Server: Try to delete. Not in Filesystem"); string temp = "DELETE FROM files " + "WHERE id = '"+dbfile.ID+"'"; _db.Insert(temp); dbfile.Status = FileStatus.FileSuccessfullyDeleted; return dbfile; } }catch(Exception e){ Debug.WriteLine("Server: "+e.Message);} dbfile.Status = FileStatus.Undefined; return dbfile; }
public DBFile GetFile(IUser user, DBFile dbfile) { try{ DataTable dt = _db.Select("SELECT md5 FROM files " + "WHERE id = '"+dbfile.ID+"'"); if(dt.Rows.Count <= 0){ dbfile.Status = FileStatus.FileNotInDatabase; }else if(dt.Rows.Count > 1){ dbfile.Status = FileStatus.FileIDNotUnique; }else{ string hash = dt.Rows[0].ItemArray[0].ToString(); if(!File.Exists(_serverStoragePath+hash)){ dbfile.Status = FileStatus.FileNotInFilesystem; }else{ FileStream fs = File.Open(_serverStoragePath+hash, FileMode.Open,FileAccess.Read,FileShare.Read); dbfile.Stream = fs; } } }catch(Exception e){ Debug.WriteLine("Server: "+e.Message);} return dbfile; }
//gui starts saving the file -> async public void DeleteFile(IUser user, DBFile dbfile) { AsyncCallback call = new AsyncCallback( ServerFileDeleted ); DeletingFile process = _ro.DeleteFile; IAsyncResult ar = process.BeginInvoke(user, dbfile, call, null); }
public List<DBFile> GetFiles(IUser user, DBDirectory dir) { List<DBFile> list = new List<DBFile>(); if(dir == null) return list; try{ //just in case the dir is empty (easier for gui getting root) int dirID = 0; if(dir != null) dirID = dir.ID; DataTable dt = _db.Select("SELECT id,filename,user " + "FROM files " + "WHERE directory = '"+dirID+"'"); foreach(DataRow r in dt.Rows) { DBFile file = new DBFile(); file.ID = int.Parse(r.ItemArray[0].ToString()); file.Name = r.ItemArray[1].ToString(); file.User = int.Parse(r.ItemArray[2].ToString()); list.Add(file); } }catch(Exception e){ Debug.WriteLine("Server: "+e.Message); } return list; }
public DBFile RenameFile(IUser user, DBFile file, string newName) { if(newName == null || newName.Length == 0){ file.Status = FileStatus.FileNameEmpty; return file;} if(file.Name == newName){ return file;} try{ DataTable dt = _db.Select("SELECT id,filename,user " + "FROM files " + "WHERE directory = '"+file.Directory+"' " + "AND filename = '"+newName+"'"); if(dt.Rows.Count > 1){ file.Status = FileStatus.FileNameAllreadyGiven; return file; } dt = _db.Select("SELECT id,filename,user " + "FROM files " + "WHERE id = '"+file.ID+"'"); if(dt.Rows.Count <= 0){ file.Status = FileStatus.FileNotInDatabase; return file; }else if(dt.Rows.Count > 1){ file.Status = FileStatus.FileIDNotUnique; return file; }else{ _db.Insert("UPDATE files SET filename = '"+newName+"' " + "WHERE id ='"+file.ID+"'"); file.Name = newName; file.Status = FileStatus.FileSuccessfullyRenamed; return file; } }catch(Exception e){ Debug.WriteLine("Server: "+e.Message); file.Status = FileStatus.Undefined; return file; } }
public DBFile SaveFile(IUser user, FileStream fs, string clientFileHash, DBDirectory dir) { //if not allready done create neccessary dirs CreatePath(); DBFile dbf = new DBFile(); //skip the file if its larger than 5 mbyte if(fs.Length > 1024*1024*5){ dbf.Status = FileStatus.FileTooBig; return dbf;} //check wether the file exists or not if(File.Exists(_serverStoragePath+clientFileHash)){ dbf.Status = FileStatus.FileAllreadyExists; return dbf;} //check if directory exists or not try{ DataTable dt = _db.Select("SELECT id FROM directories " + "WHERE id = '"+dir.ID+"'"); if(dt.Rows.Count < 1 && dir.ID != 0){ dbf.Status = FileStatus.DirectoryNotExists; return dbf; } }catch(Exception e){ Debug.WriteLine("Server: "+e.Message); dbf.Status = FileStatus.DatabaseProblem; return dbf; } //try to upload the file and write it locally try{ FileStream lfs = File.Open(_serverStoragePath+clientFileHash, FileMode.CreateNew); BinaryReader br = new BinaryReader(fs); BinaryWriter bw = new BinaryWriter(lfs); while(br.BaseStream.Length != br.BaseStream.Position){ bw.Write(br.ReadBytes(1024*1024*5));} bw.Close(); br.Close(); lfs.Close(); fs.Close(); }catch(Exception e){ Debug.WriteLine("Server: "+e.Message); dbf.Status = FileStatus.FileUploadFailed; return dbf; } //check crc MD5Class md5 = new MD5Class(_serverStoragePath+clientFileHash); string hash = md5.GetHash(); if(!clientFileHash.Equals(hash)) { try{ File.Delete(_serverStoragePath+clientFileHash); }catch{} dbf.Status = FileStatus.FileCRCFailed; return dbf; } //if everthing went just fine write it in the db try{ FileInfo fi = new FileInfo(fs.Name); FileInfo local = new FileInfo(_serverStoragePath+hash); _db.Insert("INSERT INTO files (md5,filename,user,size,directory) " + "VALUES ('"+hash+"','"+fi.Name+"','"+user.ID+"','"+local.Length+"','"+dir.ID+"')"); dbf.Status = FileStatus.FileUploaded; dbf.Name = fi.Name; dbf.User = user.ID; DataTable dt = _db.Select("SELECT id FROM files " + "WHERE md5 = '"+hash+"'"); if(dt != null && dt.Rows.Count == 1 && dt.Rows[0].ItemArray.Length == 1) dbf.ID = int.Parse(dt.Rows[0].ItemArray[0].ToString()); else throw new Exception("File inserted, but hash not found?"); return dbf; }catch(Exception e){ Debug.WriteLine("Server: "+e.Message); dbf.Status = FileStatus.DatabaseProblem; return dbf;} }
//gui starts getting the filestream -> async public void GetFile(DBFile dbfile) { AsyncCallback call = new AsyncCallback( ServerGotFile ); GettingFile process = _ro.GetFile; IAsyncResult ar = process.BeginInvoke(_user, dbfile, call, null); }
public void StartProcess(DBFile dbfile) { StartProcess(_clientStoragePath+dbfile.ID+"_"+dbfile.Name); }
//not a really good search engine public List<SearchItem> Search(string searchString, IUser user) { #region new filesearch try{ DataTable dt = _db.Select("SELECT filename,md5 " + "FROM files"); FileSearcher fs = new FileSearcher(); foreach(DataRow dr in dt.Rows) { DBFile dbfile = new DBFile(); dbfile.Name = dr.ItemArray[0].ToString(); dbfile.MD5 = dr.ItemArray[1].ToString(); fs.Search(dbfile, searchString); } }catch(Exception e) { Debug.WriteLine("Server: New Search. "+e.Message); } #endregion #region thestandard be replaced soon if(!CheckUserIntegrity(user)) return null; if(searchString.Length < 3 || searchString.Length > 50) return null; // search algo gone easy: one hit gets 100% List<SearchItem> si = new List<SearchItem>(); try{ DataTable dt = _db.Select("SELECT user " + "FROM interests " + "WHERE content = '"+searchString+"'"); foreach(DataRow dr in dt.Rows) { SearchItem s = new SearchItem(); s.PercentageHit = 100; s.User = int.Parse(dr.ItemArray[0].ToString()); s.UserLastSeen = DateTime.Now.Ticks; si.Add(s); } }catch(Exception e) { Console.Write(e); } return si; #endregion }