public void save(myChapterPgCfg cfg) { Debug.Assert(m_bLoaded); //m_dbfile.save(); List <chapterRec> tmpLst = new List <chapterRec>(); for (int i = 0; i < cfg.markedInfo.Count; i++) { string str = cfg.markedInfo[i]; string[] arr = str.Split(new char[] { '|' }); if (arr.Count() != 2) { throw new Exception("invalid data"); } chapterRec rec = null; string key = arr[0]; if (m_dict.ContainsKey(key)) { rec = m_dict[key]; cfg.markedInfo[i] = rec.key + '|' + rec.marked; m_dict.Remove(key); tmpLst.Add(rec); } } foreach (chapterRec rec in m_dict.Values) { cfg.markedInfo.Add(rec.key + '|' + rec.marked); } foreach (chapterRec rec in tmpLst) { m_dict.Add(rec.key, rec); } cfg.save(); }
private void updateMarked(chapterRec rec) { //calc size int estimateSize = rec.marked.Length * 2; resizeTmpBuff(estimateSize); rec.markedLen = (UInt32)Encoding.UTF8.GetBytes(rec.marked, 0, rec.marked.Length, t_buff, 0); UInt32 newSize = 4 + 4 + rec.keyLen + 4 + rec.markedLen; if (rec.size < newSize) { //mark rec as delete m_file.seekWriteCursor((int)rec.offset); UInt32 hdr = (UInt32)(rec.size / c_pageSize) | c_fDeleted; m_file.writeInt32((int)hdr); m_deltedItem.Add(new chapterRec { size = rec.size, offset = rec.offset }); addMarked(rec); } else { m_file.seekWriteCursor((int)rec.offset + 4 + 4 + (int)rec.keyLen); //hdr + keysize + keylen m_file.writeInt32((int)rec.markedLen); m_file.writeData(t_buff, (int)rec.markedLen); } }
//myDbFileCfg m_dbfile; public void load(myChapterPgCfg cfg) { if (m_bLoaded) { return; } m_bLoaded = true; #if use_xml //m_dbfile = myDbFileCfg.getInstance(); //foreach(var ch in m_dbfile.chapters) //{ // m_dict.Add(ch.key, ch); //} foreach (string str in cfg.markedInfo) { string[] arr = str.Split(new char[] { '|' }); if (arr.Count() != 2) { throw new Exception("invalid data"); } chapterRec rec = new chapterRec(); rec.key = arr[0]; rec.marked = arr[1]; m_dict.Add(arr[0], rec); } #endif #if !use_xml //open file db m_file.load(); //check file db malform //load marked info chapterRec ch; var error = firstChapter(out ch); while (error != readBdError.eof) { if (error == readBdError.dbMalform) { m_file.drop(); m_dict.Clear(); m_deltedItem.Clear(); Debug.Assert(m_cache.Count == 0); break; } if (ch.isDeleted) { m_deltedItem.Add(ch); } else { m_dict.Add(ch.key, ch); } error = nextChapter(out ch); } #endif }
private void updateMarked(chapterRec rec) { try { //var foundRec = m_dbfile.chapters.Find((t) => { return t.key == rec.key; }); } catch { Debug.Assert(false, "not found rec!"); } }
private readBdError nextChapter(out chapterRec rec) { rec = new chapterRec(); rec.offset = (UInt32)m_file.readCursor; UInt32 hdr; bool ret = m_file.readData(t_page, (int)c_pageSize); if (!ret) { return(readBdError.eof); } hdr = BitConverter.ToUInt32(t_page, 0); rec.size = (hdr & c_nPageMark) * c_pageSize; if ((rec.size + rec.offset) > m_file.Size) { return(readBdError.dbMalform); } if ((hdr & c_fDeleted) != 0) { rec.isDeleted = true; return(readBdError.deletedRec); } if (rec.size > t_buff.Length) { Array.Resize <byte>(ref t_buff, (int)rec.size); } t_page.CopyTo(t_buff, 0); UInt32 nRead = c_pageSize; for (; nRead < rec.size; nRead += c_pageSize) { ret = m_file.readData(t_page, (int)c_pageSize); Debug.Assert(ret); t_page.CopyTo(t_buff, (int)nRead); } UInt32 iCursor = 4; rec.keyLen = BitConverter.ToUInt32(t_buff, (int)iCursor); iCursor += 4; rec.key = Encoding.UTF8.GetString(t_buff, (int)iCursor, (int)rec.keyLen); iCursor += rec.keyLen; rec.markedLen = BitConverter.ToUInt32(t_buff, (int)iCursor); iCursor += 4; rec.marked = Encoding.UTF8.GetString(t_buff, (int)iCursor, (int)rec.markedLen); return(readBdError.success); }
chapterRec findRec(string key) { chapterRec rec = null; if (m_cache.ContainsKey(key)) { rec = m_cache[key]; } else if (m_dict.ContainsKey(key)) { rec = m_dict[key]; m_cache.Add(key, rec); } return(rec); }
private void addMarked(chapterRec rec) { //estimate req size UInt32 size = 4 + 8 + (UInt32)(rec.key.Length + rec.marked.Length) * 2; size = (size + c_pageSize - 1) & ~(c_pageSize - 1); resizeTmpBuff((int)size); UInt32 offset = 4; rec.keyLen = (UInt32)Encoding.UTF8.GetBytes(rec.key, 0, rec.key.Length, t_buff, (int)offset + 4); BitConverter.GetBytes(rec.keyLen).CopyTo(t_buff, (int)offset); offset += rec.keyLen + 4; rec.markedLen = (UInt32)Encoding.UTF8.GetBytes(rec.marked, 0, rec.marked.Length, t_buff, (int)offset + 4); BitConverter.GetBytes(rec.keyLen).CopyTo(t_buff, (int)offset); size = (rec.markedLen + offset + 4 + c_pageSize - 1) & ~(c_pageSize - 1); BitConverter.GetBytes(size / c_pageSize).CopyTo(t_buff, 0); //find in delete list var reuse = delListFind(size); if (reuse != null) { m_deltedItem.Remove(reuse); rec.offset = reuse.offset; m_file.seekWriteCursor((int)rec.offset); rec.isDeleted = false; m_file.writeData(t_buff, (int)size); rec.size = reuse.size; Debug.Assert(size <= reuse.size); } else { //seek to end of file m_file.seekWriteCursor(-1); rec.offset = (UInt32)m_file.writeCursor; rec.isDeleted = false; m_file.writeData(t_buff, (int)size); rec.size = size; } }
public void saveMarked(chapterInfo c) { string key = c.path; chapterRec rec = findRec(key); if (rec != null) { rec.marked = string.Join(";", c.markedIndexs); updateMarked(rec); } else { var newrec = new chapterRec() { key = c.path, marked = string.Join(";", c.markedIndexs) }; addMarked(newrec); m_cache.Add(key, newrec); m_dict.Add(key, newrec); } }
public void getMarked(chapterInfo c) { c.markedIndexs.Clear(); string key = c.path; chapterRec rec = findRec(key); if (rec == null) { return; } var arr = rec.marked.Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries); foreach (var i in arr) { int idx; if (int.TryParse(i, out idx)) { c.markedIndexs.Add(idx); } } }
private void addMarked(chapterRec rec) { //m_dbfile.chapters.Add(rec); }
readBdError firstChapter(out chapterRec rec) { rec = null; m_file.seekReadCursor(0); return(nextChapter(out rec)); }