//UINT32 openflags() const { return m_openflags; } //------------------------------------------------- // hash - returns the hash for a file //------------------------------------------------- public util.hash_collection hashes(string types) { // determine the hashes we already have string already_have = m_hashes.hash_types(); // determine which hashes we need string needed = ""; foreach (char scan in types) { if (already_have.find_first_of(scan) == npos) { needed += scan; } } // if we need nothing, skip it if (string.IsNullOrEmpty(needed)) { return(m_hashes); } // load the ZIP file if needed if (compressed_file_ready()) { return(m_hashes); } if (m_file == null) { return(m_hashes); } // if we have ZIP data, just hash that directly if (!m_zipdata.empty()) { m_hashes.compute(new Pointer <u8>(m_zipdata), (uint32_t)m_zipdata.size(), needed); //m_hashes.compute(&m_zipdata[0], m_zipdata.size(), needed.c_str()); return(m_hashes); } // read the data if we can MemoryU8 filedata = m_file.buffer(); //const u8 *filedata = (const u8 *)m_file->buffer(); if (filedata == null) { return(m_hashes); } // compute the hash uint64_t length; if (!m_file.length(out length)) { m_hashes.compute(new PointerU8(filedata), (uint32_t)length, needed); } return(m_hashes); }
//------------------------------------------------- // actual_height //------------------------------------------------- public float actual_height() { if (!m_lines.empty()) { return(m_lines.back().yoffset() + m_lines.back().height()); } else { return(0.0f); } }
//UINT32 openflags() const { return m_openflags; } //------------------------------------------------- // hash - returns the hash for a file //------------------------------------------------- public util.hash_collection hashes(string types) { // determine the hashes we already have string already_have = m_hashes.hash_types(); // determine which hashes we need string needed = ""; for (int scanIdx = 0; scanIdx < types.Length; scanIdx++) { if (already_have.IndexOf(types[scanIdx], 0) == -1) { needed += types[scanIdx]; } } // if we need nothing, skip it if (string.IsNullOrEmpty(needed)) { return(m_hashes); } // load the ZIP file if needed if (compressed_file_ready()) { return(m_hashes); } if (m_file == null) { return(m_hashes); } // if we have ZIP data, just hash that directly if (!m_zipdata.empty()) { m_hashes.compute(new ListBytesPointer(m_zipdata), (UInt32)m_zipdata.size(), needed.c_str()); return(m_hashes); } // read the data if we can ListBytes filedata = m_file.buffer(); if (filedata == null) { return(m_hashes); } // compute the hash m_hashes.compute(new ListBytesPointer(filedata), (UInt32)m_file.size(), needed); return(m_hashes); }
ioport_charqueue_empty_delegate m_charqueue_empty; // character queue empty callback // construction/destruction //------------------------------------------------- // natural_keyboard - constructor //------------------------------------------------- public natural_keyboard(running_machine machine) { m_machine = machine; m_have_charkeys = false; m_in_use = false; m_bufbegin = 0; m_bufend = 0; m_current_code = null; m_fieldnum = 0; m_status_keydown = false; m_last_cr = false; m_timer = null; m_current_rate = attotime.zero; m_queue_chars = null; m_accept_char = null; m_charqueue_empty = null; // try building a list of keycodes; if none are available, don't bother build_codes(); if (!m_keyboards.empty()) { m_buffer.resize(KEY_BUFFER_SIZE); m_timer = machine.scheduler().timer_alloc(timer); } // retrieve option setting set_in_use(machine.options().natural_keyboard()); }
// register for save states /*------------------------------------------------- * register_save - register for save states * -------------------------------------------------*/ public void register_save() { assert(m_save_order.empty()); assert(m_save_data == null); // make space for the data m_save_order.clear(); m_save_order.reserve(m_itemtable.size()); m_save_data = new s32 [m_itemtable.size()]; //m_save_data = std::make_unique<s32 []>(m_itemtable.size()); // sort existing outputs by name and register for save foreach (var item in m_itemtable) { m_save_order.emplace_back(item.second()); } m_save_order.Sort((l, r) => { return(string.Compare(l.name(), r.name())); }); //std::sort(m_save_order.begin(), m_save_order.end(), [] (auto const &l, auto const &r) { return l.get().name() < r.get().name(); }); // register the reserved space for saving //throw new emu_unimplemented(); #if false machine().save().save_pointer(nullptr, "output", nullptr, 0, NAME(m_save_data), m_itemtable.size()); #endif if (OUTPUT_VERBOSE) { osd_printf_verbose("Registered {0} outputs for save states\n", m_itemtable.size()); } }
Action <string> m_value_changed_handler; //std::function<void(const char *)> m_value_changed_handler; // construction/destruction protected entry(std.vector <string> names, option_type type = option_type.STRING, string description = null) { m_names = names; m_priority = OPTION_PRIORITY_DEFAULT; m_type = type; m_description = description; assert(m_names.empty() == (m_type == option_type.HEADER)); }
void error(string err) { string s = ""; string trail = " from "; string trail_first = "In file included from "; string e = new plib.pfmt("{0}:{1}:0: error: {2}\n") .op(m_stack.back().m_name, m_stack.back().m_lineno, err); m_stack.pop_back(); while (!m_stack.empty()) { if (m_stack.size() == 1) { trail = trail_first; } s = new plib.pfmt("{0}{1}:{2}:0\n{3}").op(trail, m_stack.back().m_name, m_stack.back().m_lineno, s); m_stack.pop_back(); } throw new pexception("\n" + s + e + " " + m_line + "\n"); }
//------------------------------------------------- // audit_device - audit the device //------------------------------------------------- public summary audit_device(device_t device, string validation = AUDIT_VALIDATE_FULL) { // start fresh m_record_list.clear(); // store validation for later m_validation = validation; size_t found = 0; size_t required = 0; std.vector <string> searchpath = new std.vector <string>(); audit_regions( (rom_entry region, Pointer <rom_entry> rom) => //[this, &device, &searchpath] (rom_entry const *region, rom_entry const *rom) -> audit_record const * { if (ROMREGION_ISROMDATA(region)) { if (searchpath.empty()) { searchpath = device.searchpath(); } return(audit_one_rom(searchpath, rom)); } else if (ROMREGION_ISDISKDATA(region)) { return(audit_one_disk(rom, device)); } else { return(null); } }, rom_first_region(device).op, out found, out required); if (found == 0 && required > 0) { m_record_list.clear(); return(summary.NOTFOUND); } // return a summary return(summarize(device.shortname())); }
//------------------------------------------------- // generate_samples - generate the requested // number of samples for a stream, making sure // all inputs have the appropriate number of // samples generated //------------------------------------------------- void generate_samples(int samples) { ListPointer <stream_sample_t> [] inputs = null; //stream_sample_t **inputs = nullptr; ListPointer <stream_sample_t> [] outputs = null; //stream_sample_t **outputs = nullptr; sound_global.VPRINTF("generate_samples({0}, {1})\n", this, samples); assert(samples > 0); // ensure all inputs are up to date and generate resampled data for (int inputnum = 0; inputnum < m_input.size(); inputnum++) { // update the stream to the current time stream_input input = m_input[inputnum]; if (input.m_source != null) { input.m_source.m_stream.update(); } // generate the resampled data m_input_array[inputnum] = generate_resampled_data(input, (UInt32)samples); } if (!m_input.empty()) { inputs = m_input_array; } // loop over all outputs and compute the output pointer for (int outputnum = 0; outputnum < m_output.size(); outputnum++) { stream_output output = m_output[outputnum]; m_output_array[outputnum] = new ListPointer <stream_sample_t>(output.m_buffer, m_output_sampindex - m_output_base_sampindex); // m_output_array[outputnum] = &output.m_buffer[m_output_sampindex - m_output_base_sampindex]; } if (!m_output.empty()) { outputs = m_output_array; } // run the callback sound_global.VPRINTF(" callback({0}, {1})\n", this, samples); m_callback(this, inputs, outputs, samples); sound_global.VPRINTF(" callback done\n"); }
protected void error(string errs) //void ptoken_reader::error(const perrmsg &errs) { string s = ""; string trail = " from "; string trail_first = "In file included from "; string e = new plib.pfmt("{0}:{1}:0: error: {2}\n") .op(m_source_location.back().file_name(), m_source_location.back().line(), errs); m_source_location.pop_back(); while (!m_source_location.empty()) { if (m_source_location.size() == 1) { trail = trail_first; } s = new plib.pfmt("{0}{1}:{2}:0\n{3}").op(trail, m_source_location.back().file_name(), m_source_location.back().line(), s); m_source_location.pop_back(); } verror("\n" + s + e + " " + m_line + "\n"); }
bool check_if_processed_and_join(analog_net_t n) { // no need to process rail nets - these are known variables if (n.is_rail_net()) { return(true); } // First check if it is in a previous group. // In this case we need to merge this group into the current group if (groupspre.size() > 1) { for (size_t i = 0; i < groupspre.size() - 1; i++) { if (plib.container.contains(groupspre[i], n)) { // copy all nets foreach (var cn in groupspre[i]) { if (!plib.container.contains(groupspre.back(), cn)) { groupspre.back().push_back(cn); } } // clear groupspre[i].clear(); return(true); } } } // if it's already processed - no need to continue if (!groupspre.empty() && plib.container.contains(groupspre.back(), n)) { return(true); } return(false); }
//------------------------------------------------- // populate search list //------------------------------------------------- void populate_search() { // ensure search list is populated if (m_searchlist.empty()) { var sorted = m_persistent_data.sorted_list(); m_searchlist.reserve(sorted.size()); foreach (ui_system_info info in sorted) { m_searchlist.emplace_back(new std.pair <double, ui_system_info>(1.0, info)); } } // keep track of what we matched against string ucs_search = ustr_from_utf8(normalize_unicode(m_search, unicode_normalization_form.D, true)); //const std::u32string ucs_search(ustr_from_utf8(normalize_unicode(m_search, unicode_normalization_form::D, true))); // check available search data if (m_persistent_data.is_available(system_list.available.AVAIL_UCS_SHORTNAME)) { m_searched_fields |= (unsigned)system_list.available.AVAIL_UCS_SHORTNAME; } if (m_persistent_data.is_available(system_list.available.AVAIL_UCS_DESCRIPTION)) { m_searched_fields |= (unsigned)system_list.available.AVAIL_UCS_DESCRIPTION; } if (m_persistent_data.is_available(system_list.available.AVAIL_UCS_MANUF_DESC)) { m_searched_fields |= (unsigned)system_list.available.AVAIL_UCS_MANUF_DESC; } if (m_persistent_data.is_available(system_list.available.AVAIL_UCS_DFLT_DESC)) { m_searched_fields |= (unsigned)system_list.available.AVAIL_UCS_DFLT_DESC; } if (m_persistent_data.is_available(system_list.available.AVAIL_UCS_MANUF_DFLT_DESC)) { m_searched_fields |= (unsigned)system_list.available.AVAIL_UCS_MANUF_DFLT_DESC; } for (int i = 0; i < m_searchlist.Count; i++) //for (std::pair<double, std::reference_wrapper<ui_system_info const> > &info : m_searchlist) { var info = m_searchlist[i]; m_searchlist[i] = std.make_pair(1.0, info.second); ui_system_info sys = info.second; // match shortnames if ((m_searched_fields & (unsigned)system_list.available.AVAIL_UCS_SHORTNAME) != 0) { m_searchlist[i] = std.make_pair(util.edit_distance(ucs_search, sys.ucs_shortname), info.second); } // match reading if (info.first != 0 && !sys.ucs_reading_description.empty()) { m_searchlist[i] = std.make_pair(std.min(util.edit_distance(ucs_search, sys.ucs_reading_description), info.first), info.second); // match "<manufacturer> <reading>" if (info.first != 0) { m_searchlist[i] = std.make_pair(std.min(util.edit_distance(ucs_search, sys.ucs_manufacturer_reading_description), info.first), info.second); } } // match descriptions if (info.first != 0 && (m_searched_fields & (unsigned)system_list.available.AVAIL_UCS_DESCRIPTION) != 0) { m_searchlist[i] = std.make_pair(std.min(util.edit_distance(ucs_search, sys.ucs_description), info.first), info.second); } // match "<manufacturer> <description>" if (info.first != 0 && (m_searched_fields & (unsigned)system_list.available.AVAIL_UCS_MANUF_DESC) != 0) { m_searchlist[i] = std.make_pair(std.min(util.edit_distance(ucs_search, sys.ucs_manufacturer_description), info.first), info.second); } // match default description if (info.first != 0 && (m_searched_fields & (unsigned)system_list.available.AVAIL_UCS_DFLT_DESC) != 0 && !sys.ucs_default_description.empty()) { m_searchlist[i] = std.make_pair(std.min(util.edit_distance(ucs_search, sys.ucs_default_description), info.first), info.second); // match "<manufacturer> <default description>" if (info.first != 0 && (m_searched_fields & (unsigned)system_list.available.AVAIL_UCS_MANUF_DFLT_DESC) != 0) { m_searchlist[i] = std.make_pair(std.min(util.edit_distance(ucs_search, sys.ucs_manufacturer_default_description), info.first), info.second); } } } // sort according to edit distance //std::stable_sort( // m_searchlist.begin(), // m_searchlist.end()); // [] (auto const &lhs, auto const &rhs) { return lhs.first < rhs.first; }); m_searchlist.Sort((lhs, rhs) => { return(lhs.first.CompareTo(rhs.first)); }); }
// last argument only needed by nltool void register_dev(string classname, string name, std.vector <string> params_and_connections, factory.element_t felem = null) { var f = factory_().factory_by_name(classname); // make sure we parse macro library entries // FIXME: this could be done here if e.g. f // would have an indication that this is macro element. if (f.type() == factory.element_type.MACRO) { namespace_push(name); include(f.name()); namespace_pop(); } string key = build_fqn(name); if (device_exists(key)) { log().fatal.op(MF_DEVICE_ALREADY_EXISTS_1(key)); throw new nl_exception(MF_DEVICE_ALREADY_EXISTS_1(key)); } m_abstract.m_device_factory.push_back(new std.pair <string, factory.element_t>(key, f)); //m_abstract.m_device_factory.insert(m_abstract.m_device_factory.end(), {key, f}); var paramlist = plib.pg.psplit(f.param_desc(), ","); if (!params_and_connections.empty()) { var ptokIdx = 0; //auto ptok(params_and_connections.begin()); var ptok_endIdx = params_and_connections.Count; //auto ptok_end(params_and_connections.end()); foreach (string tp in paramlist) { if (plib.pg.startsWith(tp, "+")) { if (ptokIdx == ptok_endIdx) //if (ptok == ptok_end) { var err = MF_PARAM_COUNT_MISMATCH_2(name, params_and_connections.size()); log().fatal.op(err); throw new nl_exception(err); //break; } string output_name = params_and_connections[ptokIdx]; //pstring output_name = *ptok; log().debug.op("Link: {0} {1}", tp, output_name); register_link(name + "." + tp.substr(1), output_name); ++ptokIdx; //++ptok; } else if (plib.pg.startsWith(tp, "@")) { string term = tp.substr(1); log().debug.op("Link: {0} {1}", tp, term); register_link(name + "." + term, term); } else { if (ptokIdx == params_and_connections.Count) //if (ptok == params_and_connections.end()) { var err = MF_PARAM_COUNT_MISMATCH_2(name, params_and_connections.size()); log().fatal.op(err); throw new nl_exception(err); } string paramfq = name + "." + tp; log().debug.op("Defparam: {0}\n", paramfq); register_param(paramfq, params_and_connections[ptokIdx]); //register_param(paramfq, *ptok); ++ptokIdx; //++ptok; } } if (ptokIdx != params_and_connections.Count) //if (ptok != params_and_connections.end()) { var err = MF_PARAM_COUNT_EXCEEDED_2(name, params_and_connections.size()); log().fatal.op(err); throw new nl_exception(err); } } if (felem != null) { throw new emu_unimplemented(); #if false *felem = f; #endif } }
public bool is_recording() { return(!m_movie_recordings.empty()); }
public void align_text(text_layout layout) { assert(m_right_justify_start >= m_center_justify_start); if (m_characters.empty() || m_center_justify_start != 0) { // at least some of the text is left-justified - anchor to left m_anchor_pos = 0.0f; m_anchor_target = 0.0f; if ((layout.width() > m_width) && (m_characters.size() > m_center_justify_start)) { // at least some text is not left-justified if (m_right_justify_start == m_center_justify_start) { // all text that isn't left-justified is right-justified float right_offset = layout.width() - m_width; for (size_t i = m_right_justify_start; m_characters.size() > i; ++i) { m_characters[i] = new positioned_char() { character = m_characters[i].character, style = m_characters[i].style, source = m_characters[i].source, xoffset = m_characters[i].xoffset + right_offset, xwidth = m_characters[i].xwidth } } ; //m_characters[i].xoffset += right_offset; m_width = layout.width(); } else if (m_characters.size() <= m_right_justify_start) { // all text that isn't left-justified is center-justified float center_width = m_width - m_characters[m_center_justify_start].xoffset; float center_offset = ((layout.width() - center_width) * 0.5f) - m_characters[m_center_justify_start].xoffset; if (0.0f < center_offset) { for (size_t i = m_center_justify_start; m_characters.size() > i; ++i) { m_characters[i] = new positioned_char() { character = m_characters[i].character, style = m_characters[i].style, source = m_characters[i].source, xoffset = m_characters[i].xoffset + center_offset, xwidth = m_characters[i].xwidth } } ; //m_characters[i].xoffset += center_offset; m_width += center_offset; } } else { // left, right and center-justified parts float center_width = m_characters[m_right_justify_start].xoffset - m_characters[m_center_justify_start].xoffset; float center_offset = ((layout.width() - center_width) * 0.5f) - m_characters[m_center_justify_start].xoffset; float right_offset = layout.width() - m_width; if (center_offset > right_offset) { // right-justified text pushes centre-justified text to the left for (size_t i = m_center_justify_start; m_right_justify_start > i; ++i) { m_characters[i] = new positioned_char() { character = m_characters[i].character, style = m_characters[i].style, source = m_characters[i].source, xoffset = m_characters[i].xoffset + right_offset, xwidth = m_characters[i].xwidth } } ; //m_characters[i].xoffset += right_offset; } else if (0.0f < center_offset) { // left-justified text doesn't push centre-justified text to the right for (size_t i = m_center_justify_start; m_right_justify_start > i; ++i) { m_characters[i] = new positioned_char() { character = m_characters[i].character, style = m_characters[i].style, source = m_characters[i].source, xoffset = m_characters[i].xoffset + center_offset, xwidth = m_characters[i].xwidth } } ; //m_characters[i].xoffset += center_offset; } for (size_t i = m_right_justify_start; m_characters.size() > i; ++i) { m_characters[i] = new positioned_char() { character = m_characters[i].character, style = m_characters[i].style, source = m_characters[i].source, xoffset = m_characters[i].xoffset + right_offset, xwidth = m_characters[i].xwidth } } ; //m_characters[i].xoffset += right_offset; m_width = layout.width(); } } } else if (m_characters.size() <= m_right_justify_start) { // all text is center-justified - anchor to center m_anchor_pos = 0.5f; m_anchor_target = 0.5f; } else { // at least some text is right-justified - anchor to right m_anchor_pos = 1.0f; m_anchor_target = 1.0f; if ((layout.width() > m_width) && (m_right_justify_start > m_center_justify_start)) { // mixed center-justified and right-justified text float center_width = m_characters[m_right_justify_start].xoffset; float center_offset = (layout.width() - m_width + (center_width * 0.5f)) - (layout.width() * 0.5f); if (0.0f < center_offset) { for (size_t i = m_right_justify_start; m_characters.size() > i; ++i) { m_characters[i] = new positioned_char() { character = m_characters[i].character, style = m_characters[i].style, source = m_characters[i].source, xoffset = m_characters[i].xoffset + center_offset, xwidth = m_characters[i].xwidth } } ; //m_characters[i].xoffset += center_offset; m_width += center_offset; } } } }
bool has_pen_usage() { return(!m_pen_usage.empty()); }
//------------------------------------------------- // load_cached - load a font in cached format //------------------------------------------------- bool load_cached(emu_file file, u64 length, u32 hash) { // get the file size, read the header, and check that it looks good u64 filesize = file.size(); bdc_header header = new bdc_header(); if (!header.read(file)) { osd_printf_warning("render_font::load_cached: error reading BDC header\n"); return(false); } else if (!header.check_magic() || (bdc_header.MAJVERSION != header.get_major_version()) || (bdc_header.MINVERSION != header.get_minor_version())) { LOG("render_font::load_cached: incompatible BDC file\n"); return(false); } else if (length != 0 && ((header.get_original_length() != length) || (header.get_original_hash() != hash))) { LOG("render_font::load_cached: BDC file does not match original BDF file\n"); return(false); } // get global properties from the header m_height = header.get_height(); m_scale = 1.0f / (float)(m_height); m_yoffs = header.get_y_offset(); m_defchar = header.get_default_character(); u32 numchars = header.get_glyph_count(); if (file.tell() + ((u64)numchars * bdc_table_entry.size()) > filesize) { LOG("render_font::load_cached: BDC file is too small to hold glyph table\n"); return(false); } // now read the rest of the data u64 remaining = filesize - file.tell(); try { m_rawdata.resize(remaining); } catch (Exception) { osd_printf_error("render_font::load_cached: allocation error\n"); } for (u64 bytes_read = 0; remaining > bytes_read;) { u32 chunk = (u32)std.min(u32.MaxValue, remaining); if (file.read(new Pointer <u8>(m_rawdata, (int)bytes_read), chunk) != chunk) //if (file.read(&m_rawdata[bytes_read], chunk) != chunk) { osd_printf_error("render_font::load_cached: error reading BDC data\n"); m_rawdata.clear(); return(false); } bytes_read += chunk; } // extract the data from the data size_t offset = (size_t)numchars * bdc_table_entry.size(); bdc_table_entry entry = new bdc_table_entry(m_rawdata.empty() ? null : new Pointer <u8>(m_rawdata)); for (unsigned chindex = 0; chindex < numchars; chindex++, entry = entry.get_next()) { // if we don't have a subtable yet, make one int chnum = (int)entry.get_encoding(); LOG("render_font::load_cached: loading character {0}\n", chnum); if (m_glyphs[chnum / 256] == null || m_glyphs[chnum / 256].Count == 0) { //try { m_glyphs[chnum / 256] = new List <glyph>(256); // new glyph[256]; for (int i = 0; i < 256; i++) { m_glyphs[chnum / 256].Add(new glyph()); } } //catch (Exception ) //{ // global.osd_printf_error("render_font::load_cached: allocation error\n"); // m_rawdata.clear(); // return false; //} } // fill in the entry glyph gl = m_glyphs[chnum / 256][chnum % 256]; gl.width = entry.get_x_advance(); gl.xoffs = entry.get_bb_x_offset(); gl.yoffs = entry.get_bb_y_offset(); gl.bmwidth = entry.get_bb_width(); gl.bmheight = entry.get_bb_height(); gl.rawdata = new Pointer <u8>(m_rawdata, (int)offset); // advance the offset past the character offset += (size_t)((gl.bmwidth * gl.bmheight + 7) / 8); if (m_rawdata.size() < offset) { osd_printf_verbose("render_font::load_cached: BDC file too small to hold all glyphs\n"); m_rawdata.clear(); return(false); } } // got everything m_format = format.CACHED; return(true); }
void render_font_command_glyph() { // FIXME: this is copy/pasta from the BDC loading, and it shouldn't be injected into every font emu_file file = new emu_file(OPEN_FLAG_READ); if (!file.open_ram(new MemoryU8(font_uicmd14), (u32)font_uicmd14.Length)) { // get the file size, read the header, and check that it looks good u64 filesize = file.size(); bdc_header header = new bdc_header(); if (!header.read(file)) { osd_printf_warning("render_font::render_font_command_glyph: error reading BDC header\n"); file.close(); return; } else if (!header.check_magic() || (bdc_header.MAJVERSION != header.get_major_version()) || (bdc_header.MINVERSION != header.get_minor_version())) { LOG("render_font::render_font_command_glyph: incompatible BDC file\n"); file.close(); return; } // get global properties from the header m_height_cmd = header.get_height(); m_yoffs_cmd = header.get_y_offset(); u32 numchars = header.get_glyph_count(); if ((file.tell() + ((u64)numchars * bdc_table_entry.size())) > filesize) { LOG("render_font::render_font_command_glyph: BDC file is too small to hold glyph table\n"); file.close(); return; } // now read the rest of the data u64 remaining = filesize - file.tell(); //try { m_rawdata_cmd.resize(remaining); } //catch (...) //{ // global.osd_printf_error("render_font::render_font_command_glyph: allocation error\n"); //} for (u64 bytes_read = 0; remaining > bytes_read;) { u32 chunk = (u32)std.min(u32.MaxValue, remaining); if (file.read(new Pointer <u8>(m_rawdata_cmd, (int)bytes_read), chunk) != chunk) { osd_printf_error("render_font::render_font_command_glyph: error reading BDC data\n"); m_rawdata_cmd.clear(); file.close(); return; } bytes_read += chunk; } file.close(); // extract the data from the data size_t offset = (size_t)numchars * bdc_table_entry.size(); bdc_table_entry entry = new bdc_table_entry(m_rawdata_cmd.empty() ? null : new Pointer <u8>(m_rawdata_cmd)); for (unsigned chindex = 0; chindex < numchars; chindex++, entry = entry.get_next()) { // if we don't have a subtable yet, make one int chnum = (int)entry.get_encoding(); LOG("render_font::render_font_command_glyph: loading character {0}\n", chnum); if (m_glyphs_cmd[chnum / 256] == null) { //try { m_glyphs_cmd[chnum / 256] = new List <glyph>(256); // new glyph[256]; for (int i = 0; i < 256; i++) { m_glyphs_cmd[chnum / 256].Add(new glyph()); } } //catch (...) //{ // osd_printf_error("render_font::render_font_command_glyph: allocation error\n"); // m_rawdata_cmd.clear(); // return; //} } // fill in the entry glyph gl = m_glyphs_cmd[chnum / 256][chnum % 256]; gl.width = entry.get_x_advance(); gl.xoffs = entry.get_bb_x_offset(); gl.yoffs = entry.get_bb_y_offset(); gl.bmwidth = entry.get_bb_width(); gl.bmheight = entry.get_bb_height(); gl.rawdata = new Pointer <u8>(m_rawdata_cmd, (int)offset); // advance the offset past the character offset += (size_t)((gl.bmwidth * gl.bmheight + 7) / 8); if (m_rawdata_cmd.size() < offset) { osd_printf_verbose("render_font::render_font_command_glyph: BDC file too small to hold all glyphs\n"); m_rawdata_cmd.clear(); return; } } } }
// audit operations //------------------------------------------------- // audit_media - audit the media described by the // currently-enumerated driver //------------------------------------------------- public summary audit_media(string validation = AUDIT_VALIDATE_FULL) { // start fresh m_record_list.clear(); // store validation for later m_validation = validation; // first walk the parent chain for required ROMs parent_rom_vector parentroms = new parent_rom_vector(); for (var drvindex = driver_list.find(m_enumerator.driver().parent); 0 <= drvindex; drvindex = driver_list.find(driver_list.driver((size_t)drvindex).parent)) //for (auto drvindex = m_enumerator.find(m_enumerator.driver().parent); 0 <= drvindex; drvindex = m_enumerator.find(m_enumerator.driver(drvindex).parent)) { game_driver parent = driver_list.driver((size_t)drvindex); LOG(null, "Checking parent {0} for ROM files\n", parent.type.shortname()); std.vector <rom_entry> roms = rom_build_entries(parent.rom); for (Pointer <rom_entry> region = rom_first_region(new Pointer <rom_entry>(roms)); region != null; region = rom_next_region(region)) //for (rom_entry const *region = rom_first_region(&roms.front()); region; region = rom_next_region(region)) { for (Pointer <rom_entry> rom = rom_first_file(region); rom != null; rom = rom_next_file(rom)) //for (rom_entry const *rom = rom_first_file(region); rom; rom = rom_next_file(rom)) { LOG(null, "Adding parent ROM {0}\n", rom.op.name()); parentroms.emplace_back(new parent_rom(parent.type, rom)); } } } parentroms.remove_redundant_parents(); // count ROMs required/found size_t found = 0; size_t required = 0; size_t shared_found = 0; size_t shared_required = 0; size_t parent_found = 0; // iterate over devices and regions std.vector <string> searchpath = new std.vector <string>(); foreach (device_t device in new device_enumerator(m_enumerator.config().root_device())) { searchpath.clear(); // now iterate over regions and ROMs within for (Pointer <rom_entry> region = rom_first_region(device); region != null; region = rom_next_region(region)) { for (Pointer <rom_entry> rom = rom_first_file(region); rom != null; rom = rom_next_file(rom)) { if (searchpath.empty()) { LOG(null, "Audit media for device {0}({1})\n", device.shortname(), device.tag()); searchpath = device.searchpath(); } // look for a matching parent or device ROM string name = rom.op.name(); util.hash_collection hashes = new util.hash_collection(rom.op.hashdata()); bool dumped = !hashes.flag(util.hash_collection.FLAG_NO_DUMP); device_type shared_device = parentroms.find_shared_device(device, name, hashes, rom_file_size(rom)); if (shared_device != null) { LOG(null, "File '{0}' {1}{2}dumped shared with {3}\n", name, ROM_ISOPTIONAL(rom.op) ? "optional " : "", dumped ? "" : "un", shared_device.shortname()); } else { LOG(null, "File '{0}' {1}{2}dumped\n", name, ROM_ISOPTIONAL(rom.op) ? "optional " : "", dumped ? "" : "un"); } // count the number of files with hashes if (dumped && !ROM_ISOPTIONAL(rom.op)) { required++; if (shared_device != null) { shared_required++; } } // audit a file audit_record record = null; if (ROMREGION_ISROMDATA(region.op)) { record = audit_one_rom(searchpath, rom); } // audit a disk else if (ROMREGION_ISDISKDATA(region.op)) { record = audit_one_disk(rom, device); } if (record != null) { // see if the actual content found belongs to a parent var matchesshared = parentroms.actual_matches_shared(device, record); if (matchesshared.first != null) { LOG(null, "Actual ROM file shared with {0}parent {1}\n", matchesshared.second ? "immediate " : "", matchesshared.first.shortname()); } // count the number of files that are found. if ((record.status() == audit_status.GOOD) || ((record.status() == audit_status.FOUND_INVALID) && (matchesshared.first == null))) { found++; if (shared_device != null) { shared_found++; } if (matchesshared.second) { parent_found++; } } record.set_shared_device(shared_device); } } } } if (!searchpath.empty()) { LOG(null, "Total required={0} (shared={1}) found={2} (shared={3} parent={4})\n", required, shared_required, found, shared_found, parent_found); } // if we only find files that are in the parent & either the set has no unique files or the parent is not found, then assume we don't have the set at all if ((found == shared_found) && required != 0 && ((required != shared_required) || parent_found == 0)) { m_record_list.clear(); return(summary.NOTFOUND); } // return a summary return(summarize(m_enumerator.driver().name)); }