//PCOPYASSIGNMOVE(ptokenizer, delete) //virtual ~ptokenizer() = default; //using token_type = detail::token_type; //using token_id_t = detail::token_id_t; //using token_t = detail::token_t; //using token_store = detail::token_store; // tokenizer stuff follows ... public ptokenizer_token_id_t register_token(string token) { ptokenizer_token_id_t ret = new ptokenizer_token_id_t(m_tokens.size(), token); m_tokens.emplace(token, ret); return(ret); }
// internal helpers //------------------------------------------------- // add_to_entry_map - adds an entry to the entry // map //------------------------------------------------- void add_to_entry_map(string name, entry entry) { // it is illegal to call this method for something that already exists assert(m_entrymap.find(name) == null); // append the entry m_entrymap.emplace(name, entry); }
/*------------------------------------------------- * create_new_item - create a new item * -------------------------------------------------*/ output_item create_new_item(string outname, int value) { var output_item = new output_item(this, outname, m_uniqueid++, value); var ins = m_itemtable.emplace( //std::piecewise_construct, outname, //std::forward_as_tuple(outname), output_item); // std::forward_as_tuple(this, outname, m_uniqueid++, value)); assert(ins); //ins.second); return(output_item); //ins.first.second; }
/*------------------------------------------------- * create_new_item - create a new item * -------------------------------------------------*/ output_item create_new_item(string outname, int value) { if (OUTPUT_VERBOSE) { osd_printf_verbose("Creating output {0} = {1}{2}\n", outname, value, m_save_data != null ? " (will not be saved)" : ""); } var output_item = new output_item(this, outname, m_uniqueid++, value); var ins = m_itemtable.emplace( //std::piecewise_construct, outname, //std::forward_as_tuple(outname), output_item); // std::forward_as_tuple(this, outname, m_uniqueid++, value)); assert(ins); //ins.second); return(output_item); //ins.first.second; }
public static void load_translation(emu_options m_options) { g_translation.Clear(); emu_file file = new emu_file(m_options.language_path(), global_object.OPEN_FLAG_READ); var name = m_options.language(); name = name.Replace(" ", "_"); name = name.Replace("(", ""); name = name.Replace(")", ""); if (file.open(name, global_object.PATH_SEPARATOR + "strings.mo") == osd_file.error.NONE) { uint64_t size = file.size(); RawBuffer buffer = new RawBuffer(4 * (int)size / 4 + 1); //uint32_t *buffer = global_alloc_array(uint32_t, size / 4 + 1); file.read(new ListBytesPointer(buffer), (UInt32)size); file.close(); if (buffer.get_uint32(0) != MO_MAGIC && buffer.get_uint32(0) != MO_MAGIC_REVERSED) { buffer = null; //global_free_array(buffer); return; } if (buffer.get_uint32(0) == MO_MAGIC_REVERSED) { for (var i = 0; i < ((int)size / 4) + 1; ++i) { buffer.set_uint32(i, endianchange(buffer[i])); } } uint32_t number_of_strings = buffer.get_uint32(2); uint32_t original_table_offset = buffer.get_uint32(3) >> 2; uint32_t translation_table_offset = buffer.get_uint32(4) >> 2; RawBuffer data = buffer; //const char *data = reinterpret_cast<const char*>(buffer); for (var i = 1; i < number_of_strings; ++i) { string original = "TODO original"; //(const char *)data + buffer[original_table_offset + 2 * i + 1]; string translation = "TODO translation"; //(const char *)data + buffer[translation_table_offset + 2 * i + 1]; g_translation.emplace(original, translation); } buffer = null; //global_free_array(buffer); } }
public static void load_translation(random_read file) { MemoryU8 translation_data_buffer; //std::unique_ptr<std::uint32_t []> translation_data; PointerU32 translation_data; //std::unique_ptr<std::uint32_t []> translation_data; std.unordered_map <string, std.pair <string, uint32_t> > translation_map = new std.unordered_map <string, std.pair <string, uint32_t> >(); //std::unordered_map<std::string_view, std::pair<char const *, std::uint32_t> > translation_map; uint64_t size = 0; if (file.length(out size) || (20 > size)) { osd_printf_error("Error reading translation file: {0}-byte file is too small to contain translation data\n", size); return; } translation_data_buffer = new MemoryU8((int)size + 3, true); //translation_data.reset(new (std::nothrow) std::uint32_t [(size + 3) / 4]); translation_data = new PointerU32(f_translation_data); if (translation_data == null) { osd_printf_error("Failed to allocate {0} bytes to load translation data file\n", size); return; } size_t read; file.read(translation_data, size, out read); if (read != size) { osd_printf_error("Error reading translation file: requested {0} bytes but got {1} bytes\n", size, read); translation_data = null; //translation_data.reset(); return; } if ((translation_data[0] != MO_MAGIC) && (translation_data[0] != MO_MAGIC_REVERSED)) { osd_printf_error("Error reading translation file: unrecognized magic number {0}\n", translation_data[0]); //0x%08X translation_data = null; //translation_data.reset(); return; } var fetch_word = new Func <size_t, uint32_t>( (size_t offset) => //[reversed = translation_data[0] == MO_MAGIC_REVERSED, words = translation_data.get()] (size_t offset) { var reversed = translation_data[0] == MO_MAGIC_REVERSED; var words = translation_data; return(reversed ? swapendian_int32(words[offset]) : words[offset]); }); // FIXME: check major/minor version number if ((fetch_word(3) % 4) != 0 || (fetch_word(4) % 4) != 0) { osd_printf_error("Error reading translation file: table offsets {0} and {1} are not word-aligned\n", fetch_word(3), fetch_word(4)); translation_data = null; //translation_data.reset(); return; } uint32_t number_of_strings = fetch_word(2); uint32_t original_table_offset = fetch_word(3) >> 2; uint32_t translation_table_offset = fetch_word(4) >> 2; if ((4 * (original_table_offset + ((uint64_t)number_of_strings * 2))) > size) { osd_printf_error("Error reading translation file: {0}-entry original string table at offset {1} extends past end of {2}-byte file\n", number_of_strings, fetch_word(3), size); translation_data = null; //translation_data.reset(); return; } if ((4 * (translation_table_offset + ((uint64_t)number_of_strings * 2))) > size) { osd_printf_error("Error reading translation file: {0}-entry translated string table at offset {1} extends past end of {2}-byte file\n", number_of_strings, fetch_word(4), size); translation_data = null; //translation_data.reset(); return; } osd_printf_verbose("Reading translation file: {0} strings, original table at word offset {1}, translated table at word offset {2}\n", number_of_strings, original_table_offset, translation_table_offset); PointerU8 data = new PointerU8(translation_data); //char const *const data = reinterpret_cast<char const *>(translation_data.get()); for (uint32_t i = 1; number_of_strings > i; ++i) { uint32_t original_length = fetch_word(original_table_offset + (2 * i)); uint32_t original_offset = fetch_word(original_table_offset + (2 * i) + 1); if ((original_length + original_offset) >= size) { osd_printf_error("Error reading translation file: {0}-byte original string {1} at offset {2} extends past end of {3}-byte file\n", original_length, i, original_offset, size); continue; } if (data[original_length + original_offset] != 0) { osd_printf_error("Error reading translation file: {0}-byte original string {1} at offset {2} is not correctly NUL-terminated\n", original_length, i, original_offset); continue; } uint32_t translation_length = fetch_word(translation_table_offset + (2 * i)); uint32_t translation_offset = fetch_word(translation_table_offset + (2 * i) + 1); if ((translation_length + translation_offset) >= size) { osd_printf_error("Error reading translation file: {0}-byte translated string {1} at offset {2} extends past end of {3}-byte file\n", translation_length, i, translation_offset, size); continue; } if (data[translation_length + translation_offset] != 0) { osd_printf_error("Error reading translation file: {0}-byte translated string {1} at offset {2} is not correctly NUL-terminated\n", translation_length, i, translation_offset); continue; } string original = data.ToString((int)original_offset, int.MaxValue); //std::string_view const original(&data[original_offset], original_length); string translation = data.ToString((int)translation_offset, int.MaxValue); //char const *const translation(&data[translation_offset]); var ins = translation_map.emplace(original, std.make_pair(translation, translation_length)); //auto const ins = translation_map.emplace(original, std::make_pair(translation, translation_length)); if (!ins) { osd_printf_warning( "Loading translation file: translation {0} '{1}'='{2}' conflicts with previous translation '{3}'='{4}'\n", i, original, translation, null, //ins.first->first, null); //ins.first->second.first); } } osd_printf_verbose("Loaded {0} translated string from file\n", translation_map.size()); f_translation_data = translation_data; f_translation_map = translation_map; }
//bool parse(plib::istream_uptr &&strm, const pstring &nlname); public bool parse(parser_t_token_store tokstor, string nlname) { set_token_source(tokstor); bool in_nl = false; while (true) { // FIXME: line numbers in cached local netlists are wrong // need to process raw tokens here. parser_t_token_t token = get_token_raw(); if (token.is_type(parser_t_token_type.ENDOFFILE)) { return(false); } if (token.is_(m_tok_NETLIST_END) || token.is_(m_tok_TRUTHTABLE_END)) { if (!in_nl) { error(MF_PARSER_UNEXPECTED_1(token.str())); } else { in_nl = false; } require_token(m_tok_paren_left); require_token(m_tok_paren_right); m_cur_local.push_back(token); m_cur_local.push_back(new parser_t_token_t(m_tok_paren_left)); m_cur_local.push_back(new parser_t_token_t(m_tok_paren_right)); } else if (token.is_(m_tok_NETLIST_START) || token.is_(m_tok_TRUTHTABLE_START)) { if (in_nl) { error(MF_PARSER_UNEXPECTED_1(token.str())); } require_token(m_tok_paren_left); parser_t_token_t name = get_token(); if (token.is_(m_tok_NETLIST_START) && (name.str() == nlname || nlname.empty())) { require_token(m_tok_paren_right); parse_netlist(); return(true); } if (token.is_(m_tok_TRUTHTABLE_START) && name.str() == nlname) { net_truthtable_start(nlname); return(true); } // create a new cached local store m_local.emplace(name.str(), new parser_t_token_store()); m_cur_local = m_local[name.str()]; var sl = sourceloc(); var li = new plib.pfmt("# {0} \"{1}\"").op(sl.line(), sl.file_name()); m_cur_local.push_back(new parser_t_token_t(parser_t_token_type.LINEMARKER, li)); m_cur_local.push_back(token); m_cur_local.push_back(new parser_t_token_t(m_tok_paren_left)); m_cur_local.push_back(name); //m_cur_local->push_back(token_t(m_tok_paren_right)); in_nl = true; } // FIXME: do we really need this going forward ? there should be no need // for NETLIST_EXTERNAL in netlist files else if (token.is_(m_tok_NETLIST_EXTERNAL)) { if (in_nl) { error(MF_UNEXPECTED_NETLIST_EXTERNAL()); } require_token(m_tok_paren_left); parser_t_token_t name = get_token(); require_token(m_tok_paren_right); } else if (!in_nl) { if (!token.is_(m_tok_static) && !token.is_type(parser_t_token_type.SOURCELINE) && !token.is_type(parser_t_token_type.LINEMARKER)) { error(MF_EXPECTED_NETLIST_START_1(token.str())); } } else { m_cur_local.push_back(token); } } }