py/compile: De-duplicate constant objects in module's constant table.

The recent rework of bytecode made all constants global with respect to the
module (previously, each function had its own constant table).  That means
the constant table for a module is shared among all functions/methods/etc
within the module.

This commit add support to the compiler to de-duplicate constants in this
module constant table.  So if a constant is used more than once -- eg 1.0
or (None, None) -- then the same object is reused for all instances.

For example, if there is code like `print(1.0, 1.0)` then the parser will
create two independent constants 1.0 and 1.0.  The compiler will then (with
this commit) notice they are the same and only put one of them in the
constant table.  The bytecode will then reuse that constant twice in the
print expression.  That allows the second 1.0 to be reclaimed by the GC,
also means the constant table has one less entry so saves a word.

Signed-off-by: Damien George <damien@micropython.org>
This commit is contained in:
Damien George 2022-05-07 15:51:41 +10:00
parent b3d0f5f67c
commit 8588525868
5 changed files with 76 additions and 52 deletions

View File

@ -185,8 +185,6 @@ typedef struct _compiler_t {
scope_t *scope_head;
scope_t *scope_cur;
mp_emit_common_t emit_common;
emit_t *emit; // current emitter
#if NEED_METHOD_TABLE
const emit_method_table_t *emit_method_table; // current emit method table
@ -196,6 +194,8 @@ typedef struct _compiler_t {
emit_inline_asm_t *emit_inline_asm; // current emitter for inline asm
const emit_inline_asm_method_table_t *emit_inline_asm_method_table; // current emit method table for inline asm
#endif
mp_emit_common_t emit_common;
} compiler_t;
/******************************************************************************/
@ -210,15 +210,11 @@ STATIC void mp_emit_common_init(mp_emit_common_t *emit, qstr source_file) {
mp_map_elem_t *elem = mp_map_lookup(&emit->qstr_map, MP_OBJ_NEW_QSTR(source_file), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND);
elem->value = MP_OBJ_NEW_SMALL_INT(0);
#endif
mp_obj_list_init(&emit->const_obj_list, 0);
}
STATIC void mp_emit_common_start_pass(mp_emit_common_t *emit, pass_kind_t pass) {
emit->pass = pass;
if (pass == MP_PASS_STACK_SIZE) {
emit->ct_cur_obj_base = emit->ct_cur_obj;
} else if (pass > MP_PASS_STACK_SIZE) {
emit->ct_cur_obj = emit->ct_cur_obj_base;
}
if (pass == MP_PASS_CODE_SIZE) {
if (emit->ct_cur_child == 0) {
emit->children = NULL;
@ -229,22 +225,10 @@ STATIC void mp_emit_common_start_pass(mp_emit_common_t *emit, pass_kind_t pass)
emit->ct_cur_child = 0;
}
STATIC void mp_emit_common_finalise(mp_emit_common_t *emit, bool has_native_code) {
emit->ct_cur_obj += has_native_code; // allocate an additional slot for &mp_fun_table
emit->const_table = m_new0(mp_uint_t, emit->ct_cur_obj);
emit->ct_cur_obj = has_native_code; // reserve slot 0 for &mp_fun_table
#if MICROPY_EMIT_NATIVE
if (has_native_code) {
// store mp_fun_table pointer at the start of the constant table
emit->const_table[0] = (mp_uint_t)(uintptr_t)&mp_fun_table;
}
#endif
}
STATIC void mp_emit_common_populate_module_context(mp_emit_common_t *emit, qstr source_file, mp_module_context_t *context) {
#if MICROPY_EMIT_BYTECODE_USES_QSTR_TABLE
size_t qstr_map_used = emit->qstr_map.used;
mp_module_context_alloc_tables(context, qstr_map_used, emit->ct_cur_obj);
mp_module_context_alloc_tables(context, qstr_map_used, emit->const_obj_list.len);
for (size_t i = 0; i < emit->qstr_map.alloc; ++i) {
if (mp_map_slot_is_filled(&emit->qstr_map, i)) {
size_t idx = MP_OBJ_SMALL_INT_VALUE(emit->qstr_map.table[i].value);
@ -253,12 +237,12 @@ STATIC void mp_emit_common_populate_module_context(mp_emit_common_t *emit, qstr
}
}
#else
mp_module_context_alloc_tables(context, 0, emit->ct_cur_obj);
mp_module_context_alloc_tables(context, 0, emit->const_obj_list.len);
context->constants.source_file = source_file;
#endif
if (emit->ct_cur_obj > 0) {
memcpy(context->constants.obj_table, emit->const_table, emit->ct_cur_obj * sizeof(mp_uint_t));
for (size_t i = 0; i < emit->const_obj_list.len; ++i) {
context->constants.obj_table[i] = emit->const_obj_list.items[i];
}
}
@ -3501,23 +3485,13 @@ mp_compiled_module_t mp_compile_to_raw_code(mp_parse_tree_t *parse_tree, qstr so
}
// compute some things related to scope and identifiers
bool has_native_code = false;
for (scope_t *s = comp->scope_head; s != NULL && comp->compile_error == MP_OBJ_NULL; s = s->next) {
#if MICROPY_EMIT_NATIVE
if (s->emit_options == MP_EMIT_OPT_NATIVE_PYTHON || s->emit_options == MP_EMIT_OPT_VIPER) {
has_native_code = true;
}
#endif
scope_compute_things(s);
}
// set max number of labels now that it's calculated
emit_bc_set_max_num_labels(emit_bc, max_num_labels);
// finalise and allocate the constant table
mp_emit_common_finalise(&comp->emit_common, has_native_code);
// compile MP_PASS_STACK_SIZE, MP_PASS_CODE_SIZE, MP_PASS_EMIT
#if MICROPY_EMIT_NATIVE
emit_t *emit_native = NULL;
@ -3604,9 +3578,19 @@ mp_compiled_module_t mp_compile_to_raw_code(mp_parse_tree_t *parse_tree, qstr so
cm.rc = module_scope->raw_code;
cm.context = context;
#if MICROPY_PERSISTENT_CODE_SAVE
cm.has_native = has_native_code;
cm.has_native = false;
#if MICROPY_EMIT_NATIVE
if (emit_native != NULL) {
cm.has_native = true;
}
#endif
#if MICROPY_EMIT_INLINE_ASM
if (comp->emit_inline_asm != NULL) {
cm.has_native = true;
}
#endif
cm.n_qstr = comp->emit_common.qstr_map.used;
cm.n_obj = comp->emit_common.ct_cur_obj;
cm.n_obj = comp->emit_common.const_obj_list.len;
#endif
if (comp->compile_error == MP_OBJ_NULL) {
mp_emit_common_populate_module_context(&comp->emit_common, source_file, context);

View File

@ -94,14 +94,12 @@ typedef struct _emit_t emit_t;
typedef struct _mp_emit_common_t {
pass_kind_t pass;
uint16_t ct_cur_obj_base;
uint16_t ct_cur_obj;
uint16_t ct_cur_child;
mp_uint_t *const_table;
mp_raw_code_t **children;
#if MICROPY_EMIT_BYTECODE_USES_QSTR_TABLE
mp_map_t qstr_map;
#endif
mp_obj_list_t const_obj_list;
} mp_emit_common_t;
typedef struct _mp_emit_method_table_id_ops_t {
@ -181,12 +179,7 @@ static inline qstr_short_t mp_emit_common_use_qstr(mp_emit_common_t *emit, qstr
}
#endif
static inline size_t mp_emit_common_alloc_const_obj(mp_emit_common_t *emit, mp_obj_t obj) {
if (emit->pass == MP_PASS_EMIT) {
emit->const_table[emit->ct_cur_obj] = (mp_uint_t)obj;
}
return emit->ct_cur_obj++;
}
size_t mp_emit_common_use_const_obj(mp_emit_common_t *emit, mp_obj_t const_obj);
static inline size_t mp_emit_common_alloc_const_child(mp_emit_common_t *emit, mp_raw_code_t *rc) {
if (emit->pass == MP_PASS_EMIT) {

View File

@ -204,8 +204,7 @@ STATIC void emit_write_bytecode_byte_qstr(emit_t *emit, int stack_adj, byte b, q
}
STATIC void emit_write_bytecode_byte_obj(emit_t *emit, int stack_adj, byte b, mp_obj_t obj) {
emit_write_bytecode_byte_const(emit, stack_adj, b,
mp_emit_common_alloc_const_obj(emit->emit_common, obj));
emit_write_bytecode_byte_const(emit, stack_adj, b, mp_emit_common_use_const_obj(emit->emit_common, obj));
}
STATIC void emit_write_bytecode_byte_child(emit_t *emit, int stack_adj, byte b, mp_raw_code_t *rc) {

View File

@ -27,6 +27,7 @@
#include <assert.h>
#include "py/emit.h"
#include "py/nativeglue.h"
#if MICROPY_ENABLE_COMPILER
@ -40,6 +41,51 @@ qstr_short_t mp_emit_common_use_qstr(mp_emit_common_t *emit, qstr qst) {
}
#endif
// Compare two objects for strict equality, including equality of type. This is
// different to the semantics of mp_obj_equal which, eg, has (True,) == (1.0,).
static bool strictly_equal(mp_obj_t a, mp_obj_t b) {
if (a == b) {
return true;
}
#if MICROPY_EMIT_NATIVE
if (a == MP_OBJ_FROM_PTR(&mp_fun_table) || b == MP_OBJ_FROM_PTR(&mp_fun_table)) {
return false;
}
#endif
const mp_obj_type_t *a_type = mp_obj_get_type(a);
const mp_obj_type_t *b_type = mp_obj_get_type(b);
if (a_type != b_type) {
return false;
}
if (a_type == &mp_type_tuple) {
mp_obj_tuple_t *a_tuple = MP_OBJ_TO_PTR(a);
mp_obj_tuple_t *b_tuple = MP_OBJ_TO_PTR(b);
if (a_tuple->len != b_tuple->len) {
return false;
}
for (size_t i = 0; i < a_tuple->len; ++i) {
if (!strictly_equal(a_tuple->items[i], b_tuple->items[i])) {
return false;
}
}
return true;
} else {
return mp_obj_equal(a, b);
}
}
size_t mp_emit_common_use_const_obj(mp_emit_common_t *emit, mp_obj_t const_obj) {
for (size_t i = 0; i < emit->const_obj_list.len; ++i) {
if (strictly_equal(emit->const_obj_list.items[i], const_obj)) {
return i;
}
}
mp_obj_list_append(MP_OBJ_FROM_PTR(&emit->const_obj_list), const_obj);
return emit->const_obj_list.len - 1;
}
void mp_emit_common_get_id_for_modification(scope_t *scope, qstr qst) {
// name adding/lookup
id_info_t *id = scope_find_or_add_id(scope, qst, ID_INFO_KIND_GLOBAL_IMPLICIT);

View File

@ -99,7 +99,6 @@
#define OFFSETOF_OBJ_FUN_BC_BYTECODE (offsetof(mp_obj_fun_bc_t, bytecode) / sizeof(uintptr_t))
#define OFFSETOF_MODULE_CONTEXT_OBJ_TABLE (offsetof(mp_module_context_t, constants.obj_table) / sizeof(uintptr_t))
#define OFFSETOF_MODULE_CONTEXT_GLOBALS (offsetof(mp_module_context_t, module.globals) / sizeof(uintptr_t))
#define INDEX_OF_MP_FUN_TABLE_IN_CONST_TABLE (0)
// If not already defined, set parent args to same as child call registers
#ifndef REG_PARENT_RET
@ -406,6 +405,8 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
emit->code_state_start = SIZEOF_NLR_BUF;
}
size_t fun_table_off = mp_emit_common_use_const_obj(emit->emit_common, MP_OBJ_FROM_PTR(&mp_fun_table));
if (emit->do_viper_types) {
// Work out size of state (locals plus stack)
// n_state counts all stack and locals, even those in registers
@ -443,7 +444,7 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
// Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, INDEX_OF_MP_FUN_TABLE_IN_CONST_TABLE);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, fun_table_off);
// Store function object (passed as first arg) to stack if needed
if (NEED_FUN_OBJ(emit)) {
@ -520,7 +521,7 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, LOCAL_IDX_FUN_OBJ(emit));
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONTEXT);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_TEMP0, INDEX_OF_MP_FUN_TABLE_IN_CONST_TABLE);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_TEMP0, fun_table_off);
} else {
// The locals and stack start after the code_state structure
emit->stack_start = emit->code_state_start + SIZEOF_CODE_STATE;
@ -540,7 +541,7 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
// Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, INDEX_OF_MP_FUN_TABLE_IN_CONST_TABLE);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, fun_table_off);
// Set code_state.fun_bc
ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1);
@ -1105,7 +1106,7 @@ STATIC exc_stack_entry_t *emit_native_pop_exc_stack(emit_t *emit) {
}
STATIC void emit_load_reg_with_object(emit_t *emit, int reg, mp_obj_t obj) {
size_t table_off = mp_emit_common_alloc_const_obj(emit->emit_common, obj);
size_t table_off = mp_emit_common_use_const_obj(emit->emit_common, obj);
emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit));
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONTEXT);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
@ -1211,10 +1212,11 @@ STATIC void emit_native_global_exc_entry(emit_t *emit) {
emit_native_label_assign(emit, global_except_label);
#if N_NLR_SETJMP
// Reload REG_FUN_TABLE, since it may be clobbered by longjmp
size_t fun_table_off = mp_emit_common_use_const_obj(emit->emit_common, MP_OBJ_FROM_PTR(&mp_fun_table));
emit_native_mov_reg_state(emit, REG_LOCAL_1, LOCAL_IDX_FUN_OBJ(emit));
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_1, REG_LOCAL_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_1, REG_LOCAL_1, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_LOCAL_1, emit->scope->num_pos_args + emit->scope->num_kwonly_args);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_LOCAL_1, fun_table_off);
#endif
ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_1, LOCAL_IDX_EXC_HANDLER_PC(emit));
ASM_JUMP_IF_REG_NONZERO(emit->as, REG_LOCAL_1, nlr_label, false);