py: Add option to cache map lookup results in bytecode.

This is a simple optimisation inspired by JITing technology: we cache in
the bytecode (using 1 byte) the offset of the last successful lookup in
a map. This allows us next time round to check in that location in the
hash table (mp_map_t) for the desired entry, and if it's there use that
entry straight away.  Otherwise fallback to a normal map lookup.

Works for LOAD_NAME, LOAD_GLOBAL, LOAD_ATTR and STORE_ATTR opcodes.

On a few tests it gives >90% cache hit and greatly improves speed of
code.

Disabled by default.  Enabled for unix and stmhal ports.
pull/1049/head
Damien George 2015-01-06 12:51:39 +00:00
rodzic b4b10fd350
commit 7ee91cf861
7 zmienionych plików z 142 dodań i 4 usunięć

Wyświetl plik

@ -510,16 +510,25 @@ STATIC void emit_bc_load_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
STATIC void emit_bc_load_name(emit_t *emit, qstr qst) {
emit_bc_pre(emit, 1);
emit_write_bytecode_byte_qstr(emit, MP_BC_LOAD_NAME, qst);
if (MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE) {
emit_write_bytecode_byte(emit, 0);
}
}
STATIC void emit_bc_load_global(emit_t *emit, qstr qst) {
emit_bc_pre(emit, 1);
emit_write_bytecode_byte_qstr(emit, MP_BC_LOAD_GLOBAL, qst);
if (MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE) {
emit_write_bytecode_byte(emit, 0);
}
}
STATIC void emit_bc_load_attr(emit_t *emit, qstr qst) {
emit_bc_pre(emit, 0);
emit_write_bytecode_byte_qstr(emit, MP_BC_LOAD_ATTR, qst);
if (MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE) {
emit_write_bytecode_byte(emit, 0);
}
}
STATIC void emit_bc_load_method(emit_t *emit, qstr qst) {
@ -565,6 +574,9 @@ STATIC void emit_bc_store_global(emit_t *emit, qstr qst) {
STATIC void emit_bc_store_attr(emit_t *emit, qstr qst) {
emit_bc_pre(emit, -2);
emit_write_bytecode_byte_qstr(emit, MP_BC_STORE_ATTR, qst);
if (MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE) {
emit_write_bytecode_byte(emit, 0);
}
}
STATIC void emit_bc_store_subscr(emit_t *emit) {

Wyświetl plik

@ -182,6 +182,13 @@
#define MICROPY_OPT_COMPUTED_GOTO (0)
#endif
// Whether to cache result of map lookups in LOAD_NAME, LOAD_GLOBAL, LOAD_ATTR,
// STORE_ATTR bytecodes. Uses 1 byte extra RAM for each of these opcodes and
// uses a bit of extra code ROM, but greatly improves lookup speed.
#ifndef MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE
#define MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE (0)
#endif
/*****************************************************************************/
/* Python internal features */

Wyświetl plik

@ -456,7 +456,7 @@ STATIC mp_obj_t instance_binary_op(mp_uint_t op, mp_obj_t lhs_in, mp_obj_t rhs_i
}
}
STATIC void instance_load_attr(mp_obj_t self_in, qstr attr, mp_obj_t *dest) {
void mp_obj_instance_load_attr(mp_obj_t self_in, qstr attr, mp_obj_t *dest) {
// logic: look in obj members then class locals (TODO check this against CPython)
assert(is_instance_type(mp_obj_get_type(self_in)));
mp_obj_instance_t *self = self_in;
@ -510,7 +510,7 @@ STATIC void instance_load_attr(mp_obj_t self_in, qstr attr, mp_obj_t *dest) {
}
}
STATIC bool instance_store_attr(mp_obj_t self_in, qstr attr, mp_obj_t value) {
bool mp_obj_instance_store_attr(mp_obj_t self_in, qstr attr, mp_obj_t value) {
mp_obj_instance_t *self = self_in;
#if MICROPY_PY_BUILTINS_PROPERTY
@ -817,8 +817,8 @@ mp_obj_t mp_obj_new_type(qstr name, mp_obj_t bases_tuple, mp_obj_t locals_dict)
o->make_new = instance_make_new;
o->unary_op = instance_unary_op;
o->binary_op = instance_binary_op;
o->load_attr = instance_load_attr;
o->store_attr = instance_store_attr;
o->load_attr = mp_obj_instance_load_attr;
o->store_attr = mp_obj_instance_store_attr;
o->subscr = instance_subscr;
o->call = mp_obj_instance_call;
o->getiter = instance_getiter;

Wyświetl plik

@ -37,6 +37,10 @@ typedef struct _mp_obj_instance_t {
// TODO maybe cache __getattr__ and __setattr__ for efficient lookup of them
} mp_obj_instance_t;
// these need to be exposed for MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE to work
void mp_obj_instance_load_attr(mp_obj_t self_in, qstr attr, mp_obj_t *dest);
bool mp_obj_instance_store_attr(mp_obj_t self_in, qstr attr, mp_obj_t value);
// these need to be exposed so mp_obj_is_callable can work correctly
bool mp_obj_instance_is_callable(mp_obj_t self_in);
mp_obj_t mp_obj_instance_call(mp_obj_t self_in, mp_uint_t n_args, mp_uint_t n_kw, const mp_obj_t *args);

113
py/vm.c
Wyświetl plik

@ -32,6 +32,7 @@
#include "py/mpstate.h"
#include "py/nlr.h"
#include "py/emitglue.h"
#include "py/objtype.h"
#include "py/runtime.h"
#include "py/bc0.h"
#include "py/bc.h"
@ -248,26 +249,101 @@ dispatch_loop:
goto load_check;
}
#if !MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE
ENTRY(MP_BC_LOAD_NAME): {
MARK_EXC_IP_SELECTIVE();
DECODE_QSTR;
PUSH(mp_load_name(qst));
DISPATCH();
}
#else
ENTRY(MP_BC_LOAD_NAME): {
MARK_EXC_IP_SELECTIVE();
DECODE_QSTR;
mp_obj_t key = MP_OBJ_NEW_QSTR(qst);
mp_uint_t x = *ip;
if (x < MP_STATE_CTX(dict_locals)->map.alloc && MP_STATE_CTX(dict_locals)->map.table[x].key == key) {
PUSH(MP_STATE_CTX(dict_locals)->map.table[x].value);
} else {
mp_map_elem_t *elem = mp_map_lookup(&MP_STATE_CTX(dict_locals)->map, MP_OBJ_NEW_QSTR(qst), MP_MAP_LOOKUP);
if (elem != NULL) {
*(byte*)ip = (elem - &MP_STATE_CTX(dict_locals)->map.table[0]) & 0xff;
PUSH(elem->value);
} else {
PUSH(mp_load_name(MP_OBJ_QSTR_VALUE(key)));
}
}
ip++;
DISPATCH();
}
#endif
#if !MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE
ENTRY(MP_BC_LOAD_GLOBAL): {
MARK_EXC_IP_SELECTIVE();
DECODE_QSTR;
PUSH(mp_load_global(qst));
DISPATCH();
}
#else
ENTRY(MP_BC_LOAD_GLOBAL): {
MARK_EXC_IP_SELECTIVE();
DECODE_QSTR;
mp_obj_t key = MP_OBJ_NEW_QSTR(qst);
mp_uint_t x = *ip;
if (x < MP_STATE_CTX(dict_globals)->map.alloc && MP_STATE_CTX(dict_globals)->map.table[x].key == key) {
PUSH(MP_STATE_CTX(dict_globals)->map.table[x].value);
} else {
mp_map_elem_t *elem = mp_map_lookup(&MP_STATE_CTX(dict_globals)->map, MP_OBJ_NEW_QSTR(qst), MP_MAP_LOOKUP);
if (elem != NULL) {
*(byte*)ip = (elem - &MP_STATE_CTX(dict_globals)->map.table[0]) & 0xff;
PUSH(elem->value);
} else {
PUSH(mp_load_global(MP_OBJ_QSTR_VALUE(key)));
}
}
ip++;
DISPATCH();
}
#endif
#if !MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE
ENTRY(MP_BC_LOAD_ATTR): {
MARK_EXC_IP_SELECTIVE();
DECODE_QSTR;
SET_TOP(mp_load_attr(TOP(), qst));
DISPATCH();
}
#else
ENTRY(MP_BC_LOAD_ATTR): {
MARK_EXC_IP_SELECTIVE();
DECODE_QSTR;
mp_obj_t top = TOP();
if (mp_obj_get_type(top)->load_attr == mp_obj_instance_load_attr) {
mp_obj_instance_t *self = top;
mp_uint_t x = *ip;
mp_obj_t key = MP_OBJ_NEW_QSTR(qst);
mp_map_elem_t *elem;
if (x < self->members.alloc && self->members.table[x].key == key) {
elem = &self->members.table[x];
} else {
elem = mp_map_lookup(&self->members, key, MP_MAP_LOOKUP);
if (elem != NULL) {
*(byte*)ip = elem - &self->members.table[0];
} else {
goto load_attr_cache_fail;
}
}
SET_TOP(elem->value);
ip++;
DISPATCH();
}
load_attr_cache_fail:
SET_TOP(mp_load_attr(top, qst));
ip++;
DISPATCH();
}
#endif
ENTRY(MP_BC_LOAD_METHOD): {
MARK_EXC_IP_SELECTIVE();
@ -315,6 +391,7 @@ dispatch_loop:
DISPATCH();
}
#if !MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE
ENTRY(MP_BC_STORE_ATTR): {
MARK_EXC_IP_SELECTIVE();
DECODE_QSTR;
@ -322,6 +399,42 @@ dispatch_loop:
sp -= 2;
DISPATCH();
}
#else
// This caching code works with MICROPY_PY_BUILTINS_PROPERTY enabled because
// if the attr exists in self->members then it can't be a property. A
// consequence of this is that we can't use MP_MAP_LOOKUP_ADD_IF_NOT_FOUND
// in the fast-path below, because that store could override a property.
ENTRY(MP_BC_STORE_ATTR): {
MARK_EXC_IP_SELECTIVE();
DECODE_QSTR;
mp_obj_t top = TOP();
if (mp_obj_get_type(top)->store_attr == mp_obj_instance_store_attr && sp[-1] != MP_OBJ_NULL) {
mp_obj_instance_t *self = top;
mp_uint_t x = *ip;
mp_obj_t key = MP_OBJ_NEW_QSTR(qst);
mp_map_elem_t *elem;
if (x < self->members.alloc && self->members.table[x].key == key) {
elem = &self->members.table[x];
} else {
elem = mp_map_lookup(&self->members, key, MP_MAP_LOOKUP);
if (elem != NULL) {
*(byte*)ip = elem - &self->members.table[0];
} else {
goto store_attr_cache_fail;
}
}
elem->value = sp[-1];
sp -= 2;
ip++;
DISPATCH();
}
store_attr_cache_fail:
mp_store_attr(sp[0], qst, sp[-1]);
sp -= 2;
ip++;
DISPATCH();
}
#endif
ENTRY(MP_BC_STORE_SUBSCR):
MARK_EXC_IP_SELECTIVE();

Wyświetl plik

@ -40,6 +40,7 @@
#define MICROPY_LONGINT_IMPL (MICROPY_LONGINT_IMPL_MPZ)
#define MICROPY_FLOAT_IMPL (MICROPY_FLOAT_IMPL_FLOAT)
#define MICROPY_OPT_COMPUTED_GOTO (1)
#define MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE (1)
/* Enable FatFS LFNs
0: Disable LFN feature.
1: Enable LFN with static working buffer on the BSS. Always NOT reentrant.

Wyświetl plik

@ -50,6 +50,7 @@
#define MICROPY_LONGINT_IMPL (MICROPY_LONGINT_IMPL_MPZ)
#define MICROPY_STREAMS_NON_BLOCK (1)
#define MICROPY_OPT_COMPUTED_GOTO (1)
#define MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE (1)
#define MICROPY_CAN_OVERRIDE_BUILTINS (1)
#define MICROPY_PY_BUILTINS_STR_UNICODE (1)
#define MICROPY_PY_BUILTINS_MEMORYVIEW (1)