зеркало из https://github.com/github/ruby.git
Eagerly allocate instance variable tables along with object
This allows us to allocate the right size for the object in advance, meaning that we don't have to pay the cost of ivar table extension later. The idea is that if an object type ever became "extended" at some point, then it is very likely it will become extended again. So we may as well allocate the ivar table up front.
This commit is contained in:
Родитель
a6ff1dc6f9
Коммит
9a6226c61e
|
@ -0,0 +1,23 @@
|
|||
prelude: |
|
||||
class Embedded
|
||||
def initialize
|
||||
@a = 1
|
||||
@b = 1
|
||||
@c = 1
|
||||
end
|
||||
end
|
||||
|
||||
class Extended
|
||||
def initialize
|
||||
@a = 1
|
||||
@b = 1
|
||||
@c = 1
|
||||
@d = 1
|
||||
@e = 1
|
||||
@f = 1
|
||||
end
|
||||
end
|
||||
benchmark:
|
||||
embedded: Embedded.new
|
||||
extended: Extended.new
|
||||
loop_count: 20_000_000
|
20
gc.c
20
gc.c
|
@ -2390,7 +2390,14 @@ VALUE
|
|||
rb_newobj_of(VALUE klass, VALUE flags)
|
||||
{
|
||||
if ((flags & RUBY_T_MASK) == T_OBJECT) {
|
||||
return newobj_of(klass, (flags | ROBJECT_EMBED) & ~FL_WB_PROTECTED , Qundef, Qundef, Qundef, flags & FL_WB_PROTECTED);
|
||||
st_table *index_tbl = RCLASS_IV_INDEX_TBL(klass);
|
||||
|
||||
VALUE obj = newobj_of(klass, (flags | ROBJECT_EMBED) & ~FL_WB_PROTECTED , Qundef, Qundef, Qundef, flags & FL_WB_PROTECTED);
|
||||
|
||||
if (index_tbl && index_tbl->num_entries > ROBJECT_EMBED_LEN_MAX) {
|
||||
rb_init_iv_list(obj);
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
else {
|
||||
return newobj_of(klass, flags & ~FL_WB_PROTECTED, 0, 0, 0, flags & FL_WB_PROTECTED);
|
||||
|
@ -2501,8 +2508,17 @@ rb_imemo_new_debug(enum imemo_type type, VALUE v1, VALUE v2, VALUE v3, VALUE v0,
|
|||
VALUE
|
||||
rb_class_allocate_instance(VALUE klass)
|
||||
{
|
||||
st_table *index_tbl = RCLASS_IV_INDEX_TBL(klass);
|
||||
|
||||
VALUE flags = T_OBJECT | ROBJECT_EMBED;
|
||||
return newobj_of(klass, flags, Qundef, Qundef, Qundef, RGENGC_WB_PROTECTED_OBJECT);
|
||||
|
||||
VALUE obj = newobj_of(klass, flags, Qundef, Qundef, Qundef, RGENGC_WB_PROTECTED_OBJECT);
|
||||
|
||||
if (index_tbl && index_tbl->num_entries > ROBJECT_EMBED_LEN_MAX) {
|
||||
rb_init_iv_list(obj);
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
VALUE
|
||||
|
|
35
object.c
35
object.c
|
@ -324,24 +324,33 @@ rb_obj_singleton_class(VALUE obj)
|
|||
MJIT_FUNC_EXPORTED void
|
||||
rb_obj_copy_ivar(VALUE dest, VALUE obj)
|
||||
{
|
||||
RUBY_ASSERT(RBASIC(dest)->flags & ROBJECT_EMBED);
|
||||
VALUE *dst_buf = 0;
|
||||
VALUE *src_buf = 0;
|
||||
uint32_t len = ROBJECT_EMBED_LEN_MAX;
|
||||
|
||||
if (RBASIC(obj)->flags & ROBJECT_EMBED) {
|
||||
MEMCPY(ROBJECT(dest)->as.ary, ROBJECT(obj)->as.ary, VALUE, ROBJECT_EMBED_LEN_MAX);
|
||||
RBASIC(dest)->flags |= ROBJECT_EMBED;
|
||||
src_buf = ROBJECT(obj)->as.ary;
|
||||
|
||||
// embedded -> embedded
|
||||
if (RBASIC(dest)->flags & ROBJECT_EMBED) {
|
||||
dst_buf = ROBJECT(dest)->as.ary;
|
||||
}
|
||||
// embedded -> extended
|
||||
else {
|
||||
dst_buf = ROBJECT(dest)->as.heap.ivptr;
|
||||
}
|
||||
}
|
||||
// extended -> extended
|
||||
else {
|
||||
uint32_t len = ROBJECT(obj)->as.heap.numiv;
|
||||
VALUE *ptr = 0;
|
||||
if (len > 0) {
|
||||
ptr = ALLOC_N(VALUE, len);
|
||||
MEMCPY(ptr, ROBJECT(obj)->as.heap.ivptr, VALUE, len);
|
||||
}
|
||||
ROBJECT(dest)->as.heap.ivptr = ptr;
|
||||
ROBJECT(dest)->as.heap.numiv = len;
|
||||
ROBJECT(dest)->as.heap.iv_index_tbl = ROBJECT(obj)->as.heap.iv_index_tbl;
|
||||
RBASIC(dest)->flags &= ~ROBJECT_EMBED;
|
||||
uint32_t src_len = ROBJECT(obj)->as.heap.numiv;
|
||||
uint32_t dst_len = ROBJECT(dest)->as.heap.numiv;
|
||||
|
||||
len = src_len < dst_len ? src_len : dst_len;
|
||||
dst_buf = ROBJECT(dest)->as.heap.ivptr;
|
||||
src_buf = ROBJECT(obj)->as.heap.ivptr;
|
||||
}
|
||||
|
||||
MEMCPY(dst_buf, src_buf, VALUE, len);
|
||||
}
|
||||
|
||||
static void
|
||||
|
|
Загрузка…
Ссылка в новой задаче