2022-10-03 18:14:32 +03:00
|
|
|
#ifndef RUBY_SHAPE_H
|
|
|
|
#define RUBY_SHAPE_H
|
2022-12-15 21:54:07 +03:00
|
|
|
|
|
|
|
#include "internal/gc.h"
|
|
|
|
|
2023-03-24 12:07:02 +03:00
|
|
|
#if (SIZEOF_UINT64_T <= SIZEOF_VALUE)
|
2023-10-13 00:43:30 +03:00
|
|
|
|
2022-10-03 18:14:32 +03:00
|
|
|
#define SIZEOF_SHAPE_T 4
|
|
|
|
#define SHAPE_IN_BASIC_FLAGS 1
|
2023-10-19 00:05:48 +03:00
|
|
|
typedef uint32_t attr_index_t;
|
2022-10-03 18:14:32 +03:00
|
|
|
typedef uint32_t shape_id_t;
|
2022-11-18 21:29:41 +03:00
|
|
|
# define SHAPE_ID_NUM_BITS 32
|
2023-10-13 00:43:30 +03:00
|
|
|
|
2022-11-18 18:39:09 +03:00
|
|
|
#else
|
2023-10-13 00:43:30 +03:00
|
|
|
|
|
|
|
#define SIZEOF_SHAPE_T 2
|
|
|
|
#define SHAPE_IN_BASIC_FLAGS 0
|
|
|
|
typedef uint16_t attr_index_t;
|
2022-10-03 18:14:32 +03:00
|
|
|
typedef uint16_t shape_id_t;
|
2022-11-18 21:29:41 +03:00
|
|
|
# define SHAPE_ID_NUM_BITS 16
|
2023-10-13 00:43:30 +03:00
|
|
|
|
2022-10-03 18:14:32 +03:00
|
|
|
#endif
|
|
|
|
|
2023-12-04 17:26:26 +03:00
|
|
|
typedef uint32_t redblack_id_t;
|
|
|
|
|
2023-10-13 00:43:30 +03:00
|
|
|
#define MAX_IVARS (attr_index_t)(-1)
|
|
|
|
|
2022-11-18 21:29:41 +03:00
|
|
|
# define SHAPE_MASK (((uintptr_t)1 << SHAPE_ID_NUM_BITS) - 1)
|
|
|
|
# define SHAPE_FLAG_MASK (((VALUE)-1) >> SHAPE_ID_NUM_BITS)
|
2022-10-03 18:14:32 +03:00
|
|
|
|
2022-11-18 21:29:41 +03:00
|
|
|
# define SHAPE_FLAG_SHIFT ((SIZEOF_VALUE * 8) - SHAPE_ID_NUM_BITS)
|
2022-10-03 18:14:32 +03:00
|
|
|
|
2022-12-09 01:16:52 +03:00
|
|
|
# define SHAPE_MAX_VARIATIONS 8
|
|
|
|
|
2022-10-03 18:14:32 +03:00
|
|
|
# define INVALID_SHAPE_ID SHAPE_MASK
|
|
|
|
# define ROOT_SHAPE_ID 0x0
|
2022-12-09 01:16:52 +03:00
|
|
|
|
2024-03-12 21:34:17 +03:00
|
|
|
# define SPECIAL_CONST_SHAPE_ID (ROOT_SHAPE_ID + 1)
|
2022-12-09 01:16:52 +03:00
|
|
|
# define OBJ_TOO_COMPLEX_SHAPE_ID (SPECIAL_CONST_SHAPE_ID + 1)
|
2024-03-12 21:34:17 +03:00
|
|
|
# define FIRST_T_OBJECT_SHAPE_ID (OBJ_TOO_COMPLEX_SHAPE_ID + 1)
|
2022-10-03 18:14:32 +03:00
|
|
|
|
2023-02-08 04:46:42 +03:00
|
|
|
typedef struct redblack_node redblack_node_t;
|
|
|
|
|
2022-10-03 18:14:32 +03:00
|
|
|
struct rb_shape {
|
|
|
|
struct rb_id_table * edges; // id_table from ID (ivar) to next shape
|
|
|
|
ID edge_name; // ID (ivar) for transition from parent to rb_shape
|
2022-10-21 23:24:29 +03:00
|
|
|
attr_index_t next_iv_index;
|
2023-10-19 00:05:48 +03:00
|
|
|
uint32_t capacity; // Total capacity of the object with this shape
|
2022-10-03 18:14:32 +03:00
|
|
|
uint8_t type;
|
Rename size_pool -> heap
Now that we've inlined the eden_heap into the size_pool, we should
rename the size_pool to heap. So that Ruby contains multiple heaps, with
different sized objects.
The term heap as a collection of memory pages is more in memory
management nomenclature, whereas size_pool was a name chosen out of
necessity during the development of the Variable Width Allocation
features of Ruby.
The concept of size pools was introduced in order to facilitate
different sized objects (other than the default 40 bytes). They wrapped
the eden heap and the tomb heap, and some related state, and provided a
reasonably simple way of duplicating all related concerns, to provide
multiple pools that all shared the same structure but held different
objects.
Since then various changes have happend in Ruby's memory layout:
* The concept of tomb heaps has been replaced by a global free pages list,
with each page having it's slot size reconfigured at the point when it
is resurrected
* the eden heap has been inlined into the size pool itself, so that now
the size pool directly controls the free_pages list, the sweeping
page, the compaction cursor and the other state that was previously
being managed by the eden heap.
Now that there is no need for a heap wrapper, we should refer to the
collection of pages containing Ruby objects as a heap again rather than
a size pool
2024-10-03 15:53:49 +03:00
|
|
|
uint8_t heap_index;
|
2022-10-03 20:52:40 +03:00
|
|
|
shape_id_t parent_id;
|
2023-02-08 04:46:42 +03:00
|
|
|
redblack_node_t * ancestor_index;
|
2022-10-03 18:14:32 +03:00
|
|
|
};
|
|
|
|
|
|
|
|
typedef struct rb_shape rb_shape_t;
|
|
|
|
|
2023-02-08 04:46:42 +03:00
|
|
|
struct redblack_node {
|
|
|
|
ID key;
|
|
|
|
rb_shape_t * value;
|
|
|
|
redblack_id_t l;
|
|
|
|
redblack_id_t r;
|
|
|
|
};
|
|
|
|
|
2022-10-03 18:14:32 +03:00
|
|
|
enum shape_type {
|
|
|
|
SHAPE_ROOT,
|
|
|
|
SHAPE_IVAR,
|
|
|
|
SHAPE_FROZEN,
|
2022-11-18 02:57:11 +03:00
|
|
|
SHAPE_T_OBJECT,
|
2022-12-09 01:16:52 +03:00
|
|
|
SHAPE_OBJ_TOO_COMPLEX,
|
2022-10-03 18:14:32 +03:00
|
|
|
};
|
|
|
|
|
2023-02-17 16:32:51 +03:00
|
|
|
typedef struct {
|
|
|
|
/* object shapes */
|
|
|
|
rb_shape_t *shape_list;
|
|
|
|
rb_shape_t *root_shape;
|
|
|
|
shape_id_t next_shape_id;
|
2023-02-08 04:46:42 +03:00
|
|
|
|
|
|
|
redblack_node_t *shape_cache;
|
|
|
|
unsigned int cache_size;
|
2023-02-17 16:32:51 +03:00
|
|
|
} rb_shape_tree_t;
|
|
|
|
RUBY_EXTERN rb_shape_tree_t *rb_shape_tree_ptr;
|
|
|
|
|
|
|
|
static inline rb_shape_tree_t *
|
|
|
|
rb_current_shape_tree(void)
|
|
|
|
{
|
|
|
|
return rb_shape_tree_ptr;
|
|
|
|
}
|
|
|
|
#define GET_SHAPE_TREE() rb_current_shape_tree()
|
|
|
|
|
2022-10-03 18:14:32 +03:00
|
|
|
static inline shape_id_t
|
2023-04-14 23:27:37 +03:00
|
|
|
get_shape_id_from_flags(VALUE obj)
|
2022-10-03 18:14:32 +03:00
|
|
|
{
|
|
|
|
RUBY_ASSERT(!RB_SPECIAL_CONST_P(obj));
|
|
|
|
return (shape_id_t)(SHAPE_MASK & ((RBASIC(obj)->flags) >> SHAPE_FLAG_SHIFT));
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
2023-04-14 23:27:37 +03:00
|
|
|
set_shape_id_in_flags(VALUE obj, shape_id_t shape_id)
|
2022-10-03 18:14:32 +03:00
|
|
|
{
|
|
|
|
// Ractors are occupying the upper 32 bits of flags, but only in debug mode
|
|
|
|
// Object shapes are occupying top bits
|
|
|
|
RBASIC(obj)->flags &= SHAPE_FLAG_MASK;
|
|
|
|
RBASIC(obj)->flags |= ((VALUE)(shape_id) << SHAPE_FLAG_SHIFT);
|
|
|
|
}
|
|
|
|
|
2023-04-14 23:27:37 +03:00
|
|
|
|
|
|
|
#if SHAPE_IN_BASIC_FLAGS
|
2022-10-03 18:14:32 +03:00
|
|
|
static inline shape_id_t
|
2023-04-14 23:27:37 +03:00
|
|
|
RBASIC_SHAPE_ID(VALUE obj)
|
2022-10-03 18:14:32 +03:00
|
|
|
{
|
2023-04-14 23:27:37 +03:00
|
|
|
return get_shape_id_from_flags(obj);
|
2022-10-03 18:14:32 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
2023-04-14 23:27:37 +03:00
|
|
|
RBASIC_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
|
2022-11-01 00:05:37 +03:00
|
|
|
{
|
2023-04-14 23:27:37 +03:00
|
|
|
set_shape_id_in_flags(obj, shape_id);
|
2022-11-01 00:05:37 +03:00
|
|
|
}
|
2023-04-14 23:27:37 +03:00
|
|
|
#endif
|
2022-10-03 18:14:32 +03:00
|
|
|
|
|
|
|
static inline shape_id_t
|
|
|
|
ROBJECT_SHAPE_ID(VALUE obj)
|
|
|
|
{
|
|
|
|
RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
|
2023-04-14 23:27:37 +03:00
|
|
|
return get_shape_id_from_flags(obj);
|
2022-10-03 18:14:32 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
|
|
|
ROBJECT_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
|
|
|
|
{
|
2023-04-14 23:27:37 +03:00
|
|
|
RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
|
|
|
|
set_shape_id_in_flags(obj, shape_id);
|
2022-10-03 18:14:32 +03:00
|
|
|
}
|
2022-11-01 00:05:37 +03:00
|
|
|
|
2023-04-14 23:27:37 +03:00
|
|
|
static inline shape_id_t
|
|
|
|
RCLASS_SHAPE_ID(VALUE obj)
|
2022-12-02 17:43:53 +03:00
|
|
|
{
|
2023-04-14 23:27:37 +03:00
|
|
|
RUBY_ASSERT(RB_TYPE_P(obj, T_CLASS) || RB_TYPE_P(obj, T_MODULE));
|
|
|
|
return get_shape_id_from_flags(obj);
|
2022-11-01 00:05:37 +03:00
|
|
|
}
|
|
|
|
|
2023-04-14 23:27:37 +03:00
|
|
|
static inline void
|
|
|
|
RCLASS_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
|
|
|
|
{
|
|
|
|
RUBY_ASSERT(RB_TYPE_P(obj, T_CLASS) || RB_TYPE_P(obj, T_MODULE));
|
|
|
|
set_shape_id_in_flags(obj, shape_id);
|
|
|
|
}
|
2022-10-03 18:14:32 +03:00
|
|
|
|
2022-11-08 23:35:31 +03:00
|
|
|
rb_shape_t * rb_shape_get_root_shape(void);
|
2022-12-02 20:33:20 +03:00
|
|
|
int32_t rb_shape_id_offset(void);
|
2022-10-03 18:14:32 +03:00
|
|
|
|
2022-11-10 19:36:24 +03:00
|
|
|
rb_shape_t * rb_shape_get_parent(rb_shape_t * shape);
|
2022-10-03 18:14:32 +03:00
|
|
|
|
2024-02-23 00:02:10 +03:00
|
|
|
RUBY_FUNC_EXPORTED rb_shape_t *rb_shape_get_shape_by_id(shape_id_t shape_id);
|
|
|
|
RUBY_FUNC_EXPORTED shape_id_t rb_shape_get_shape_id(VALUE obj);
|
2022-12-23 21:18:57 +03:00
|
|
|
rb_shape_t * rb_shape_get_next_iv_shape(rb_shape_t * shape, ID id);
|
|
|
|
bool rb_shape_get_iv_index(rb_shape_t * shape, ID id, attr_index_t * value);
|
2023-10-26 12:08:05 +03:00
|
|
|
bool rb_shape_get_iv_index_with_hint(shape_id_t shape_id, ID id, attr_index_t * value, shape_id_t *shape_id_hint);
|
2024-02-23 00:02:10 +03:00
|
|
|
RUBY_FUNC_EXPORTED bool rb_shape_obj_too_complex(VALUE obj);
|
2022-12-23 21:18:57 +03:00
|
|
|
|
|
|
|
void rb_shape_set_shape(VALUE obj, rb_shape_t* shape);
|
2022-10-03 18:14:32 +03:00
|
|
|
rb_shape_t* rb_shape_get_shape(VALUE obj);
|
|
|
|
int rb_shape_frozen_shape_p(rb_shape_t* shape);
|
2023-10-20 02:01:35 +03:00
|
|
|
rb_shape_t* rb_shape_transition_shape_frozen(VALUE obj);
|
2023-11-01 14:15:12 +03:00
|
|
|
bool rb_shape_transition_shape_remove_ivar(VALUE obj, ID id, rb_shape_t *shape, VALUE * removed);
|
2022-10-03 18:14:32 +03:00
|
|
|
rb_shape_t* rb_shape_get_next(rb_shape_t* shape, VALUE obj, ID id);
|
2024-06-04 18:27:29 +03:00
|
|
|
rb_shape_t* rb_shape_get_next_no_warnings(rb_shape_t* shape, VALUE obj, ID id);
|
2022-10-03 18:14:32 +03:00
|
|
|
|
2022-11-08 23:35:31 +03:00
|
|
|
rb_shape_t * rb_shape_rebuild_shape(rb_shape_t * initial_shape, rb_shape_t * dest_shape);
|
|
|
|
|
|
|
|
static inline uint32_t
|
|
|
|
ROBJECT_IV_CAPACITY(VALUE obj)
|
|
|
|
{
|
|
|
|
RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
|
2022-12-09 01:16:52 +03:00
|
|
|
// Asking for capacity doesn't make sense when the object is using
|
|
|
|
// a hash table for storing instance variables
|
2023-10-24 22:37:27 +03:00
|
|
|
RUBY_ASSERT(!rb_shape_obj_too_complex(obj));
|
2022-11-08 23:35:31 +03:00
|
|
|
return rb_shape_get_shape_by_id(ROBJECT_SHAPE_ID(obj))->capacity;
|
|
|
|
}
|
|
|
|
|
2023-03-17 21:29:04 +03:00
|
|
|
static inline st_table *
|
2022-12-09 01:16:52 +03:00
|
|
|
ROBJECT_IV_HASH(VALUE obj)
|
|
|
|
{
|
|
|
|
RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
|
2023-10-24 22:37:27 +03:00
|
|
|
RUBY_ASSERT(rb_shape_obj_too_complex(obj));
|
2023-03-17 21:29:04 +03:00
|
|
|
return (st_table *)ROBJECT(obj)->as.heap.ivptr;
|
2022-12-09 01:16:52 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
2023-10-19 21:00:54 +03:00
|
|
|
ROBJECT_SET_IV_HASH(VALUE obj, const st_table *tbl)
|
2022-12-09 01:16:52 +03:00
|
|
|
{
|
|
|
|
RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
|
2023-10-24 22:37:27 +03:00
|
|
|
RUBY_ASSERT(rb_shape_obj_too_complex(obj));
|
2022-12-09 01:16:52 +03:00
|
|
|
ROBJECT(obj)->as.heap.ivptr = (VALUE *)tbl;
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t rb_id_table_size(const struct rb_id_table *tbl);
|
|
|
|
|
2022-10-15 19:37:44 +03:00
|
|
|
static inline uint32_t
|
|
|
|
ROBJECT_IV_COUNT(VALUE obj)
|
|
|
|
{
|
2023-10-24 22:37:27 +03:00
|
|
|
if (rb_shape_obj_too_complex(obj)) {
|
2023-03-17 21:29:04 +03:00
|
|
|
return (uint32_t)rb_st_table_size(ROBJECT_IV_HASH(obj));
|
2022-12-09 01:16:52 +03:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
|
2023-10-24 22:37:27 +03:00
|
|
|
RUBY_ASSERT(!rb_shape_obj_too_complex(obj));
|
2022-12-09 01:16:52 +03:00
|
|
|
return rb_shape_get_shape_by_id(ROBJECT_SHAPE_ID(obj))->next_iv_index;
|
|
|
|
}
|
2022-10-15 19:37:44 +03:00
|
|
|
}
|
|
|
|
|
2022-11-08 23:35:31 +03:00
|
|
|
static inline uint32_t
|
|
|
|
RBASIC_IV_COUNT(VALUE obj)
|
|
|
|
{
|
|
|
|
return rb_shape_get_shape_by_id(rb_shape_get_shape_id(obj))->next_iv_index;
|
|
|
|
}
|
|
|
|
|
2022-12-13 18:11:57 +03:00
|
|
|
rb_shape_t *rb_shape_traverse_from_new_root(rb_shape_t *initial_shape, rb_shape_t *orig_shape);
|
|
|
|
|
2022-10-03 18:14:32 +03:00
|
|
|
bool rb_shape_set_shape_id(VALUE obj, shape_id_t shape_id);
|
|
|
|
|
|
|
|
VALUE rb_obj_debug_shape(VALUE self, VALUE obj);
|
|
|
|
|
2022-12-23 21:18:57 +03:00
|
|
|
// For ext/objspace
|
2022-12-06 14:56:51 +03:00
|
|
|
RUBY_SYMBOL_EXPORT_BEGIN
|
|
|
|
typedef void each_shape_callback(rb_shape_t * shape, void *data);
|
|
|
|
void rb_shape_each_shape(each_shape_callback callback, void *data);
|
|
|
|
size_t rb_shape_memsize(rb_shape_t *shape);
|
|
|
|
size_t rb_shape_edges_count(rb_shape_t *shape);
|
|
|
|
size_t rb_shape_depth(rb_shape_t *shape);
|
2022-12-23 21:18:57 +03:00
|
|
|
shape_id_t rb_shape_id(rb_shape_t * shape);
|
2022-12-06 14:56:51 +03:00
|
|
|
RUBY_SYMBOL_EXPORT_END
|
|
|
|
|
2022-10-03 18:14:32 +03:00
|
|
|
#endif
|