Ruby 3.3.2p78 (2024-05-30 revision e5a195edf62fe1bf7146a191da13fa1c4fecbd71)
shape.c
1#include "vm_core.h"
2#include "vm_sync.h"
3#include "shape.h"
4#include "symbol.h"
5#include "id_table.h"
6#include "internal/class.h"
7#include "internal/error.h"
8#include "internal/gc.h"
9#include "internal/object.h"
10#include "internal/symbol.h"
11#include "internal/variable.h"
12#include "variable.h"
13#include <stdbool.h>
14
15#ifndef _WIN32
16#include <sys/mman.h>
17#endif
18
19#ifndef SHAPE_DEBUG
20#define SHAPE_DEBUG (VM_CHECK_MODE > 0)
21#endif
22
23#if SIZEOF_SHAPE_T == 4
24#if RUBY_DEBUG
25#define SHAPE_BUFFER_SIZE 0x8000
26#else
27#define SHAPE_BUFFER_SIZE 0x80000
28#endif
29#else
30#define SHAPE_BUFFER_SIZE 0x8000
31#endif
32
33#define REDBLACK_CACHE_SIZE (SHAPE_BUFFER_SIZE * 32)
34
35#define SINGLE_CHILD_TAG 0x1
36#define TAG_SINGLE_CHILD(x) (struct rb_id_table *)((uintptr_t)x | SINGLE_CHILD_TAG)
37#define SINGLE_CHILD_MASK (~((uintptr_t)SINGLE_CHILD_TAG))
38#define SINGLE_CHILD_P(x) (((uintptr_t)x) & SINGLE_CHILD_TAG)
39#define SINGLE_CHILD(x) (rb_shape_t *)((uintptr_t)x & SINGLE_CHILD_MASK)
40#define ANCESTOR_CACHE_THRESHOLD 10
41#define MAX_SHAPE_ID (SHAPE_BUFFER_SIZE - 1)
42#define ANCESTOR_SEARCH_MAX_DEPTH 2
43
44static ID id_frozen;
45static ID id_t_object;
46static ID size_pool_edge_names[SIZE_POOL_COUNT];
47
48#define LEAF 0
49#define BLACK 0x0
50#define RED 0x1
51
52static redblack_node_t *
53redblack_left(redblack_node_t * node)
54{
55 if (node->l == LEAF) {
56 return LEAF;
57 }
58 else {
59 RUBY_ASSERT(node->l < GET_SHAPE_TREE()->cache_size);
60 redblack_node_t * left = &GET_SHAPE_TREE()->shape_cache[node->l - 1];
61 return left;
62 }
63}
64
65static redblack_node_t *
66redblack_right(redblack_node_t * node)
67{
68 if (node->r == LEAF) {
69 return LEAF;
70 }
71 else {
72 RUBY_ASSERT(node->r < GET_SHAPE_TREE()->cache_size);
73 redblack_node_t * right = &GET_SHAPE_TREE()->shape_cache[node->r - 1];
74 return right;
75 }
76}
77
78static redblack_node_t *
79redblack_find(redblack_node_t * tree, ID key)
80{
81 if (tree == LEAF) {
82 return LEAF;
83 }
84 else {
85 RUBY_ASSERT(redblack_left(tree) == LEAF || redblack_left(tree)->key < tree->key);
86 RUBY_ASSERT(redblack_right(tree) == LEAF || redblack_right(tree)->key > tree->key);
87
88 if (tree->key == key) {
89 return tree;
90 }
91 else {
92 if (key < tree->key) {
93 return redblack_find(redblack_left(tree), key);
94 }
95 else {
96 return redblack_find(redblack_right(tree), key);
97 }
98 }
99 }
100}
101
102static inline char
103redblack_color(redblack_node_t * node)
104{
105 return node && ((uintptr_t)node->value & RED);
106}
107
108static inline bool
109redblack_red_p(redblack_node_t * node)
110{
111 return redblack_color(node) == RED;
112}
113
114static inline rb_shape_t *
115redblack_value(redblack_node_t * node)
116{
117 // Color is stored in the bottom bit of the shape pointer
118 // Mask away the bit so we get the actual pointer back
119 return (rb_shape_t *)((uintptr_t)node->value & (((uintptr_t)-1) - 1));
120}
121
122static redblack_id_t
123redblack_id_for(redblack_node_t * node)
124{
125 RUBY_ASSERT(node || node == LEAF);
126 if (node == LEAF) {
127 return 0;
128 }
129 else {
130 redblack_node_t * redblack_nodes = GET_SHAPE_TREE()->shape_cache;
131 redblack_id_t id = (redblack_id_t)(node - redblack_nodes);
132 return id + 1;
133 }
134}
135
136static redblack_node_t *
137redblack_new(char color, ID key, rb_shape_t * value, redblack_node_t * left, redblack_node_t * right)
138{
139 if (GET_SHAPE_TREE()->cache_size + 1 >= REDBLACK_CACHE_SIZE) {
140 // We're out of cache, just quit
141 return LEAF;
142 }
143
144 RUBY_ASSERT(left == LEAF || left->key < key);
145 RUBY_ASSERT(right == LEAF || right->key > key);
146
147 redblack_node_t * redblack_nodes = GET_SHAPE_TREE()->shape_cache;
148 redblack_node_t * node = &redblack_nodes[(GET_SHAPE_TREE()->cache_size)++];
149 node->key = key;
150 node->value = (rb_shape_t *)((uintptr_t)value | color);
151 node->l = redblack_id_for(left);
152 node->r = redblack_id_for(right);
153 return node;
154}
155
156static redblack_node_t *
157redblack_balance(char color, ID key, rb_shape_t * value, redblack_node_t * left, redblack_node_t * right)
158{
159 if (color == BLACK) {
160 ID new_key, new_left_key, new_right_key;
161 rb_shape_t *new_value, *new_left_value, *new_right_value;
162 redblack_node_t *new_left_left, *new_left_right, *new_right_left, *new_right_right;
163
164 if (redblack_red_p(left) && redblack_red_p(redblack_left(left))) {
165 new_right_key = key;
166 new_right_value = value;
167 new_right_right = right;
168
169 new_key = left->key;
170 new_value = redblack_value(left);
171 new_right_left = redblack_right(left);
172
173 new_left_key = redblack_left(left)->key;
174 new_left_value = redblack_value(redblack_left(left));
175
176 new_left_left = redblack_left(redblack_left(left));
177 new_left_right = redblack_right(redblack_left(left));
178 }
179 else if (redblack_red_p(left) && redblack_red_p(redblack_right(left))) {
180 new_right_key = key;
181 new_right_value = value;
182 new_right_right = right;
183
184 new_left_key = left->key;
185 new_left_value = redblack_value(left);
186 new_left_left = redblack_left(left);
187
188 new_key = redblack_right(left)->key;
189 new_value = redblack_value(redblack_right(left));
190 new_left_right = redblack_left(redblack_right(left));
191 new_right_left = redblack_right(redblack_right(left));
192 }
193 else if (redblack_red_p(right) && redblack_red_p(redblack_left(right))) {
194 new_left_key = key;
195 new_left_value = value;
196 new_left_left = left;
197
198 new_right_key = right->key;
199 new_right_value = redblack_value(right);
200 new_right_right = redblack_right(right);
201
202 new_key = redblack_left(right)->key;
203 new_value = redblack_value(redblack_left(right));
204 new_left_right = redblack_left(redblack_left(right));
205 new_right_left = redblack_right(redblack_left(right));
206 }
207 else if (redblack_red_p(right) && redblack_red_p(redblack_right(right))) {
208 new_left_key = key;
209 new_left_value = value;
210 new_left_left = left;
211
212 new_key = right->key;
213 new_value = redblack_value(right);
214 new_left_right = redblack_left(right);
215
216 new_right_key = redblack_right(right)->key;
217 new_right_value = redblack_value(redblack_right(right));
218 new_right_left = redblack_left(redblack_right(right));
219 new_right_right = redblack_right(redblack_right(right));
220 }
221 else {
222 return redblack_new(color, key, value, left, right);
223 }
224
225 RUBY_ASSERT(new_left_key < new_key);
226 RUBY_ASSERT(new_right_key > new_key);
227 RUBY_ASSERT(new_left_left == LEAF || new_left_left->key < new_left_key);
228 RUBY_ASSERT(new_left_right == LEAF || new_left_right->key > new_left_key);
229 RUBY_ASSERT(new_left_right == LEAF || new_left_right->key < new_key);
230 RUBY_ASSERT(new_right_left == LEAF || new_right_left->key < new_right_key);
231 RUBY_ASSERT(new_right_left == LEAF || new_right_left->key > new_key);
232 RUBY_ASSERT(new_right_right == LEAF || new_right_right->key > new_right_key);
233
234 return redblack_new(
235 RED, new_key, new_value,
236 redblack_new(BLACK, new_left_key, new_left_value, new_left_left, new_left_right),
237 redblack_new(BLACK, new_right_key, new_right_value, new_right_left, new_right_right));
238 }
239
240 return redblack_new(color, key, value, left, right);
241}
242
243static redblack_node_t *
244redblack_insert_aux(redblack_node_t * tree, ID key, rb_shape_t * value)
245{
246 if (tree == LEAF) {
247 return redblack_new(RED, key, value, LEAF, LEAF);
248 }
249 else {
250 redblack_node_t *left, *right;
251 if (key < tree->key) {
252 left = redblack_insert_aux(redblack_left(tree), key, value);
253 RUBY_ASSERT(left != LEAF);
254 right = redblack_right(tree);
255 RUBY_ASSERT(right == LEAF || right->key > tree->key);
256 }
257 else if (key > tree->key) {
258 left = redblack_left(tree);
259 RUBY_ASSERT(left == LEAF || left->key < tree->key);
260 right = redblack_insert_aux(redblack_right(tree), key, value);
261 RUBY_ASSERT(right != LEAF);
262 }
263 else {
264 return tree;
265 }
266
267 return redblack_balance(
268 redblack_color(tree),
269 tree->key,
270 redblack_value(tree),
271 left,
272 right
273 );
274 }
275}
276
277static redblack_node_t *
278redblack_force_black(redblack_node_t * node)
279{
280 node->value = redblack_value(node);
281 return node;
282}
283
284static redblack_node_t *
285redblack_insert(redblack_node_t * tree, ID key, rb_shape_t * value)
286{
287 redblack_node_t * root = redblack_insert_aux(tree, key, value);
288
289 if (redblack_red_p(root)) {
290 return redblack_force_black(root);
291 }
292 else {
293 return root;
294 }
295}
296
297rb_shape_tree_t *rb_shape_tree_ptr = NULL;
298
299/*
300 * Shape getters
301 */
303rb_shape_get_root_shape(void)
304{
305 return GET_SHAPE_TREE()->root_shape;
306}
307
308shape_id_t
309rb_shape_id(rb_shape_t * shape)
310{
311 return (shape_id_t)(shape - GET_SHAPE_TREE()->shape_list);
312}
313
314void
315rb_shape_each_shape(each_shape_callback callback, void *data)
316{
317 rb_shape_t *cursor = rb_shape_get_root_shape();
318 rb_shape_t *end = rb_shape_get_shape_by_id(GET_SHAPE_TREE()->next_shape_id);
319 while (cursor < end) {
320 callback(cursor, data);
321 cursor += 1;
322 }
323}
324
325RUBY_FUNC_EXPORTED rb_shape_t*
326rb_shape_get_shape_by_id(shape_id_t shape_id)
327{
328 RUBY_ASSERT(shape_id != INVALID_SHAPE_ID);
329
330 rb_shape_t *shape = &GET_SHAPE_TREE()->shape_list[shape_id];
331 return shape;
332}
333
335rb_shape_get_parent(rb_shape_t * shape)
336{
337 return rb_shape_get_shape_by_id(shape->parent_id);
338}
339
340#if !SHAPE_IN_BASIC_FLAGS
341shape_id_t rb_generic_shape_id(VALUE obj);
342#endif
343
344RUBY_FUNC_EXPORTED shape_id_t
345rb_shape_get_shape_id(VALUE obj)
346{
347 if (RB_SPECIAL_CONST_P(obj)) {
348 return SPECIAL_CONST_SHAPE_ID;
349 }
350
351#if SHAPE_IN_BASIC_FLAGS
352 return RBASIC_SHAPE_ID(obj);
353#else
354 switch (BUILTIN_TYPE(obj)) {
355 case T_OBJECT:
356 return ROBJECT_SHAPE_ID(obj);
357 break;
358 case T_CLASS:
359 case T_MODULE:
360 return RCLASS_SHAPE_ID(obj);
361 default:
362 return rb_generic_shape_id(obj);
363 }
364#endif
365}
366
367size_t
368rb_shape_depth(rb_shape_t * shape)
369{
370 size_t depth = 1;
371
372 while (shape->parent_id != INVALID_SHAPE_ID) {
373 depth++;
374 shape = rb_shape_get_parent(shape);
375 }
376
377 return depth;
378}
379
381rb_shape_get_shape(VALUE obj)
382{
383 return rb_shape_get_shape_by_id(rb_shape_get_shape_id(obj));
384}
385
386static rb_shape_t *
387shape_alloc(void)
388{
389 shape_id_t shape_id = GET_SHAPE_TREE()->next_shape_id;
390 GET_SHAPE_TREE()->next_shape_id++;
391
392 if (shape_id == (MAX_SHAPE_ID + 1)) {
393 // TODO: Make an OutOfShapesError ??
394 rb_bug("Out of shapes");
395 }
396
397 return &GET_SHAPE_TREE()->shape_list[shape_id];
398}
399
400static rb_shape_t *
401rb_shape_alloc_with_parent_id(ID edge_name, shape_id_t parent_id)
402{
403 rb_shape_t * shape = shape_alloc();
404
405 shape->edge_name = edge_name;
406 shape->next_iv_index = 0;
407 shape->parent_id = parent_id;
408 shape->edges = NULL;
409
410 return shape;
411}
412
413static rb_shape_t *
414rb_shape_alloc(ID edge_name, rb_shape_t * parent, enum shape_type type)
415{
416 rb_shape_t * shape = rb_shape_alloc_with_parent_id(edge_name, rb_shape_id(parent));
417 shape->type = (uint8_t)type;
418 shape->size_pool_index = parent->size_pool_index;
419 shape->capacity = parent->capacity;
420 shape->edges = 0;
421 return shape;
422}
423
424#ifdef HAVE_MMAP
425static redblack_node_t *
426redblack_cache_ancestors(rb_shape_t * shape)
427{
428 if (!(shape->ancestor_index || shape->parent_id == INVALID_SHAPE_ID)) {
429 redblack_node_t * parent_index;
430
431 parent_index = redblack_cache_ancestors(rb_shape_get_parent(shape));
432
433 if (shape->type == SHAPE_IVAR) {
434 shape->ancestor_index = redblack_insert(parent_index, shape->edge_name, shape);
435
436#if RUBY_DEBUG
437 if (shape->ancestor_index) {
438 redblack_node_t *inserted_node = redblack_find(shape->ancestor_index, shape->edge_name);
439 RUBY_ASSERT(inserted_node);
440 RUBY_ASSERT(redblack_value(inserted_node) == shape);
441 }
442#endif
443 }
444 else {
445 shape->ancestor_index = parent_index;
446 }
447 }
448
449 return shape->ancestor_index;
450}
451#else
452static redblack_node_t *
453redblack_cache_ancestors(rb_shape_t * shape)
454{
455 return LEAF;
456}
457#endif
458
459static rb_shape_t *
460rb_shape_alloc_new_child(ID id, rb_shape_t * shape, enum shape_type shape_type)
461{
462 rb_shape_t * new_shape = rb_shape_alloc(id, shape, shape_type);
463
464 switch (shape_type) {
465 case SHAPE_IVAR:
466 if (UNLIKELY(shape->next_iv_index >= shape->capacity)) {
467 RUBY_ASSERT(shape->next_iv_index == shape->capacity);
468 new_shape->capacity = (uint32_t)rb_malloc_grow_capa(shape->capacity, sizeof(VALUE));
469 }
470 RUBY_ASSERT(new_shape->capacity > shape->next_iv_index);
471 new_shape->next_iv_index = shape->next_iv_index + 1;
472 if (new_shape->next_iv_index > ANCESTOR_CACHE_THRESHOLD) {
473 redblack_cache_ancestors(new_shape);
474 }
475 break;
476 case SHAPE_FROZEN:
477 case SHAPE_T_OBJECT:
478 new_shape->next_iv_index = shape->next_iv_index;
479 break;
480 case SHAPE_OBJ_TOO_COMPLEX:
481 case SHAPE_ROOT:
482 rb_bug("Unreachable");
483 break;
484 }
485
486 return new_shape;
487}
488
489static rb_shape_t*
490get_next_shape_internal(rb_shape_t * shape, ID id, enum shape_type shape_type, bool * variation_created, bool new_variations_allowed)
491{
492 rb_shape_t *res = NULL;
493
494 // There should never be outgoing edges from "too complex"
495 RUBY_ASSERT(rb_shape_id(shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
496
497 *variation_created = false;
498
499 RB_VM_LOCK_ENTER();
500 {
501 // If the current shape has children
502 if (shape->edges) {
503 // Check if it only has one child
504 if (SINGLE_CHILD_P(shape->edges)) {
505 rb_shape_t * child = SINGLE_CHILD(shape->edges);
506 // If the one child has a matching edge name, then great,
507 // we found what we want.
508 if (child->edge_name == id) {
509 res = child;
510 }
511 }
512 else {
513 // If it has more than one child, do a hash lookup to find it.
514 VALUE lookup_result;
515 if (rb_id_table_lookup(shape->edges, id, &lookup_result)) {
516 res = (rb_shape_t *)lookup_result;
517 }
518 }
519 }
520
521 // If we didn't find the shape we're looking for we create it.
522 if (!res) {
523 // If we're not allowed to create a new variation, of if we're out of shapes
524 // we return TOO_COMPLEX_SHAPE.
525 if (!new_variations_allowed || GET_SHAPE_TREE()->next_shape_id > MAX_SHAPE_ID) {
526 res = rb_shape_get_shape_by_id(OBJ_TOO_COMPLEX_SHAPE_ID);
527 }
528 else {
529 rb_shape_t * new_shape = rb_shape_alloc_new_child(id, shape, shape_type);
530
531 if (!shape->edges) {
532 // If the shape had no edge yet, we can directly set the new child
533 shape->edges = TAG_SINGLE_CHILD(new_shape);
534 }
535 else {
536 // If the edge was single child we need to allocate a table.
537 if (SINGLE_CHILD_P(shape->edges)) {
538 rb_shape_t * old_child = SINGLE_CHILD(shape->edges);
539 shape->edges = rb_id_table_create(2);
540 rb_id_table_insert(shape->edges, old_child->edge_name, (VALUE)old_child);
541 }
542
543 rb_id_table_insert(shape->edges, new_shape->edge_name, (VALUE)new_shape);
544 *variation_created = true;
545 }
546
547 res = new_shape;
548 }
549 }
550 }
551 RB_VM_LOCK_LEAVE();
552
553 return res;
554}
555
556int
557rb_shape_frozen_shape_p(rb_shape_t* shape)
558{
559 return SHAPE_FROZEN == (enum shape_type)shape->type;
560}
561
562static rb_shape_t *
563remove_shape_recursive(rb_shape_t *shape, ID id, rb_shape_t **removed_shape)
564{
565 if (shape->parent_id == INVALID_SHAPE_ID) {
566 // We've hit the top of the shape tree and couldn't find the
567 // IV we wanted to remove, so return NULL
568 return NULL;
569 }
570 else {
571 if (shape->type == SHAPE_IVAR && shape->edge_name == id) {
572 *removed_shape = shape;
573
574 return rb_shape_get_parent(shape);
575 }
576 else {
577 // This isn't the IV we want to remove, keep walking up.
578 rb_shape_t *new_parent = remove_shape_recursive(rb_shape_get_parent(shape), id, removed_shape);
579
580 // We found a new parent. Create a child of the new parent that
581 // has the same attributes as this shape.
582 if (new_parent) {
583 if (UNLIKELY(new_parent->type == SHAPE_OBJ_TOO_COMPLEX)) {
584 return new_parent;
585 }
586
587 bool dont_care;
588 rb_shape_t *new_child = get_next_shape_internal(new_parent, shape->edge_name, shape->type, &dont_care, true);
589 if (UNLIKELY(new_child->type == SHAPE_OBJ_TOO_COMPLEX)) {
590 return new_child;
591 }
592
593 RUBY_ASSERT(new_child->capacity <= shape->capacity);
594
595 return new_child;
596 }
597 else {
598 // We went all the way to the top of the shape tree and couldn't
599 // find an IV to remove, so return NULL
600 return NULL;
601 }
602 }
603 }
604}
605
606bool
607rb_shape_transition_shape_remove_ivar(VALUE obj, ID id, rb_shape_t *shape, VALUE *removed)
608{
609 if (UNLIKELY(shape->type == SHAPE_OBJ_TOO_COMPLEX)) {
610 return false;
611 }
612
613 rb_shape_t *removed_shape = NULL;
614 rb_shape_t *new_shape = remove_shape_recursive(shape, id, &removed_shape);
615 if (new_shape) {
616 RUBY_ASSERT(removed_shape != NULL);
617
618 if (UNLIKELY(new_shape->type == SHAPE_OBJ_TOO_COMPLEX)) {
619 return false;
620 }
621
622 RUBY_ASSERT(new_shape->next_iv_index == shape->next_iv_index - 1);
623
624 VALUE *ivptr;
625 switch(BUILTIN_TYPE(obj)) {
626 case T_CLASS:
627 case T_MODULE:
628 ivptr = RCLASS_IVPTR(obj);
629 break;
630 case T_OBJECT:
631 ivptr = ROBJECT_IVPTR(obj);
632 break;
633 default: {
634 struct gen_ivtbl *ivtbl;
635 rb_gen_ivtbl_get(obj, id, &ivtbl);
636 ivptr = ivtbl->as.shape.ivptr;
637 break;
638 }
639 }
640
641 *removed = ivptr[removed_shape->next_iv_index - 1];
642
643 memmove(&ivptr[removed_shape->next_iv_index - 1], &ivptr[removed_shape->next_iv_index],
644 ((new_shape->next_iv_index + 1) - removed_shape->next_iv_index) * sizeof(VALUE));
645
646 // Re-embed objects when instances become small enough
647 // This is necessary because YJIT assumes that objects with the same shape
648 // have the same embeddedness for efficiency (avoid extra checks)
649 if (BUILTIN_TYPE(obj) == T_OBJECT &&
650 !RB_FL_TEST_RAW(obj, ROBJECT_EMBED) &&
651 rb_obj_embedded_size(new_shape->next_iv_index) <= rb_gc_obj_slot_size(obj)) {
652 RB_FL_SET_RAW(obj, ROBJECT_EMBED);
653 memcpy(ROBJECT_IVPTR(obj), ivptr, new_shape->next_iv_index * sizeof(VALUE));
654 xfree(ivptr);
655 }
656
657 rb_shape_set_shape(obj, new_shape);
658 }
659 return true;
660}
661
663rb_shape_transition_shape_frozen(VALUE obj)
664{
665 rb_shape_t* shape = rb_shape_get_shape(obj);
666 RUBY_ASSERT(shape);
667 RUBY_ASSERT(RB_OBJ_FROZEN(obj));
668
669 if (rb_shape_frozen_shape_p(shape) || rb_shape_obj_too_complex(obj)) {
670 return shape;
671 }
672
673 rb_shape_t* next_shape;
674
675 if (shape == rb_shape_get_root_shape()) {
676 return rb_shape_get_shape_by_id(SPECIAL_CONST_SHAPE_ID);
677 }
678
679 bool dont_care;
680 next_shape = get_next_shape_internal(shape, (ID)id_frozen, SHAPE_FROZEN, &dont_care, true);
681
682 RUBY_ASSERT(next_shape);
683 return next_shape;
684}
685
686/*
687 * This function is used for assertions where we don't want to increment
688 * max_iv_count
689 */
691rb_shape_get_next_iv_shape(rb_shape_t* shape, ID id)
692{
693 RUBY_ASSERT(!is_instance_id(id) || RTEST(rb_sym2str(ID2SYM(id))));
694 bool dont_care;
695 return get_next_shape_internal(shape, id, SHAPE_IVAR, &dont_care, true);
696}
697
699rb_shape_get_next(rb_shape_t *shape, VALUE obj, ID id)
700{
701 RUBY_ASSERT(!is_instance_id(id) || RTEST(rb_sym2str(ID2SYM(id))));
702 if (UNLIKELY(shape->type == SHAPE_OBJ_TOO_COMPLEX)) {
703 return shape;
704 }
705
706#if RUBY_DEBUG
707 attr_index_t index;
708 if (rb_shape_get_iv_index(shape, id, &index)) {
709 rb_bug("rb_shape_get_next: trying to create ivar that already exists at index %u", index);
710 }
711#endif
712
713 bool allow_new_shape = true;
714
715 if (BUILTIN_TYPE(obj) == T_OBJECT) {
716 VALUE klass = rb_obj_class(obj);
717 allow_new_shape = RCLASS_EXT(klass)->variation_count < SHAPE_MAX_VARIATIONS;
718 }
719
720 bool variation_created = false;
721 rb_shape_t *new_shape = get_next_shape_internal(shape, id, SHAPE_IVAR, &variation_created, allow_new_shape);
722
723 // Check if we should update max_iv_count on the object's class
724 if (BUILTIN_TYPE(obj) == T_OBJECT) {
725 VALUE klass = rb_obj_class(obj);
726 if (new_shape->next_iv_index > RCLASS_EXT(klass)->max_iv_count) {
727 RCLASS_EXT(klass)->max_iv_count = new_shape->next_iv_index;
728 }
729
730 if (variation_created) {
731 RCLASS_EXT(klass)->variation_count++;
732 if (rb_warning_category_enabled_p(RB_WARN_CATEGORY_PERFORMANCE)) {
733 if (RCLASS_EXT(klass)->variation_count >= SHAPE_MAX_VARIATIONS) {
736 "The class %"PRIsVALUE" reached %d shape variations, instance variables accesses will be slower and memory usage increased.\n"
737 "It is recommended to define instance variables in a consistent order, for instance by eagerly defining them all in the #initialize method.",
738 rb_class_path(klass),
739 SHAPE_MAX_VARIATIONS
740 );
741 }
742 }
743 }
744 }
745
746 return new_shape;
747}
748
749// Same as rb_shape_get_iv_index, but uses a provided valid shape id and index
750// to return a result faster if branches of the shape tree are closely related.
751bool
752rb_shape_get_iv_index_with_hint(shape_id_t shape_id, ID id, attr_index_t *value, shape_id_t *shape_id_hint)
753{
754 attr_index_t index_hint = *value;
755 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
756 rb_shape_t *initial_shape = shape;
757
758 if (*shape_id_hint == INVALID_SHAPE_ID) {
759 *shape_id_hint = shape_id;
760 return rb_shape_get_iv_index(shape, id, value);
761 }
762
763 rb_shape_t * shape_hint = rb_shape_get_shape_by_id(*shape_id_hint);
764
765 // We assume it's likely shape_id_hint and shape_id have a close common
766 // ancestor, so we check up to ANCESTOR_SEARCH_MAX_DEPTH ancestors before
767 // eventually using the index, as in case of a match it will be faster.
768 // However if the shape doesn't have an index, we walk the entire tree.
769 int depth = INT_MAX;
770 if (shape->ancestor_index && shape->next_iv_index >= ANCESTOR_CACHE_THRESHOLD) {
771 depth = ANCESTOR_SEARCH_MAX_DEPTH;
772 }
773
774 while (depth > 0 && shape->next_iv_index > index_hint) {
775 while (shape_hint->next_iv_index > shape->next_iv_index) {
776 shape_hint = rb_shape_get_parent(shape_hint);
777 }
778
779 if (shape_hint == shape) {
780 // We've found a common ancestor so use the index hint
781 *value = index_hint;
782 *shape_id_hint = rb_shape_id(shape);
783 return true;
784 }
785 if (shape->edge_name == id) {
786 // We found the matching id before a common ancestor
787 *value = shape->next_iv_index - 1;
788 *shape_id_hint = rb_shape_id(shape);
789 return true;
790 }
791
792 shape = rb_shape_get_parent(shape);
793 depth--;
794 }
795
796 // If the original shape had an index but its ancestor doesn't
797 // we switch back to the original one as it will be faster.
798 if (!shape->ancestor_index && initial_shape->ancestor_index) {
799 shape = initial_shape;
800 }
801 *shape_id_hint = shape_id;
802 return rb_shape_get_iv_index(shape, id, value);
803}
804
805static bool
806shape_get_iv_index(rb_shape_t *shape, ID id, attr_index_t *value)
807{
808 while (shape->parent_id != INVALID_SHAPE_ID) {
809 if (shape->edge_name == id) {
810 enum shape_type shape_type;
811 shape_type = (enum shape_type)shape->type;
812
813 switch (shape_type) {
814 case SHAPE_IVAR:
815 RUBY_ASSERT(shape->next_iv_index > 0);
816 *value = shape->next_iv_index - 1;
817 return true;
818 case SHAPE_ROOT:
819 case SHAPE_T_OBJECT:
820 return false;
821 case SHAPE_OBJ_TOO_COMPLEX:
822 case SHAPE_FROZEN:
823 rb_bug("Ivar should not exist on transition");
824 }
825 }
826
827 shape = rb_shape_get_parent(shape);
828 }
829
830 return false;
831}
832
833static bool
834shape_cache_get_iv_index(rb_shape_t *shape, ID id, attr_index_t *value)
835{
836 if (shape->ancestor_index && shape->next_iv_index >= ANCESTOR_CACHE_THRESHOLD) {
837 redblack_node_t *node = redblack_find(shape->ancestor_index, id);
838 if (node) {
839 rb_shape_t *shape = redblack_value(node);
840 *value = shape->next_iv_index - 1;
841
842#if RUBY_DEBUG
843 attr_index_t shape_tree_index;
844 RUBY_ASSERT(shape_get_iv_index(shape, id, &shape_tree_index));
845 RUBY_ASSERT(shape_tree_index == *value);
846#endif
847
848 return true;
849 }
850
851 /* Verify the cache is correct by checking that this instance variable
852 * does not exist in the shape tree either. */
853 RUBY_ASSERT(!shape_get_iv_index(shape, id, value));
854 }
855
856 return false;
857}
858
859bool
860rb_shape_get_iv_index(rb_shape_t *shape, ID id, attr_index_t *value)
861{
862 // It doesn't make sense to ask for the index of an IV that's stored
863 // on an object that is "too complex" as it uses a hash for storing IVs
864 RUBY_ASSERT(rb_shape_id(shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
865
866 if (!shape_cache_get_iv_index(shape, id, value)) {
867 return shape_get_iv_index(shape, id, value);
868 }
869
870 return true;
871}
872
873void
874rb_shape_set_shape(VALUE obj, rb_shape_t* shape)
875{
876 rb_shape_set_shape_id(obj, rb_shape_id(shape));
877}
878
879int32_t
880rb_shape_id_offset(void)
881{
882 return sizeof(uintptr_t) - SHAPE_ID_NUM_BITS / sizeof(uintptr_t);
883}
884
886rb_shape_traverse_from_new_root(rb_shape_t *initial_shape, rb_shape_t *dest_shape)
887{
888 RUBY_ASSERT(initial_shape->type == SHAPE_T_OBJECT);
889 rb_shape_t *next_shape = initial_shape;
890
891 if (dest_shape->type != initial_shape->type) {
892 next_shape = rb_shape_traverse_from_new_root(initial_shape, rb_shape_get_parent(dest_shape));
893 if (!next_shape) {
894 return NULL;
895 }
896 }
897
898 switch ((enum shape_type)dest_shape->type) {
899 case SHAPE_IVAR:
900 case SHAPE_FROZEN:
901 if (!next_shape->edges) {
902 return NULL;
903 }
904
905 VALUE lookup_result;
906 if (SINGLE_CHILD_P(next_shape->edges)) {
907 rb_shape_t * child = SINGLE_CHILD(next_shape->edges);
908 if (child->edge_name == dest_shape->edge_name) {
909 return child;
910 }
911 else {
912 return NULL;
913 }
914 }
915 else {
916 if (rb_id_table_lookup(next_shape->edges, dest_shape->edge_name, &lookup_result)) {
917 next_shape = (rb_shape_t *)lookup_result;
918 }
919 else {
920 return NULL;
921 }
922 }
923 break;
924 case SHAPE_ROOT:
925 case SHAPE_T_OBJECT:
926 break;
927 case SHAPE_OBJ_TOO_COMPLEX:
928 rb_bug("Unreachable");
929 break;
930 }
931
932 return next_shape;
933}
934
936rb_shape_rebuild_shape(rb_shape_t * initial_shape, rb_shape_t * dest_shape)
937{
938 RUBY_ASSERT(rb_shape_id(initial_shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
939 RUBY_ASSERT(rb_shape_id(dest_shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
940
941 rb_shape_t * midway_shape;
942
943 RUBY_ASSERT(initial_shape->type == SHAPE_T_OBJECT);
944
945 if (dest_shape->type != initial_shape->type) {
946 midway_shape = rb_shape_rebuild_shape(initial_shape, rb_shape_get_parent(dest_shape));
947 if (UNLIKELY(rb_shape_id(midway_shape) == OBJ_TOO_COMPLEX_SHAPE_ID)) {
948 return midway_shape;
949 }
950 }
951 else {
952 midway_shape = initial_shape;
953 }
954
955 switch ((enum shape_type)dest_shape->type) {
956 case SHAPE_IVAR:
957 midway_shape = rb_shape_get_next_iv_shape(midway_shape, dest_shape->edge_name);
958 break;
959 case SHAPE_ROOT:
960 case SHAPE_FROZEN:
961 case SHAPE_T_OBJECT:
962 break;
963 case SHAPE_OBJ_TOO_COMPLEX:
964 rb_bug("Unreachable");
965 break;
966 }
967
968 return midway_shape;
969}
970
971RUBY_FUNC_EXPORTED bool
972rb_shape_obj_too_complex(VALUE obj)
973{
974 return rb_shape_get_shape_id(obj) == OBJ_TOO_COMPLEX_SHAPE_ID;
975}
976
977size_t
978rb_shape_edges_count(rb_shape_t *shape)
979{
980 if (shape->edges) {
981 if (SINGLE_CHILD_P(shape->edges)) {
982 return 1;
983 }
984 else {
985 return rb_id_table_size(shape->edges);
986 }
987 }
988 return 0;
989}
990
991size_t
992rb_shape_memsize(rb_shape_t *shape)
993{
994 size_t memsize = sizeof(rb_shape_t);
995 if (shape->edges && !SINGLE_CHILD_P(shape->edges)) {
996 memsize += rb_id_table_memsize(shape->edges);
997 }
998 return memsize;
999}
1000
1001#if SHAPE_DEBUG
1002/*
1003 * Exposing Shape to Ruby via RubyVM.debug_shape
1004 */
1005
1006/* :nodoc: */
1007static VALUE
1008rb_shape_too_complex(VALUE self)
1009{
1010 rb_shape_t * shape;
1011 shape = rb_shape_get_shape_by_id(NUM2INT(rb_struct_getmember(self, rb_intern("id"))));
1012 if (rb_shape_id(shape) == OBJ_TOO_COMPLEX_SHAPE_ID) {
1013 return Qtrue;
1014 }
1015 else {
1016 return Qfalse;
1017 }
1018}
1019
1020static VALUE
1021parse_key(ID key)
1022{
1023 if (is_instance_id(key)) {
1024 return ID2SYM(key);
1025 }
1026 return LONG2NUM(key);
1027}
1028
1029static VALUE rb_shape_edge_name(rb_shape_t * shape);
1030
1031static VALUE
1032rb_shape_t_to_rb_cShape(rb_shape_t *shape)
1033{
1034 VALUE rb_cShape = rb_const_get(rb_cRubyVM, rb_intern("Shape"));
1035
1036 VALUE obj = rb_struct_new(rb_cShape,
1037 INT2NUM(rb_shape_id(shape)),
1038 INT2NUM(shape->parent_id),
1039 rb_shape_edge_name(shape),
1040 INT2NUM(shape->next_iv_index),
1041 INT2NUM(shape->size_pool_index),
1042 INT2NUM(shape->type),
1043 INT2NUM(shape->capacity));
1044 rb_obj_freeze(obj);
1045 return obj;
1046}
1047
1048static enum rb_id_table_iterator_result
1049rb_edges_to_hash(ID key, VALUE value, void *ref)
1050{
1051 rb_hash_aset(*(VALUE *)ref, parse_key(key), rb_shape_t_to_rb_cShape((rb_shape_t*)value));
1052 return ID_TABLE_CONTINUE;
1053}
1054
1055/* :nodoc: */
1056static VALUE
1057rb_shape_edges(VALUE self)
1058{
1059 rb_shape_t* shape;
1060
1061 shape = rb_shape_get_shape_by_id(NUM2INT(rb_struct_getmember(self, rb_intern("id"))));
1062
1063 VALUE hash = rb_hash_new();
1064
1065 if (shape->edges) {
1066 if (SINGLE_CHILD_P(shape->edges)) {
1067 rb_shape_t * child = SINGLE_CHILD(shape->edges);
1068 rb_edges_to_hash(child->edge_name, (VALUE)child, &hash);
1069 }
1070 else {
1071 rb_id_table_foreach(shape->edges, rb_edges_to_hash, &hash);
1072 }
1073 }
1074
1075 return hash;
1076}
1077
1078static VALUE
1079rb_shape_edge_name(rb_shape_t * shape)
1080{
1081 if (shape->edge_name) {
1082 if (is_instance_id(shape->edge_name)) {
1083 return ID2SYM(shape->edge_name);
1084 }
1085 return INT2NUM(shape->capacity);
1086 }
1087 return Qnil;
1088}
1089
1090/* :nodoc: */
1091static VALUE
1092rb_shape_export_depth(VALUE self)
1093{
1094 rb_shape_t* shape;
1095 shape = rb_shape_get_shape_by_id(NUM2INT(rb_struct_getmember(self, rb_intern("id"))));
1096 return SIZET2NUM(rb_shape_depth(shape));
1097}
1098
1099/* :nodoc: */
1100static VALUE
1101rb_shape_parent(VALUE self)
1102{
1103 rb_shape_t * shape;
1104 shape = rb_shape_get_shape_by_id(NUM2INT(rb_struct_getmember(self, rb_intern("id"))));
1105 if (shape->parent_id != INVALID_SHAPE_ID) {
1106 return rb_shape_t_to_rb_cShape(rb_shape_get_parent(shape));
1107 }
1108 else {
1109 return Qnil;
1110 }
1111}
1112
1113/* :nodoc: */
1114static VALUE
1115rb_shape_debug_shape(VALUE self, VALUE obj)
1116{
1117 return rb_shape_t_to_rb_cShape(rb_shape_get_shape(obj));
1118}
1119
1120/* :nodoc: */
1121static VALUE
1122rb_shape_root_shape(VALUE self)
1123{
1124 return rb_shape_t_to_rb_cShape(rb_shape_get_root_shape());
1125}
1126
1127/* :nodoc: */
1128static VALUE
1129rb_shape_shapes_available(VALUE self)
1130{
1131 return INT2NUM(MAX_SHAPE_ID - (GET_SHAPE_TREE()->next_shape_id - 1));
1132}
1133
1134/* :nodoc: */
1135static VALUE
1136rb_shape_exhaust(int argc, VALUE *argv, VALUE self)
1137{
1138 rb_check_arity(argc, 0, 1);
1139 int offset = argc == 1 ? NUM2INT(argv[0]) : 0;
1140 GET_SHAPE_TREE()->next_shape_id = MAX_SHAPE_ID - offset + 1;
1141 return Qnil;
1142}
1143
1144VALUE rb_obj_shape(rb_shape_t* shape);
1145
1146static enum rb_id_table_iterator_result collect_keys_and_values(ID key, VALUE value, void *ref)
1147{
1148 rb_hash_aset(*(VALUE *)ref, parse_key(key), rb_obj_shape((rb_shape_t*)value));
1149 return ID_TABLE_CONTINUE;
1150}
1151
1152static VALUE edges(struct rb_id_table* edges)
1153{
1154 VALUE hash = rb_hash_new();
1155 if (SINGLE_CHILD_P(edges)) {
1156 rb_shape_t * child = SINGLE_CHILD(edges);
1157 collect_keys_and_values(child->edge_name, (VALUE)child, &hash);
1158 }
1159 else {
1160 rb_id_table_foreach(edges, collect_keys_and_values, &hash);
1161 }
1162 return hash;
1163}
1164
1165/* :nodoc: */
1166VALUE
1167rb_obj_shape(rb_shape_t* shape)
1168{
1169 VALUE rb_shape = rb_hash_new();
1170
1171 rb_hash_aset(rb_shape, ID2SYM(rb_intern("id")), INT2NUM(rb_shape_id(shape)));
1172 rb_hash_aset(rb_shape, ID2SYM(rb_intern("edges")), edges(shape->edges));
1173
1174 if (shape == rb_shape_get_root_shape()) {
1175 rb_hash_aset(rb_shape, ID2SYM(rb_intern("parent_id")), INT2NUM(ROOT_SHAPE_ID));
1176 }
1177 else {
1178 rb_hash_aset(rb_shape, ID2SYM(rb_intern("parent_id")), INT2NUM(shape->parent_id));
1179 }
1180
1181 rb_hash_aset(rb_shape, ID2SYM(rb_intern("edge_name")), rb_id2str(shape->edge_name));
1182 return rb_shape;
1183}
1184
1185/* :nodoc: */
1186static VALUE
1187shape_transition_tree(VALUE self)
1188{
1189 return rb_obj_shape(rb_shape_get_root_shape());
1190}
1191
1192/* :nodoc: */
1193static VALUE
1194rb_shape_find_by_id(VALUE mod, VALUE id)
1195{
1196 shape_id_t shape_id = NUM2UINT(id);
1197 if (shape_id >= GET_SHAPE_TREE()->next_shape_id) {
1198 rb_raise(rb_eArgError, "Shape ID %d is out of bounds\n", shape_id);
1199 }
1200 return rb_shape_t_to_rb_cShape(rb_shape_get_shape_by_id(shape_id));
1201}
1202#endif
1203
1204#ifdef HAVE_MMAP
1205#include <sys/mman.h>
1206#endif
1207
1208void
1209Init_default_shapes(void)
1210{
1211 rb_shape_tree_t *st = ruby_mimmalloc(sizeof(rb_shape_tree_t));
1212 memset(st, 0, sizeof(rb_shape_tree_t));
1213 rb_shape_tree_ptr = st;
1214
1215#ifdef HAVE_MMAP
1216 rb_shape_tree_ptr->shape_list = (rb_shape_t *)mmap(NULL, rb_size_mul_or_raise(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t), rb_eRuntimeError),
1217 PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
1218 if (GET_SHAPE_TREE()->shape_list == MAP_FAILED) {
1219 GET_SHAPE_TREE()->shape_list = 0;
1220 }
1221#else
1222 GET_SHAPE_TREE()->shape_list = xcalloc(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t));
1223#endif
1224
1225 if (!GET_SHAPE_TREE()->shape_list) {
1226 rb_memerror();
1227 }
1228
1229 id_frozen = rb_make_internal_id();
1230 id_t_object = rb_make_internal_id();
1231
1232#ifdef HAVE_MMAP
1233 rb_shape_tree_ptr->shape_cache = (redblack_node_t *)mmap(NULL, rb_size_mul_or_raise(REDBLACK_CACHE_SIZE, sizeof(redblack_node_t), rb_eRuntimeError),
1234 PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
1235 rb_shape_tree_ptr->cache_size = 0;
1236
1237 // If mmap fails, then give up on the redblack tree cache.
1238 // We set the cache size such that the redblack node allocators think
1239 // the cache is full.
1240 if (GET_SHAPE_TREE()->shape_cache == MAP_FAILED) {
1241 GET_SHAPE_TREE()->shape_cache = 0;
1242 GET_SHAPE_TREE()->cache_size = REDBLACK_CACHE_SIZE;
1243 }
1244#endif
1245
1246 // Shapes by size pool
1247 for (int i = 0; i < SIZE_POOL_COUNT; i++) {
1248 size_pool_edge_names[i] = rb_make_internal_id();
1249 }
1250
1251 // Root shape
1252 rb_shape_t *root = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID);
1253 root->capacity = 0;
1254 root->type = SHAPE_ROOT;
1255 root->size_pool_index = 0;
1256 GET_SHAPE_TREE()->root_shape = root;
1257 RUBY_ASSERT(rb_shape_id(GET_SHAPE_TREE()->root_shape) == ROOT_SHAPE_ID);
1258
1259 // Shapes by size pool
1260 for (int i = 1; i < SIZE_POOL_COUNT; i++) {
1261 rb_shape_t *new_shape = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID);
1262 new_shape->type = SHAPE_ROOT;
1263 new_shape->size_pool_index = i;
1264 new_shape->ancestor_index = LEAF;
1265 RUBY_ASSERT(rb_shape_id(new_shape) == (shape_id_t)i);
1266 }
1267
1268 // Make shapes for T_OBJECT
1269 for (int i = 0; i < SIZE_POOL_COUNT; i++) {
1270 rb_shape_t * shape = rb_shape_get_shape_by_id(i);
1271 bool dont_care;
1272 rb_shape_t * t_object_shape =
1273 get_next_shape_internal(shape, id_t_object, SHAPE_T_OBJECT, &dont_care, true);
1274 t_object_shape->capacity = (uint32_t)((rb_size_pool_slot_size(i) - offsetof(struct RObject, as.ary)) / sizeof(VALUE));
1275 t_object_shape->edges = rb_id_table_create(0);
1276 t_object_shape->ancestor_index = LEAF;
1277 RUBY_ASSERT(rb_shape_id(t_object_shape) == (shape_id_t)(i + SIZE_POOL_COUNT));
1278 }
1279
1280 bool dont_care;
1281 // Special const shape
1282#if RUBY_DEBUG
1283 rb_shape_t * special_const_shape =
1284#endif
1285 get_next_shape_internal(root, (ID)id_frozen, SHAPE_FROZEN, &dont_care, true);
1286 RUBY_ASSERT(rb_shape_id(special_const_shape) == SPECIAL_CONST_SHAPE_ID);
1287 RUBY_ASSERT(SPECIAL_CONST_SHAPE_ID == (GET_SHAPE_TREE()->next_shape_id - 1));
1288 RUBY_ASSERT(rb_shape_frozen_shape_p(special_const_shape));
1289
1290 rb_shape_t * hash_fallback_shape = rb_shape_alloc_with_parent_id(0, ROOT_SHAPE_ID);
1291 hash_fallback_shape->type = SHAPE_OBJ_TOO_COMPLEX;
1292 hash_fallback_shape->size_pool_index = 0;
1293 RUBY_ASSERT(OBJ_TOO_COMPLEX_SHAPE_ID == (GET_SHAPE_TREE()->next_shape_id - 1));
1294 RUBY_ASSERT(rb_shape_id(hash_fallback_shape) == OBJ_TOO_COMPLEX_SHAPE_ID);
1295}
1296
1297void
1298Init_shape(void)
1299{
1300#if SHAPE_DEBUG
1301 VALUE rb_cShape = rb_struct_define_under(rb_cRubyVM, "Shape",
1302 "id",
1303 "parent_id",
1304 "edge_name",
1305 "next_iv_index",
1306 "size_pool_index",
1307 "type",
1308 "capacity",
1309 NULL);
1310
1311 rb_define_method(rb_cShape, "parent", rb_shape_parent, 0);
1312 rb_define_method(rb_cShape, "edges", rb_shape_edges, 0);
1313 rb_define_method(rb_cShape, "depth", rb_shape_export_depth, 0);
1314 rb_define_method(rb_cShape, "too_complex?", rb_shape_too_complex, 0);
1315 rb_define_const(rb_cShape, "SHAPE_ROOT", INT2NUM(SHAPE_ROOT));
1316 rb_define_const(rb_cShape, "SHAPE_IVAR", INT2NUM(SHAPE_IVAR));
1317 rb_define_const(rb_cShape, "SHAPE_T_OBJECT", INT2NUM(SHAPE_T_OBJECT));
1318 rb_define_const(rb_cShape, "SHAPE_FROZEN", INT2NUM(SHAPE_FROZEN));
1319 rb_define_const(rb_cShape, "SHAPE_ID_NUM_BITS", INT2NUM(SHAPE_ID_NUM_BITS));
1320 rb_define_const(rb_cShape, "SHAPE_FLAG_SHIFT", INT2NUM(SHAPE_FLAG_SHIFT));
1321 rb_define_const(rb_cShape, "SPECIAL_CONST_SHAPE_ID", INT2NUM(SPECIAL_CONST_SHAPE_ID));
1322 rb_define_const(rb_cShape, "OBJ_TOO_COMPLEX_SHAPE_ID", INT2NUM(OBJ_TOO_COMPLEX_SHAPE_ID));
1323 rb_define_const(rb_cShape, "SHAPE_MAX_VARIATIONS", INT2NUM(SHAPE_MAX_VARIATIONS));
1324 rb_define_const(rb_cShape, "SIZEOF_RB_SHAPE_T", INT2NUM(sizeof(rb_shape_t)));
1325 rb_define_const(rb_cShape, "SIZEOF_REDBLACK_NODE_T", INT2NUM(sizeof(redblack_node_t)));
1326 rb_define_const(rb_cShape, "SHAPE_BUFFER_SIZE", INT2NUM(sizeof(rb_shape_t) * SHAPE_BUFFER_SIZE));
1327 rb_define_const(rb_cShape, "REDBLACK_CACHE_SIZE", INT2NUM(sizeof(redblack_node_t) * REDBLACK_CACHE_SIZE));
1328
1329 rb_define_singleton_method(rb_cShape, "transition_tree", shape_transition_tree, 0);
1330 rb_define_singleton_method(rb_cShape, "find_by_id", rb_shape_find_by_id, 1);
1331 rb_define_singleton_method(rb_cShape, "of", rb_shape_debug_shape, 1);
1332 rb_define_singleton_method(rb_cShape, "root_shape", rb_shape_root_shape, 0);
1333 rb_define_singleton_method(rb_cShape, "shapes_available", rb_shape_shapes_available, 0);
1334 rb_define_singleton_method(rb_cShape, "exhaust_shapes", rb_shape_exhaust, -1);
1335#endif
1336}
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
Definition assert.h:177
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_singleton_method(klass, mid, func, arity)
Defines klass.mid.
static VALUE RB_FL_TEST_RAW(VALUE obj, VALUE flags)
This is an implementation detail of RB_FL_TEST().
Definition fl_type.h:469
static void RB_FL_SET_RAW(VALUE obj, VALUE flags)
This is an implementation detail of RB_FL_SET().
Definition fl_type.h:606
#define xfree
Old name of ruby_xfree.
Definition xmalloc.h:58
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define SIZET2NUM
Old name of RB_SIZE2NUM.
Definition size_t.h:62
#define T_MODULE
Old name of RUBY_T_MODULE.
Definition value_type.h:70
#define NUM2UINT
Old name of RB_NUM2UINT.
Definition int.h:45
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define Qtrue
Old name of RUBY_Qtrue.
#define NUM2INT
Old name of RB_NUM2INT.
Definition int.h:44
#define INT2NUM
Old name of RB_INT2NUM.
Definition int.h:43
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
Definition value_type.h:75
#define T_CLASS
Old name of RUBY_T_CLASS.
Definition value_type.h:58
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
Definition value_type.h:85
#define xcalloc
Old name of ruby_xcalloc.
Definition xmalloc.h:55
void rb_category_warn(rb_warning_category_t category, const char *fmt,...)
Identical to rb_category_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:433
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1342
@ RB_WARN_CATEGORY_PERFORMANCE
Warning is for performance issues (not enabled by -w).
Definition error.h:54
size_t rb_obj_embedded_size(uint32_t numiv)
Internal header for Object.
Definition object.c:96
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:215
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:280
VALUE rb_struct_define_under(VALUE space, const char *name,...)
Identical to rb_struct_define(), except it defines the class under the specified namespace instead of...
Definition struct.c:505
VALUE rb_struct_new(VALUE klass,...)
Creates an instance of the given struct.
Definition struct.c:842
VALUE rb_struct_getmember(VALUE self, ID key)
Identical to rb_struct_aref(), except it takes ID instead of VALUE.
Definition struct.c:232
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
Definition variable.c:3141
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:283
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
Definition symbol.c:953
void rb_define_const(VALUE klass, const char *name, VALUE val)
Defines a Ruby level constant under a namespace.
Definition variable.c:3690
VALUE type(ANYARGS)
ANYARGS-ed function type.
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
Definition robject.h:136
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
C99 shim for <stdbool.h>
Ruby's ordinal objects.
Definition robject.h:83
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40