Ruby 3.3.6p108 (2024-11-05 revision 75015d4c1f6965b5e85e96fb309f1f2129f933c0)
shape.c
1#include "vm_core.h"
2#include "vm_sync.h"
3#include "shape.h"
4#include "symbol.h"
5#include "id_table.h"
6#include "internal/class.h"
7#include "internal/error.h"
8#include "internal/gc.h"
9#include "internal/object.h"
10#include "internal/symbol.h"
11#include "internal/variable.h"
12#include "variable.h"
13#include <stdbool.h>
14
15#ifndef _WIN32
16#include <sys/mman.h>
17#endif
18
19#ifndef SHAPE_DEBUG
20#define SHAPE_DEBUG (VM_CHECK_MODE > 0)
21#endif
22
23#if SIZEOF_SHAPE_T == 4
24#if RUBY_DEBUG
25#define SHAPE_BUFFER_SIZE 0x8000
26#else
27#define SHAPE_BUFFER_SIZE 0x80000
28#endif
29#else
30#define SHAPE_BUFFER_SIZE 0x8000
31#endif
32
33#define REDBLACK_CACHE_SIZE (SHAPE_BUFFER_SIZE * 32)
34
35#define SINGLE_CHILD_TAG 0x1
36#define TAG_SINGLE_CHILD(x) (struct rb_id_table *)((uintptr_t)x | SINGLE_CHILD_TAG)
37#define SINGLE_CHILD_MASK (~((uintptr_t)SINGLE_CHILD_TAG))
38#define SINGLE_CHILD_P(x) (((uintptr_t)x) & SINGLE_CHILD_TAG)
39#define SINGLE_CHILD(x) (rb_shape_t *)((uintptr_t)x & SINGLE_CHILD_MASK)
40#define ANCESTOR_CACHE_THRESHOLD 10
41#define MAX_SHAPE_ID (SHAPE_BUFFER_SIZE - 1)
42#define ANCESTOR_SEARCH_MAX_DEPTH 2
43
44static ID id_frozen;
45static ID id_t_object;
46static ID size_pool_edge_names[SIZE_POOL_COUNT];
47
48#define LEAF 0
49#define BLACK 0x0
50#define RED 0x1
51
52static redblack_node_t *
53redblack_left(redblack_node_t * node)
54{
55 if (node->l == LEAF) {
56 return LEAF;
57 }
58 else {
59 RUBY_ASSERT(node->l < GET_SHAPE_TREE()->cache_size);
60 redblack_node_t * left = &GET_SHAPE_TREE()->shape_cache[node->l - 1];
61 return left;
62 }
63}
64
65static redblack_node_t *
66redblack_right(redblack_node_t * node)
67{
68 if (node->r == LEAF) {
69 return LEAF;
70 }
71 else {
72 RUBY_ASSERT(node->r < GET_SHAPE_TREE()->cache_size);
73 redblack_node_t * right = &GET_SHAPE_TREE()->shape_cache[node->r - 1];
74 return right;
75 }
76}
77
78static redblack_node_t *
79redblack_find(redblack_node_t * tree, ID key)
80{
81 if (tree == LEAF) {
82 return LEAF;
83 }
84 else {
85 RUBY_ASSERT(redblack_left(tree) == LEAF || redblack_left(tree)->key < tree->key);
86 RUBY_ASSERT(redblack_right(tree) == LEAF || redblack_right(tree)->key > tree->key);
87
88 if (tree->key == key) {
89 return tree;
90 }
91 else {
92 if (key < tree->key) {
93 return redblack_find(redblack_left(tree), key);
94 }
95 else {
96 return redblack_find(redblack_right(tree), key);
97 }
98 }
99 }
100}
101
102static inline char
103redblack_color(redblack_node_t * node)
104{
105 return node && ((uintptr_t)node->value & RED);
106}
107
108static inline bool
109redblack_red_p(redblack_node_t * node)
110{
111 return redblack_color(node) == RED;
112}
113
114static inline rb_shape_t *
115redblack_value(redblack_node_t * node)
116{
117 // Color is stored in the bottom bit of the shape pointer
118 // Mask away the bit so we get the actual pointer back
119 return (rb_shape_t *)((uintptr_t)node->value & (((uintptr_t)-1) - 1));
120}
121
122static redblack_id_t
123redblack_id_for(redblack_node_t * node)
124{
125 RUBY_ASSERT(node || node == LEAF);
126 if (node == LEAF) {
127 return 0;
128 }
129 else {
130 redblack_node_t * redblack_nodes = GET_SHAPE_TREE()->shape_cache;
131 redblack_id_t id = (redblack_id_t)(node - redblack_nodes);
132 return id + 1;
133 }
134}
135
136static redblack_node_t *
137redblack_new(char color, ID key, rb_shape_t * value, redblack_node_t * left, redblack_node_t * right)
138{
139 if (GET_SHAPE_TREE()->cache_size + 1 >= REDBLACK_CACHE_SIZE) {
140 // We're out of cache, just quit
141 return LEAF;
142 }
143
144 RUBY_ASSERT(left == LEAF || left->key < key);
145 RUBY_ASSERT(right == LEAF || right->key > key);
146
147 redblack_node_t * redblack_nodes = GET_SHAPE_TREE()->shape_cache;
148 redblack_node_t * node = &redblack_nodes[(GET_SHAPE_TREE()->cache_size)++];
149 node->key = key;
150 node->value = (rb_shape_t *)((uintptr_t)value | color);
151 node->l = redblack_id_for(left);
152 node->r = redblack_id_for(right);
153 return node;
154}
155
156static redblack_node_t *
157redblack_balance(char color, ID key, rb_shape_t * value, redblack_node_t * left, redblack_node_t * right)
158{
159 if (color == BLACK) {
160 ID new_key, new_left_key, new_right_key;
161 rb_shape_t *new_value, *new_left_value, *new_right_value;
162 redblack_node_t *new_left_left, *new_left_right, *new_right_left, *new_right_right;
163
164 if (redblack_red_p(left) && redblack_red_p(redblack_left(left))) {
165 new_right_key = key;
166 new_right_value = value;
167 new_right_right = right;
168
169 new_key = left->key;
170 new_value = redblack_value(left);
171 new_right_left = redblack_right(left);
172
173 new_left_key = redblack_left(left)->key;
174 new_left_value = redblack_value(redblack_left(left));
175
176 new_left_left = redblack_left(redblack_left(left));
177 new_left_right = redblack_right(redblack_left(left));
178 }
179 else if (redblack_red_p(left) && redblack_red_p(redblack_right(left))) {
180 new_right_key = key;
181 new_right_value = value;
182 new_right_right = right;
183
184 new_left_key = left->key;
185 new_left_value = redblack_value(left);
186 new_left_left = redblack_left(left);
187
188 new_key = redblack_right(left)->key;
189 new_value = redblack_value(redblack_right(left));
190 new_left_right = redblack_left(redblack_right(left));
191 new_right_left = redblack_right(redblack_right(left));
192 }
193 else if (redblack_red_p(right) && redblack_red_p(redblack_left(right))) {
194 new_left_key = key;
195 new_left_value = value;
196 new_left_left = left;
197
198 new_right_key = right->key;
199 new_right_value = redblack_value(right);
200 new_right_right = redblack_right(right);
201
202 new_key = redblack_left(right)->key;
203 new_value = redblack_value(redblack_left(right));
204 new_left_right = redblack_left(redblack_left(right));
205 new_right_left = redblack_right(redblack_left(right));
206 }
207 else if (redblack_red_p(right) && redblack_red_p(redblack_right(right))) {
208 new_left_key = key;
209 new_left_value = value;
210 new_left_left = left;
211
212 new_key = right->key;
213 new_value = redblack_value(right);
214 new_left_right = redblack_left(right);
215
216 new_right_key = redblack_right(right)->key;
217 new_right_value = redblack_value(redblack_right(right));
218 new_right_left = redblack_left(redblack_right(right));
219 new_right_right = redblack_right(redblack_right(right));
220 }
221 else {
222 return redblack_new(color, key, value, left, right);
223 }
224
225 RUBY_ASSERT(new_left_key < new_key);
226 RUBY_ASSERT(new_right_key > new_key);
227 RUBY_ASSERT(new_left_left == LEAF || new_left_left->key < new_left_key);
228 RUBY_ASSERT(new_left_right == LEAF || new_left_right->key > new_left_key);
229 RUBY_ASSERT(new_left_right == LEAF || new_left_right->key < new_key);
230 RUBY_ASSERT(new_right_left == LEAF || new_right_left->key < new_right_key);
231 RUBY_ASSERT(new_right_left == LEAF || new_right_left->key > new_key);
232 RUBY_ASSERT(new_right_right == LEAF || new_right_right->key > new_right_key);
233
234 return redblack_new(
235 RED, new_key, new_value,
236 redblack_new(BLACK, new_left_key, new_left_value, new_left_left, new_left_right),
237 redblack_new(BLACK, new_right_key, new_right_value, new_right_left, new_right_right));
238 }
239
240 return redblack_new(color, key, value, left, right);
241}
242
243static redblack_node_t *
244redblack_insert_aux(redblack_node_t * tree, ID key, rb_shape_t * value)
245{
246 if (tree == LEAF) {
247 return redblack_new(RED, key, value, LEAF, LEAF);
248 }
249 else {
250 redblack_node_t *left, *right;
251 if (key < tree->key) {
252 left = redblack_insert_aux(redblack_left(tree), key, value);
253 RUBY_ASSERT(left != LEAF);
254 right = redblack_right(tree);
255 RUBY_ASSERT(right == LEAF || right->key > tree->key);
256 }
257 else if (key > tree->key) {
258 left = redblack_left(tree);
259 RUBY_ASSERT(left == LEAF || left->key < tree->key);
260 right = redblack_insert_aux(redblack_right(tree), key, value);
261 RUBY_ASSERT(right != LEAF);
262 }
263 else {
264 return tree;
265 }
266
267 return redblack_balance(
268 redblack_color(tree),
269 tree->key,
270 redblack_value(tree),
271 left,
272 right
273 );
274 }
275}
276
277static redblack_node_t *
278redblack_force_black(redblack_node_t * node)
279{
280 node->value = redblack_value(node);
281 return node;
282}
283
284static redblack_node_t *
285redblack_insert(redblack_node_t * tree, ID key, rb_shape_t * value)
286{
287 redblack_node_t * root = redblack_insert_aux(tree, key, value);
288
289 if (redblack_red_p(root)) {
290 return redblack_force_black(root);
291 }
292 else {
293 return root;
294 }
295}
296
297rb_shape_tree_t *rb_shape_tree_ptr = NULL;
298
299/*
300 * Shape getters
301 */
303rb_shape_get_root_shape(void)
304{
305 return GET_SHAPE_TREE()->root_shape;
306}
307
308shape_id_t
309rb_shape_id(rb_shape_t * shape)
310{
311 return (shape_id_t)(shape - GET_SHAPE_TREE()->shape_list);
312}
313
314void
315rb_shape_each_shape(each_shape_callback callback, void *data)
316{
317 rb_shape_t *cursor = rb_shape_get_root_shape();
318 rb_shape_t *end = rb_shape_get_shape_by_id(GET_SHAPE_TREE()->next_shape_id);
319 while (cursor < end) {
320 callback(cursor, data);
321 cursor += 1;
322 }
323}
324
325RUBY_FUNC_EXPORTED rb_shape_t*
326rb_shape_get_shape_by_id(shape_id_t shape_id)
327{
328 RUBY_ASSERT(shape_id != INVALID_SHAPE_ID);
329
330 rb_shape_t *shape = &GET_SHAPE_TREE()->shape_list[shape_id];
331 return shape;
332}
333
335rb_shape_get_parent(rb_shape_t * shape)
336{
337 return rb_shape_get_shape_by_id(shape->parent_id);
338}
339
340#if !SHAPE_IN_BASIC_FLAGS
341shape_id_t rb_generic_shape_id(VALUE obj);
342#endif
343
344RUBY_FUNC_EXPORTED shape_id_t
345rb_shape_get_shape_id(VALUE obj)
346{
347 if (RB_SPECIAL_CONST_P(obj)) {
348 return SPECIAL_CONST_SHAPE_ID;
349 }
350
351#if SHAPE_IN_BASIC_FLAGS
352 return RBASIC_SHAPE_ID(obj);
353#else
354 switch (BUILTIN_TYPE(obj)) {
355 case T_OBJECT:
356 return ROBJECT_SHAPE_ID(obj);
357 break;
358 case T_CLASS:
359 case T_MODULE:
360 return RCLASS_SHAPE_ID(obj);
361 default:
362 return rb_generic_shape_id(obj);
363 }
364#endif
365}
366
367size_t
368rb_shape_depth(rb_shape_t * shape)
369{
370 size_t depth = 1;
371
372 while (shape->parent_id != INVALID_SHAPE_ID) {
373 depth++;
374 shape = rb_shape_get_parent(shape);
375 }
376
377 return depth;
378}
379
381rb_shape_get_shape(VALUE obj)
382{
383 return rb_shape_get_shape_by_id(rb_shape_get_shape_id(obj));
384}
385
386static rb_shape_t *
387shape_alloc(void)
388{
389 shape_id_t shape_id = GET_SHAPE_TREE()->next_shape_id;
390 GET_SHAPE_TREE()->next_shape_id++;
391
392 if (shape_id == (MAX_SHAPE_ID + 1)) {
393 // TODO: Make an OutOfShapesError ??
394 rb_bug("Out of shapes");
395 }
396
397 return &GET_SHAPE_TREE()->shape_list[shape_id];
398}
399
400static rb_shape_t *
401rb_shape_alloc_with_parent_id(ID edge_name, shape_id_t parent_id)
402{
403 rb_shape_t * shape = shape_alloc();
404
405 shape->edge_name = edge_name;
406 shape->next_iv_index = 0;
407 shape->parent_id = parent_id;
408 shape->edges = NULL;
409
410 return shape;
411}
412
413static rb_shape_t *
414rb_shape_alloc(ID edge_name, rb_shape_t * parent, enum shape_type type)
415{
416 rb_shape_t * shape = rb_shape_alloc_with_parent_id(edge_name, rb_shape_id(parent));
417 shape->type = (uint8_t)type;
418 shape->size_pool_index = parent->size_pool_index;
419 shape->capacity = parent->capacity;
420 shape->edges = 0;
421 return shape;
422}
423
424#ifdef HAVE_MMAP
425static redblack_node_t *
426redblack_cache_ancestors(rb_shape_t * shape)
427{
428 if (!(shape->ancestor_index || shape->parent_id == INVALID_SHAPE_ID)) {
429 redblack_node_t * parent_index;
430
431 parent_index = redblack_cache_ancestors(rb_shape_get_parent(shape));
432
433 if (shape->type == SHAPE_IVAR) {
434 shape->ancestor_index = redblack_insert(parent_index, shape->edge_name, shape);
435
436#if RUBY_DEBUG
437 if (shape->ancestor_index) {
438 redblack_node_t *inserted_node = redblack_find(shape->ancestor_index, shape->edge_name);
439 RUBY_ASSERT(inserted_node);
440 RUBY_ASSERT(redblack_value(inserted_node) == shape);
441 }
442#endif
443 }
444 else {
445 shape->ancestor_index = parent_index;
446 }
447 }
448
449 return shape->ancestor_index;
450}
451#else
452static redblack_node_t *
453redblack_cache_ancestors(rb_shape_t * shape)
454{
455 return LEAF;
456}
457#endif
458
459static rb_shape_t *
460rb_shape_alloc_new_child(ID id, rb_shape_t * shape, enum shape_type shape_type)
461{
462 rb_shape_t * new_shape = rb_shape_alloc(id, shape, shape_type);
463
464 switch (shape_type) {
465 case SHAPE_IVAR:
466 if (UNLIKELY(shape->next_iv_index >= shape->capacity)) {
467 RUBY_ASSERT(shape->next_iv_index == shape->capacity);
468 new_shape->capacity = (uint32_t)rb_malloc_grow_capa(shape->capacity, sizeof(VALUE));
469 }
470 RUBY_ASSERT(new_shape->capacity > shape->next_iv_index);
471 new_shape->next_iv_index = shape->next_iv_index + 1;
472 if (new_shape->next_iv_index > ANCESTOR_CACHE_THRESHOLD) {
473 redblack_cache_ancestors(new_shape);
474 }
475 break;
476 case SHAPE_FROZEN:
477 case SHAPE_T_OBJECT:
478 new_shape->next_iv_index = shape->next_iv_index;
479 break;
480 case SHAPE_OBJ_TOO_COMPLEX:
481 case SHAPE_ROOT:
482 rb_bug("Unreachable");
483 break;
484 }
485
486 return new_shape;
487}
488
489static rb_shape_t*
490get_next_shape_internal(rb_shape_t * shape, ID id, enum shape_type shape_type, bool * variation_created, bool new_variations_allowed)
491{
492 rb_shape_t *res = NULL;
493
494 // There should never be outgoing edges from "too complex"
495 RUBY_ASSERT(rb_shape_id(shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
496
497 *variation_created = false;
498
499 RB_VM_LOCK_ENTER();
500 {
501 // If the current shape has children
502 if (shape->edges) {
503 // Check if it only has one child
504 if (SINGLE_CHILD_P(shape->edges)) {
505 rb_shape_t * child = SINGLE_CHILD(shape->edges);
506 // If the one child has a matching edge name, then great,
507 // we found what we want.
508 if (child->edge_name == id) {
509 res = child;
510 }
511 }
512 else {
513 // If it has more than one child, do a hash lookup to find it.
514 VALUE lookup_result;
515 if (rb_id_table_lookup(shape->edges, id, &lookup_result)) {
516 res = (rb_shape_t *)lookup_result;
517 }
518 }
519 }
520
521 // If we didn't find the shape we're looking for we create it.
522 if (!res) {
523 // If we're not allowed to create a new variation, of if we're out of shapes
524 // we return TOO_COMPLEX_SHAPE.
525 if (!new_variations_allowed || GET_SHAPE_TREE()->next_shape_id > MAX_SHAPE_ID) {
526 res = rb_shape_get_shape_by_id(OBJ_TOO_COMPLEX_SHAPE_ID);
527 }
528 else {
529 rb_shape_t * new_shape = rb_shape_alloc_new_child(id, shape, shape_type);
530
531 if (!shape->edges) {
532 // If the shape had no edge yet, we can directly set the new child
533 shape->edges = TAG_SINGLE_CHILD(new_shape);
534 }
535 else {
536 // If the edge was single child we need to allocate a table.
537 if (SINGLE_CHILD_P(shape->edges)) {
538 rb_shape_t * old_child = SINGLE_CHILD(shape->edges);
539 shape->edges = rb_id_table_create(2);
540 rb_id_table_insert(shape->edges, old_child->edge_name, (VALUE)old_child);
541 }
542
543 rb_id_table_insert(shape->edges, new_shape->edge_name, (VALUE)new_shape);
544 *variation_created = true;
545 }
546
547 res = new_shape;
548 }
549 }
550 }
551 RB_VM_LOCK_LEAVE();
552
553 return res;
554}
555
556int
557rb_shape_frozen_shape_p(rb_shape_t* shape)
558{
559 return SHAPE_FROZEN == (enum shape_type)shape->type;
560}
561
562static rb_shape_t *
563remove_shape_recursive(rb_shape_t *shape, ID id, rb_shape_t **removed_shape)
564{
565 if (shape->parent_id == INVALID_SHAPE_ID) {
566 // We've hit the top of the shape tree and couldn't find the
567 // IV we wanted to remove, so return NULL
568 return NULL;
569 }
570 else {
571 if (shape->type == SHAPE_IVAR && shape->edge_name == id) {
572 *removed_shape = shape;
573
574 return rb_shape_get_parent(shape);
575 }
576 else {
577 // This isn't the IV we want to remove, keep walking up.
578 rb_shape_t *new_parent = remove_shape_recursive(rb_shape_get_parent(shape), id, removed_shape);
579
580 // We found a new parent. Create a child of the new parent that
581 // has the same attributes as this shape.
582 if (new_parent) {
583 if (UNLIKELY(new_parent->type == SHAPE_OBJ_TOO_COMPLEX)) {
584 return new_parent;
585 }
586
587 bool dont_care;
588 rb_shape_t *new_child = get_next_shape_internal(new_parent, shape->edge_name, shape->type, &dont_care, true);
589 if (UNLIKELY(new_child->type == SHAPE_OBJ_TOO_COMPLEX)) {
590 return new_child;
591 }
592
593 RUBY_ASSERT(new_child->capacity <= shape->capacity);
594
595 return new_child;
596 }
597 else {
598 // We went all the way to the top of the shape tree and couldn't
599 // find an IV to remove, so return NULL
600 return NULL;
601 }
602 }
603 }
604}
605
606bool
607rb_shape_transition_shape_remove_ivar(VALUE obj, ID id, rb_shape_t *shape, VALUE *removed)
608{
609 if (UNLIKELY(shape->type == SHAPE_OBJ_TOO_COMPLEX)) {
610 return false;
611 }
612
613 rb_shape_t *removed_shape = NULL;
614 rb_shape_t *new_shape = remove_shape_recursive(shape, id, &removed_shape);
615 if (new_shape) {
616 RUBY_ASSERT(removed_shape != NULL);
617
618 if (UNLIKELY(new_shape->type == SHAPE_OBJ_TOO_COMPLEX)) {
619 return false;
620 }
621
622 RUBY_ASSERT(new_shape->next_iv_index == shape->next_iv_index - 1);
623
624 VALUE *ivptr;
625 switch(BUILTIN_TYPE(obj)) {
626 case T_CLASS:
627 case T_MODULE:
628 ivptr = RCLASS_IVPTR(obj);
629 break;
630 case T_OBJECT:
631 ivptr = ROBJECT_IVPTR(obj);
632 break;
633 default: {
634 struct gen_ivtbl *ivtbl;
635 rb_gen_ivtbl_get(obj, id, &ivtbl);
636 ivptr = ivtbl->as.shape.ivptr;
637 break;
638 }
639 }
640
641 *removed = ivptr[removed_shape->next_iv_index - 1];
642
643 memmove(&ivptr[removed_shape->next_iv_index - 1], &ivptr[removed_shape->next_iv_index],
644 ((new_shape->next_iv_index + 1) - removed_shape->next_iv_index) * sizeof(VALUE));
645
646 // Re-embed objects when instances become small enough
647 // This is necessary because YJIT assumes that objects with the same shape
648 // have the same embeddedness for efficiency (avoid extra checks)
649 if (BUILTIN_TYPE(obj) == T_OBJECT &&
650 !RB_FL_TEST_RAW(obj, ROBJECT_EMBED) &&
651 rb_obj_embedded_size(new_shape->next_iv_index) <= rb_gc_obj_slot_size(obj)) {
652 RB_FL_SET_RAW(obj, ROBJECT_EMBED);
653 memcpy(ROBJECT_IVPTR(obj), ivptr, new_shape->next_iv_index * sizeof(VALUE));
654 xfree(ivptr);
655 }
656
657 rb_shape_set_shape(obj, new_shape);
658 }
659 return true;
660}
661
663rb_shape_transition_shape_frozen(VALUE obj)
664{
665 rb_shape_t* shape = rb_shape_get_shape(obj);
666 RUBY_ASSERT(shape);
667 RUBY_ASSERT(RB_OBJ_FROZEN(obj));
668
669 if (rb_shape_frozen_shape_p(shape) || rb_shape_obj_too_complex(obj)) {
670 return shape;
671 }
672
673 rb_shape_t* next_shape;
674
675 if (shape == rb_shape_get_root_shape()) {
676 return rb_shape_get_shape_by_id(SPECIAL_CONST_SHAPE_ID);
677 }
678
679 bool dont_care;
680 next_shape = get_next_shape_internal(shape, (ID)id_frozen, SHAPE_FROZEN, &dont_care, true);
681
682 RUBY_ASSERT(next_shape);
683 return next_shape;
684}
685
686/*
687 * This function is used for assertions where we don't want to increment
688 * max_iv_count
689 */
691rb_shape_get_next_iv_shape(rb_shape_t* shape, ID id)
692{
693 RUBY_ASSERT(!is_instance_id(id) || RTEST(rb_sym2str(ID2SYM(id))));
694 bool dont_care;
695 return get_next_shape_internal(shape, id, SHAPE_IVAR, &dont_care, true);
696}
697
698static inline rb_shape_t *
699shape_get_next(rb_shape_t *shape, VALUE obj, ID id, bool emit_warnings)
700{
701 RUBY_ASSERT(!is_instance_id(id) || RTEST(rb_sym2str(ID2SYM(id))));
702 if (UNLIKELY(shape->type == SHAPE_OBJ_TOO_COMPLEX)) {
703 return shape;
704 }
705
706#if RUBY_DEBUG
707 attr_index_t index;
708 if (rb_shape_get_iv_index(shape, id, &index)) {
709 rb_bug("rb_shape_get_next: trying to create ivar that already exists at index %u", index);
710 }
711#endif
712
713 bool allow_new_shape = true;
714
715 if (BUILTIN_TYPE(obj) == T_OBJECT) {
716 VALUE klass = rb_obj_class(obj);
717 allow_new_shape = RCLASS_EXT(klass)->variation_count < SHAPE_MAX_VARIATIONS;
718 }
719
720 bool variation_created = false;
721 rb_shape_t *new_shape = get_next_shape_internal(shape, id, SHAPE_IVAR, &variation_created, allow_new_shape);
722
723 // Check if we should update max_iv_count on the object's class
724 if (BUILTIN_TYPE(obj) == T_OBJECT) {
725 VALUE klass = rb_obj_class(obj);
726 if (new_shape->next_iv_index > RCLASS_EXT(klass)->max_iv_count) {
727 RCLASS_EXT(klass)->max_iv_count = new_shape->next_iv_index;
728 }
729
730 if (variation_created) {
731 RCLASS_EXT(klass)->variation_count++;
732 if (emit_warnings && rb_warning_category_enabled_p(RB_WARN_CATEGORY_PERFORMANCE)) {
733 if (RCLASS_EXT(klass)->variation_count >= SHAPE_MAX_VARIATIONS) {
736 "The class %"PRIsVALUE" reached %d shape variations, instance variables accesses will be slower and memory usage increased.\n"
737 "It is recommended to define instance variables in a consistent order, for instance by eagerly defining them all in the #initialize method.",
738 rb_class_path(klass),
739 SHAPE_MAX_VARIATIONS
740 );
741 }
742 }
743 }
744 }
745
746 return new_shape;
747}
748
750rb_shape_get_next(rb_shape_t *shape, VALUE obj, ID id)
751{
752 return shape_get_next(shape, obj, id, true);
753}
754
756rb_shape_get_next_no_warnings(rb_shape_t *shape, VALUE obj, ID id)
757{
758 return shape_get_next(shape, obj, id, false);
759}
760
761// Same as rb_shape_get_iv_index, but uses a provided valid shape id and index
762// to return a result faster if branches of the shape tree are closely related.
763bool
764rb_shape_get_iv_index_with_hint(shape_id_t shape_id, ID id, attr_index_t *value, shape_id_t *shape_id_hint)
765{
766 attr_index_t index_hint = *value;
767 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
768 rb_shape_t *initial_shape = shape;
769
770 if (*shape_id_hint == INVALID_SHAPE_ID) {
771 *shape_id_hint = shape_id;
772 return rb_shape_get_iv_index(shape, id, value);
773 }
774
775 rb_shape_t * shape_hint = rb_shape_get_shape_by_id(*shape_id_hint);
776
777 // We assume it's likely shape_id_hint and shape_id have a close common
778 // ancestor, so we check up to ANCESTOR_SEARCH_MAX_DEPTH ancestors before
779 // eventually using the index, as in case of a match it will be faster.
780 // However if the shape doesn't have an index, we walk the entire tree.
781 int depth = INT_MAX;
782 if (shape->ancestor_index && shape->next_iv_index >= ANCESTOR_CACHE_THRESHOLD) {
783 depth = ANCESTOR_SEARCH_MAX_DEPTH;
784 }
785
786 while (depth > 0 && shape->next_iv_index > index_hint) {
787 while (shape_hint->next_iv_index > shape->next_iv_index) {
788 shape_hint = rb_shape_get_parent(shape_hint);
789 }
790
791 if (shape_hint == shape) {
792 // We've found a common ancestor so use the index hint
793 *value = index_hint;
794 *shape_id_hint = rb_shape_id(shape);
795 return true;
796 }
797 if (shape->edge_name == id) {
798 // We found the matching id before a common ancestor
799 *value = shape->next_iv_index - 1;
800 *shape_id_hint = rb_shape_id(shape);
801 return true;
802 }
803
804 shape = rb_shape_get_parent(shape);
805 depth--;
806 }
807
808 // If the original shape had an index but its ancestor doesn't
809 // we switch back to the original one as it will be faster.
810 if (!shape->ancestor_index && initial_shape->ancestor_index) {
811 shape = initial_shape;
812 }
813 *shape_id_hint = shape_id;
814 return rb_shape_get_iv_index(shape, id, value);
815}
816
817static bool
818shape_get_iv_index(rb_shape_t *shape, ID id, attr_index_t *value)
819{
820 while (shape->parent_id != INVALID_SHAPE_ID) {
821 if (shape->edge_name == id) {
822 enum shape_type shape_type;
823 shape_type = (enum shape_type)shape->type;
824
825 switch (shape_type) {
826 case SHAPE_IVAR:
827 RUBY_ASSERT(shape->next_iv_index > 0);
828 *value = shape->next_iv_index - 1;
829 return true;
830 case SHAPE_ROOT:
831 case SHAPE_T_OBJECT:
832 return false;
833 case SHAPE_OBJ_TOO_COMPLEX:
834 case SHAPE_FROZEN:
835 rb_bug("Ivar should not exist on transition");
836 }
837 }
838
839 shape = rb_shape_get_parent(shape);
840 }
841
842 return false;
843}
844
845static bool
846shape_cache_get_iv_index(rb_shape_t *shape, ID id, attr_index_t *value)
847{
848 if (shape->ancestor_index && shape->next_iv_index >= ANCESTOR_CACHE_THRESHOLD) {
849 redblack_node_t *node = redblack_find(shape->ancestor_index, id);
850 if (node) {
851 rb_shape_t *shape = redblack_value(node);
852 *value = shape->next_iv_index - 1;
853
854#if RUBY_DEBUG
855 attr_index_t shape_tree_index;
856 RUBY_ASSERT(shape_get_iv_index(shape, id, &shape_tree_index));
857 RUBY_ASSERT(shape_tree_index == *value);
858#endif
859
860 return true;
861 }
862
863 /* Verify the cache is correct by checking that this instance variable
864 * does not exist in the shape tree either. */
865 RUBY_ASSERT(!shape_get_iv_index(shape, id, value));
866 }
867
868 return false;
869}
870
871bool
872rb_shape_get_iv_index(rb_shape_t *shape, ID id, attr_index_t *value)
873{
874 // It doesn't make sense to ask for the index of an IV that's stored
875 // on an object that is "too complex" as it uses a hash for storing IVs
876 RUBY_ASSERT(rb_shape_id(shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
877
878 if (!shape_cache_get_iv_index(shape, id, value)) {
879 return shape_get_iv_index(shape, id, value);
880 }
881
882 return true;
883}
884
885void
886rb_shape_set_shape(VALUE obj, rb_shape_t* shape)
887{
888 rb_shape_set_shape_id(obj, rb_shape_id(shape));
889}
890
891int32_t
892rb_shape_id_offset(void)
893{
894 return sizeof(uintptr_t) - SHAPE_ID_NUM_BITS / sizeof(uintptr_t);
895}
896
898rb_shape_traverse_from_new_root(rb_shape_t *initial_shape, rb_shape_t *dest_shape)
899{
900 RUBY_ASSERT(initial_shape->type == SHAPE_T_OBJECT);
901 rb_shape_t *next_shape = initial_shape;
902
903 if (dest_shape->type != initial_shape->type) {
904 next_shape = rb_shape_traverse_from_new_root(initial_shape, rb_shape_get_parent(dest_shape));
905 if (!next_shape) {
906 return NULL;
907 }
908 }
909
910 switch ((enum shape_type)dest_shape->type) {
911 case SHAPE_IVAR:
912 case SHAPE_FROZEN:
913 if (!next_shape->edges) {
914 return NULL;
915 }
916
917 VALUE lookup_result;
918 if (SINGLE_CHILD_P(next_shape->edges)) {
919 rb_shape_t * child = SINGLE_CHILD(next_shape->edges);
920 if (child->edge_name == dest_shape->edge_name) {
921 return child;
922 }
923 else {
924 return NULL;
925 }
926 }
927 else {
928 if (rb_id_table_lookup(next_shape->edges, dest_shape->edge_name, &lookup_result)) {
929 next_shape = (rb_shape_t *)lookup_result;
930 }
931 else {
932 return NULL;
933 }
934 }
935 break;
936 case SHAPE_ROOT:
937 case SHAPE_T_OBJECT:
938 break;
939 case SHAPE_OBJ_TOO_COMPLEX:
940 rb_bug("Unreachable");
941 break;
942 }
943
944 return next_shape;
945}
946
948rb_shape_rebuild_shape(rb_shape_t * initial_shape, rb_shape_t * dest_shape)
949{
950 RUBY_ASSERT(rb_shape_id(initial_shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
951 RUBY_ASSERT(rb_shape_id(dest_shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
952
953 rb_shape_t * midway_shape;
954
955 RUBY_ASSERT(initial_shape->type == SHAPE_T_OBJECT);
956
957 if (dest_shape->type != initial_shape->type) {
958 midway_shape = rb_shape_rebuild_shape(initial_shape, rb_shape_get_parent(dest_shape));
959 if (UNLIKELY(rb_shape_id(midway_shape) == OBJ_TOO_COMPLEX_SHAPE_ID)) {
960 return midway_shape;
961 }
962 }
963 else {
964 midway_shape = initial_shape;
965 }
966
967 switch ((enum shape_type)dest_shape->type) {
968 case SHAPE_IVAR:
969 midway_shape = rb_shape_get_next_iv_shape(midway_shape, dest_shape->edge_name);
970 break;
971 case SHAPE_ROOT:
972 case SHAPE_FROZEN:
973 case SHAPE_T_OBJECT:
974 break;
975 case SHAPE_OBJ_TOO_COMPLEX:
976 rb_bug("Unreachable");
977 break;
978 }
979
980 return midway_shape;
981}
982
983RUBY_FUNC_EXPORTED bool
984rb_shape_obj_too_complex(VALUE obj)
985{
986 return rb_shape_get_shape_id(obj) == OBJ_TOO_COMPLEX_SHAPE_ID;
987}
988
989size_t
990rb_shape_edges_count(rb_shape_t *shape)
991{
992 if (shape->edges) {
993 if (SINGLE_CHILD_P(shape->edges)) {
994 return 1;
995 }
996 else {
997 return rb_id_table_size(shape->edges);
998 }
999 }
1000 return 0;
1001}
1002
1003size_t
1004rb_shape_memsize(rb_shape_t *shape)
1005{
1006 size_t memsize = sizeof(rb_shape_t);
1007 if (shape->edges && !SINGLE_CHILD_P(shape->edges)) {
1008 memsize += rb_id_table_memsize(shape->edges);
1009 }
1010 return memsize;
1011}
1012
1013#if SHAPE_DEBUG
1014/*
1015 * Exposing Shape to Ruby via RubyVM.debug_shape
1016 */
1017
1018/* :nodoc: */
1019static VALUE
1020rb_shape_too_complex(VALUE self)
1021{
1022 rb_shape_t * shape;
1023 shape = rb_shape_get_shape_by_id(NUM2INT(rb_struct_getmember(self, rb_intern("id"))));
1024 if (rb_shape_id(shape) == OBJ_TOO_COMPLEX_SHAPE_ID) {
1025 return Qtrue;
1026 }
1027 else {
1028 return Qfalse;
1029 }
1030}
1031
1032static VALUE
1033parse_key(ID key)
1034{
1035 if (is_instance_id(key)) {
1036 return ID2SYM(key);
1037 }
1038 return LONG2NUM(key);
1039}
1040
1041static VALUE rb_shape_edge_name(rb_shape_t * shape);
1042
1043static VALUE
1044rb_shape_t_to_rb_cShape(rb_shape_t *shape)
1045{
1046 VALUE rb_cShape = rb_const_get(rb_cRubyVM, rb_intern("Shape"));
1047
1048 VALUE obj = rb_struct_new(rb_cShape,
1049 INT2NUM(rb_shape_id(shape)),
1050 INT2NUM(shape->parent_id),
1051 rb_shape_edge_name(shape),
1052 INT2NUM(shape->next_iv_index),
1053 INT2NUM(shape->size_pool_index),
1054 INT2NUM(shape->type),
1055 INT2NUM(shape->capacity));
1056 rb_obj_freeze(obj);
1057 return obj;
1058}
1059
1060static enum rb_id_table_iterator_result
1061rb_edges_to_hash(ID key, VALUE value, void *ref)
1062{
1063 rb_hash_aset(*(VALUE *)ref, parse_key(key), rb_shape_t_to_rb_cShape((rb_shape_t*)value));
1064 return ID_TABLE_CONTINUE;
1065}
1066
1067/* :nodoc: */
1068static VALUE
1069rb_shape_edges(VALUE self)
1070{
1071 rb_shape_t* shape;
1072
1073 shape = rb_shape_get_shape_by_id(NUM2INT(rb_struct_getmember(self, rb_intern("id"))));
1074
1075 VALUE hash = rb_hash_new();
1076
1077 if (shape->edges) {
1078 if (SINGLE_CHILD_P(shape->edges)) {
1079 rb_shape_t * child = SINGLE_CHILD(shape->edges);
1080 rb_edges_to_hash(child->edge_name, (VALUE)child, &hash);
1081 }
1082 else {
1083 rb_id_table_foreach(shape->edges, rb_edges_to_hash, &hash);
1084 }
1085 }
1086
1087 return hash;
1088}
1089
1090static VALUE
1091rb_shape_edge_name(rb_shape_t * shape)
1092{
1093 if (shape->edge_name) {
1094 if (is_instance_id(shape->edge_name)) {
1095 return ID2SYM(shape->edge_name);
1096 }
1097 return INT2NUM(shape->capacity);
1098 }
1099 return Qnil;
1100}
1101
1102/* :nodoc: */
1103static VALUE
1104rb_shape_export_depth(VALUE self)
1105{
1106 rb_shape_t* shape;
1107 shape = rb_shape_get_shape_by_id(NUM2INT(rb_struct_getmember(self, rb_intern("id"))));
1108 return SIZET2NUM(rb_shape_depth(shape));
1109}
1110
1111/* :nodoc: */
1112static VALUE
1113rb_shape_parent(VALUE self)
1114{
1115 rb_shape_t * shape;
1116 shape = rb_shape_get_shape_by_id(NUM2INT(rb_struct_getmember(self, rb_intern("id"))));
1117 if (shape->parent_id != INVALID_SHAPE_ID) {
1118 return rb_shape_t_to_rb_cShape(rb_shape_get_parent(shape));
1119 }
1120 else {
1121 return Qnil;
1122 }
1123}
1124
1125/* :nodoc: */
1126static VALUE
1127rb_shape_debug_shape(VALUE self, VALUE obj)
1128{
1129 return rb_shape_t_to_rb_cShape(rb_shape_get_shape(obj));
1130}
1131
1132/* :nodoc: */
1133static VALUE
1134rb_shape_root_shape(VALUE self)
1135{
1136 return rb_shape_t_to_rb_cShape(rb_shape_get_root_shape());
1137}
1138
1139/* :nodoc: */
1140static VALUE
1141rb_shape_shapes_available(VALUE self)
1142{
1143 return INT2NUM(MAX_SHAPE_ID - (GET_SHAPE_TREE()->next_shape_id - 1));
1144}
1145
1146/* :nodoc: */
1147static VALUE
1148rb_shape_exhaust(int argc, VALUE *argv, VALUE self)
1149{
1150 rb_check_arity(argc, 0, 1);
1151 int offset = argc == 1 ? NUM2INT(argv[0]) : 0;
1152 GET_SHAPE_TREE()->next_shape_id = MAX_SHAPE_ID - offset + 1;
1153 return Qnil;
1154}
1155
1156VALUE rb_obj_shape(rb_shape_t* shape);
1157
1158static enum rb_id_table_iterator_result collect_keys_and_values(ID key, VALUE value, void *ref)
1159{
1160 rb_hash_aset(*(VALUE *)ref, parse_key(key), rb_obj_shape((rb_shape_t*)value));
1161 return ID_TABLE_CONTINUE;
1162}
1163
1164static VALUE edges(struct rb_id_table* edges)
1165{
1166 VALUE hash = rb_hash_new();
1167 if (SINGLE_CHILD_P(edges)) {
1168 rb_shape_t * child = SINGLE_CHILD(edges);
1169 collect_keys_and_values(child->edge_name, (VALUE)child, &hash);
1170 }
1171 else {
1172 rb_id_table_foreach(edges, collect_keys_and_values, &hash);
1173 }
1174 return hash;
1175}
1176
1177/* :nodoc: */
1178VALUE
1179rb_obj_shape(rb_shape_t* shape)
1180{
1181 VALUE rb_shape = rb_hash_new();
1182
1183 rb_hash_aset(rb_shape, ID2SYM(rb_intern("id")), INT2NUM(rb_shape_id(shape)));
1184 rb_hash_aset(rb_shape, ID2SYM(rb_intern("edges")), edges(shape->edges));
1185
1186 if (shape == rb_shape_get_root_shape()) {
1187 rb_hash_aset(rb_shape, ID2SYM(rb_intern("parent_id")), INT2NUM(ROOT_SHAPE_ID));
1188 }
1189 else {
1190 rb_hash_aset(rb_shape, ID2SYM(rb_intern("parent_id")), INT2NUM(shape->parent_id));
1191 }
1192
1193 rb_hash_aset(rb_shape, ID2SYM(rb_intern("edge_name")), rb_id2str(shape->edge_name));
1194 return rb_shape;
1195}
1196
1197/* :nodoc: */
1198static VALUE
1199shape_transition_tree(VALUE self)
1200{
1201 return rb_obj_shape(rb_shape_get_root_shape());
1202}
1203
1204/* :nodoc: */
1205static VALUE
1206rb_shape_find_by_id(VALUE mod, VALUE id)
1207{
1208 shape_id_t shape_id = NUM2UINT(id);
1209 if (shape_id >= GET_SHAPE_TREE()->next_shape_id) {
1210 rb_raise(rb_eArgError, "Shape ID %d is out of bounds\n", shape_id);
1211 }
1212 return rb_shape_t_to_rb_cShape(rb_shape_get_shape_by_id(shape_id));
1213}
1214#endif
1215
1216#ifdef HAVE_MMAP
1217#include <sys/mman.h>
1218#endif
1219
1220void
1221Init_default_shapes(void)
1222{
1223 rb_shape_tree_t *st = ruby_mimmalloc(sizeof(rb_shape_tree_t));
1224 memset(st, 0, sizeof(rb_shape_tree_t));
1225 rb_shape_tree_ptr = st;
1226
1227#ifdef HAVE_MMAP
1228 rb_shape_tree_ptr->shape_list = (rb_shape_t *)mmap(NULL, rb_size_mul_or_raise(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t), rb_eRuntimeError),
1229 PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
1230 if (GET_SHAPE_TREE()->shape_list == MAP_FAILED) {
1231 GET_SHAPE_TREE()->shape_list = 0;
1232 }
1233#else
1234 GET_SHAPE_TREE()->shape_list = xcalloc(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t));
1235#endif
1236
1237 if (!GET_SHAPE_TREE()->shape_list) {
1238 rb_memerror();
1239 }
1240
1241 id_frozen = rb_make_internal_id();
1242 id_t_object = rb_make_internal_id();
1243
1244#ifdef HAVE_MMAP
1245 rb_shape_tree_ptr->shape_cache = (redblack_node_t *)mmap(NULL, rb_size_mul_or_raise(REDBLACK_CACHE_SIZE, sizeof(redblack_node_t), rb_eRuntimeError),
1246 PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
1247 rb_shape_tree_ptr->cache_size = 0;
1248
1249 // If mmap fails, then give up on the redblack tree cache.
1250 // We set the cache size such that the redblack node allocators think
1251 // the cache is full.
1252 if (GET_SHAPE_TREE()->shape_cache == MAP_FAILED) {
1253 GET_SHAPE_TREE()->shape_cache = 0;
1254 GET_SHAPE_TREE()->cache_size = REDBLACK_CACHE_SIZE;
1255 }
1256#endif
1257
1258 // Shapes by size pool
1259 for (int i = 0; i < SIZE_POOL_COUNT; i++) {
1260 size_pool_edge_names[i] = rb_make_internal_id();
1261 }
1262
1263 // Root shape
1264 rb_shape_t *root = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID);
1265 root->capacity = 0;
1266 root->type = SHAPE_ROOT;
1267 root->size_pool_index = 0;
1268 GET_SHAPE_TREE()->root_shape = root;
1269 RUBY_ASSERT(rb_shape_id(GET_SHAPE_TREE()->root_shape) == ROOT_SHAPE_ID);
1270
1271 // Shapes by size pool
1272 for (int i = 1; i < SIZE_POOL_COUNT; i++) {
1273 rb_shape_t *new_shape = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID);
1274 new_shape->type = SHAPE_ROOT;
1275 new_shape->size_pool_index = i;
1276 new_shape->ancestor_index = LEAF;
1277 RUBY_ASSERT(rb_shape_id(new_shape) == (shape_id_t)i);
1278 }
1279
1280 // Make shapes for T_OBJECT
1281 for (int i = 0; i < SIZE_POOL_COUNT; i++) {
1282 rb_shape_t * shape = rb_shape_get_shape_by_id(i);
1283 bool dont_care;
1284 rb_shape_t * t_object_shape =
1285 get_next_shape_internal(shape, id_t_object, SHAPE_T_OBJECT, &dont_care, true);
1286 t_object_shape->capacity = (uint32_t)((rb_size_pool_slot_size(i) - offsetof(struct RObject, as.ary)) / sizeof(VALUE));
1287 t_object_shape->edges = rb_id_table_create(0);
1288 t_object_shape->ancestor_index = LEAF;
1289 RUBY_ASSERT(rb_shape_id(t_object_shape) == (shape_id_t)(i + SIZE_POOL_COUNT));
1290 }
1291
1292 bool dont_care;
1293 // Special const shape
1294#if RUBY_DEBUG
1295 rb_shape_t * special_const_shape =
1296#endif
1297 get_next_shape_internal(root, (ID)id_frozen, SHAPE_FROZEN, &dont_care, true);
1298 RUBY_ASSERT(rb_shape_id(special_const_shape) == SPECIAL_CONST_SHAPE_ID);
1299 RUBY_ASSERT(SPECIAL_CONST_SHAPE_ID == (GET_SHAPE_TREE()->next_shape_id - 1));
1300 RUBY_ASSERT(rb_shape_frozen_shape_p(special_const_shape));
1301
1302 rb_shape_t * hash_fallback_shape = rb_shape_alloc_with_parent_id(0, ROOT_SHAPE_ID);
1303 hash_fallback_shape->type = SHAPE_OBJ_TOO_COMPLEX;
1304 hash_fallback_shape->size_pool_index = 0;
1305 RUBY_ASSERT(OBJ_TOO_COMPLEX_SHAPE_ID == (GET_SHAPE_TREE()->next_shape_id - 1));
1306 RUBY_ASSERT(rb_shape_id(hash_fallback_shape) == OBJ_TOO_COMPLEX_SHAPE_ID);
1307}
1308
1309void
1310Init_shape(void)
1311{
1312#if SHAPE_DEBUG
1313 VALUE rb_cShape = rb_struct_define_under(rb_cRubyVM, "Shape",
1314 "id",
1315 "parent_id",
1316 "edge_name",
1317 "next_iv_index",
1318 "size_pool_index",
1319 "type",
1320 "capacity",
1321 NULL);
1322
1323 rb_define_method(rb_cShape, "parent", rb_shape_parent, 0);
1324 rb_define_method(rb_cShape, "edges", rb_shape_edges, 0);
1325 rb_define_method(rb_cShape, "depth", rb_shape_export_depth, 0);
1326 rb_define_method(rb_cShape, "too_complex?", rb_shape_too_complex, 0);
1327 rb_define_const(rb_cShape, "SHAPE_ROOT", INT2NUM(SHAPE_ROOT));
1328 rb_define_const(rb_cShape, "SHAPE_IVAR", INT2NUM(SHAPE_IVAR));
1329 rb_define_const(rb_cShape, "SHAPE_T_OBJECT", INT2NUM(SHAPE_T_OBJECT));
1330 rb_define_const(rb_cShape, "SHAPE_FROZEN", INT2NUM(SHAPE_FROZEN));
1331 rb_define_const(rb_cShape, "SHAPE_ID_NUM_BITS", INT2NUM(SHAPE_ID_NUM_BITS));
1332 rb_define_const(rb_cShape, "SHAPE_FLAG_SHIFT", INT2NUM(SHAPE_FLAG_SHIFT));
1333 rb_define_const(rb_cShape, "SPECIAL_CONST_SHAPE_ID", INT2NUM(SPECIAL_CONST_SHAPE_ID));
1334 rb_define_const(rb_cShape, "OBJ_TOO_COMPLEX_SHAPE_ID", INT2NUM(OBJ_TOO_COMPLEX_SHAPE_ID));
1335 rb_define_const(rb_cShape, "SHAPE_MAX_VARIATIONS", INT2NUM(SHAPE_MAX_VARIATIONS));
1336 rb_define_const(rb_cShape, "SIZEOF_RB_SHAPE_T", INT2NUM(sizeof(rb_shape_t)));
1337 rb_define_const(rb_cShape, "SIZEOF_REDBLACK_NODE_T", INT2NUM(sizeof(redblack_node_t)));
1338 rb_define_const(rb_cShape, "SHAPE_BUFFER_SIZE", INT2NUM(sizeof(rb_shape_t) * SHAPE_BUFFER_SIZE));
1339 rb_define_const(rb_cShape, "REDBLACK_CACHE_SIZE", INT2NUM(sizeof(redblack_node_t) * REDBLACK_CACHE_SIZE));
1340
1341 rb_define_singleton_method(rb_cShape, "transition_tree", shape_transition_tree, 0);
1342 rb_define_singleton_method(rb_cShape, "find_by_id", rb_shape_find_by_id, 1);
1343 rb_define_singleton_method(rb_cShape, "of", rb_shape_debug_shape, 1);
1344 rb_define_singleton_method(rb_cShape, "root_shape", rb_shape_root_shape, 0);
1345 rb_define_singleton_method(rb_cShape, "shapes_available", rb_shape_shapes_available, 0);
1346 rb_define_singleton_method(rb_cShape, "exhaust_shapes", rb_shape_exhaust, -1);
1347#endif
1348}
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
Definition assert.h:177
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_singleton_method(klass, mid, func, arity)
Defines klass.mid.
static VALUE RB_FL_TEST_RAW(VALUE obj, VALUE flags)
This is an implementation detail of RB_FL_TEST().
Definition fl_type.h:469
static void RB_FL_SET_RAW(VALUE obj, VALUE flags)
This is an implementation detail of RB_FL_SET().
Definition fl_type.h:606
#define xfree
Old name of ruby_xfree.
Definition xmalloc.h:58
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define SIZET2NUM
Old name of RB_SIZE2NUM.
Definition size_t.h:62
#define T_MODULE
Old name of RUBY_T_MODULE.
Definition value_type.h:70
#define NUM2UINT
Old name of RB_NUM2UINT.
Definition int.h:45
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define Qtrue
Old name of RUBY_Qtrue.
#define NUM2INT
Old name of RB_NUM2INT.
Definition int.h:44
#define INT2NUM
Old name of RB_INT2NUM.
Definition int.h:43
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
Definition value_type.h:75
#define T_CLASS
Old name of RUBY_T_CLASS.
Definition value_type.h:58
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
Definition value_type.h:85
#define xcalloc
Old name of ruby_xcalloc.
Definition xmalloc.h:55
void rb_category_warn(rb_warning_category_t category, const char *fmt,...)
Identical to rb_category_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:433
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1342
@ RB_WARN_CATEGORY_PERFORMANCE
Warning is for performance issues (not enabled by -w).
Definition error.h:54
size_t rb_obj_embedded_size(uint32_t numiv)
Internal header for Object.
Definition object.c:96
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:215
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:280
VALUE rb_struct_define_under(VALUE space, const char *name,...)
Identical to rb_struct_define(), except it defines the class under the specified namespace instead of...
Definition struct.c:505
VALUE rb_struct_new(VALUE klass,...)
Creates an instance of the given struct.
Definition struct.c:842
VALUE rb_struct_getmember(VALUE self, ID key)
Identical to rb_struct_aref(), except it takes ID instead of VALUE.
Definition struct.c:232
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
Definition variable.c:3141
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:283
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
Definition symbol.c:953
void rb_define_const(VALUE klass, const char *name, VALUE val)
Defines a Ruby level constant under a namespace.
Definition variable.c:3690
VALUE type(ANYARGS)
ANYARGS-ed function type.
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
Definition robject.h:136
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
C99 shim for <stdbool.h>
Ruby's ordinal objects.
Definition robject.h:83
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40