Ruby 3.3.6p108 (2024-11-05 revision 75015d4c1f6965b5e85e96fb309f1f2129f933c0)
vm_insnhelper.c
1/**********************************************************************
2
3 vm_insnhelper.c - instruction helper functions.
4
5 $Author$
6
7 Copyright (C) 2007 Koichi Sasada
8
9**********************************************************************/
10
11#include "ruby/internal/config.h"
12
13#include <math.h>
14
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
17#endif
18
19#include "constant.h"
20#include "debug_counter.h"
21#include "internal.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/struct.h"
30#include "variable.h"
31
32/* finish iseq array */
33#include "insns.inc"
34#include "insns_info.inc"
35
36extern rb_method_definition_t *rb_method_definition_create(rb_method_type_t type, ID mid);
37extern void rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts);
38extern int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2);
39extern VALUE rb_make_no_method_exception(VALUE exc, VALUE format, VALUE obj,
40 int argc, const VALUE *argv, int priv);
41
42static const struct rb_callcache vm_empty_cc;
43static const struct rb_callcache vm_empty_cc_for_super;
44
45/* control stack frame */
46
47static rb_control_frame_t *vm_get_ruby_level_caller_cfp(const rb_execution_context_t *ec, const rb_control_frame_t *cfp);
48
50ruby_vm_special_exception_copy(VALUE exc)
51{
53 rb_obj_copy_ivar(e, exc);
54 return e;
55}
56
57NORETURN(static void ec_stack_overflow(rb_execution_context_t *ec, int));
58static void
59ec_stack_overflow(rb_execution_context_t *ec, int setup)
60{
61 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
62 ec->raised_flag = RAISED_STACKOVERFLOW;
63 if (setup) {
64 VALUE at = rb_ec_backtrace_object(ec);
65 mesg = ruby_vm_special_exception_copy(mesg);
66 rb_ivar_set(mesg, idBt, at);
67 rb_ivar_set(mesg, idBt_locations, at);
68 }
69 ec->errinfo = mesg;
70 EC_JUMP_TAG(ec, TAG_RAISE);
71}
72
73NORETURN(static void vm_stackoverflow(void));
74
75static void
76vm_stackoverflow(void)
77{
78 ec_stack_overflow(GET_EC(), TRUE);
79}
80
81NORETURN(void rb_ec_stack_overflow(rb_execution_context_t *ec, int crit));
82void
83rb_ec_stack_overflow(rb_execution_context_t *ec, int crit)
84{
85 if (rb_during_gc()) {
86 rb_bug("system stack overflow during GC. Faulty native extension?");
87 }
88 if (crit) {
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
92 }
93#ifdef USE_SIGALTSTACK
94 ec_stack_overflow(ec, TRUE);
95#else
96 ec_stack_overflow(ec, FALSE);
97#endif
98}
99
100static inline void stack_check(rb_execution_context_t *ec);
101
102#if VM_CHECK_MODE > 0
103static int
104callable_class_p(VALUE klass)
105{
106#if VM_CHECK_MODE >= 2
107 if (!klass) return FALSE;
108 switch (RB_BUILTIN_TYPE(klass)) {
109 default:
110 break;
111 case T_ICLASS:
112 if (!RB_TYPE_P(RCLASS_SUPER(klass), T_MODULE)) break;
113 case T_MODULE:
114 return TRUE;
115 }
116 while (klass) {
117 if (klass == rb_cBasicObject) {
118 return TRUE;
119 }
120 klass = RCLASS_SUPER(klass);
121 }
122 return FALSE;
123#else
124 return klass != 0;
125#endif
126}
127
128static int
129callable_method_entry_p(const rb_callable_method_entry_t *cme)
130{
131 if (cme == NULL) {
132 return TRUE;
133 }
134 else {
135 VM_ASSERT(IMEMO_TYPE_P((VALUE)cme, imemo_ment));
136
137 if (callable_class_p(cme->defined_class)) {
138 return TRUE;
139 }
140 else {
141 return FALSE;
142 }
143 }
144}
145
146static void
147vm_check_frame_detail(VALUE type, int req_block, int req_me, int req_cref, VALUE specval, VALUE cref_or_me, int is_cframe, const rb_iseq_t *iseq)
148{
149 unsigned int magic = (unsigned int)(type & VM_FRAME_MAGIC_MASK);
150 enum imemo_type cref_or_me_type = imemo_env; /* impossible value */
151
152 if (RB_TYPE_P(cref_or_me, T_IMEMO)) {
153 cref_or_me_type = imemo_type(cref_or_me);
154 }
155 if (type & VM_FRAME_FLAG_BMETHOD) {
156 req_me = TRUE;
157 }
158
159 if (req_block && (type & VM_ENV_FLAG_LOCAL) == 0) {
160 rb_bug("vm_push_frame: specval (%p) should be a block_ptr on %x frame", (void *)specval, magic);
161 }
162 if (!req_block && (type & VM_ENV_FLAG_LOCAL) != 0) {
163 rb_bug("vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (void *)specval, magic);
164 }
165
166 if (req_me) {
167 if (cref_or_me_type != imemo_ment) {
168 rb_bug("vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
169 }
170 }
171 else {
172 if (req_cref && cref_or_me_type != imemo_cref) {
173 rb_bug("vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
174 }
175 else { /* cref or Qfalse */
176 if (cref_or_me != Qfalse && cref_or_me_type != imemo_cref) {
177 if (((type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
178 /* ignore */
179 }
180 else {
181 rb_bug("vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
182 }
183 }
184 }
185 }
186
187 if (cref_or_me_type == imemo_ment) {
188 const rb_callable_method_entry_t *me = (const rb_callable_method_entry_t *)cref_or_me;
189
190 if (!callable_method_entry_p(me)) {
191 rb_bug("vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
192 }
193 }
194
195 if ((type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
196 VM_ASSERT(iseq == NULL ||
197 RBASIC_CLASS((VALUE)iseq) == 0 || // dummy frame for loading
198 RUBY_VM_NORMAL_ISEQ_P(iseq) //argument error
199 );
200 }
201 else {
202 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
203 }
204}
205
206static void
207vm_check_frame(VALUE type,
208 VALUE specval,
209 VALUE cref_or_me,
210 const rb_iseq_t *iseq)
211{
212 VALUE given_magic = type & VM_FRAME_MAGIC_MASK;
213 VM_ASSERT(FIXNUM_P(type));
214
215#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
216 case magic: \
217 vm_check_frame_detail(type, req_block, req_me, req_cref, \
218 specval, cref_or_me, is_cframe, iseq); \
219 break
220 switch (given_magic) {
221 /* BLK ME CREF CFRAME */
222 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
224 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
226 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
227 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
230 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
231 default:
232 rb_bug("vm_push_frame: unknown type (%x)", (unsigned int)given_magic);
233 }
234#undef CHECK
235}
236
237static VALUE vm_stack_canary; /* Initialized later */
238static bool vm_stack_canary_was_born = false;
239
240void
241rb_vm_check_canary(const rb_execution_context_t *ec, VALUE *sp)
242{
243 const struct rb_control_frame_struct *reg_cfp = ec->cfp;
244 const struct rb_iseq_struct *iseq;
245
246 if (! LIKELY(vm_stack_canary_was_born)) {
247 return; /* :FIXME: isn't it rather fatal to enter this branch? */
248 }
249 else if ((VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
250 /* This is at the very beginning of a thread. cfp does not exist. */
251 return;
252 }
253 else if (! (iseq = GET_ISEQ())) {
254 return;
255 }
256 else if (LIKELY(sp[0] != vm_stack_canary)) {
257 return;
258 }
259 else {
260 /* we are going to call methods below; squash the canary to
261 * prevent infinite loop. */
262 sp[0] = Qundef;
263 }
264
265 const VALUE *orig = rb_iseq_original_iseq(iseq);
266 const VALUE *encoded = ISEQ_BODY(iseq)->iseq_encoded;
267 const ptrdiff_t pos = GET_PC() - encoded;
268 const enum ruby_vminsn_type insn = (enum ruby_vminsn_type)orig[pos];
269 const char *name = insn_name(insn);
270 const VALUE iseqw = rb_iseqw_new(iseq);
271 const VALUE inspection = rb_inspect(iseqw);
272 const char *stri = rb_str_to_cstr(inspection);
273 const VALUE disasm = rb_iseq_disasm(iseq);
274 const char *strd = rb_str_to_cstr(disasm);
275
276 /* rb_bug() is not capable of outputting this large contents. It
277 is designed to run form a SIGSEGV handler, which tends to be
278 very restricted. */
279 ruby_debug_printf(
280 "We are killing the stack canary set by %s, "
281 "at %s@pc=%"PRIdPTR"\n"
282 "watch out the C stack trace.\n"
283 "%s",
284 name, stri, pos, strd);
285 rb_bug("see above.");
286}
287#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
288
289#else
290#define vm_check_canary(ec, sp)
291#define vm_check_frame(a, b, c, d)
292#endif /* VM_CHECK_MODE > 0 */
293
294#if USE_DEBUG_COUNTER
295static void
296vm_push_frame_debug_counter_inc(
297 const struct rb_execution_context_struct *ec,
298 const struct rb_control_frame_struct *reg_cfp,
299 VALUE type)
300{
301 const struct rb_control_frame_struct *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(reg_cfp);
302
303 RB_DEBUG_COUNTER_INC(frame_push);
304
305 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
306 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
307 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
308 if (prev) {
309 if (curr) {
310 RB_DEBUG_COUNTER_INC(frame_R2R);
311 }
312 else {
313 RB_DEBUG_COUNTER_INC(frame_R2C);
314 }
315 }
316 else {
317 if (curr) {
318 RB_DEBUG_COUNTER_INC(frame_C2R);
319 }
320 else {
321 RB_DEBUG_COUNTER_INC(frame_C2C);
322 }
323 }
324 }
325
326 switch (type & VM_FRAME_MAGIC_MASK) {
327 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method); return;
328 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block); return;
329 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class); return;
330 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top); return;
331 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc); return;
332 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc); return;
333 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval); return;
334 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue); return;
335 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy); return;
336 }
337
338 rb_bug("unreachable");
339}
340#else
341#define vm_push_frame_debug_counter_inc(ec, cfp, t) /* void */
342#endif
343
344STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
345STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
346STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
347
348static void
349vm_push_frame(rb_execution_context_t *ec,
350 const rb_iseq_t *iseq,
351 VALUE type,
352 VALUE self,
353 VALUE specval,
354 VALUE cref_or_me,
355 const VALUE *pc,
356 VALUE *sp,
357 int local_size,
358 int stack_max)
359{
360 rb_control_frame_t *const cfp = RUBY_VM_NEXT_CONTROL_FRAME(ec->cfp);
361
362 vm_check_frame(type, specval, cref_or_me, iseq);
363 VM_ASSERT(local_size >= 0);
364
365 /* check stack overflow */
366 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
367 vm_check_canary(ec, sp);
368
369 /* setup vm value stack */
370
371 /* initialize local variables */
372 for (int i=0; i < local_size; i++) {
373 *sp++ = Qnil;
374 }
375
376 /* setup ep with managing data */
377 *sp++ = cref_or_me; /* ep[-2] / Qnil or T_IMEMO(cref) or T_IMEMO(ment) */
378 *sp++ = specval /* ep[-1] / block handler or prev env ptr */;
379 *sp++ = type; /* ep[-0] / ENV_FLAGS */
380
381 /* setup new frame */
382 *cfp = (const struct rb_control_frame_struct) {
383 .pc = pc,
384 .sp = sp,
385 .iseq = iseq,
386 .self = self,
387 .ep = sp - 1,
388 .block_code = NULL,
389#if VM_DEBUG_BP_CHECK
390 .bp_check = sp,
391#endif
392 .jit_return = NULL
393 };
394
395 /* Ensure the initialization of `*cfp` above never gets reordered with the update of `ec->cfp` below.
396 This is a no-op in all cases we've looked at (https://godbolt.org/z/3oxd1446K), but should guarantee it for all
397 future/untested compilers/platforms. */
398
399 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
400 atomic_signal_fence(memory_order_seq_cst);
401 #endif
402
403 ec->cfp = cfp;
404
405 if (VMDEBUG == 2) {
406 SDR();
407 }
408 vm_push_frame_debug_counter_inc(ec, cfp, type);
409}
410
411void
412rb_vm_pop_frame_no_int(rb_execution_context_t *ec)
413{
414 rb_control_frame_t *cfp = ec->cfp;
415
416 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
417 if (VMDEBUG == 2) SDR();
418
419 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
420}
421
422/* return TRUE if the frame is finished */
423static inline int
424vm_pop_frame(rb_execution_context_t *ec, rb_control_frame_t *cfp, const VALUE *ep)
425{
426 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
427
428 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
429 if (VMDEBUG == 2) SDR();
430
431 RUBY_VM_CHECK_INTS(ec);
432 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
433
434 return flags & VM_FRAME_FLAG_FINISH;
435}
436
437void
438rb_vm_pop_frame(rb_execution_context_t *ec)
439{
440 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
441}
442
443// it pushes pseudo-frame with fname filename.
444VALUE
445rb_vm_push_frame_fname(rb_execution_context_t *ec, VALUE fname)
446{
447 VALUE tmpbuf = rb_imemo_tmpbuf_auto_free_pointer();
448 void *ptr = ruby_xcalloc(sizeof(struct rb_iseq_constant_body) + sizeof(struct rb_iseq_struct), 1);
449 rb_imemo_tmpbuf_set_ptr(tmpbuf, ptr);
450
451 struct rb_iseq_struct *dmy_iseq = (struct rb_iseq_struct *)ptr;
452 struct rb_iseq_constant_body *dmy_body = (struct rb_iseq_constant_body *)&dmy_iseq[1];
453 dmy_iseq->body = dmy_body;
454 dmy_body->type = ISEQ_TYPE_TOP;
455 dmy_body->location.pathobj = fname;
456
457 vm_push_frame(ec,
458 dmy_iseq, //const rb_iseq_t *iseq,
459 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH, // VALUE type,
460 ec->cfp->self, // VALUE self,
461 VM_BLOCK_HANDLER_NONE, // VALUE specval,
462 Qfalse, // VALUE cref_or_me,
463 NULL, // const VALUE *pc,
464 ec->cfp->sp, // VALUE *sp,
465 0, // int local_size,
466 0); // int stack_max
467
468 return tmpbuf;
469}
470
471/* method dispatch */
472static inline VALUE
473rb_arity_error_new(int argc, int min, int max)
474{
475 VALUE err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d", argc, min);
476 if (min == max) {
477 /* max is not needed */
478 }
479 else if (max == UNLIMITED_ARGUMENTS) {
480 rb_str_cat_cstr(err_mess, "+");
481 }
482 else {
483 rb_str_catf(err_mess, "..%d", max);
484 }
485 rb_str_cat_cstr(err_mess, ")");
486 return rb_exc_new3(rb_eArgError, err_mess);
487}
488
489void
490rb_error_arity(int argc, int min, int max)
491{
492 rb_exc_raise(rb_arity_error_new(argc, min, max));
493}
494
495/* lvar */
496
497NOINLINE(static void vm_env_write_slowpath(const VALUE *ep, int index, VALUE v));
498
499static void
500vm_env_write_slowpath(const VALUE *ep, int index, VALUE v)
501{
502 /* remember env value forcely */
503 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
504 VM_FORCE_WRITE(&ep[index], v);
505 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
506 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
507}
508
509static inline void
510vm_env_write(const VALUE *ep, int index, VALUE v)
511{
512 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
513 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
514 VM_STACK_ENV_WRITE(ep, index, v);
515 }
516 else {
517 vm_env_write_slowpath(ep, index, v);
518 }
519}
520
521void
522rb_vm_env_write(const VALUE *ep, int index, VALUE v)
523{
524 vm_env_write(ep, index, v);
525}
526
527VALUE
528rb_vm_bh_to_procval(const rb_execution_context_t *ec, VALUE block_handler)
529{
530 if (block_handler == VM_BLOCK_HANDLER_NONE) {
531 return Qnil;
532 }
533 else {
534 switch (vm_block_handler_type(block_handler)) {
535 case block_handler_type_iseq:
536 case block_handler_type_ifunc:
537 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler), rb_cProc);
538 case block_handler_type_symbol:
539 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
540 case block_handler_type_proc:
541 return VM_BH_TO_PROC(block_handler);
542 default:
543 VM_UNREACHABLE(rb_vm_bh_to_procval);
544 }
545 }
546}
547
548/* svar */
549
550#if VM_CHECK_MODE > 0
551static int
552vm_svar_valid_p(VALUE svar)
553{
554 if (RB_TYPE_P((VALUE)svar, T_IMEMO)) {
555 switch (imemo_type(svar)) {
556 case imemo_svar:
557 case imemo_cref:
558 case imemo_ment:
559 return TRUE;
560 default:
561 break;
562 }
563 }
564 rb_bug("vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
565 return FALSE;
566}
567#endif
568
569static inline struct vm_svar *
570lep_svar(const rb_execution_context_t *ec, const VALUE *lep)
571{
572 VALUE svar;
573
574 if (lep && (ec == NULL || ec->root_lep != lep)) {
575 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
576 }
577 else {
578 svar = ec->root_svar;
579 }
580
581 VM_ASSERT(svar == Qfalse || vm_svar_valid_p(svar));
582
583 return (struct vm_svar *)svar;
584}
585
586static inline void
587lep_svar_write(const rb_execution_context_t *ec, const VALUE *lep, const struct vm_svar *svar)
588{
589 VM_ASSERT(vm_svar_valid_p((VALUE)svar));
590
591 if (lep && (ec == NULL || ec->root_lep != lep)) {
592 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (VALUE)svar);
593 }
594 else {
595 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
596 }
597}
598
599static VALUE
600lep_svar_get(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key)
601{
602 const struct vm_svar *svar = lep_svar(ec, lep);
603
604 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) return Qnil;
605
606 switch (key) {
607 case VM_SVAR_LASTLINE:
608 return svar->lastline;
609 case VM_SVAR_BACKREF:
610 return svar->backref;
611 default: {
612 const VALUE ary = svar->others;
613
614 if (NIL_P(ary)) {
615 return Qnil;
616 }
617 else {
618 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
619 }
620 }
621 }
622}
623
624static struct vm_svar *
625svar_new(VALUE obj)
626{
627 return (struct vm_svar *)rb_imemo_new(imemo_svar, Qnil, Qnil, Qnil, obj);
628}
629
630static void
631lep_svar_set(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, VALUE val)
632{
633 struct vm_svar *svar = lep_svar(ec, lep);
634
635 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) {
636 lep_svar_write(ec, lep, svar = svar_new((VALUE)svar));
637 }
638
639 switch (key) {
640 case VM_SVAR_LASTLINE:
641 RB_OBJ_WRITE(svar, &svar->lastline, val);
642 return;
643 case VM_SVAR_BACKREF:
644 RB_OBJ_WRITE(svar, &svar->backref, val);
645 return;
646 default: {
647 VALUE ary = svar->others;
648
649 if (NIL_P(ary)) {
650 RB_OBJ_WRITE(svar, &svar->others, ary = rb_ary_new());
651 }
652 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
653 }
654 }
655}
656
657static inline VALUE
658vm_getspecial(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, rb_num_t type)
659{
660 VALUE val;
661
662 if (type == 0) {
663 val = lep_svar_get(ec, lep, key);
664 }
665 else {
666 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
667
668 if (type & 0x01) {
669 switch (type >> 1) {
670 case '&':
671 val = rb_reg_last_match(backref);
672 break;
673 case '`':
674 val = rb_reg_match_pre(backref);
675 break;
676 case '\'':
677 val = rb_reg_match_post(backref);
678 break;
679 case '+':
680 val = rb_reg_match_last(backref);
681 break;
682 default:
683 rb_bug("unexpected back-ref");
684 }
685 }
686 else {
687 val = rb_reg_nth_match((int)(type >> 1), backref);
688 }
689 }
690 return val;
691}
692
693static inline VALUE
694vm_backref_defined(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t type)
695{
696 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
697 int nth = 0;
698
699 if (type & 0x01) {
700 switch (type >> 1) {
701 case '&':
702 case '`':
703 case '\'':
704 break;
705 case '+':
706 return rb_reg_last_defined(backref);
707 default:
708 rb_bug("unexpected back-ref");
709 }
710 }
711 else {
712 nth = (int)(type >> 1);
713 }
714 return rb_reg_nth_defined(nth, backref);
715}
716
717PUREFUNC(static rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar));
719check_method_entry(VALUE obj, int can_be_svar)
720{
721 if (obj == Qfalse) return NULL;
722
723#if VM_CHECK_MODE > 0
724 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_method_entry: unknown type: %s", rb_obj_info(obj));
725#endif
726
727 switch (imemo_type(obj)) {
728 case imemo_ment:
729 return (rb_callable_method_entry_t *)obj;
730 case imemo_cref:
731 return NULL;
732 case imemo_svar:
733 if (can_be_svar) {
734 return check_method_entry(((struct vm_svar *)obj)->cref_or_me, FALSE);
735 }
736 default:
737#if VM_CHECK_MODE > 0
738 rb_bug("check_method_entry: svar should not be there:");
739#endif
740 return NULL;
741 }
742}
743
745rb_vm_frame_method_entry(const rb_control_frame_t *cfp)
746{
747 const VALUE *ep = cfp->ep;
749
750 while (!VM_ENV_LOCAL_P(ep)) {
751 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return me;
752 ep = VM_ENV_PREV_EP(ep);
753 }
754
755 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
756}
757
758static const rb_iseq_t *
759method_entry_iseqptr(const rb_callable_method_entry_t *me)
760{
761 switch (me->def->type) {
762 case VM_METHOD_TYPE_ISEQ:
763 return me->def->body.iseq.iseqptr;
764 default:
765 return NULL;
766 }
767}
768
769static rb_cref_t *
770method_entry_cref(const rb_callable_method_entry_t *me)
771{
772 switch (me->def->type) {
773 case VM_METHOD_TYPE_ISEQ:
774 return me->def->body.iseq.cref;
775 default:
776 return NULL;
777 }
778}
779
780#if VM_CHECK_MODE == 0
781PUREFUNC(static rb_cref_t *check_cref(VALUE, int));
782#endif
783static rb_cref_t *
784check_cref(VALUE obj, int can_be_svar)
785{
786 if (obj == Qfalse) return NULL;
787
788#if VM_CHECK_MODE > 0
789 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_cref: unknown type: %s", rb_obj_info(obj));
790#endif
791
792 switch (imemo_type(obj)) {
793 case imemo_ment:
794 return method_entry_cref((rb_callable_method_entry_t *)obj);
795 case imemo_cref:
796 return (rb_cref_t *)obj;
797 case imemo_svar:
798 if (can_be_svar) {
799 return check_cref(((struct vm_svar *)obj)->cref_or_me, FALSE);
800 }
801 default:
802#if VM_CHECK_MODE > 0
803 rb_bug("check_method_entry: svar should not be there:");
804#endif
805 return NULL;
806 }
807}
808
809static inline rb_cref_t *
810vm_env_cref(const VALUE *ep)
811{
812 rb_cref_t *cref;
813
814 while (!VM_ENV_LOCAL_P(ep)) {
815 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return cref;
816 ep = VM_ENV_PREV_EP(ep);
817 }
818
819 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
820}
821
822static int
823is_cref(const VALUE v, int can_be_svar)
824{
825 if (RB_TYPE_P(v, T_IMEMO)) {
826 switch (imemo_type(v)) {
827 case imemo_cref:
828 return TRUE;
829 case imemo_svar:
830 if (can_be_svar) return is_cref(((struct vm_svar *)v)->cref_or_me, FALSE);
831 default:
832 break;
833 }
834 }
835 return FALSE;
836}
837
838static int
839vm_env_cref_by_cref(const VALUE *ep)
840{
841 while (!VM_ENV_LOCAL_P(ep)) {
842 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) return TRUE;
843 ep = VM_ENV_PREV_EP(ep);
844 }
845 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
846}
847
848static rb_cref_t *
849cref_replace_with_duplicated_cref_each_frame(const VALUE *vptr, int can_be_svar, VALUE parent)
850{
851 const VALUE v = *vptr;
852 rb_cref_t *cref, *new_cref;
853
854 if (RB_TYPE_P(v, T_IMEMO)) {
855 switch (imemo_type(v)) {
856 case imemo_cref:
857 cref = (rb_cref_t *)v;
858 new_cref = vm_cref_dup(cref);
859 if (parent) {
860 RB_OBJ_WRITE(parent, vptr, new_cref);
861 }
862 else {
863 VM_FORCE_WRITE(vptr, (VALUE)new_cref);
864 }
865 return (rb_cref_t *)new_cref;
866 case imemo_svar:
867 if (can_be_svar) {
868 return cref_replace_with_duplicated_cref_each_frame(&((struct vm_svar *)v)->cref_or_me, FALSE, v);
869 }
870 /* fall through */
871 case imemo_ment:
872 rb_bug("cref_replace_with_duplicated_cref_each_frame: unreachable");
873 default:
874 break;
875 }
876 }
877 return NULL;
878}
879
880static rb_cref_t *
881vm_cref_replace_with_duplicated_cref(const VALUE *ep)
882{
883 if (vm_env_cref_by_cref(ep)) {
884 rb_cref_t *cref;
885 VALUE envval;
886
887 while (!VM_ENV_LOCAL_P(ep)) {
888 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
889 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
890 return cref;
891 }
892 ep = VM_ENV_PREV_EP(ep);
893 }
894 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
895 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
896 }
897 else {
898 rb_bug("vm_cref_dup: unreachable");
899 }
900}
901
902static rb_cref_t *
903vm_get_cref(const VALUE *ep)
904{
905 rb_cref_t *cref = vm_env_cref(ep);
906
907 if (cref != NULL) {
908 return cref;
909 }
910 else {
911 rb_bug("vm_get_cref: unreachable");
912 }
913}
914
915rb_cref_t *
916rb_vm_get_cref(const VALUE *ep)
917{
918 return vm_get_cref(ep);
919}
920
921static rb_cref_t *
922vm_ec_cref(const rb_execution_context_t *ec)
923{
924 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
925
926 if (cfp == NULL) {
927 return NULL;
928 }
929 return vm_get_cref(cfp->ep);
930}
931
932static const rb_cref_t *
933vm_get_const_key_cref(const VALUE *ep)
934{
935 const rb_cref_t *cref = vm_get_cref(ep);
936 const rb_cref_t *key_cref = cref;
937
938 while (cref) {
939 if (FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
940 RCLASS_EXT(CREF_CLASS(cref))->cloned) {
941 return key_cref;
942 }
943 cref = CREF_NEXT(cref);
944 }
945
946 /* does not include singleton class */
947 return NULL;
948}
949
950void
951rb_vm_rewrite_cref(rb_cref_t *cref, VALUE old_klass, VALUE new_klass, rb_cref_t **new_cref_ptr)
952{
953 rb_cref_t *new_cref;
954
955 while (cref) {
956 if (CREF_CLASS(cref) == old_klass) {
957 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
958 *new_cref_ptr = new_cref;
959 return;
960 }
961 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
962 cref = CREF_NEXT(cref);
963 *new_cref_ptr = new_cref;
964 new_cref_ptr = &new_cref->next;
965 }
966 *new_cref_ptr = NULL;
967}
968
969static rb_cref_t *
970vm_cref_push(const rb_execution_context_t *ec, VALUE klass, const VALUE *ep, int pushed_by_eval, int singleton)
971{
972 rb_cref_t *prev_cref = NULL;
973
974 if (ep) {
975 prev_cref = vm_env_cref(ep);
976 }
977 else {
978 rb_control_frame_t *cfp = vm_get_ruby_level_caller_cfp(ec, ec->cfp);
979
980 if (cfp) {
981 prev_cref = vm_env_cref(cfp->ep);
982 }
983 }
984
985 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
986}
987
988static inline VALUE
989vm_get_cbase(const VALUE *ep)
990{
991 const rb_cref_t *cref = vm_get_cref(ep);
992
993 return CREF_CLASS_FOR_DEFINITION(cref);
994}
995
996static inline VALUE
997vm_get_const_base(const VALUE *ep)
998{
999 const rb_cref_t *cref = vm_get_cref(ep);
1000
1001 while (cref) {
1002 if (!CREF_PUSHED_BY_EVAL(cref)) {
1003 return CREF_CLASS_FOR_DEFINITION(cref);
1004 }
1005 cref = CREF_NEXT(cref);
1006 }
1007
1008 return Qundef;
1009}
1010
1011static inline void
1012vm_check_if_namespace(VALUE klass)
1013{
1014 if (!RB_TYPE_P(klass, T_CLASS) && !RB_TYPE_P(klass, T_MODULE)) {
1015 rb_raise(rb_eTypeError, "%+"PRIsVALUE" is not a class/module", klass);
1016 }
1017}
1018
1019static inline void
1020vm_ensure_not_refinement_module(VALUE self)
1021{
1022 if (RB_TYPE_P(self, T_MODULE) && FL_TEST(self, RMODULE_IS_REFINEMENT)) {
1023 rb_warn("not defined at the refinement, but at the outer class/module");
1024 }
1025}
1026
1027static inline VALUE
1028vm_get_iclass(const rb_control_frame_t *cfp, VALUE klass)
1029{
1030 return klass;
1031}
1032
1033static inline VALUE
1034vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, bool allow_nil, int is_defined)
1035{
1036 void rb_const_warn_if_deprecated(const rb_const_entry_t *ce, VALUE klass, ID id);
1037 VALUE val;
1038
1039 if (NIL_P(orig_klass) && allow_nil) {
1040 /* in current lexical scope */
1041 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1042 const rb_cref_t *cref;
1043 VALUE klass = Qnil;
1044
1045 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1046 root_cref = CREF_NEXT(root_cref);
1047 }
1048 cref = root_cref;
1049 while (cref && CREF_NEXT(cref)) {
1050 if (CREF_PUSHED_BY_EVAL(cref)) {
1051 klass = Qnil;
1052 }
1053 else {
1054 klass = CREF_CLASS(cref);
1055 }
1056 cref = CREF_NEXT(cref);
1057
1058 if (!NIL_P(klass)) {
1059 VALUE av, am = 0;
1060 rb_const_entry_t *ce;
1061 search_continue:
1062 if ((ce = rb_const_lookup(klass, id))) {
1063 rb_const_warn_if_deprecated(ce, klass, id);
1064 val = ce->value;
1065 if (UNDEF_P(val)) {
1066 if (am == klass) break;
1067 am = klass;
1068 if (is_defined) return 1;
1069 if (rb_autoloading_value(klass, id, &av, NULL)) return av;
1070 rb_autoload_load(klass, id);
1071 goto search_continue;
1072 }
1073 else {
1074 if (is_defined) {
1075 return 1;
1076 }
1077 else {
1078 if (UNLIKELY(!rb_ractor_main_p())) {
1079 if (!rb_ractor_shareable_p(val)) {
1080 rb_raise(rb_eRactorIsolationError,
1081 "can not access non-shareable objects in constant %"PRIsVALUE"::%s by non-main ractor.", rb_class_path(klass), rb_id2name(id));
1082 }
1083 }
1084 return val;
1085 }
1086 }
1087 }
1088 }
1089 }
1090
1091 /* search self */
1092 if (root_cref && !NIL_P(CREF_CLASS(root_cref))) {
1093 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1094 }
1095 else {
1096 klass = CLASS_OF(ec->cfp->self);
1097 }
1098
1099 if (is_defined) {
1100 return rb_const_defined(klass, id);
1101 }
1102 else {
1103 return rb_const_get(klass, id);
1104 }
1105 }
1106 else {
1107 vm_check_if_namespace(orig_klass);
1108 if (is_defined) {
1109 return rb_public_const_defined_from(orig_klass, id);
1110 }
1111 else {
1112 return rb_public_const_get_from(orig_klass, id);
1113 }
1114 }
1115}
1116
1117VALUE
1118rb_vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, VALUE allow_nil)
1119{
1120 return vm_get_ev_const(ec, orig_klass, id, allow_nil == Qtrue, 0);
1121}
1122
1123static inline VALUE
1124vm_get_ev_const_chain(rb_execution_context_t *ec, const ID *segments)
1125{
1126 VALUE val = Qnil;
1127 int idx = 0;
1128 int allow_nil = TRUE;
1129 if (segments[0] == idNULL) {
1130 val = rb_cObject;
1131 idx++;
1132 allow_nil = FALSE;
1133 }
1134 while (segments[idx]) {
1135 ID id = segments[idx++];
1136 val = vm_get_ev_const(ec, val, id, allow_nil, 0);
1137 allow_nil = FALSE;
1138 }
1139 return val;
1140}
1141
1142
1143static inline VALUE
1144vm_get_cvar_base(const rb_cref_t *cref, const rb_control_frame_t *cfp, int top_level_raise)
1145{
1146 VALUE klass;
1147
1148 if (!cref) {
1149 rb_bug("vm_get_cvar_base: no cref");
1150 }
1151
1152 while (CREF_NEXT(cref) &&
1153 (NIL_P(CREF_CLASS(cref)) || FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
1154 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1155 cref = CREF_NEXT(cref);
1156 }
1157 if (top_level_raise && !CREF_NEXT(cref)) {
1158 rb_raise(rb_eRuntimeError, "class variable access from toplevel");
1159 }
1160
1161 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1162
1163 if (NIL_P(klass)) {
1164 rb_raise(rb_eTypeError, "no class variables available");
1165 }
1166 return klass;
1167}
1168
1169ALWAYS_INLINE(static void fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id));
1170static inline void
1171fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id)
1172{
1173 if (is_attr) {
1174 vm_cc_attr_index_set(cc, index, shape_id);
1175 }
1176 else {
1177 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1178 }
1179}
1180
1181#define ractor_incidental_shareable_p(cond, val) \
1182 (!(cond) || rb_ractor_shareable_p(val))
1183#define ractor_object_incidental_shareable_p(obj, val) \
1184 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1185
1186#define ATTR_INDEX_NOT_SET (attr_index_t)-1
1187
1188ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, const rb_iseq_t *, IVC, const struct rb_callcache *, int, VALUE));
1189static inline VALUE
1190vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, VALUE default_value)
1191{
1192#if OPT_IC_FOR_IVAR
1193 VALUE val = Qundef;
1194 shape_id_t shape_id;
1195 VALUE * ivar_list;
1196
1197 if (SPECIAL_CONST_P(obj)) {
1198 return default_value;
1199 }
1200
1201#if SHAPE_IN_BASIC_FLAGS
1202 shape_id = RBASIC_SHAPE_ID(obj);
1203#endif
1204
1205 switch (BUILTIN_TYPE(obj)) {
1206 case T_OBJECT:
1207 ivar_list = ROBJECT_IVPTR(obj);
1208 VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
1209
1210#if !SHAPE_IN_BASIC_FLAGS
1211 shape_id = ROBJECT_SHAPE_ID(obj);
1212#endif
1213 break;
1214 case T_CLASS:
1215 case T_MODULE:
1216 {
1217 if (UNLIKELY(!rb_ractor_main_p())) {
1218 // For two reasons we can only use the fast path on the main
1219 // ractor.
1220 // First, only the main ractor is allowed to set ivars on classes
1221 // and modules. So we can skip locking.
1222 // Second, other ractors need to check the shareability of the
1223 // values returned from the class ivars.
1224 goto general_path;
1225 }
1226
1227 ivar_list = RCLASS_IVPTR(obj);
1228
1229#if !SHAPE_IN_BASIC_FLAGS
1230 shape_id = RCLASS_SHAPE_ID(obj);
1231#endif
1232
1233 break;
1234 }
1235 default:
1236 if (FL_TEST_RAW(obj, FL_EXIVAR)) {
1237 struct gen_ivtbl *ivtbl;
1238 rb_gen_ivtbl_get(obj, id, &ivtbl);
1239#if !SHAPE_IN_BASIC_FLAGS
1240 shape_id = ivtbl->shape_id;
1241#endif
1242 ivar_list = ivtbl->as.shape.ivptr;
1243 }
1244 else {
1245 return default_value;
1246 }
1247 }
1248
1249 shape_id_t cached_id;
1250 attr_index_t index;
1251
1252 if (is_attr) {
1253 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1254 }
1255 else {
1256 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1257 }
1258
1259 if (LIKELY(cached_id == shape_id)) {
1260 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1261
1262 if (index == ATTR_INDEX_NOT_SET) {
1263 return default_value;
1264 }
1265
1266 val = ivar_list[index];
1267#if USE_DEBUG_COUNTER
1268 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1269
1270 if (RB_TYPE_P(obj, T_OBJECT)) {
1271 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1272 }
1273#endif
1274 RUBY_ASSERT(!UNDEF_P(val));
1275 }
1276 else { // cache miss case
1277#if USE_DEBUG_COUNTER
1278 if (is_attr) {
1279 if (cached_id != INVALID_SHAPE_ID) {
1280 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1281 }
1282 else {
1283 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1284 }
1285 }
1286 else {
1287 if (cached_id != INVALID_SHAPE_ID) {
1288 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1289 }
1290 else {
1291 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1292 }
1293 }
1294 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1295
1296 if (RB_TYPE_P(obj, T_OBJECT)) {
1297 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1298 }
1299#endif
1300
1301 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1302 st_table *table = NULL;
1303 switch (BUILTIN_TYPE(obj)) {
1304 case T_CLASS:
1305 case T_MODULE:
1306 table = (st_table *)RCLASS_IVPTR(obj);
1307 break;
1308
1309 case T_OBJECT:
1310 table = ROBJECT_IV_HASH(obj);
1311 break;
1312
1313 default: {
1314 struct gen_ivtbl *ivtbl;
1315 if (rb_gen_ivtbl_get(obj, 0, &ivtbl)) {
1316 table = ivtbl->as.complex.table;
1317 }
1318 break;
1319 }
1320 }
1321
1322 if (!table || !st_lookup(table, id, &val)) {
1323 val = default_value;
1324 }
1325 }
1326 else {
1327 shape_id_t previous_cached_id = cached_id;
1328 if (rb_shape_get_iv_index_with_hint(shape_id, id, &index, &cached_id)) {
1329 // This fills in the cache with the shared cache object.
1330 // "ent" is the shared cache object
1331 if (cached_id != previous_cached_id) {
1332 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1333 }
1334
1335 if (index == ATTR_INDEX_NOT_SET) {
1336 val = default_value;
1337 }
1338 else {
1339 // We fetched the ivar list above
1340 val = ivar_list[index];
1341 RUBY_ASSERT(!UNDEF_P(val));
1342 }
1343 }
1344 else {
1345 if (is_attr) {
1346 vm_cc_attr_index_initialize(cc, shape_id);
1347 }
1348 else {
1349 vm_ic_attr_index_initialize(ic, shape_id);
1350 }
1351
1352 val = default_value;
1353 }
1354 }
1355
1356 }
1357
1358 if (default_value != Qundef) {
1359 RUBY_ASSERT(!UNDEF_P(val));
1360 }
1361
1362 return val;
1363
1364general_path:
1365#endif /* OPT_IC_FOR_IVAR */
1366 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1367
1368 if (is_attr) {
1369 return rb_attr_get(obj, id);
1370 }
1371 else {
1372 return rb_ivar_get(obj, id);
1373 }
1374}
1375
1376static void
1377populate_cache(attr_index_t index, shape_id_t next_shape_id, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, bool is_attr)
1378{
1379 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1380
1381 // Cache population code
1382 if (is_attr) {
1383 vm_cc_attr_index_set(cc, index, next_shape_id);
1384 }
1385 else {
1386 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1387 }
1388}
1389
1390ALWAYS_INLINE(static VALUE vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr));
1391NOINLINE(static VALUE vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic));
1392NOINLINE(static VALUE vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc));
1393
1394static VALUE
1395vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
1396{
1397#if OPT_IC_FOR_IVAR
1398 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1399
1400 if (BUILTIN_TYPE(obj) == T_OBJECT) {
1402
1403 attr_index_t index = rb_obj_ivar_set(obj, id, val);
1404
1405 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1406
1407 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1408 populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
1409 }
1410
1411 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1412 return val;
1413 }
1414#endif
1415 return rb_ivar_set(obj, id, val);
1416}
1417
1418static VALUE
1419vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic)
1420{
1421 return vm_setivar_slowpath(obj, id, val, iseq, ic, NULL, false);
1422}
1423
1424static VALUE
1425vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc)
1426{
1427 return vm_setivar_slowpath(obj, id, val, NULL, NULL, cc, true);
1428}
1429
1430NOINLINE(static VALUE vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1431static VALUE
1432vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1433{
1434#if SHAPE_IN_BASIC_FLAGS
1435 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1436#else
1437 shape_id_t shape_id = rb_generic_shape_id(obj);
1438#endif
1439
1440 struct gen_ivtbl *ivtbl = 0;
1441
1442 // Cache hit case
1443 if (shape_id == dest_shape_id) {
1444 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1445 }
1446 else if (dest_shape_id != INVALID_SHAPE_ID) {
1447 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1448 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1449
1450 if (shape_id == dest_shape->parent_id && dest_shape->edge_name == id && shape->capacity == dest_shape->capacity) {
1451 RUBY_ASSERT(index < dest_shape->capacity);
1452 }
1453 else {
1454 return Qundef;
1455 }
1456 }
1457 else {
1458 return Qundef;
1459 }
1460
1461 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1462
1463 if (shape_id != dest_shape_id) {
1464#if SHAPE_IN_BASIC_FLAGS
1465 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1466#else
1467 ivtbl->shape_id = dest_shape_id;
1468#endif
1469 }
1470
1471 RB_OBJ_WRITE(obj, &ivtbl->as.shape.ivptr[index], val);
1472
1473 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1474
1475 return val;
1476}
1477
1478static inline VALUE
1479vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1480{
1481#if OPT_IC_FOR_IVAR
1482 switch (BUILTIN_TYPE(obj)) {
1483 case T_OBJECT:
1484 {
1485 VM_ASSERT(!rb_ractor_shareable_p(obj) || rb_obj_frozen_p(obj));
1486
1487 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1488 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1489
1490 if (LIKELY(shape_id == dest_shape_id)) {
1491 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1492 VM_ASSERT(!rb_ractor_shareable_p(obj));
1493 }
1494 else if (dest_shape_id != INVALID_SHAPE_ID) {
1495 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1496 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1497 shape_id_t source_shape_id = dest_shape->parent_id;
1498
1499 if (shape_id == source_shape_id && dest_shape->edge_name == id && shape->capacity == dest_shape->capacity) {
1500 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1501
1502 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1503
1504 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id), id) == dest_shape);
1505 RUBY_ASSERT(index < dest_shape->capacity);
1506 }
1507 else {
1508 break;
1509 }
1510 }
1511 else {
1512 break;
1513 }
1514
1515 VALUE *ptr = ROBJECT_IVPTR(obj);
1516
1517 RUBY_ASSERT(!rb_shape_obj_too_complex(obj));
1518 RB_OBJ_WRITE(obj, &ptr[index], val);
1519
1520 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1521 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1522 return val;
1523 }
1524 break;
1525 case T_CLASS:
1526 case T_MODULE:
1527 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1528 default:
1529 break;
1530 }
1531
1532 return Qundef;
1533#endif /* OPT_IC_FOR_IVAR */
1534}
1535
1536static VALUE
1537update_classvariable_cache(const rb_iseq_t *iseq, VALUE klass, ID id, const rb_cref_t * cref, ICVARC ic)
1538{
1539 VALUE defined_class = 0;
1540 VALUE cvar_value = rb_cvar_find(klass, id, &defined_class);
1541
1542 if (RB_TYPE_P(defined_class, T_ICLASS)) {
1543 defined_class = RBASIC(defined_class)->klass;
1544 }
1545
1546 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1547 if (!rb_cvc_tbl) {
1548 rb_bug("the cvc table should be set");
1549 }
1550
1551 VALUE ent_data;
1552 if (!rb_id_table_lookup(rb_cvc_tbl, id, &ent_data)) {
1553 rb_bug("should have cvar cache entry");
1554 }
1555
1556 struct rb_cvar_class_tbl_entry *ent = (void *)ent_data;
1557
1558 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1559 ent->cref = cref;
1560 ic->entry = ent;
1561
1562 RUBY_ASSERT(BUILTIN_TYPE((VALUE)cref) == T_IMEMO && IMEMO_TYPE_P(cref, imemo_cref));
1563 RB_OBJ_WRITTEN(iseq, Qundef, ent->cref);
1564 RB_OBJ_WRITTEN(iseq, Qundef, ent->class_value);
1565 RB_OBJ_WRITTEN(ent->class_value, Qundef, ent->cref);
1566
1567 return cvar_value;
1568}
1569
1570static inline VALUE
1571vm_getclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *reg_cfp, ID id, ICVARC ic)
1572{
1573 const rb_cref_t *cref;
1574 cref = vm_get_cref(GET_EP());
1575
1576 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1577 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1578
1579 VALUE v = rb_ivar_lookup(ic->entry->class_value, id, Qundef);
1580 RUBY_ASSERT(!UNDEF_P(v));
1581
1582 return v;
1583 }
1584
1585 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1586
1587 return update_classvariable_cache(iseq, klass, id, cref, ic);
1588}
1589
1590VALUE
1591rb_vm_getclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID id, ICVARC ic)
1592{
1593 return vm_getclassvariable(iseq, cfp, id, ic);
1594}
1595
1596static inline void
1597vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *reg_cfp, ID id, VALUE val, ICVARC ic)
1598{
1599 const rb_cref_t *cref;
1600 cref = vm_get_cref(GET_EP());
1601
1602 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1603 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1604
1605 rb_class_ivar_set(ic->entry->class_value, id, val);
1606 return;
1607 }
1608
1609 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1610
1611 rb_cvar_set(klass, id, val);
1612
1613 update_classvariable_cache(iseq, klass, id, cref, ic);
1614}
1615
1616void
1617rb_vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID id, VALUE val, ICVARC ic)
1618{
1619 vm_setclassvariable(iseq, cfp, id, val, ic);
1620}
1621
1622static inline VALUE
1623vm_getinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, IVC ic)
1624{
1625 return vm_getivar(obj, id, iseq, ic, NULL, FALSE, Qnil);
1626}
1627
1628static inline void
1629vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
1630{
1631 if (RB_SPECIAL_CONST_P(obj)) {
1633 return;
1634 }
1635
1636 shape_id_t dest_shape_id;
1637 attr_index_t index;
1638 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1639
1640 if (UNLIKELY(UNDEF_P(vm_setivar(obj, id, val, dest_shape_id, index)))) {
1641 switch (BUILTIN_TYPE(obj)) {
1642 case T_OBJECT:
1643 case T_CLASS:
1644 case T_MODULE:
1645 break;
1646 default:
1647 if (!UNDEF_P(vm_setivar_default(obj, id, val, dest_shape_id, index))) {
1648 return;
1649 }
1650 }
1651 vm_setivar_slowpath_ivar(obj, id, val, iseq, ic);
1652 }
1653}
1654
1655void
1656rb_vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
1657{
1658 vm_setinstancevariable(iseq, obj, id, val, ic);
1659}
1660
1661static VALUE
1662vm_throw_continue(const rb_execution_context_t *ec, VALUE err)
1663{
1664 /* continue throw */
1665
1666 if (FIXNUM_P(err)) {
1667 ec->tag->state = RUBY_TAG_FATAL;
1668 }
1669 else if (SYMBOL_P(err)) {
1670 ec->tag->state = TAG_THROW;
1671 }
1672 else if (THROW_DATA_P(err)) {
1673 ec->tag->state = THROW_DATA_STATE((struct vm_throw_data *)err);
1674 }
1675 else {
1676 ec->tag->state = TAG_RAISE;
1677 }
1678 return err;
1679}
1680
1681static VALUE
1682vm_throw_start(const rb_execution_context_t *ec, rb_control_frame_t *const reg_cfp, enum ruby_tag_type state,
1683 const int flag, const VALUE throwobj)
1684{
1685 const rb_control_frame_t *escape_cfp = NULL;
1686 const rb_control_frame_t * const eocfp = RUBY_VM_END_CONTROL_FRAME(ec); /* end of control frame pointer */
1687
1688 if (flag != 0) {
1689 /* do nothing */
1690 }
1691 else if (state == TAG_BREAK) {
1692 int is_orphan = 1;
1693 const VALUE *ep = GET_EP();
1694 const rb_iseq_t *base_iseq = GET_ISEQ();
1695 escape_cfp = reg_cfp;
1696
1697 while (ISEQ_BODY(base_iseq)->type != ISEQ_TYPE_BLOCK) {
1698 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1699 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1700 ep = escape_cfp->ep;
1701 base_iseq = escape_cfp->iseq;
1702 }
1703 else {
1704 ep = VM_ENV_PREV_EP(ep);
1705 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1706 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1707 VM_ASSERT(escape_cfp->iseq == base_iseq);
1708 }
1709 }
1710
1711 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1712 /* lambda{... break ...} */
1713 is_orphan = 0;
1714 state = TAG_RETURN;
1715 }
1716 else {
1717 ep = VM_ENV_PREV_EP(ep);
1718
1719 while (escape_cfp < eocfp) {
1720 if (escape_cfp->ep == ep) {
1721 const rb_iseq_t *const iseq = escape_cfp->iseq;
1722 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1723 const struct iseq_catch_table *const ct = ISEQ_BODY(iseq)->catch_table;
1724 unsigned int i;
1725
1726 if (!ct) break;
1727 for (i=0; i < ct->size; i++) {
1728 const struct iseq_catch_table_entry *const entry =
1729 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1730
1731 if (entry->type == CATCH_TYPE_BREAK &&
1732 entry->iseq == base_iseq &&
1733 entry->start < epc && entry->end >= epc) {
1734 if (entry->cont == epc) { /* found! */
1735 is_orphan = 0;
1736 }
1737 break;
1738 }
1739 }
1740 break;
1741 }
1742
1743 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1744 }
1745 }
1746
1747 if (is_orphan) {
1748 rb_vm_localjump_error("break from proc-closure", throwobj, TAG_BREAK);
1749 }
1750 }
1751 else if (state == TAG_RETRY) {
1752 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1753
1754 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1755 }
1756 else if (state == TAG_RETURN) {
1757 const VALUE *current_ep = GET_EP();
1758 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1759 int in_class_frame = 0;
1760 int toplevel = 1;
1761 escape_cfp = reg_cfp;
1762
1763 // find target_lep, target_ep
1764 while (!VM_ENV_LOCAL_P(ep)) {
1765 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1766 target_ep = ep;
1767 }
1768 ep = VM_ENV_PREV_EP(ep);
1769 }
1770 target_lep = ep;
1771
1772 while (escape_cfp < eocfp) {
1773 const VALUE *lep = VM_CF_LEP(escape_cfp);
1774
1775 if (!target_lep) {
1776 target_lep = lep;
1777 }
1778
1779 if (lep == target_lep &&
1780 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1781 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1782 in_class_frame = 1;
1783 target_lep = 0;
1784 }
1785
1786 if (lep == target_lep) {
1787 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1788 toplevel = 0;
1789 if (in_class_frame) {
1790 /* lambda {class A; ... return ...; end} */
1791 goto valid_return;
1792 }
1793 else {
1794 const VALUE *tep = current_ep;
1795
1796 while (target_lep != tep) {
1797 if (escape_cfp->ep == tep) {
1798 /* in lambda */
1799 if (tep == target_ep) {
1800 goto valid_return;
1801 }
1802 else {
1803 goto unexpected_return;
1804 }
1805 }
1806 tep = VM_ENV_PREV_EP(tep);
1807 }
1808 }
1809 }
1810 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1811 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1812 case ISEQ_TYPE_TOP:
1813 case ISEQ_TYPE_MAIN:
1814 if (toplevel) {
1815 if (in_class_frame) goto unexpected_return;
1816 if (target_ep == NULL) {
1817 goto valid_return;
1818 }
1819 else {
1820 goto unexpected_return;
1821 }
1822 }
1823 break;
1824 case ISEQ_TYPE_EVAL: {
1825 const rb_iseq_t *is = escape_cfp->iseq;
1826 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1827 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1828 if (!(is = ISEQ_BODY(is)->parent_iseq)) break;
1829 t = ISEQ_BODY(is)->type;
1830 }
1831 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1832 break;
1833 }
1834 case ISEQ_TYPE_CLASS:
1835 toplevel = 0;
1836 break;
1837 default:
1838 break;
1839 }
1840 }
1841 }
1842
1843 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1844 if (target_ep == NULL) {
1845 goto valid_return;
1846 }
1847 else {
1848 goto unexpected_return;
1849 }
1850 }
1851
1852 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1853 }
1854 unexpected_return:;
1855 rb_vm_localjump_error("unexpected return", throwobj, TAG_RETURN);
1856
1857 valid_return:;
1858 /* do nothing */
1859 }
1860 else {
1861 rb_bug("isns(throw): unsupported throw type");
1862 }
1863
1864 ec->tag->state = state;
1865 return (VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1866}
1867
1868static VALUE
1869vm_throw(const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
1870 rb_num_t throw_state, VALUE throwobj)
1871{
1872 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1873 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1874
1875 if (state != 0) {
1876 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1877 }
1878 else {
1879 return vm_throw_continue(ec, throwobj);
1880 }
1881}
1882
1883VALUE
1884rb_vm_throw(const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t throw_state, VALUE throwobj)
1885{
1886 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1887}
1888
1889static inline void
1890vm_expandarray(struct rb_control_frame_struct *cfp, VALUE ary, rb_num_t num, int flag)
1891{
1892 int is_splat = flag & 0x01;
1893 const VALUE *ptr;
1894 rb_num_t len;
1895 const VALUE obj = ary;
1896
1897 if (!RB_TYPE_P(ary, T_ARRAY) && NIL_P(ary = rb_check_array_type(ary))) {
1898 ary = obj;
1899 ptr = &ary;
1900 len = 1;
1901 }
1902 else {
1903 ptr = RARRAY_CONST_PTR(ary);
1904 len = (rb_num_t)RARRAY_LEN(ary);
1905 }
1906
1907 if (num + is_splat == 0) {
1908 /* no space left on stack */
1909 }
1910 else if (flag & 0x02) {
1911 /* post: ..., nil ,ary[-1], ..., ary[0..-num] # top */
1912 rb_num_t i = 0, j;
1913
1914 if (len < num) {
1915 for (i = 0; i < num - len; i++) {
1916 *cfp->sp++ = Qnil;
1917 }
1918 }
1919
1920 for (j = 0; i < num; i++, j++) {
1921 VALUE v = ptr[len - j - 1];
1922 *cfp->sp++ = v;
1923 }
1924
1925 if (is_splat) {
1926 *cfp->sp++ = rb_ary_new4(len - j, ptr);
1927 }
1928 }
1929 else {
1930 /* normal: ary[num..-1], ary[num-2], ary[num-3], ..., ary[0] # top */
1931 if (is_splat) {
1932 if (num > len) {
1933 *cfp->sp++ = rb_ary_new();
1934 }
1935 else {
1936 *cfp->sp++ = rb_ary_new4(len - num, ptr + num);
1937 }
1938 }
1939
1940 if (num > len) {
1941 rb_num_t i = 0;
1942 for (; i < num - len; i++) {
1943 *cfp->sp++ = Qnil;
1944 }
1945
1946 for (rb_num_t j = 0; i < num; i++, j++) {
1947 *cfp->sp++ = ptr[len - j - 1];
1948 }
1949 }
1950 else {
1951 for (rb_num_t j = 0; j < num; j++) {
1952 *cfp->sp++ = ptr[num - j - 1];
1953 }
1954 }
1955 }
1956
1957 RB_GC_GUARD(ary);
1958}
1959
1960static VALUE vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
1961
1962static VALUE vm_mtbl_dump(VALUE klass, ID target_mid);
1963
1964static struct rb_class_cc_entries *
1965vm_ccs_create(VALUE klass, struct rb_id_table *cc_tbl, ID mid, const rb_callable_method_entry_t *cme)
1966{
1967 struct rb_class_cc_entries *ccs = ALLOC(struct rb_class_cc_entries);
1968#if VM_CHECK_MODE > 0
1969 ccs->debug_sig = ~(VALUE)ccs;
1970#endif
1971 ccs->capa = 0;
1972 ccs->len = 0;
1973 ccs->cme = cme;
1974 METHOD_ENTRY_CACHED_SET((rb_callable_method_entry_t *)cme);
1975 ccs->entries = NULL;
1976
1977 rb_id_table_insert(cc_tbl, mid, (VALUE)ccs);
1978 RB_OBJ_WRITTEN(klass, Qundef, cme);
1979 return ccs;
1980}
1981
1982static void
1983vm_ccs_push(VALUE klass, struct rb_class_cc_entries *ccs, const struct rb_callinfo *ci, const struct rb_callcache *cc)
1984{
1985 if (! vm_cc_markable(cc)) {
1986 return;
1987 }
1988 else if (! vm_ci_markable(ci)) {
1989 return;
1990 }
1991
1992 if (UNLIKELY(ccs->len == ccs->capa)) {
1993 if (ccs->capa == 0) {
1994 ccs->capa = 1;
1995 ccs->entries = ALLOC_N(struct rb_class_cc_entries_entry, ccs->capa);
1996 }
1997 else {
1998 ccs->capa *= 2;
1999 REALLOC_N(ccs->entries, struct rb_class_cc_entries_entry, ccs->capa);
2000 }
2001 }
2002 VM_ASSERT(ccs->len < ccs->capa);
2003
2004 const int pos = ccs->len++;
2005 RB_OBJ_WRITE(klass, &ccs->entries[pos].ci, ci);
2006 RB_OBJ_WRITE(klass, &ccs->entries[pos].cc, cc);
2007
2008 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2009 // for tuning
2010 // vm_mtbl_dump(klass, 0);
2011 }
2012}
2013
2014#if VM_CHECK_MODE > 0
2015void
2016rb_vm_ccs_dump(struct rb_class_cc_entries *ccs)
2017{
2018 ruby_debug_printf("ccs:%p (%d,%d)\n", (void *)ccs, ccs->len, ccs->capa);
2019 for (int i=0; i<ccs->len; i++) {
2020 vm_ci_dump(ccs->entries[i].ci);
2021 rp(ccs->entries[i].cc);
2022 }
2023}
2024
2025static int
2026vm_ccs_verify(struct rb_class_cc_entries *ccs, ID mid, VALUE klass)
2027{
2028 VM_ASSERT(vm_ccs_p(ccs));
2029 VM_ASSERT(ccs->len <= ccs->capa);
2030
2031 for (int i=0; i<ccs->len; i++) {
2032 const struct rb_callinfo *ci = ccs->entries[i].ci;
2033 const struct rb_callcache *cc = ccs->entries[i].cc;
2034
2035 VM_ASSERT(vm_ci_p(ci));
2036 VM_ASSERT(vm_ci_mid(ci) == mid);
2037 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2038 VM_ASSERT(vm_cc_class_check(cc, klass));
2039 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2040 VM_ASSERT(!vm_cc_super_p(cc));
2041 VM_ASSERT(!vm_cc_refinement_p(cc));
2042 }
2043 return TRUE;
2044}
2045#endif
2046
2047static const rb_callable_method_entry_t *check_overloaded_cme(const rb_callable_method_entry_t *cme, const struct rb_callinfo * const ci);
2048
2049static const struct rb_callcache *
2050vm_search_cc(const VALUE klass, const struct rb_callinfo * const ci)
2051{
2052 const ID mid = vm_ci_mid(ci);
2053 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
2054 struct rb_class_cc_entries *ccs = NULL;
2055 VALUE ccs_data;
2056
2057 if (cc_tbl) {
2058 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2059 ccs = (struct rb_class_cc_entries *)ccs_data;
2060 const int ccs_len = ccs->len;
2061
2062 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2063 rb_vm_ccs_free(ccs);
2064 rb_id_table_delete(cc_tbl, mid);
2065 ccs = NULL;
2066 }
2067 else {
2068 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2069
2070 for (int i=0; i<ccs_len; i++) {
2071 const struct rb_callinfo *ccs_ci = ccs->entries[i].ci;
2072 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2073
2074 VM_ASSERT(vm_ci_p(ccs_ci));
2075 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2076
2077 if (ccs_ci == ci) { // TODO: equality
2078 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2079
2080 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2081 VM_ASSERT(ccs_cc->klass == klass);
2082 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2083
2084 return ccs_cc;
2085 }
2086 }
2087 }
2088 }
2089 }
2090 else {
2091 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2092 }
2093
2094 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2095
2096 const rb_callable_method_entry_t *cme;
2097
2098 if (ccs) {
2099 cme = ccs->cme;
2100 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2101
2102 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2103 }
2104 else {
2105 cme = rb_callable_method_entry(klass, mid);
2106 }
2107
2108 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2109
2110 if (cme == NULL) {
2111 // undef or not found: can't cache the information
2112 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2113 return &vm_empty_cc;
2114 }
2115
2116 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2117
2118 METHOD_ENTRY_CACHED_SET((struct rb_callable_method_entry_struct *)cme);
2119
2120 if (ccs == NULL) {
2121 VM_ASSERT(cc_tbl != NULL);
2122
2123 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2124 // rb_callable_method_entry() prepares ccs.
2125 ccs = (struct rb_class_cc_entries *)ccs_data;
2126 }
2127 else {
2128 // TODO: required?
2129 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2130 }
2131 }
2132
2133 cme = check_overloaded_cme(cme, ci);
2134
2135 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2136 vm_ccs_push(klass, ccs, ci, cc);
2137
2138 VM_ASSERT(vm_cc_cme(cc) != NULL);
2139 VM_ASSERT(cme->called_id == mid);
2140 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2141
2142 return cc;
2143}
2144
2145const struct rb_callcache *
2146rb_vm_search_method_slowpath(const struct rb_callinfo *ci, VALUE klass)
2147{
2148 const struct rb_callcache *cc;
2149
2150 VM_ASSERT(RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS));
2151
2152 RB_VM_LOCK_ENTER();
2153 {
2154 cc = vm_search_cc(klass, ci);
2155
2156 VM_ASSERT(cc);
2157 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2158 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2159 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2160 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2161 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2162 }
2163 RB_VM_LOCK_LEAVE();
2164
2165 return cc;
2166}
2167
2168static const struct rb_callcache *
2169vm_search_method_slowpath0(VALUE cd_owner, struct rb_call_data *cd, VALUE klass)
2170{
2171#if USE_DEBUG_COUNTER
2172 const struct rb_callcache *old_cc = cd->cc;
2173#endif
2174
2175 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2176
2177#if OPT_INLINE_METHOD_CACHE
2178 cd->cc = cc;
2179
2180 const struct rb_callcache *empty_cc = &vm_empty_cc;
2181 if (cd_owner && cc != empty_cc) {
2182 RB_OBJ_WRITTEN(cd_owner, Qundef, cc);
2183 }
2184
2185#if USE_DEBUG_COUNTER
2186 if (old_cc == empty_cc) {
2187 // empty
2188 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2189 }
2190 else if (old_cc == cc) {
2191 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2192 }
2193 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2194 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2195 }
2196 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2197 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2198 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2199 }
2200 else {
2201 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2202 }
2203#endif
2204#endif // OPT_INLINE_METHOD_CACHE
2205
2206 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2207 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2208
2209 return cc;
2210}
2211
2212ALWAYS_INLINE(static const struct rb_callcache *vm_search_method_fastpath(VALUE cd_owner, struct rb_call_data *cd, VALUE klass));
2213static const struct rb_callcache *
2214vm_search_method_fastpath(VALUE cd_owner, struct rb_call_data *cd, VALUE klass)
2215{
2216 const struct rb_callcache *cc = cd->cc;
2217
2218#if OPT_INLINE_METHOD_CACHE
2219 if (LIKELY(vm_cc_class_check(cc, klass))) {
2220 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2221 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2222 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2223 VM_ASSERT(vm_cc_cme(cc) == NULL || // not found
2224 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) || // search_super w/ define_method
2225 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci)); // cme->called_id == ci->mid
2226
2227 return cc;
2228 }
2229 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2230 }
2231 else {
2232 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2233 }
2234#endif
2235
2236 return vm_search_method_slowpath0(cd_owner, cd, klass);
2237}
2238
2239static const struct rb_callcache *
2240vm_search_method(VALUE cd_owner, struct rb_call_data *cd, VALUE recv)
2241{
2242 VALUE klass = CLASS_OF(recv);
2243 VM_ASSERT(klass != Qfalse);
2244 VM_ASSERT(RBASIC_CLASS(klass) == 0 || rb_obj_is_kind_of(klass, rb_cClass));
2245
2246 return vm_search_method_fastpath(cd_owner, cd, klass);
2247}
2248
2249#if __has_attribute(transparent_union)
2250typedef union {
2251 VALUE (*anyargs)(ANYARGS);
2252 VALUE (*f00)(VALUE);
2253 VALUE (*f01)(VALUE, VALUE);
2254 VALUE (*f02)(VALUE, VALUE, VALUE);
2255 VALUE (*f03)(VALUE, VALUE, VALUE, VALUE);
2256 VALUE (*f04)(VALUE, VALUE, VALUE, VALUE, VALUE);
2257 VALUE (*f05)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2258 VALUE (*f06)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2259 VALUE (*f07)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2268 VALUE (*fm1)(int, union { VALUE *x; const VALUE *y; } __attribute__((__transparent_union__)), VALUE);
2269} __attribute__((__transparent_union__)) cfunc_type;
2270#else
2271typedef VALUE (*cfunc_type)(ANYARGS);
2272#endif
2273
2274static inline int
2275check_cfunc(const rb_callable_method_entry_t *me, cfunc_type func)
2276{
2277 if (! me) {
2278 return false;
2279 }
2280 else {
2281 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2282 VM_ASSERT(callable_method_entry_p(me));
2283 VM_ASSERT(me->def);
2284 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2285 return false;
2286 }
2287 else {
2288#if __has_attribute(transparent_union)
2289 return me->def->body.cfunc.func == func.anyargs;
2290#else
2291 return me->def->body.cfunc.func == func;
2292#endif
2293 }
2294 }
2295}
2296
2297static inline int
2298vm_method_cfunc_is(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv, cfunc_type func)
2299{
2300 VM_ASSERT(iseq != NULL);
2301 const struct rb_callcache *cc = vm_search_method((VALUE)iseq, cd, recv);
2302 return check_cfunc(vm_cc_cme(cc), func);
2303}
2304
2305#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2306
2307static inline bool
2308FIXNUM_2_P(VALUE a, VALUE b)
2309{
2310 /* FIXNUM_P(a) && FIXNUM_P(b)
2311 * == ((a & 1) && (b & 1))
2312 * == a & b & 1 */
2313 SIGNED_VALUE x = a;
2314 SIGNED_VALUE y = b;
2315 SIGNED_VALUE z = x & y & 1;
2316 return z == 1;
2317}
2318
2319static inline bool
2320FLONUM_2_P(VALUE a, VALUE b)
2321{
2322#if USE_FLONUM
2323 /* FLONUM_P(a) && FLONUM_P(b)
2324 * == ((a & 3) == 2) && ((b & 3) == 2)
2325 * == ! ((a ^ 2) | (b ^ 2) & 3)
2326 */
2327 SIGNED_VALUE x = a;
2328 SIGNED_VALUE y = b;
2329 SIGNED_VALUE z = ((x ^ 2) | (y ^ 2)) & 3;
2330 return !z;
2331#else
2332 return false;
2333#endif
2334}
2335
2336static VALUE
2337opt_equality_specialized(VALUE recv, VALUE obj)
2338{
2339 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2340 goto compare_by_identity;
2341 }
2342 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2343 goto compare_by_identity;
2344 }
2345 else if (STATIC_SYM_P(recv) && STATIC_SYM_P(obj) && EQ_UNREDEFINED_P(SYMBOL)) {
2346 goto compare_by_identity;
2347 }
2348 else if (SPECIAL_CONST_P(recv)) {
2349 //
2350 }
2351 else if (RBASIC_CLASS(recv) == rb_cFloat && RB_FLOAT_TYPE_P(obj) && EQ_UNREDEFINED_P(FLOAT)) {
2352 double a = RFLOAT_VALUE(recv);
2353 double b = RFLOAT_VALUE(obj);
2354
2355#if MSC_VERSION_BEFORE(1300)
2356 if (isnan(a)) {
2357 return Qfalse;
2358 }
2359 else if (isnan(b)) {
2360 return Qfalse;
2361 }
2362 else
2363#endif
2364 return RBOOL(a == b);
2365 }
2366 else if (RBASIC_CLASS(recv) == rb_cString && EQ_UNREDEFINED_P(STRING)) {
2367 if (recv == obj) {
2368 return Qtrue;
2369 }
2370 else if (RB_TYPE_P(obj, T_STRING)) {
2371 return rb_str_eql_internal(obj, recv);
2372 }
2373 }
2374 return Qundef;
2375
2376 compare_by_identity:
2377 return RBOOL(recv == obj);
2378}
2379
2380static VALUE
2381opt_equality(const rb_iseq_t *cd_owner, VALUE recv, VALUE obj, CALL_DATA cd)
2382{
2383 VM_ASSERT(cd_owner != NULL);
2384
2385 VALUE val = opt_equality_specialized(recv, obj);
2386 if (!UNDEF_P(val)) return val;
2387
2388 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2389 return Qundef;
2390 }
2391 else {
2392 return RBOOL(recv == obj);
2393 }
2394}
2395
2396#undef EQ_UNREDEFINED_P
2397
2398static inline const struct rb_callcache *gccct_method_search(rb_execution_context_t *ec, VALUE recv, ID mid, int argc); // vm_eval.c
2399NOINLINE(static VALUE opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid));
2400
2401static VALUE
2402opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid)
2403{
2404 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, 1);
2405
2406 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2407 return RBOOL(recv == obj);
2408 }
2409 else {
2410 return Qundef;
2411 }
2412}
2413
2414static VALUE
2415opt_equality_by_mid(VALUE recv, VALUE obj, ID mid)
2416{
2417 VALUE val = opt_equality_specialized(recv, obj);
2418 if (!UNDEF_P(val)) {
2419 return val;
2420 }
2421 else {
2422 return opt_equality_by_mid_slowpath(recv, obj, mid);
2423 }
2424}
2425
2426VALUE
2427rb_equal_opt(VALUE obj1, VALUE obj2)
2428{
2429 return opt_equality_by_mid(obj1, obj2, idEq);
2430}
2431
2432VALUE
2433rb_eql_opt(VALUE obj1, VALUE obj2)
2434{
2435 return opt_equality_by_mid(obj1, obj2, idEqlP);
2436}
2437
2438extern VALUE rb_vm_call0(rb_execution_context_t *ec, VALUE, ID, int, const VALUE*, const rb_callable_method_entry_t *, int kw_splat);
2439extern VALUE rb_vm_call_with_refinements(rb_execution_context_t *, VALUE, ID, int, const VALUE *, int);
2440
2441static VALUE
2442check_match(rb_execution_context_t *ec, VALUE pattern, VALUE target, enum vm_check_match_type type)
2443{
2444 switch (type) {
2445 case VM_CHECKMATCH_TYPE_WHEN:
2446 return pattern;
2447 case VM_CHECKMATCH_TYPE_RESCUE:
2448 if (!rb_obj_is_kind_of(pattern, rb_cModule)) {
2449 rb_raise(rb_eTypeError, "class or module required for rescue clause");
2450 }
2451 /* fall through */
2452 case VM_CHECKMATCH_TYPE_CASE: {
2453 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target, RB_NO_KEYWORDS);
2454 }
2455 default:
2456 rb_bug("check_match: unreachable");
2457 }
2458}
2459
2460
2461#if MSC_VERSION_BEFORE(1300)
2462#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2463#else
2464#define CHECK_CMP_NAN(a, b) /* do nothing */
2465#endif
2466
2467static inline VALUE
2468double_cmp_lt(double a, double b)
2469{
2470 CHECK_CMP_NAN(a, b);
2471 return RBOOL(a < b);
2472}
2473
2474static inline VALUE
2475double_cmp_le(double a, double b)
2476{
2477 CHECK_CMP_NAN(a, b);
2478 return RBOOL(a <= b);
2479}
2480
2481static inline VALUE
2482double_cmp_gt(double a, double b)
2483{
2484 CHECK_CMP_NAN(a, b);
2485 return RBOOL(a > b);
2486}
2487
2488static inline VALUE
2489double_cmp_ge(double a, double b)
2490{
2491 CHECK_CMP_NAN(a, b);
2492 return RBOOL(a >= b);
2493}
2494
2495// Copied by vm_dump.c
2496static inline VALUE *
2497vm_base_ptr(const rb_control_frame_t *cfp)
2498{
2499 const rb_control_frame_t *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
2500
2501 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2502 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2503 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2504 /* adjust `self' */
2505 bp += 1;
2506 }
2507#if VM_DEBUG_BP_CHECK
2508 if (bp != cfp->bp_check) {
2509 ruby_debug_printf("bp_check: %ld, bp: %ld\n",
2510 (long)(cfp->bp_check - GET_EC()->vm_stack),
2511 (long)(bp - GET_EC()->vm_stack));
2512 rb_bug("vm_base_ptr: unreachable");
2513 }
2514#endif
2515 return bp;
2516 }
2517 else {
2518 return NULL;
2519 }
2520}
2521
2522VALUE *
2523rb_vm_base_ptr(const rb_control_frame_t *cfp)
2524{
2525 return vm_base_ptr(cfp);
2526}
2527
2528/* method call processes with call_info */
2529
2530#include "vm_args.c"
2531
2532static inline VALUE vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc, int param_size, int local_size);
2533ALWAYS_INLINE(static VALUE vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me, int opt_pc, int param_size, int local_size));
2534static inline VALUE vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc);
2535static VALUE vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
2536static VALUE vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2537static VALUE vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2538static inline VALUE vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2539
2540static vm_call_handler vm_call_iseq_setup_func(const struct rb_callinfo *ci, const int param_size, const int local_size);
2541
2542static VALUE
2543vm_call_iseq_setup_tailcall_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2544{
2545 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2546
2547 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2548}
2549
2550static VALUE
2551vm_call_iseq_setup_normal_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2552{
2553 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2554
2555 const struct rb_callcache *cc = calling->cc;
2556 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2557 int param = ISEQ_BODY(iseq)->param.size;
2558 int local = ISEQ_BODY(iseq)->local_table_size;
2559 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2560}
2561
2562bool
2563rb_simple_iseq_p(const rb_iseq_t *iseq)
2564{
2565 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2566 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2567 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2568 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2569 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2570 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2571 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2572}
2573
2574bool
2575rb_iseq_only_optparam_p(const rb_iseq_t *iseq)
2576{
2577 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2578 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2579 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2580 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2581 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2582 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2583 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2584}
2585
2586bool
2587rb_iseq_only_kwparam_p(const rb_iseq_t *iseq)
2588{
2589 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2590 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2591 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2592 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2593 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2594 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2595}
2596
2597#define ALLOW_HEAP_ARGV (-2)
2598#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2599
2600static inline bool
2601vm_caller_setup_arg_splat(rb_control_frame_t *cfp, struct rb_calling_info *calling, VALUE ary, int max_args)
2602{
2603 vm_check_canary(GET_EC(), cfp->sp);
2604 bool ret = false;
2605
2606 if (!NIL_P(ary)) {
2607 const VALUE *ptr = RARRAY_CONST_PTR(ary);
2608 long len = RARRAY_LEN(ary);
2609 int argc = calling->argc;
2610
2611 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2612 /* Avoid SystemStackError when splatting large arrays by storing arguments in
2613 * a temporary array, instead of trying to keeping arguments on the VM stack.
2614 */
2615 VALUE *argv = cfp->sp - argc;
2616 VALUE argv_ary = rb_ary_hidden_new(len + argc + 1);
2617 rb_ary_cat(argv_ary, argv, argc);
2618 rb_ary_cat(argv_ary, ptr, len);
2619 cfp->sp -= argc - 1;
2620 cfp->sp[-1] = argv_ary;
2621 calling->argc = 1;
2622 calling->heap_argv = argv_ary;
2623 RB_GC_GUARD(ary);
2624 }
2625 else {
2626 long i;
2627
2628 if (max_args >= 0 && len + argc > max_args) {
2629 /* If only a given max_args is allowed, copy up to max args.
2630 * Used by vm_callee_setup_block_arg for non-lambda blocks,
2631 * where additional arguments are ignored.
2632 *
2633 * Also, copy up to one more argument than the maximum,
2634 * in case it is an empty keyword hash that will be removed.
2635 */
2636 calling->argc += len - (max_args - argc + 1);
2637 len = max_args - argc + 1;
2638 ret = true;
2639 }
2640 else {
2641 /* Unset heap_argv if set originally. Can happen when
2642 * forwarding modified arguments, where heap_argv was used
2643 * originally, but heap_argv not supported by the forwarded
2644 * method in all cases.
2645 */
2646 calling->heap_argv = 0;
2647 }
2648 CHECK_VM_STACK_OVERFLOW(cfp, len);
2649
2650 for (i = 0; i < len; i++) {
2651 *cfp->sp++ = ptr[i];
2652 }
2653 calling->argc += i;
2654 }
2655 }
2656
2657 return ret;
2658}
2659
2660static inline void
2661vm_caller_setup_arg_kw(rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_callinfo *ci)
2662{
2663 const VALUE *const passed_keywords = vm_ci_kwarg(ci)->keywords;
2664 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2665 const VALUE h = rb_hash_new_with_size(kw_len);
2666 VALUE *sp = cfp->sp;
2667 int i;
2668
2669 for (i=0; i<kw_len; i++) {
2670 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2671 }
2672 (sp-kw_len)[0] = h;
2673
2674 cfp->sp -= kw_len - 1;
2675 calling->argc -= kw_len - 1;
2676 calling->kw_splat = 1;
2677}
2678
2679static inline VALUE
2680vm_caller_setup_keyword_hash(const struct rb_callinfo *ci, VALUE keyword_hash)
2681{
2682 if (UNLIKELY(!RB_TYPE_P(keyword_hash, T_HASH))) {
2683 /* Convert a non-hash keyword splat to a new hash */
2684 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2685 }
2686 else if (!IS_ARGS_KW_SPLAT_MUT(ci)) {
2687 /* Convert a hash keyword splat to a new hash unless
2688 * a mutable keyword splat was passed.
2689 */
2690 keyword_hash = rb_hash_dup(keyword_hash);
2691 }
2692 return keyword_hash;
2693}
2694
2695static inline void
2696CALLER_SETUP_ARG(struct rb_control_frame_struct *restrict cfp,
2697 struct rb_calling_info *restrict calling,
2698 const struct rb_callinfo *restrict ci, int max_args)
2699{
2700 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2701 if (IS_ARGS_KW_SPLAT(ci)) {
2702 // f(*a, **kw)
2703 VM_ASSERT(calling->kw_splat == 1);
2704
2705 cfp->sp -= 2;
2706 calling->argc -= 2;
2707 VALUE ary = cfp->sp[0];
2708 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2709
2710 // splat a
2711 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) return;
2712
2713 // put kw
2714 if (!RHASH_EMPTY_P(kwh)) {
2715 if (UNLIKELY(calling->heap_argv)) {
2716 rb_ary_push(calling->heap_argv, kwh);
2717 ((struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2718 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2719 calling->kw_splat = 0;
2720 }
2721 }
2722 else {
2723 cfp->sp[0] = kwh;
2724 cfp->sp++;
2725 calling->argc++;
2726
2727 VM_ASSERT(calling->kw_splat == 1);
2728 }
2729 }
2730 else {
2731 calling->kw_splat = 0;
2732 }
2733 }
2734 else {
2735 // f(*a)
2736 VM_ASSERT(calling->kw_splat == 0);
2737
2738 cfp->sp -= 1;
2739 calling->argc -= 1;
2740 VALUE ary = cfp->sp[0];
2741
2742 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2743 goto check_keyword;
2744 }
2745
2746 // check the last argument
2747 VALUE last_hash, argv_ary;
2748 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2749 if (!IS_ARGS_KEYWORD(ci) &&
2750 RARRAY_LEN(argv_ary) > 0 &&
2751 RB_TYPE_P((last_hash = rb_ary_last(0, NULL, argv_ary)), T_HASH) &&
2752 (((struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2753
2754 rb_ary_pop(argv_ary);
2755 if (!RHASH_EMPTY_P(last_hash)) {
2756 rb_ary_push(argv_ary, rb_hash_dup(last_hash));
2757 calling->kw_splat = 1;
2758 }
2759 }
2760 }
2761 else {
2762check_keyword:
2763 if (!IS_ARGS_KEYWORD(ci) &&
2764 calling->argc > 0 &&
2765 RB_TYPE_P((last_hash = cfp->sp[-1]), T_HASH) &&
2766 (((struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2767
2768 if (RHASH_EMPTY_P(last_hash)) {
2769 calling->argc--;
2770 cfp->sp -= 1;
2771 }
2772 else {
2773 cfp->sp[-1] = rb_hash_dup(last_hash);
2774 calling->kw_splat = 1;
2775 }
2776 }
2777 }
2778 }
2779 }
2780 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2781 // f(**kw)
2782 VM_ASSERT(calling->kw_splat == 1);
2783 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2784
2785 if (RHASH_EMPTY_P(kwh)) {
2786 cfp->sp--;
2787 calling->argc--;
2788 calling->kw_splat = 0;
2789 }
2790 else {
2791 cfp->sp[-1] = kwh;
2792 }
2793 }
2794 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2795 // f(k1:1, k2:2)
2796 VM_ASSERT(calling->kw_splat == 0);
2797
2798 /* This converts VM_CALL_KWARG style to VM_CALL_KW_SPLAT style
2799 * by creating a keyword hash.
2800 * So, vm_ci_flag(ci) & VM_CALL_KWARG is now inconsistent.
2801 */
2802 vm_caller_setup_arg_kw(cfp, calling, ci);
2803 }
2804}
2805
2806#define USE_OPT_HIST 0
2807
2808#if USE_OPT_HIST
2809#define OPT_HIST_MAX 64
2810static int opt_hist[OPT_HIST_MAX+1];
2811
2812__attribute__((destructor))
2813static void
2814opt_hist_show_results_at_exit(void)
2815{
2816 for (int i=0; i<OPT_HIST_MAX; i++) {
2817 ruby_debug_printf("opt_hist\t%d\t%d\n", i, opt_hist[i]);
2818 }
2819}
2820#endif
2821
2822static VALUE
2823vm_call_iseq_setup_normal_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2824 struct rb_calling_info *calling)
2825{
2826 const struct rb_callcache *cc = calling->cc;
2827 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2828 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2829 const int opt = calling->argc - lead_num;
2830 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2831 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2832 const int param = ISEQ_BODY(iseq)->param.size;
2833 const int local = ISEQ_BODY(iseq)->local_table_size;
2834 const int delta = opt_num - opt;
2835
2836 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2837
2838#if USE_OPT_HIST
2839 if (opt_pc < OPT_HIST_MAX) {
2840 opt_hist[opt]++;
2841 }
2842 else {
2843 opt_hist[OPT_HIST_MAX]++;
2844 }
2845#endif
2846
2847 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2848}
2849
2850static VALUE
2851vm_call_iseq_setup_tailcall_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2852 struct rb_calling_info *calling)
2853{
2854 const struct rb_callcache *cc = calling->cc;
2855 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2856 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2857 const int opt = calling->argc - lead_num;
2858 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2859
2860 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2861
2862#if USE_OPT_HIST
2863 if (opt_pc < OPT_HIST_MAX) {
2864 opt_hist[opt]++;
2865 }
2866 else {
2867 opt_hist[OPT_HIST_MAX]++;
2868 }
2869#endif
2870
2871 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2872}
2873
2874static void
2875args_setup_kw_parameters(rb_execution_context_t *const ec, const rb_iseq_t *const iseq,
2876 VALUE *const passed_values, const int passed_keyword_len, const VALUE *const passed_keywords,
2877 VALUE *const locals);
2878
2879static VALUE
2880vm_call_iseq_setup_kwparm_kwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2881 struct rb_calling_info *calling)
2882{
2883 const struct rb_callinfo *ci = calling->cd->ci;
2884 const struct rb_callcache *cc = calling->cc;
2885
2886 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2887 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2888
2889 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2890 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2891 const struct rb_callinfo_kwarg *kw_arg = vm_ci_kwarg(ci);
2892 const int ci_kw_len = kw_arg->keyword_len;
2893 const VALUE * const ci_keywords = kw_arg->keywords;
2894 VALUE *argv = cfp->sp - calling->argc;
2895 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2896 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2897 VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
2898 MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
2899 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2900
2901 int param = ISEQ_BODY(iseq)->param.size;
2902 int local = ISEQ_BODY(iseq)->local_table_size;
2903 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2904}
2905
2906static VALUE
2907vm_call_iseq_setup_kwparm_nokwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2908 struct rb_calling_info *calling)
2909{
2910 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
2911 const struct rb_callcache *cc = calling->cc;
2912
2913 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2914 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2915
2916 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2917 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2918 VALUE * const argv = cfp->sp - calling->argc;
2919 VALUE * const klocals = argv + kw_param->bits_start - kw_param->num;
2920
2921 int i;
2922 for (i=0; i<kw_param->num; i++) {
2923 klocals[i] = kw_param->default_values[i];
2924 }
2925 klocals[i] = INT2FIX(0); // kw specify flag
2926 // NOTE:
2927 // nobody check this value, but it should be cleared because it can
2928 // points invalid VALUE (T_NONE objects, raw pointer and so on).
2929
2930 int param = ISEQ_BODY(iseq)->param.size;
2931 int local = ISEQ_BODY(iseq)->local_table_size;
2932 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2933}
2934
2935static VALUE builtin_invoker0(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr);
2936
2937static VALUE
2938vm_call_single_noarg_inline_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2939 struct rb_calling_info *calling)
2940{
2941 const struct rb_builtin_function *bf = calling->cc->aux_.bf;
2942 cfp->sp -= (calling->argc + 1);
2943 return builtin_invoker0(ec, calling->recv, NULL, (rb_insn_func_t)bf->func_ptr);
2944}
2945
2946static inline int
2947vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
2948 const rb_iseq_t *iseq, VALUE *argv, int param_size, int local_size)
2949{
2950 const struct rb_callinfo *ci = calling->cd->ci;
2951 const struct rb_callcache *cc = calling->cc;
2952 bool cacheable_ci = vm_ci_markable(ci);
2953
2954 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
2955 if (LIKELY(rb_simple_iseq_p(iseq))) {
2956 rb_control_frame_t *cfp = ec->cfp;
2957 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2958 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
2959
2960 if (calling->argc != lead_num) {
2961 argument_arity_error(ec, iseq, calling->argc, lead_num, lead_num);
2962 }
2963
2964 VM_ASSERT(ci == calling->cd->ci);
2965 VM_ASSERT(cc == calling->cc);
2966
2967 if (cacheable_ci && vm_call_iseq_optimizable_p(ci, cc)) {
2968 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_INLINE) &&
2969 !(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) {
2970 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
2971 vm_cc_bf_set(cc, (void *)iseq->body->iseq_encoded[1]);
2972 CC_SET_FASTPATH(cc, vm_call_single_noarg_inline_builtin, true);
2973 }
2974 else {
2975 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), true);
2976 }
2977 }
2978 return 0;
2979 }
2980 else if (rb_iseq_only_optparam_p(iseq)) {
2981 rb_control_frame_t *cfp = ec->cfp;
2982
2983 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2984 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2985
2986 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
2987 const int argc = calling->argc;
2988 const int opt = argc - lead_num;
2989
2990 if (opt < 0 || opt > opt_num) {
2991 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
2992 }
2993
2994 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2995 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
2996 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2997 cacheable_ci && vm_call_cacheable(ci, cc));
2998 }
2999 else {
3000 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3001 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3002 cacheable_ci && vm_call_cacheable(ci, cc));
3003 }
3004
3005 /* initialize opt vars for self-references */
3006 VM_ASSERT((int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3007 for (int i=argc; i<lead_num + opt_num; i++) {
3008 argv[i] = Qnil;
3009 }
3010 return (int)ISEQ_BODY(iseq)->param.opt_table[opt];
3011 }
3012 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3013 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3014 const int argc = calling->argc;
3015 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3016
3017 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3018 const struct rb_callinfo_kwarg *kw_arg = vm_ci_kwarg(ci);
3019
3020 if (argc - kw_arg->keyword_len == lead_num) {
3021 const int ci_kw_len = kw_arg->keyword_len;
3022 const VALUE * const ci_keywords = kw_arg->keywords;
3023 VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
3024 MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
3025
3026 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
3027 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
3028
3029 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3030 cacheable_ci && vm_call_cacheable(ci, cc));
3031
3032 return 0;
3033 }
3034 }
3035 else if (argc == lead_num) {
3036 /* no kwarg */
3037 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
3038 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
3039
3040 if (klocals[kw_param->num] == INT2FIX(0)) {
3041 /* copy from default_values */
3042 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3043 cacheable_ci && vm_call_cacheable(ci, cc));
3044 }
3045
3046 return 0;
3047 }
3048 }
3049 }
3050
3051 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3052}
3053
3054static VALUE
3055vm_call_iseq_setup(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3056{
3057 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3058
3059 const struct rb_callcache *cc = calling->cc;
3060 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3061 const int param_size = ISEQ_BODY(iseq)->param.size;
3062 const int local_size = ISEQ_BODY(iseq)->local_table_size;
3063 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3064 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3065}
3066
3067static inline VALUE
3068vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
3069 int opt_pc, int param_size, int local_size)
3070{
3071 const struct rb_callinfo *ci = calling->cd->ci;
3072 const struct rb_callcache *cc = calling->cc;
3073
3074 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3075 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3076 }
3077 else {
3078 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3079 }
3080}
3081
3082static inline VALUE
3083vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me,
3084 int opt_pc, int param_size, int local_size)
3085{
3086 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3087 VALUE *argv = cfp->sp - calling->argc;
3088 VALUE *sp = argv + param_size;
3089 cfp->sp = argv - 1 /* recv */;
3090
3091 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3092 calling->block_handler, (VALUE)me,
3093 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3094 local_size - param_size,
3095 ISEQ_BODY(iseq)->stack_max);
3096 return Qundef;
3097}
3098
3099static inline VALUE
3100vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc)
3101{
3102 const struct rb_callcache *cc = calling->cc;
3103 unsigned int i;
3104 VALUE *argv = cfp->sp - calling->argc;
3105 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
3106 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3107 VALUE *src_argv = argv;
3108 VALUE *sp_orig, *sp;
3109 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3110
3111 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3112 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3113 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3114 dst_captured->code.val = src_captured->code.val;
3115 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3116 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3117 }
3118 else {
3119 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3120 }
3121 }
3122
3123 vm_pop_frame(ec, cfp, cfp->ep);
3124 cfp = ec->cfp;
3125
3126 sp_orig = sp = cfp->sp;
3127
3128 /* push self */
3129 sp[0] = calling->recv;
3130 sp++;
3131
3132 /* copy arguments */
3133 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3134 *sp++ = src_argv[i];
3135 }
3136
3137 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3138 calling->recv, calling->block_handler, (VALUE)me,
3139 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3140 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3141 ISEQ_BODY(iseq)->stack_max);
3142
3143 cfp->sp = sp_orig;
3144
3145 return Qundef;
3146}
3147
3148static void
3149ractor_unsafe_check(void)
3150{
3151 if (!rb_ractor_main_p()) {
3152 rb_raise(rb_eRactorUnsafeError, "ractor unsafe method called from not main ractor");
3153 }
3154}
3155
3156static VALUE
3157call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3158{
3159 ractor_unsafe_check();
3160 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
3161 return (*f)(recv, rb_ary_new4(argc, argv));
3162}
3163
3164static VALUE
3165call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3166{
3167 ractor_unsafe_check();
3168 VALUE(*f)(int, const VALUE *, VALUE) = (VALUE(*)(int, const VALUE *, VALUE))func;
3169 return (*f)(argc, argv, recv);
3170}
3171
3172static VALUE
3173call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3174{
3175 ractor_unsafe_check();
3176 VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
3177 return (*f)(recv);
3178}
3179
3180static VALUE
3181call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3182{
3183 ractor_unsafe_check();
3184 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
3185 return (*f)(recv, argv[0]);
3186}
3187
3188static VALUE
3189call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3190{
3191 ractor_unsafe_check();
3192 VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
3193 return (*f)(recv, argv[0], argv[1]);
3194}
3195
3196static VALUE
3197call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3198{
3199 ractor_unsafe_check();
3200 VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
3201 return (*f)(recv, argv[0], argv[1], argv[2]);
3202}
3203
3204static VALUE
3205call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3206{
3207 ractor_unsafe_check();
3208 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
3209 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3210}
3211
3212static VALUE
3213call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3214{
3215 ractor_unsafe_check();
3216 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
3217 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3218}
3219
3220static VALUE
3221call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3222{
3223 ractor_unsafe_check();
3225 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3226}
3227
3228static VALUE
3229call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3230{
3231 ractor_unsafe_check();
3233 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3234}
3235
3236static VALUE
3237call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3238{
3239 ractor_unsafe_check();
3241 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3242}
3243
3244static VALUE
3245call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3246{
3247 ractor_unsafe_check();
3249 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3250}
3251
3252static VALUE
3253call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3254{
3255 ractor_unsafe_check();
3257 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3258}
3259
3260static VALUE
3261call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3262{
3263 ractor_unsafe_check();
3265 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3266}
3267
3268static VALUE
3269call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3270{
3271 ractor_unsafe_check();
3273 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3274}
3275
3276static VALUE
3277call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3278{
3279 ractor_unsafe_check();
3281 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3282}
3283
3284static VALUE
3285call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3286{
3287 ractor_unsafe_check();
3289 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3290}
3291
3292static VALUE
3293call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3294{
3295 ractor_unsafe_check();
3297 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3298}
3299
3300static VALUE
3301ractor_safe_call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3302{
3303 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
3304 return (*f)(recv, rb_ary_new4(argc, argv));
3305}
3306
3307static VALUE
3308ractor_safe_call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3309{
3310 VALUE(*f)(int, const VALUE *, VALUE) = (VALUE(*)(int, const VALUE *, VALUE))func;
3311 return (*f)(argc, argv, recv);
3312}
3313
3314static VALUE
3315ractor_safe_call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3316{
3317 VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
3318 return (*f)(recv);
3319}
3320
3321static VALUE
3322ractor_safe_call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3323{
3324 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
3325 return (*f)(recv, argv[0]);
3326}
3327
3328static VALUE
3329ractor_safe_call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3330{
3331 VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
3332 return (*f)(recv, argv[0], argv[1]);
3333}
3334
3335static VALUE
3336ractor_safe_call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3337{
3338 VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
3339 return (*f)(recv, argv[0], argv[1], argv[2]);
3340}
3341
3342static VALUE
3343ractor_safe_call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3344{
3345 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
3346 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3347}
3348
3349static VALUE
3350ractor_safe_call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3351{
3352 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
3353 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3354}
3355
3356static VALUE
3357ractor_safe_call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3358{
3360 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3361}
3362
3363static VALUE
3364ractor_safe_call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3365{
3367 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3368}
3369
3370static VALUE
3371ractor_safe_call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3372{
3374 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3375}
3376
3377static VALUE
3378ractor_safe_call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3379{
3381 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3382}
3383
3384static VALUE
3385ractor_safe_call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3386{
3388 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3389}
3390
3391static VALUE
3392ractor_safe_call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3393{
3395 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3396}
3397
3398static VALUE
3399ractor_safe_call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3400{
3402 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3403}
3404
3405static VALUE
3406ractor_safe_call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3407{
3409 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3410}
3411
3412static VALUE
3413ractor_safe_call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3414{
3416 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3417}
3418
3419static VALUE
3420ractor_safe_call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3421{
3423 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3424}
3425
3426static inline int
3427vm_cfp_consistent_p(rb_execution_context_t *ec, const rb_control_frame_t *reg_cfp)
3428{
3429 const int ov_flags = RAISED_STACKOVERFLOW;
3430 if (LIKELY(reg_cfp == ec->cfp + 1)) return TRUE;
3431 if (rb_ec_raised_p(ec, ov_flags)) {
3432 rb_ec_raised_reset(ec, ov_flags);
3433 return TRUE;
3434 }
3435 return FALSE;
3436}
3437
3438#define CHECK_CFP_CONSISTENCY(func) \
3439 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3440 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3441
3442static inline
3443const rb_method_cfunc_t *
3444vm_method_cfunc_entry(const rb_callable_method_entry_t *me)
3445{
3446#if VM_DEBUG_VERIFY_METHOD_CACHE
3447 switch (me->def->type) {
3448 case VM_METHOD_TYPE_CFUNC:
3449 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3450 break;
3451# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3452 METHOD_BUG(ISEQ);
3453 METHOD_BUG(ATTRSET);
3454 METHOD_BUG(IVAR);
3455 METHOD_BUG(BMETHOD);
3456 METHOD_BUG(ZSUPER);
3457 METHOD_BUG(UNDEF);
3458 METHOD_BUG(OPTIMIZED);
3459 METHOD_BUG(MISSING);
3460 METHOD_BUG(REFINED);
3461 METHOD_BUG(ALIAS);
3462# undef METHOD_BUG
3463 default:
3464 rb_bug("wrong method type: %d", me->def->type);
3465 }
3466#endif
3467 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3468}
3469
3470static inline VALUE
3471vm_call_cfunc_with_frame_(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling,
3472 int argc, VALUE *argv, VALUE *stack_bottom)
3473{
3474 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3475 const struct rb_callinfo *ci = calling->cd->ci;
3476 const struct rb_callcache *cc = calling->cc;
3477 VALUE val;
3478 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
3479 const rb_method_cfunc_t *cfunc = vm_method_cfunc_entry(me);
3480
3481 VALUE recv = calling->recv;
3482 VALUE block_handler = calling->block_handler;
3483 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3484
3485 if (UNLIKELY(calling->kw_splat)) {
3486 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3487 }
3488
3489 VM_ASSERT(reg_cfp == ec->cfp);
3490
3491 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3492 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, recv, me->def->original_id, vm_ci_mid(ci), me->owner, Qundef);
3493
3494 vm_push_frame(ec, NULL, frame_type, recv,
3495 block_handler, (VALUE)me,
3496 0, ec->cfp->sp, 0, 0);
3497
3498 int len = cfunc->argc;
3499 if (len >= 0) rb_check_arity(argc, len, len);
3500
3501 reg_cfp->sp = stack_bottom;
3502 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3503
3504 CHECK_CFP_CONSISTENCY("vm_call_cfunc");
3505
3506 rb_vm_pop_frame(ec);
3507
3508 VM_ASSERT(ec->cfp->sp == stack_bottom);
3509
3510 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3511 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3512
3513 return val;
3514}
3515
3516// If true, cc->call needs to include `CALLER_SETUP_ARG` (i.e. can't be skipped in fastpath)
3517bool
3518rb_splat_or_kwargs_p(const struct rb_callinfo *restrict ci)
3519{
3520 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3521}
3522
3523static VALUE
3524vm_call_cfunc_with_frame(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3525{
3526 int argc = calling->argc;
3527 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3528 VALUE *argv = &stack_bottom[1];
3529
3530 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3531}
3532
3533static VALUE
3534vm_call_cfunc_other(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3535{
3536 const struct rb_callinfo *ci = calling->cd->ci;
3537 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3538
3539 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3540 VALUE argv_ary;
3541 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3542 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3543 int argc = RARRAY_LENINT(argv_ary);
3544 VALUE *argv = (VALUE *)RARRAY_CONST_PTR(argv_ary);
3545 VALUE *stack_bottom = reg_cfp->sp - 2;
3546
3547 VM_ASSERT(calling->argc == 1);
3548 VM_ASSERT(RB_TYPE_P(argv_ary, T_ARRAY));
3549 VM_ASSERT(RBASIC_CLASS(argv_ary) == 0); // hidden ary
3550
3551 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3552 }
3553 else {
3554 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat);
3555
3556 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3557 }
3558}
3559
3560static inline VALUE
3561vm_call_cfunc_array_argv(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, int stack_offset, int argc_offset)
3562{
3563 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3564 int argc = RARRAY_LENINT(argv_ary) - argc_offset;
3565
3566 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3567 return vm_call_cfunc_other(ec, reg_cfp, calling);
3568 }
3569
3570 VALUE *argv = (VALUE *)RARRAY_CONST_PTR(argv_ary);
3571 calling->kw_splat = 0;
3572 int i;
3573 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3574 VALUE *sp = stack_bottom;
3575 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3576 for(i = 0; i < argc; i++) {
3577 *++sp = argv[i];
3578 }
3579 reg_cfp->sp = sp+1;
3580
3581 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3582}
3583
3584static inline VALUE
3585vm_call_cfunc_only_splat(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3586{
3587 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3588 VALUE argv_ary = reg_cfp->sp[-1];
3589 int argc = RARRAY_LENINT(argv_ary);
3590 VALUE *argv = (VALUE *)RARRAY_CONST_PTR(argv_ary);
3591 VALUE last_hash;
3592 int argc_offset = 0;
3593
3594 if (UNLIKELY(argc > 0 &&
3595 RB_TYPE_P((last_hash = argv[argc-1]), T_HASH) &&
3596 (((struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3597 if (!RHASH_EMPTY_P(last_hash)) {
3598 return vm_call_cfunc_other(ec, reg_cfp, calling);
3599 }
3600 argc_offset++;
3601 }
3602 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3603}
3604
3605static inline VALUE
3606vm_call_cfunc_only_splat_kw(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3607{
3608 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3609 VALUE keyword_hash = reg_cfp->sp[-1];
3610
3611 if (RB_TYPE_P(keyword_hash, T_HASH) && RHASH_EMPTY_P(keyword_hash)) {
3612 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3613 }
3614
3615 return vm_call_cfunc_other(ec, reg_cfp, calling);
3616}
3617
3618static VALUE
3619vm_call_cfunc(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3620{
3621 const struct rb_callinfo *ci = calling->cd->ci;
3622 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3623
3624 if (IS_ARGS_SPLAT(ci)) {
3625 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3626 // f(*a)
3627 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3628 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3629 }
3630 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3631 // f(*a, **kw)
3632 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3633 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3634 }
3635 }
3636
3637 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3638 return vm_call_cfunc_other(ec, reg_cfp, calling);
3639}
3640
3641static VALUE
3642vm_call_ivar(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3643{
3644 const struct rb_callcache *cc = calling->cc;
3645 RB_DEBUG_COUNTER_INC(ccf_ivar);
3646 cfp->sp -= 1;
3647 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE, Qnil);
3648 return ivar;
3649}
3650
3651static VALUE
3652vm_call_attrset_direct(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_callcache *cc, VALUE obj)
3653{
3654 RB_DEBUG_COUNTER_INC(ccf_attrset);
3655 VALUE val = *(cfp->sp - 1);
3656 cfp->sp -= 2;
3657 attr_index_t index = vm_cc_attr_index(cc);
3658 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3659 ID id = vm_cc_cme(cc)->def->body.attr.id;
3661 VALUE res = vm_setivar(obj, id, val, dest_shape_id, index);
3662 if (UNDEF_P(res)) {
3663 switch (BUILTIN_TYPE(obj)) {
3664 case T_OBJECT:
3665 case T_CLASS:
3666 case T_MODULE:
3667 break;
3668 default:
3669 {
3670 res = vm_setivar_default(obj, id, val, dest_shape_id, index);
3671 if (!UNDEF_P(res)) {
3672 return res;
3673 }
3674 }
3675 }
3676 res = vm_setivar_slowpath_attr(obj, id, val, cc);
3677 }
3678 return res;
3679}
3680
3681static VALUE
3682vm_call_attrset(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3683{
3684 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
3685}
3686
3687static inline VALUE
3688vm_call_bmethod_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv)
3689{
3690 rb_proc_t *proc;
3691 VALUE val;
3692 const struct rb_callcache *cc = calling->cc;
3693 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3694 VALUE procv = cme->def->body.bmethod.proc;
3695
3696 if (!RB_OBJ_SHAREABLE_P(procv) &&
3697 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3698 rb_raise(rb_eRuntimeError, "defined with an un-shareable Proc in a different Ractor");
3699 }
3700
3701 /* control block frame */
3702 GetProcPtr(procv, proc);
3703 val = rb_vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
3704
3705 return val;
3706}
3707
3708static int vm_callee_setup_block_arg(rb_execution_context_t *ec, struct rb_calling_info *calling, const struct rb_callinfo *ci, const rb_iseq_t *iseq, VALUE *argv, const enum arg_setup_type arg_setup_type);
3709static VALUE invoke_bmethod(rb_execution_context_t *ec, const rb_iseq_t *iseq, VALUE self, const struct rb_captured_block *captured, const rb_callable_method_entry_t *me, VALUE type, int opt_pc);
3710
3711static VALUE
3712vm_call_iseq_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3713{
3714 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
3715
3716 const struct rb_callcache *cc = calling->cc;
3717 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3718 VALUE procv = cme->def->body.bmethod.proc;
3719
3720 if (!RB_OBJ_SHAREABLE_P(procv) &&
3721 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3722 rb_raise(rb_eRuntimeError, "defined with an un-shareable Proc in a different Ractor");
3723 }
3724
3725 rb_proc_t *proc;
3726 GetProcPtr(procv, proc);
3727 const struct rb_block *block = &proc->block;
3728
3729 while (vm_block_type(block) == block_type_proc) {
3730 block = vm_proc_block(block->as.proc);
3731 }
3732 VM_ASSERT(vm_block_type(block) == block_type_iseq);
3733
3734 const struct rb_captured_block *captured = &block->as.captured;
3735 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
3736 VALUE * const argv = cfp->sp - calling->argc;
3737 const int arg_size = ISEQ_BODY(iseq)->param.size;
3738
3739 int opt_pc;
3740 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
3741 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
3742 }
3743 else {
3744 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
3745 }
3746
3747 cfp->sp = argv - 1; // -1 for the receiver
3748
3749 vm_push_frame(ec, iseq,
3750 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
3751 calling->recv,
3752 VM_GUARDED_PREV_EP(captured->ep),
3753 (VALUE)cme,
3754 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
3755 argv + arg_size,
3756 ISEQ_BODY(iseq)->local_table_size - arg_size,
3757 ISEQ_BODY(iseq)->stack_max);
3758
3759 return Qundef;
3760}
3761
3762static VALUE
3763vm_call_noniseq_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3764{
3765 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
3766
3767 VALUE *argv;
3768 int argc;
3769 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
3770 if (UNLIKELY(calling->heap_argv)) {
3771 argv = RARRAY_PTR(calling->heap_argv);
3772 cfp->sp -= 2;
3773 }
3774 else {
3775 argc = calling->argc;
3776 argv = ALLOCA_N(VALUE, argc);
3777 MEMCPY(argv, cfp->sp - argc, VALUE, argc);
3778 cfp->sp += - argc - 1;
3779 }
3780
3781 return vm_call_bmethod_body(ec, calling, argv);
3782}
3783
3784static VALUE
3785vm_call_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3786{
3787 RB_DEBUG_COUNTER_INC(ccf_bmethod);
3788
3789 const struct rb_callcache *cc = calling->cc;
3790 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3791 VALUE procv = cme->def->body.bmethod.proc;
3792 rb_proc_t *proc;
3793 GetProcPtr(procv, proc);
3794 const struct rb_block *block = &proc->block;
3795
3796 while (vm_block_type(block) == block_type_proc) {
3797 block = vm_proc_block(block->as.proc);
3798 }
3799 if (vm_block_type(block) == block_type_iseq) {
3800 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
3801 return vm_call_iseq_bmethod(ec, cfp, calling);
3802 }
3803
3804 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
3805 return vm_call_noniseq_bmethod(ec, cfp, calling);
3806}
3807
3808VALUE
3809rb_find_defined_class_by_owner(VALUE current_class, VALUE target_owner)
3810{
3811 VALUE klass = current_class;
3812
3813 /* for prepended Module, then start from cover class */
3814 if (RB_TYPE_P(klass, T_ICLASS) && FL_TEST(klass, RICLASS_IS_ORIGIN) &&
3815 RB_TYPE_P(RBASIC_CLASS(klass), T_CLASS)) {
3816 klass = RBASIC_CLASS(klass);
3817 }
3818
3819 while (RTEST(klass)) {
3820 VALUE owner = RB_TYPE_P(klass, T_ICLASS) ? RBASIC_CLASS(klass) : klass;
3821 if (owner == target_owner) {
3822 return klass;
3823 }
3824 klass = RCLASS_SUPER(klass);
3825 }
3826
3827 return current_class; /* maybe module function */
3828}
3829
3830static const rb_callable_method_entry_t *
3831aliased_callable_method_entry(const rb_callable_method_entry_t *me)
3832{
3833 const rb_method_entry_t *orig_me = me->def->body.alias.original_me;
3834 const rb_callable_method_entry_t *cme;
3835
3836 if (orig_me->defined_class == 0) {
3837 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
3838 VM_ASSERT(RB_TYPE_P(orig_me->owner, T_MODULE));
3839 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
3840
3841 if (me->def->reference_count == 1) {
3842 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
3843 }
3844 else {
3846 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
3847 rb_method_definition_set((rb_method_entry_t *)me, def, (void *)cme);
3848 }
3849 }
3850 else {
3851 cme = (const rb_callable_method_entry_t *)orig_me;
3852 }
3853
3854 VM_ASSERT(callable_method_entry_p(cme));
3855 return cme;
3856}
3857
3859rb_aliased_callable_method_entry(const rb_callable_method_entry_t *me)
3860{
3861 return aliased_callable_method_entry(me);
3862}
3863
3864static VALUE
3865vm_call_alias(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3866{
3867 calling->cc = &VM_CC_ON_STACK(Qundef,
3868 vm_call_general,
3869 {{0}},
3870 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
3871
3872 return vm_call_method_each_type(ec, cfp, calling);
3873}
3874
3875static enum method_missing_reason
3876ci_missing_reason(const struct rb_callinfo *ci)
3877{
3878 enum method_missing_reason stat = MISSING_NOENTRY;
3879 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
3880 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
3881 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
3882 return stat;
3883}
3884
3885static VALUE vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
3886
3887static VALUE
3888vm_call_symbol(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3889 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE symbol, int flags)
3890{
3891 ASSUME(calling->argc >= 0);
3892
3893 enum method_missing_reason missing_reason = MISSING_NOENTRY;
3894 int argc = calling->argc;
3895 VALUE recv = calling->recv;
3896 VALUE klass = CLASS_OF(recv);
3897 ID mid = rb_check_id(&symbol);
3898 flags |= VM_CALL_OPT_SEND;
3899
3900 if (UNLIKELY(! mid)) {
3901 mid = idMethodMissing;
3902 missing_reason = ci_missing_reason(ci);
3903 ec->method_missing_reason = missing_reason;
3904
3905 VALUE argv_ary;
3906 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3907 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3908 rb_ary_unshift(argv_ary, symbol);
3909
3910 /* Inadvertent symbol creation shall be forbidden, see [Feature #5112] */
3911 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3912 VALUE exc = rb_make_no_method_exception(
3913 rb_eNoMethodError, 0, recv, RARRAY_LENINT(argv_ary), RARRAY_CONST_PTR(argv_ary), priv);
3914
3915 rb_exc_raise(exc);
3916 }
3917 rb_ary_unshift(argv_ary, rb_str_intern(symbol));
3918 }
3919 else {
3920 /* E.g. when argc == 2
3921 *
3922 * | | | | TOPN
3923 * | | +------+
3924 * | | +---> | arg1 | 0
3925 * +------+ | +------+
3926 * | arg1 | -+ +-> | arg0 | 1
3927 * +------+ | +------+
3928 * | arg0 | ---+ | sym | 2
3929 * +------+ +------+
3930 * | recv | | recv | 3
3931 * --+------+--------+------+------
3932 */
3933 int i = argc;
3934 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3935 INC_SP(1);
3936 MEMMOVE(&TOPN(i - 1), &TOPN(i), VALUE, i);
3937 argc = ++calling->argc;
3938
3939 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3940 /* Inadvertent symbol creation shall be forbidden, see [Feature #5112] */
3941 TOPN(i) = symbol;
3942 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3943 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
3944 VALUE exc = rb_make_no_method_exception(
3945 rb_eNoMethodError, 0, recv, argc, argv, priv);
3946
3947 rb_exc_raise(exc);
3948 }
3949 else {
3950 TOPN(i) = rb_str_intern(symbol);
3951 }
3952 }
3953 }
3954
3955 calling->cd = &(struct rb_call_data) {
3956 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
3957 .cc = NULL,
3958 };
3959 calling->cc = &VM_CC_ON_STACK(klass,
3960 vm_call_general,
3961 { .method_missing_reason = missing_reason },
3962 rb_callable_method_entry_with_refinements(klass, mid, NULL));
3963
3964 if (flags & VM_CALL_FCALL) {
3965 return vm_call_method(ec, reg_cfp, calling);
3966 }
3967
3968 const struct rb_callcache *cc = calling->cc;
3969 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
3970
3971 if (vm_cc_cme(cc) != NULL) {
3972 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
3973 case METHOD_VISI_PUBLIC: /* likely */
3974 return vm_call_method_each_type(ec, reg_cfp, calling);
3975 case METHOD_VISI_PRIVATE:
3976 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
3977 break;
3978 case METHOD_VISI_PROTECTED:
3979 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
3980 break;
3981 default:
3982 VM_UNREACHABLE(vm_call_method);
3983 }
3984 return vm_call_method_missing(ec, reg_cfp, calling);
3985 }
3986
3987 return vm_call_method_nome(ec, reg_cfp, calling);
3988}
3989
3990static VALUE
3991vm_call_opt_send0(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, int flags)
3992{
3993 const struct rb_callinfo *ci = calling->cd->ci;
3994 int i;
3995 VALUE sym;
3996
3997 i = calling->argc - 1;
3998
3999 if (calling->argc == 0) {
4000 rb_raise(rb_eArgError, "no method name given");
4001 }
4002
4003 sym = TOPN(i);
4004 /* E.g. when i == 2
4005 *
4006 * | | | | TOPN
4007 * +------+ | |
4008 * | arg1 | ---+ | | 0
4009 * +------+ | +------+
4010 * | arg0 | -+ +-> | arg1 | 1
4011 * +------+ | +------+
4012 * | sym | +---> | arg0 | 2
4013 * +------+ +------+
4014 * | recv | | recv | 3
4015 * --+------+--------+------+------
4016 */
4017 /* shift arguments */
4018 if (i > 0) {
4019 MEMMOVE(&TOPN(i), &TOPN(i-1), VALUE, i);
4020 }
4021 calling->argc -= 1;
4022 DEC_SP(1);
4023
4024 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4025}
4026
4027static VALUE
4028vm_call_opt_send_complex(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4029{
4030 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4031 const struct rb_callinfo *ci = calling->cd->ci;
4032 int flags = VM_CALL_FCALL;
4033 VALUE sym;
4034
4035 VALUE argv_ary;
4036 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4037 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4038 sym = rb_ary_shift(argv_ary);
4039 flags |= VM_CALL_ARGS_SPLAT;
4040 if (calling->kw_splat) {
4041 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4042 ((struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4043 calling->kw_splat = 0;
4044 }
4045 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4046 }
4047
4048 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4049 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4050}
4051
4052static VALUE
4053vm_call_opt_send_simple(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4054{
4055 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4056 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4057}
4058
4059static VALUE
4060vm_call_opt_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4061{
4062 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4063
4064 const struct rb_callinfo *ci = calling->cd->ci;
4065 int flags = vm_ci_flag(ci);
4066
4067 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
4068 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4069 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4070 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
4071 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4072 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4073 }
4074
4075 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4076 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4077}
4078
4079static VALUE
4080vm_call_method_missing_body(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling,
4081 const struct rb_callinfo *orig_ci, enum method_missing_reason reason)
4082{
4083 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4084
4085 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4086 unsigned int argc, flag;
4087
4088 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4089 argc = ++calling->argc;
4090
4091 /* shift arguments: m(a, b, c) #=> method_missing(:m, a, b, c) */
4092 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4093 vm_check_canary(ec, reg_cfp->sp);
4094 if (argc > 1) {
4095 MEMMOVE(argv+1, argv, VALUE, argc-1);
4096 }
4097 argv[0] = ID2SYM(vm_ci_mid(orig_ci));
4098 INC_SP(1);
4099
4100 ec->method_missing_reason = reason;
4101 calling->cd = &(struct rb_call_data) {
4102 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4103 .cc = NULL,
4104 };
4105 calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }},
4106 rb_callable_method_entry_without_refinements(CLASS_OF(calling->recv), idMethodMissing, NULL));
4107 return vm_call_method(ec, reg_cfp, calling);
4108}
4109
4110static VALUE
4111vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4112{
4113 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4114}
4115
4116static const rb_callable_method_entry_t *refined_method_callable_without_refinement(const rb_callable_method_entry_t *me);
4117static VALUE
4118vm_call_zsuper(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, VALUE klass)
4119{
4120 klass = RCLASS_SUPER(klass);
4121
4122 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, vm_ci_mid(calling->cd->ci)) : NULL;
4123 if (cme == NULL) {
4124 return vm_call_method_nome(ec, cfp, calling);
4125 }
4126 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4127 cme->def->body.refined.orig_me) {
4128 cme = refined_method_callable_without_refinement(cme);
4129 }
4130
4131 calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, cme);
4132
4133 return vm_call_method_each_type(ec, cfp, calling);
4134}
4135
4136static inline VALUE
4137find_refinement(VALUE refinements, VALUE klass)
4138{
4139 if (NIL_P(refinements)) {
4140 return Qnil;
4141 }
4142 return rb_hash_lookup(refinements, klass);
4143}
4144
4145PUREFUNC(static rb_control_frame_t * current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp));
4146static rb_control_frame_t *
4147current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp)
4148{
4149 rb_control_frame_t *top_cfp = cfp;
4150
4151 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4152 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4153
4154 do {
4155 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4156 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4157 /* TODO: orphan block */
4158 return top_cfp;
4159 }
4160 } while (cfp->iseq != local_iseq);
4161 }
4162 return cfp;
4163}
4164
4165static const rb_callable_method_entry_t *
4166refined_method_callable_without_refinement(const rb_callable_method_entry_t *me)
4167{
4168 const rb_method_entry_t *orig_me = me->def->body.refined.orig_me;
4169 const rb_callable_method_entry_t *cme;
4170
4171 if (orig_me->defined_class == 0) {
4172 cme = NULL;
4174 }
4175 else {
4176 cme = (const rb_callable_method_entry_t *)orig_me;
4177 }
4178
4179 VM_ASSERT(callable_method_entry_p(cme));
4180
4181 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4182 cme = NULL;
4183 }
4184
4185 return cme;
4186}
4187
4188static const rb_callable_method_entry_t *
4189search_refined_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4190{
4191 ID mid = vm_ci_mid(calling->cd->ci);
4192 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4193 const struct rb_callcache * const cc = calling->cc;
4194 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
4195
4196 for (; cref; cref = CREF_NEXT(cref)) {
4197 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4198 if (NIL_P(refinement)) continue;
4199
4200 const rb_callable_method_entry_t *const ref_me =
4201 rb_callable_method_entry(refinement, mid);
4202
4203 if (ref_me) {
4204 if (vm_cc_call(cc) == vm_call_super_method) {
4205 const rb_control_frame_t *top_cfp = current_method_entry(ec, cfp);
4206 const rb_callable_method_entry_t *top_me = rb_vm_frame_method_entry(top_cfp);
4207 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4208 continue;
4209 }
4210 }
4211
4212 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4213 cme->def != ref_me->def) {
4214 cme = ref_me;
4215 }
4216 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4217 return cme;
4218 }
4219 }
4220 else {
4221 return NULL;
4222 }
4223 }
4224
4225 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4226 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4227 }
4228 else {
4229 VALUE klass = RCLASS_SUPER(vm_cc_cme(cc)->defined_class);
4230 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, mid) : NULL;
4231 return cme;
4232 }
4233}
4234
4235static VALUE
4236vm_call_refined(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4237{
4238 const rb_callable_method_entry_t *ref_cme = search_refined_method(ec, cfp, calling);
4239
4240 if (ref_cme) {
4241 if (calling->cd->cc) {
4242 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4243 RB_OBJ_WRITE(cfp->iseq, &calling->cd->cc, cc);
4244 return vm_call_method(ec, cfp, calling);
4245 }
4246 else {
4247 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, ref_cme);
4248 calling->cc= ref_cc;
4249 return vm_call_method(ec, cfp, calling);
4250 }
4251 }
4252 else {
4253 return vm_call_method_nome(ec, cfp, calling);
4254 }
4255}
4256
4257static inline VALUE vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_callinfo *ci, bool is_lambda, VALUE block_handler);
4258
4259NOINLINE(static VALUE
4260 vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4261 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler));
4262
4263static VALUE
4264vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4265 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler)
4266{
4267 int argc = calling->argc;
4268
4269 /* remove self */
4270 if (argc > 0) MEMMOVE(&TOPN(argc), &TOPN(argc-1), VALUE, argc);
4271 DEC_SP(1);
4272
4273 return vm_invoke_block(ec, reg_cfp, calling, ci, false, block_handler);
4274}
4275
4276static VALUE
4277vm_call_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4278{
4279 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4280
4281 const struct rb_callinfo *ci = calling->cd->ci;
4282 VALUE procval = calling->recv;
4283 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4284}
4285
4286static VALUE
4287vm_call_opt_block_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4288{
4289 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4290
4291 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4292 const struct rb_callinfo *ci = calling->cd->ci;
4293
4294 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4295 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4296 }
4297 else {
4298 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4299 calling->cc = rb_vm_search_method_slowpath(ci, CLASS_OF(calling->recv));
4300 return vm_call_general(ec, reg_cfp, calling);
4301 }
4302}
4303
4304static VALUE
4305vm_call_opt_struct_aref0(rb_execution_context_t *ec, struct rb_calling_info *calling)
4306{
4307 VALUE recv = calling->recv;
4308
4309 VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
4310 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4311 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4312
4313 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4314 return internal_RSTRUCT_GET(recv, off);
4315}
4316
4317static VALUE
4318vm_call_opt_struct_aref(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4319{
4320 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4321
4322 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4323 reg_cfp->sp -= 1;
4324 return ret;
4325}
4326
4327static VALUE
4328vm_call_opt_struct_aset0(rb_execution_context_t *ec, struct rb_calling_info *calling, VALUE val)
4329{
4330 VALUE recv = calling->recv;
4331
4332 VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
4333 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4334 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4335
4336 rb_check_frozen(recv);
4337
4338 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4339 internal_RSTRUCT_SET(recv, off, val);
4340
4341 return val;
4342}
4343
4344static VALUE
4345vm_call_opt_struct_aset(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4346{
4347 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4348
4349 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4350 reg_cfp->sp -= 2;
4351 return ret;
4352}
4353
4354NOINLINE(static VALUE vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
4355 const struct rb_callinfo *ci, const struct rb_callcache *cc));
4356
4357#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4358 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4359 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4360 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4361 var = func; \
4362 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4363 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4364 } \
4365 else { \
4366 nohook; \
4367 var = func; \
4368 }
4369
4370static VALUE
4371vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
4372 const struct rb_callinfo *ci, const struct rb_callcache *cc)
4373{
4374 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4375 case OPTIMIZED_METHOD_TYPE_SEND:
4376 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4377 return vm_call_opt_send(ec, cfp, calling);
4378 case OPTIMIZED_METHOD_TYPE_CALL:
4379 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4380 return vm_call_opt_call(ec, cfp, calling);
4381 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4382 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4383 return vm_call_opt_block_call(ec, cfp, calling);
4384 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4385 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4386 rb_check_arity(calling->argc, 0, 0);
4387
4388 VALUE v;
4389 VM_CALL_METHOD_ATTR(v,
4390 vm_call_opt_struct_aref(ec, cfp, calling),
4391 set_vm_cc_ivar(cc); \
4392 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4393 return v;
4394 }
4395 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4396 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4397 rb_check_arity(calling->argc, 1, 1);
4398
4399 VALUE v;
4400 VM_CALL_METHOD_ATTR(v,
4401 vm_call_opt_struct_aset(ec, cfp, calling),
4402 set_vm_cc_ivar(cc); \
4403 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4404 return v;
4405 }
4406 default:
4407 rb_bug("vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4408 }
4409}
4410
4411static VALUE
4412vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4413{
4414 const struct rb_callinfo *ci = calling->cd->ci;
4415 const struct rb_callcache *cc = calling->cc;
4416 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
4417 VALUE v;
4418
4419 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4420
4421 switch (cme->def->type) {
4422 case VM_METHOD_TYPE_ISEQ:
4423 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4424 return vm_call_iseq_setup(ec, cfp, calling);
4425
4426 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4427 case VM_METHOD_TYPE_CFUNC:
4428 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4429 return vm_call_cfunc(ec, cfp, calling);
4430
4431 case VM_METHOD_TYPE_ATTRSET:
4432 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4433
4434 rb_check_arity(calling->argc, 1, 1);
4435
4436 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG);
4437
4438 if (vm_cc_markable(cc)) {
4439 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4440 VM_CALL_METHOD_ATTR(v,
4441 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4442 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4443 }
4444 else {
4445 cc = &((struct rb_callcache) {
4446 .flags = T_IMEMO |
4447 (imemo_callcache << FL_USHIFT) |
4448 VM_CALLCACHE_UNMARKABLE |
4449 VM_CALLCACHE_ON_STACK,
4450 .klass = cc->klass,
4451 .cme_ = cc->cme_,
4452 .call_ = cc->call_,
4453 .aux_ = {
4454 .attr = {
4455 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
4456 }
4457 },
4458 });
4459
4460 VM_CALL_METHOD_ATTR(v,
4461 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4462 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4463 }
4464 return v;
4465
4466 case VM_METHOD_TYPE_IVAR:
4467 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4468 rb_check_arity(calling->argc, 0, 0);
4469 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4470 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT);
4471 VM_CALL_METHOD_ATTR(v,
4472 vm_call_ivar(ec, cfp, calling),
4473 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4474 return v;
4475
4476 case VM_METHOD_TYPE_MISSING:
4477 vm_cc_method_missing_reason_set(cc, 0);
4478 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4479 return vm_call_method_missing(ec, cfp, calling);
4480
4481 case VM_METHOD_TYPE_BMETHOD:
4482 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4483 return vm_call_bmethod(ec, cfp, calling);
4484
4485 case VM_METHOD_TYPE_ALIAS:
4486 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4487 return vm_call_alias(ec, cfp, calling);
4488
4489 case VM_METHOD_TYPE_OPTIMIZED:
4490 return vm_call_optimized(ec, cfp, calling, ci, cc);
4491
4492 case VM_METHOD_TYPE_UNDEF:
4493 break;
4494
4495 case VM_METHOD_TYPE_ZSUPER:
4496 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4497
4498 case VM_METHOD_TYPE_REFINED:
4499 // CC_SET_FASTPATH(cc, vm_call_refined, TRUE);
4500 // should not set FASTPATH since vm_call_refined assumes cc->call is vm_call_super_method on invokesuper.
4501 return vm_call_refined(ec, cfp, calling);
4502 }
4503
4504 rb_bug("vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4505}
4506
4507NORETURN(static void vm_raise_method_missing(rb_execution_context_t *ec, int argc, const VALUE *argv, VALUE obj, int call_status));
4508
4509static VALUE
4510vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4511{
4512 /* method missing */
4513 const struct rb_callinfo *ci = calling->cd->ci;
4514 const int stat = ci_missing_reason(ci);
4515
4516 if (vm_ci_mid(ci) == idMethodMissing) {
4517 if (UNLIKELY(calling->heap_argv)) {
4518 vm_raise_method_missing(ec, RARRAY_LENINT(calling->heap_argv), RARRAY_CONST_PTR(calling->heap_argv), calling->recv, stat);
4519 }
4520 else {
4521 rb_control_frame_t *reg_cfp = cfp;
4522 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4523 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4524 }
4525 }
4526 else {
4527 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4528 }
4529}
4530
4531/* Protected method calls and super invocations need to check that the receiver
4532 * (self for super) inherits the module on which the method is defined.
4533 * In the case of refinements, it should consider the original class not the
4534 * refinement.
4535 */
4536static VALUE
4537vm_defined_class_for_protected_call(const rb_callable_method_entry_t *me)
4538{
4539 VALUE defined_class = me->defined_class;
4540 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4541 return NIL_P(refined_class) ? defined_class : refined_class;
4542}
4543
4544static inline VALUE
4545vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4546{
4547 const struct rb_callinfo *ci = calling->cd->ci;
4548 const struct rb_callcache *cc = calling->cc;
4549
4550 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4551
4552 if (vm_cc_cme(cc) != NULL) {
4553 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4554 case METHOD_VISI_PUBLIC: /* likely */
4555 return vm_call_method_each_type(ec, cfp, calling);
4556
4557 case METHOD_VISI_PRIVATE:
4558 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4559 enum method_missing_reason stat = MISSING_PRIVATE;
4560 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4561
4562 vm_cc_method_missing_reason_set(cc, stat);
4563 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4564 return vm_call_method_missing(ec, cfp, calling);
4565 }
4566 return vm_call_method_each_type(ec, cfp, calling);
4567
4568 case METHOD_VISI_PROTECTED:
4569 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4570 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4571 if (!rb_obj_is_kind_of(cfp->self, defined_class)) {
4572 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4573 return vm_call_method_missing(ec, cfp, calling);
4574 }
4575 else {
4576 /* caching method info to dummy cc */
4577 VM_ASSERT(vm_cc_cme(cc) != NULL);
4578 struct rb_callcache cc_on_stack = *cc;
4579 FL_SET_RAW((VALUE)&cc_on_stack, VM_CALLCACHE_UNMARKABLE);
4580 calling->cc = &cc_on_stack;
4581 return vm_call_method_each_type(ec, cfp, calling);
4582 }
4583 }
4584 return vm_call_method_each_type(ec, cfp, calling);
4585
4586 default:
4587 rb_bug("unreachable");
4588 }
4589 }
4590 else {
4591 return vm_call_method_nome(ec, cfp, calling);
4592 }
4593}
4594
4595static VALUE
4596vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4597{
4598 RB_DEBUG_COUNTER_INC(ccf_general);
4599 return vm_call_method(ec, reg_cfp, calling);
4600}
4601
4602void
4603rb_vm_cc_general(const struct rb_callcache *cc)
4604{
4605 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4606 VM_ASSERT(cc != vm_cc_empty());
4607
4608 *(vm_call_handler *)&cc->call_ = vm_call_general;
4609}
4610
4611static VALUE
4612vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4613{
4614 RB_DEBUG_COUNTER_INC(ccf_super_method);
4615
4616 // This line is introduced to make different from `vm_call_general` because some compilers (VC we found)
4617 // can merge the function and the address of the function becomes same.
4618 // The address of `vm_call_super_method` is used in `search_refined_method`, so it should be different.
4619 if (ec == NULL) rb_bug("unreachable");
4620
4621 /* this check is required to distinguish with other functions. */
4622 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4623 return vm_call_method(ec, reg_cfp, calling);
4624}
4625
4626/* super */
4627
4628static inline VALUE
4629vm_search_normal_superclass(VALUE klass)
4630{
4631 if (BUILTIN_TYPE(klass) == T_ICLASS &&
4632 RB_TYPE_P(RBASIC(klass)->klass, T_MODULE) &&
4633 FL_TEST_RAW(RBASIC(klass)->klass, RMODULE_IS_REFINEMENT)) {
4634 klass = RBASIC(klass)->klass;
4635 }
4636 klass = RCLASS_ORIGIN(klass);
4637 return RCLASS_SUPER(klass);
4638}
4639
4640NORETURN(static void vm_super_outside(void));
4641
4642static void
4643vm_super_outside(void)
4644{
4645 rb_raise(rb_eNoMethodError, "super called outside of method");
4646}
4647
4648static const struct rb_callcache *
4649empty_cc_for_super(void)
4650{
4651 return &vm_empty_cc_for_super;
4652}
4653
4654static const struct rb_callcache *
4655vm_search_super_method(const rb_control_frame_t *reg_cfp, struct rb_call_data *cd, VALUE recv)
4656{
4657 VALUE current_defined_class;
4658 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
4659
4660 if (!me) {
4661 vm_super_outside();
4662 }
4663
4664 current_defined_class = vm_defined_class_for_protected_call(me);
4665
4666 if (BUILTIN_TYPE(current_defined_class) != T_MODULE &&
4667 reg_cfp->iseq != method_entry_iseqptr(me) &&
4668 !rb_obj_is_kind_of(recv, current_defined_class)) {
4669 VALUE m = RB_TYPE_P(current_defined_class, T_ICLASS) ?
4670 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
4671
4672 if (m) { /* not bound UnboundMethod */
4673 rb_raise(rb_eTypeError,
4674 "self has wrong type to call super in this context: "
4675 "%"PRIsVALUE" (expected %"PRIsVALUE")",
4676 rb_obj_class(recv), m);
4677 }
4678 }
4679
4680 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
4681 rb_raise(rb_eRuntimeError,
4682 "implicit argument passing of super from method defined"
4683 " by define_method() is not supported."
4684 " Specify all arguments explicitly.");
4685 }
4686
4687 ID mid = me->def->original_id;
4688
4689 // update iseq. really? (TODO)
4690 cd->ci = vm_ci_new_runtime(mid,
4691 vm_ci_flag(cd->ci),
4692 vm_ci_argc(cd->ci),
4693 vm_ci_kwarg(cd->ci));
4694
4695 RB_OBJ_WRITTEN(reg_cfp->iseq, Qundef, cd->ci);
4696
4697 const struct rb_callcache *cc;
4698
4699 VALUE klass = vm_search_normal_superclass(me->defined_class);
4700
4701 if (!klass) {
4702 /* bound instance method of module */
4703 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
4704 RB_OBJ_WRITE(reg_cfp->iseq, &cd->cc, cc);
4705 }
4706 else {
4707 cc = vm_search_method_fastpath((VALUE)reg_cfp->iseq, cd, klass);
4708 const rb_callable_method_entry_t *cached_cme = vm_cc_cme(cc);
4709
4710 // define_method can cache for different method id
4711 if (cached_cme == NULL) {
4712 // empty_cc_for_super is not markable object
4713 cd->cc = empty_cc_for_super();
4714 }
4715 else if (cached_cme->called_id != mid) {
4716 const rb_callable_method_entry_t *cme = rb_callable_method_entry(klass, mid);
4717 if (cme) {
4718 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
4719 RB_OBJ_WRITE(reg_cfp->iseq, &cd->cc, cc);
4720 }
4721 else {
4722 cd->cc = cc = empty_cc_for_super();
4723 }
4724 }
4725 else {
4726 switch (cached_cme->def->type) {
4727 // vm_call_refined (search_refined_method) assumes cc->call is vm_call_super_method on invokesuper
4728 case VM_METHOD_TYPE_REFINED:
4729 // cc->klass is superclass of receiver class. Checking cc->klass is not enough to invalidate IVC for the receiver class.
4730 case VM_METHOD_TYPE_ATTRSET:
4731 case VM_METHOD_TYPE_IVAR:
4732 vm_cc_call_set(cc, vm_call_super_method); // invalidate fastpath
4733 break;
4734 default:
4735 break; // use fastpath
4736 }
4737 }
4738 }
4739
4740 VM_ASSERT((vm_cc_cme(cc), true));
4741
4742 return cc;
4743}
4744
4745/* yield */
4746
4747static inline int
4748block_proc_is_lambda(const VALUE procval)
4749{
4750 rb_proc_t *proc;
4751
4752 if (procval) {
4753 GetProcPtr(procval, proc);
4754 return proc->is_lambda;
4755 }
4756 else {
4757 return 0;
4758 }
4759}
4760
4761static VALUE
4762vm_yield_with_cfunc(rb_execution_context_t *ec,
4763 const struct rb_captured_block *captured,
4764 VALUE self, int argc, const VALUE *argv, int kw_splat, VALUE block_handler,
4766{
4767 int is_lambda = FALSE; /* TODO */
4768 VALUE val, arg, blockarg;
4769 int frame_flag;
4770 const struct vm_ifunc *ifunc = captured->code.ifunc;
4771
4772 if (is_lambda) {
4773 arg = rb_ary_new4(argc, argv);
4774 }
4775 else if (argc == 0) {
4776 arg = Qnil;
4777 }
4778 else {
4779 arg = argv[0];
4780 }
4781
4782 blockarg = rb_vm_bh_to_procval(ec, block_handler);
4783
4784 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
4785 if (kw_splat) {
4786 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
4787 }
4788
4789 vm_push_frame(ec, (const rb_iseq_t *)captured->code.ifunc,
4790 frame_flag,
4791 self,
4792 VM_GUARDED_PREV_EP(captured->ep),
4793 (VALUE)me,
4794 0, ec->cfp->sp, 0, 0);
4795 val = (*ifunc->func)(arg, (VALUE)ifunc->data, argc, argv, blockarg);
4796 rb_vm_pop_frame(ec);
4797
4798 return val;
4799}
4800
4801VALUE
4802rb_vm_yield_with_cfunc(rb_execution_context_t *ec, const struct rb_captured_block *captured, int argc, const VALUE *argv)
4803{
4804 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
4805}
4806
4807static VALUE
4808vm_yield_with_symbol(rb_execution_context_t *ec, VALUE symbol, int argc, const VALUE *argv, int kw_splat, VALUE block_handler)
4809{
4810 return rb_sym_proc_call(SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
4811}
4812
4813static inline int
4814vm_callee_setup_block_arg_arg0_splat(rb_control_frame_t *cfp, const rb_iseq_t *iseq, VALUE *argv, VALUE ary)
4815{
4816 int i;
4817 long len = RARRAY_LEN(ary);
4818
4819 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4820
4821 for (i=0; i<len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
4822 argv[i] = RARRAY_AREF(ary, i);
4823 }
4824
4825 return i;
4826}
4827
4828static inline VALUE
4829vm_callee_setup_block_arg_arg0_check(VALUE *argv)
4830{
4831 VALUE ary, arg0 = argv[0];
4832 ary = rb_check_array_type(arg0);
4833#if 0
4834 argv[0] = arg0;
4835#else
4836 VM_ASSERT(argv[0] == arg0);
4837#endif
4838 return ary;
4839}
4840
4841static int
4842vm_callee_setup_block_arg(rb_execution_context_t *ec, struct rb_calling_info *calling, const struct rb_callinfo *ci, const rb_iseq_t *iseq, VALUE *argv, const enum arg_setup_type arg_setup_type)
4843{
4844 if (rb_simple_iseq_p(iseq)) {
4845 rb_control_frame_t *cfp = ec->cfp;
4846 VALUE arg0;
4847
4848 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
4849
4850 if (arg_setup_type == arg_setup_block &&
4851 calling->argc == 1 &&
4852 ISEQ_BODY(iseq)->param.flags.has_lead &&
4853 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
4854 !NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
4855 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
4856 }
4857
4858 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
4859 if (arg_setup_type == arg_setup_block) {
4860 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
4861 int i;
4862 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4863 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] = Qnil;
4864 calling->argc = ISEQ_BODY(iseq)->param.lead_num; /* fill rest parameters */
4865 }
4866 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
4867 calling->argc = ISEQ_BODY(iseq)->param.lead_num; /* simply truncate arguments */
4868 }
4869 }
4870 else {
4871 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
4872 }
4873 }
4874
4875 return 0;
4876 }
4877 else {
4878 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
4879 }
4880}
4881
4882static int
4883vm_yield_setup_args(rb_execution_context_t *ec, const rb_iseq_t *iseq, const int argc, VALUE *argv, int flags, VALUE block_handler, enum arg_setup_type arg_setup_type)
4884{
4885 struct rb_calling_info calling_entry, *calling;
4886
4887 calling = &calling_entry;
4888 calling->argc = argc;
4889 calling->block_handler = block_handler;
4890 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
4891 calling->recv = Qundef;
4892 calling->heap_argv = 0;
4893 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
4894
4895 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
4896}
4897
4898/* ruby iseq -> ruby block */
4899
4900static VALUE
4901vm_invoke_iseq_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4902 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4903 bool is_lambda, VALUE block_handler)
4904{
4905 const struct rb_captured_block *captured = VM_BH_TO_ISEQ_BLOCK(block_handler);
4906 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4907 const int arg_size = ISEQ_BODY(iseq)->param.size;
4908 VALUE * const rsp = GET_SP() - calling->argc;
4909 VALUE * const argv = rsp;
4910 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
4911
4912 SET_SP(rsp);
4913
4914 vm_push_frame(ec, iseq,
4915 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
4916 captured->self,
4917 VM_GUARDED_PREV_EP(captured->ep), 0,
4918 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4919 rsp + arg_size,
4920 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
4921
4922 return Qundef;
4923}
4924
4925static VALUE
4926vm_invoke_symbol_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4927 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4928 MAYBE_UNUSED(bool is_lambda), VALUE block_handler)
4929{
4930 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
4931 int flags = vm_ci_flag(ci);
4932
4933 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
4934 ((calling->argc == 0) ||
4935 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4936 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4937 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
4938 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4939 flags = 0;
4940 if (UNLIKELY(calling->heap_argv)) {
4941#if VM_ARGC_STACK_MAX < 0
4942 if (RARRAY_LEN(calling->heap_argv) < 1) {
4943 rb_raise(rb_eArgError, "no receiver given");
4944 }
4945#endif
4946 calling->recv = rb_ary_shift(calling->heap_argv);
4947 // Modify stack to avoid cfp consistency error
4948 reg_cfp->sp++;
4949 reg_cfp->sp[-1] = reg_cfp->sp[-2];
4950 reg_cfp->sp[-2] = calling->recv;
4951 flags |= VM_CALL_ARGS_SPLAT;
4952 }
4953 else {
4954 if (calling->argc < 1) {
4955 rb_raise(rb_eArgError, "no receiver given");
4956 }
4957 calling->recv = TOPN(--calling->argc);
4958 }
4959 if (calling->kw_splat) {
4960 flags |= VM_CALL_KW_SPLAT;
4961 }
4962 }
4963 else {
4964 if (calling->argc < 1) {
4965 rb_raise(rb_eArgError, "no receiver given");
4966 }
4967 calling->recv = TOPN(--calling->argc);
4968 }
4969
4970 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
4971}
4972
4973static VALUE
4974vm_invoke_ifunc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4975 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4976 MAYBE_UNUSED(bool is_lambda), VALUE block_handler)
4977{
4978 VALUE val;
4979 int argc;
4980 const struct rb_captured_block *captured = VM_BH_TO_IFUNC_BLOCK(block_handler);
4981 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
4982 argc = calling->argc;
4983 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ? RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
4984 POPN(argc); /* TODO: should put before C/yield? */
4985 return val;
4986}
4987
4988static VALUE
4989vm_proc_to_block_handler(VALUE procval)
4990{
4991 const struct rb_block *block = vm_proc_block(procval);
4992
4993 switch (vm_block_type(block)) {
4994 case block_type_iseq:
4995 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
4996 case block_type_ifunc:
4997 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
4998 case block_type_symbol:
4999 return VM_BH_FROM_SYMBOL(block->as.symbol);
5000 case block_type_proc:
5001 return VM_BH_FROM_PROC(block->as.proc);
5002 }
5003 VM_UNREACHABLE(vm_yield_with_proc);
5004 return Qundef;
5005}
5006
5007static VALUE
5008vm_invoke_proc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
5009 struct rb_calling_info *calling, const struct rb_callinfo *ci,
5010 bool is_lambda, VALUE block_handler)
5011{
5012 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5013 VALUE proc = VM_BH_TO_PROC(block_handler);
5014 is_lambda = block_proc_is_lambda(proc);
5015 block_handler = vm_proc_to_block_handler(proc);
5016 }
5017
5018 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5019}
5020
5021static inline VALUE
5022vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
5023 struct rb_calling_info *calling, const struct rb_callinfo *ci,
5024 bool is_lambda, VALUE block_handler)
5025{
5026 VALUE (*func)(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
5027 struct rb_calling_info *calling, const struct rb_callinfo *ci,
5028 bool is_lambda, VALUE block_handler);
5029
5030 switch (vm_block_handler_type(block_handler)) {
5031 case block_handler_type_iseq: func = vm_invoke_iseq_block; break;
5032 case block_handler_type_ifunc: func = vm_invoke_ifunc_block; break;
5033 case block_handler_type_proc: func = vm_invoke_proc_block; break;
5034 case block_handler_type_symbol: func = vm_invoke_symbol_block; break;
5035 default: rb_bug("vm_invoke_block: unreachable");
5036 }
5037
5038 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5039}
5040
5041static VALUE
5042vm_make_proc_with_iseq(const rb_iseq_t *blockiseq)
5043{
5044 const rb_execution_context_t *ec = GET_EC();
5045 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
5046 struct rb_captured_block *captured;
5047
5048 if (cfp == 0) {
5049 rb_bug("vm_make_proc_with_iseq: unreachable");
5050 }
5051
5052 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5053 captured->code.iseq = blockiseq;
5054
5055 return rb_vm_make_proc(ec, captured, rb_cProc);
5056}
5057
5058static VALUE
5059vm_once_exec(VALUE iseq)
5060{
5061 VALUE proc = vm_make_proc_with_iseq((rb_iseq_t *)iseq);
5062 return rb_proc_call_with_block(proc, 0, 0, Qnil);
5063}
5064
5065static VALUE
5066vm_once_clear(VALUE data)
5067{
5068 union iseq_inline_storage_entry *is = (union iseq_inline_storage_entry *)data;
5069 is->once.running_thread = NULL;
5070 return Qnil;
5071}
5072
5073/* defined insn */
5074
5075static bool
5076check_respond_to_missing(VALUE obj, VALUE v)
5077{
5078 VALUE args[2];
5079 VALUE r;
5080
5081 args[0] = obj; args[1] = Qfalse;
5082 r = rb_check_funcall(v, idRespond_to_missing, 2, args);
5083 if (!UNDEF_P(r) && RTEST(r)) {
5084 return true;
5085 }
5086 else {
5087 return false;
5088 }
5089}
5090
5091static bool
5092vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE v)
5093{
5094 VALUE klass;
5095 enum defined_type type = (enum defined_type)op_type;
5096
5097 switch (type) {
5098 case DEFINED_IVAR:
5099 return rb_ivar_defined(GET_SELF(), SYM2ID(obj));
5100 break;
5101 case DEFINED_GVAR:
5102 return rb_gvar_defined(SYM2ID(obj));
5103 break;
5104 case DEFINED_CVAR: {
5105 const rb_cref_t *cref = vm_get_cref(GET_EP());
5106 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5107 return rb_cvar_defined(klass, SYM2ID(obj));
5108 break;
5109 }
5110 case DEFINED_CONST:
5111 case DEFINED_CONST_FROM: {
5112 bool allow_nil = type == DEFINED_CONST;
5113 klass = v;
5114 return vm_get_ev_const(ec, klass, SYM2ID(obj), allow_nil, true);
5115 break;
5116 }
5117 case DEFINED_FUNC:
5118 klass = CLASS_OF(v);
5119 return rb_ec_obj_respond_to(ec, v, SYM2ID(obj), TRUE);
5120 break;
5121 case DEFINED_METHOD:{
5122 VALUE klass = CLASS_OF(v);
5123 const rb_method_entry_t *me = rb_method_entry_with_refinements(klass, SYM2ID(obj), NULL);
5124
5125 if (me) {
5126 switch (METHOD_ENTRY_VISI(me)) {
5127 case METHOD_VISI_PRIVATE:
5128 break;
5129 case METHOD_VISI_PROTECTED:
5130 if (!rb_obj_is_kind_of(GET_SELF(), rb_class_real(me->defined_class))) {
5131 break;
5132 }
5133 case METHOD_VISI_PUBLIC:
5134 return true;
5135 break;
5136 default:
5137 rb_bug("vm_defined: unreachable: %u", (unsigned int)METHOD_ENTRY_VISI(me));
5138 }
5139 }
5140 else {
5141 return check_respond_to_missing(obj, v);
5142 }
5143 break;
5144 }
5145 case DEFINED_YIELD:
5146 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5147 return true;
5148 }
5149 break;
5150 case DEFINED_ZSUPER:
5151 {
5152 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(GET_CFP());
5153
5154 if (me) {
5155 VALUE klass = vm_search_normal_superclass(me->defined_class);
5156 if (!klass) return false;
5157
5158 ID id = me->def->original_id;
5159
5160 return rb_method_boundp(klass, id, 0);
5161 }
5162 }
5163 break;
5164 case DEFINED_REF:
5165 return RTEST(vm_backref_defined(ec, GET_LEP(), FIX2INT(obj)));
5166 default:
5167 rb_bug("unimplemented defined? type (VM)");
5168 break;
5169 }
5170
5171 return false;
5172}
5173
5174bool
5175rb_vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE v)
5176{
5177 return vm_defined(ec, reg_cfp, op_type, obj, v);
5178}
5179
5180static const VALUE *
5181vm_get_ep(const VALUE *const reg_ep, rb_num_t lv)
5182{
5183 rb_num_t i;
5184 const VALUE *ep = reg_ep;
5185 for (i = 0; i < lv; i++) {
5186 ep = GET_PREV_EP(ep);
5187 }
5188 return ep;
5189}
5190
5191static VALUE
5192vm_get_special_object(const VALUE *const reg_ep,
5193 enum vm_special_object_type type)
5194{
5195 switch (type) {
5196 case VM_SPECIAL_OBJECT_VMCORE:
5197 return rb_mRubyVMFrozenCore;
5198 case VM_SPECIAL_OBJECT_CBASE:
5199 return vm_get_cbase(reg_ep);
5200 case VM_SPECIAL_OBJECT_CONST_BASE:
5201 return vm_get_const_base(reg_ep);
5202 default:
5203 rb_bug("putspecialobject insn: unknown value_type %d", type);
5204 }
5205}
5206
5207static VALUE
5208vm_concat_array(VALUE ary1, VALUE ary2st)
5209{
5210 const VALUE ary2 = ary2st;
5211 VALUE tmp1 = rb_check_to_array(ary1);
5212 VALUE tmp2 = rb_check_to_array(ary2);
5213
5214 if (NIL_P(tmp1)) {
5215 tmp1 = rb_ary_new3(1, ary1);
5216 }
5217
5218 if (NIL_P(tmp2)) {
5219 tmp2 = rb_ary_new3(1, ary2);
5220 }
5221
5222 if (tmp1 == ary1) {
5223 tmp1 = rb_ary_dup(ary1);
5224 }
5225 return rb_ary_concat(tmp1, tmp2);
5226}
5227
5228// YJIT implementation is using the C function
5229// and needs to call a non-static function
5230VALUE
5231rb_vm_concat_array(VALUE ary1, VALUE ary2st)
5232{
5233 return vm_concat_array(ary1, ary2st);
5234}
5235
5236static VALUE
5237vm_splat_array(VALUE flag, VALUE ary)
5238{
5239 VALUE tmp = rb_check_to_array(ary);
5240 if (NIL_P(tmp)) {
5241 return rb_ary_new3(1, ary);
5242 }
5243 else if (RTEST(flag)) {
5244 return rb_ary_dup(tmp);
5245 }
5246 else {
5247 return tmp;
5248 }
5249}
5250
5251// YJIT implementation is using the C function
5252// and needs to call a non-static function
5253VALUE
5254rb_vm_splat_array(VALUE flag, VALUE ary)
5255{
5256 return vm_splat_array(flag, ary);
5257}
5258
5259static VALUE
5260vm_check_match(rb_execution_context_t *ec, VALUE target, VALUE pattern, rb_num_t flag)
5261{
5262 enum vm_check_match_type type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5263
5264 if (flag & VM_CHECKMATCH_ARRAY) {
5265 long i;
5266 const long n = RARRAY_LEN(pattern);
5267
5268 for (i = 0; i < n; i++) {
5269 VALUE v = RARRAY_AREF(pattern, i);
5270 VALUE c = check_match(ec, v, target, type);
5271
5272 if (RTEST(c)) {
5273 return c;
5274 }
5275 }
5276 return Qfalse;
5277 }
5278 else {
5279 return check_match(ec, pattern, target, type);
5280 }
5281}
5282
5283VALUE
5284rb_vm_check_match(rb_execution_context_t *ec, VALUE target, VALUE pattern, rb_num_t flag)
5285{
5286 return vm_check_match(ec, target, pattern, flag);
5287}
5288
5289static VALUE
5290vm_check_keyword(lindex_t bits, lindex_t idx, const VALUE *ep)
5291{
5292 const VALUE kw_bits = *(ep - bits);
5293
5294 if (FIXNUM_P(kw_bits)) {
5295 unsigned int b = (unsigned int)FIX2ULONG(kw_bits);
5296 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5297 return Qfalse;
5298 }
5299 else {
5300 VM_ASSERT(RB_TYPE_P(kw_bits, T_HASH));
5301 if (rb_hash_has_key(kw_bits, INT2FIX(idx))) return Qfalse;
5302 }
5303 return Qtrue;
5304}
5305
5306static void
5307vm_dtrace(rb_event_flag_t flag, rb_execution_context_t *ec)
5308{
5309 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5310 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5311 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5312 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5313
5314 switch (flag) {
5315 case RUBY_EVENT_CALL:
5316 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5317 return;
5318 case RUBY_EVENT_C_CALL:
5319 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5320 return;
5321 case RUBY_EVENT_RETURN:
5322 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5323 return;
5325 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5326 return;
5327 }
5328 }
5329}
5330
5331static VALUE
5332vm_const_get_under(ID id, rb_num_t flags, VALUE cbase)
5333{
5334 if (!rb_const_defined_at(cbase, id)) {
5335 return 0;
5336 }
5337 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5338 return rb_public_const_get_at(cbase, id);
5339 }
5340 else {
5341 return rb_const_get_at(cbase, id);
5342 }
5343}
5344
5345static VALUE
5346vm_check_if_class(ID id, rb_num_t flags, VALUE super, VALUE klass)
5347{
5348 if (!RB_TYPE_P(klass, T_CLASS)) {
5349 return 0;
5350 }
5351 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5352 VALUE tmp = rb_class_real(RCLASS_SUPER(klass));
5353
5354 if (tmp != super) {
5355 rb_raise(rb_eTypeError,
5356 "superclass mismatch for class %"PRIsVALUE"",
5357 rb_id2str(id));
5358 }
5359 else {
5360 return klass;
5361 }
5362 }
5363 else {
5364 return klass;
5365 }
5366}
5367
5368static VALUE
5369vm_check_if_module(ID id, VALUE mod)
5370{
5371 if (!RB_TYPE_P(mod, T_MODULE)) {
5372 return 0;
5373 }
5374 else {
5375 return mod;
5376 }
5377}
5378
5379static VALUE
5380declare_under(ID id, VALUE cbase, VALUE c)
5381{
5382 rb_set_class_path_string(c, cbase, rb_id2str(id));
5383 rb_const_set(cbase, id, c);
5384 return c;
5385}
5386
5387static VALUE
5388vm_declare_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
5389{
5390 /* new class declaration */
5391 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5392 VALUE c = declare_under(id, cbase, rb_define_class_id(id, s));
5394 rb_class_inherited(s, c);
5395 return c;
5396}
5397
5398static VALUE
5399vm_declare_module(ID id, VALUE cbase)
5400{
5401 /* new module declaration */
5402 return declare_under(id, cbase, rb_module_new());
5403}
5404
5405NORETURN(static void unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old));
5406static void
5407unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old)
5408{
5409 VALUE name = rb_id2str(id);
5410 VALUE message = rb_sprintf("%"PRIsVALUE" is not a %s",
5411 name, type);
5412 VALUE location = rb_const_source_location_at(cbase, id);
5413 if (!NIL_P(location)) {
5414 rb_str_catf(message, "\n%"PRIsVALUE":%"PRIsVALUE":"
5415 " previous definition of %"PRIsVALUE" was here",
5416 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
5417 }
5418 rb_exc_raise(rb_exc_new_str(rb_eTypeError, message));
5419}
5420
5421static VALUE
5422vm_define_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
5423{
5424 VALUE klass;
5425
5426 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !RB_TYPE_P(super, T_CLASS)) {
5427 rb_raise(rb_eTypeError,
5428 "superclass must be an instance of Class (given an instance of %"PRIsVALUE")",
5429 rb_obj_class(super));
5430 }
5431
5432 vm_check_if_namespace(cbase);
5433
5434 /* find klass */
5435 rb_autoload_load(cbase, id);
5436 if ((klass = vm_const_get_under(id, flags, cbase)) != 0) {
5437 if (!vm_check_if_class(id, flags, super, klass))
5438 unmatched_redefinition("class", cbase, id, klass);
5439 return klass;
5440 }
5441 else {
5442 return vm_declare_class(id, flags, cbase, super);
5443 }
5444}
5445
5446static VALUE
5447vm_define_module(ID id, rb_num_t flags, VALUE cbase)
5448{
5449 VALUE mod;
5450
5451 vm_check_if_namespace(cbase);
5452 if ((mod = vm_const_get_under(id, flags, cbase)) != 0) {
5453 if (!vm_check_if_module(id, mod))
5454 unmatched_redefinition("module", cbase, id, mod);
5455 return mod;
5456 }
5457 else {
5458 return vm_declare_module(id, cbase);
5459 }
5460}
5461
5462static VALUE
5463vm_find_or_create_class_by_id(ID id,
5464 rb_num_t flags,
5465 VALUE cbase,
5466 VALUE super)
5467{
5468 rb_vm_defineclass_type_t type = VM_DEFINECLASS_TYPE(flags);
5469
5470 switch (type) {
5471 case VM_DEFINECLASS_TYPE_CLASS:
5472 /* classdef returns class scope value */
5473 return vm_define_class(id, flags, cbase, super);
5474
5475 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5476 /* classdef returns class scope value */
5477 return rb_singleton_class(cbase);
5478
5479 case VM_DEFINECLASS_TYPE_MODULE:
5480 /* classdef returns class scope value */
5481 return vm_define_module(id, flags, cbase);
5482
5483 default:
5484 rb_bug("unknown defineclass type: %d", (int)type);
5485 }
5486}
5487
5488static rb_method_visibility_t
5489vm_scope_visibility_get(const rb_execution_context_t *ec)
5490{
5491 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
5492
5493 if (!vm_env_cref_by_cref(cfp->ep)) {
5494 return METHOD_VISI_PUBLIC;
5495 }
5496 else {
5497 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5498 }
5499}
5500
5501static int
5502vm_scope_module_func_check(const rb_execution_context_t *ec)
5503{
5504 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
5505
5506 if (!vm_env_cref_by_cref(cfp->ep)) {
5507 return FALSE;
5508 }
5509 else {
5510 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5511 }
5512}
5513
5514static void
5515vm_define_method(const rb_execution_context_t *ec, VALUE obj, ID id, VALUE iseqval, int is_singleton)
5516{
5517 VALUE klass;
5518 rb_method_visibility_t visi;
5519 rb_cref_t *cref = vm_ec_cref(ec);
5520
5521 if (is_singleton) {
5522 klass = rb_singleton_class(obj); /* class and frozen checked in this API */
5523 visi = METHOD_VISI_PUBLIC;
5524 }
5525 else {
5526 klass = CREF_CLASS_FOR_DEFINITION(cref);
5527 visi = vm_scope_visibility_get(ec);
5528 }
5529
5530 if (NIL_P(klass)) {
5531 rb_raise(rb_eTypeError, "no class/module to add method");
5532 }
5533
5534 rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, visi);
5535 // Set max_iv_count on klasses based on number of ivar sets that are in the initialize method
5536 if (id == idInitialize && klass != rb_cObject && RB_TYPE_P(klass, T_CLASS) && (rb_get_alloc_func(klass) == rb_class_allocate_instance)) {
5537
5538 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (const rb_iseq_t *)iseqval);
5539 }
5540
5541 if (!is_singleton && vm_scope_module_func_check(ec)) {
5542 klass = rb_singleton_class(klass);
5543 rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5544 }
5545}
5546
5547static VALUE
5548vm_invokeblock_i(struct rb_execution_context_struct *ec,
5549 struct rb_control_frame_struct *reg_cfp,
5550 struct rb_calling_info *calling)
5551{
5552 const struct rb_callinfo *ci = calling->cd->ci;
5553 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5554
5555 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5556 rb_vm_localjump_error("no block given (yield)", Qnil, 0);
5557 }
5558 else {
5559 return vm_invoke_block(ec, GET_CFP(), calling, ci, false, block_handler);
5560 }
5561}
5562
5563enum method_explorer_type {
5564 mexp_search_method,
5565 mexp_search_invokeblock,
5566 mexp_search_super,
5567};
5568
5569static inline VALUE
5570vm_sendish(
5571 struct rb_execution_context_struct *ec,
5572 struct rb_control_frame_struct *reg_cfp,
5573 struct rb_call_data *cd,
5574 VALUE block_handler,
5575 enum method_explorer_type method_explorer
5576) {
5577 VALUE val = Qundef;
5578 const struct rb_callinfo *ci = cd->ci;
5579 const struct rb_callcache *cc;
5580 int argc = vm_ci_argc(ci);
5581 VALUE recv = TOPN(argc);
5582 struct rb_calling_info calling = {
5583 .block_handler = block_handler,
5584 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5585 .recv = recv,
5586 .argc = argc,
5587 .cd = cd,
5588 };
5589
5590 switch (method_explorer) {
5591 case mexp_search_method:
5592 calling.cc = cc = vm_search_method_fastpath((VALUE)reg_cfp->iseq, cd, CLASS_OF(recv));
5593 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5594 break;
5595 case mexp_search_super:
5596 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5597 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5598 break;
5599 case mexp_search_invokeblock:
5600 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5601 break;
5602 }
5603 return val;
5604}
5605
5606VALUE
5607rb_vm_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd, ISEQ blockiseq)
5608{
5609 stack_check(ec);
5610 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq, false);
5611 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
5612 VM_EXEC(ec, val);
5613 return val;
5614}
5615
5616VALUE
5617rb_vm_opt_send_without_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd)
5618{
5619 stack_check(ec);
5620 VALUE bh = VM_BLOCK_HANDLER_NONE;
5621 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
5622 VM_EXEC(ec, val);
5623 return val;
5624}
5625
5626VALUE
5627rb_vm_invokesuper(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd, ISEQ blockiseq)
5628{
5629 stack_check(ec);
5630 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq, true);
5631 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
5632 VM_EXEC(ec, val);
5633 return val;
5634}
5635
5636VALUE
5637rb_vm_invokeblock(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd)
5638{
5639 stack_check(ec);
5640 VALUE bh = VM_BLOCK_HANDLER_NONE;
5641 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
5642 VM_EXEC(ec, val);
5643 return val;
5644}
5645
5646/* object.c */
5647VALUE rb_nil_to_s(VALUE);
5648VALUE rb_true_to_s(VALUE);
5649VALUE rb_false_to_s(VALUE);
5650/* numeric.c */
5651VALUE rb_int_to_s(int argc, VALUE *argv, VALUE x);
5652VALUE rb_fix_to_s(VALUE);
5653/* variable.c */
5654VALUE rb_mod_to_s(VALUE);
5656
5657static VALUE
5658vm_objtostring(const rb_iseq_t *iseq, VALUE recv, CALL_DATA cd)
5659{
5660 int type = TYPE(recv);
5661 if (type == T_STRING) {
5662 return recv;
5663 }
5664
5665 const struct rb_callcache *cc = vm_search_method((VALUE)iseq, cd, recv);
5666
5667 switch (type) {
5668 case T_SYMBOL:
5669 if (check_cfunc(vm_cc_cme(cc), rb_sym_to_s)) {
5670 // rb_sym_to_s() allocates a mutable string, but since we are only
5671 // going to use this string for interpolation, it's fine to use the
5672 // frozen string.
5673 return rb_sym2str(recv);
5674 }
5675 break;
5676 case T_MODULE:
5677 case T_CLASS:
5678 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
5679 // rb_mod_to_s() allocates a mutable string, but since we are only
5680 // going to use this string for interpolation, it's fine to use the
5681 // frozen string.
5682 VALUE val = rb_mod_name(recv);
5683 if (NIL_P(val)) {
5684 val = rb_mod_to_s(recv);
5685 }
5686 return val;
5687 }
5688 break;
5689 case T_NIL:
5690 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
5691 return rb_nil_to_s(recv);
5692 }
5693 break;
5694 case T_TRUE:
5695 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
5696 return rb_true_to_s(recv);
5697 }
5698 break;
5699 case T_FALSE:
5700 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
5701 return rb_false_to_s(recv);
5702 }
5703 break;
5704 case T_FIXNUM:
5705 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
5706 return rb_fix_to_s(recv);
5707 }
5708 break;
5709 }
5710 return Qundef;
5711}
5712
5713static VALUE
5714vm_opt_str_freeze(VALUE str, int bop, ID id)
5715{
5716 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5717 return str;
5718 }
5719 else {
5720 return Qundef;
5721 }
5722}
5723
5724/* this macro is mandatory to use OPTIMIZED_CMP. What a design! */
5725#define id_cmp idCmp
5726
5727static VALUE
5728vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5729{
5730 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
5731 if (num == 0) {
5732 return Qnil;
5733 }
5734 else {
5735 VALUE result = *ptr;
5736 rb_snum_t i = num - 1;
5737 while (i-- > 0) {
5738 const VALUE v = *++ptr;
5739 if (OPTIMIZED_CMP(v, result) > 0) {
5740 result = v;
5741 }
5742 }
5743 return result;
5744 }
5745 }
5746 else {
5747 return rb_vm_call_with_refinements(ec, rb_ary_new4(num, ptr), idMax, 0, NULL, RB_NO_KEYWORDS);
5748 }
5749}
5750
5751VALUE
5752rb_vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5753{
5754 return vm_opt_newarray_max(ec, num, ptr);
5755}
5756
5757static VALUE
5758vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5759{
5760 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
5761 if (num == 0) {
5762 return Qnil;
5763 }
5764 else {
5765 VALUE result = *ptr;
5766 rb_snum_t i = num - 1;
5767 while (i-- > 0) {
5768 const VALUE v = *++ptr;
5769 if (OPTIMIZED_CMP(v, result) < 0) {
5770 result = v;
5771 }
5772 }
5773 return result;
5774 }
5775 }
5776 else {
5777 return rb_vm_call_with_refinements(ec, rb_ary_new4(num, ptr), idMin, 0, NULL, RB_NO_KEYWORDS);
5778 }
5779}
5780
5781VALUE
5782rb_vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5783{
5784 return vm_opt_newarray_min(ec, num, ptr);
5785}
5786
5787static VALUE
5788vm_opt_newarray_hash(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5789{
5790 // If Array#hash is _not_ monkeypatched, use the optimized call
5791 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
5792 return rb_ary_hash_values(num, ptr);
5793 }
5794 else {
5795 return rb_vm_call_with_refinements(ec, rb_ary_new4(num, ptr), idHash, 0, NULL, RB_NO_KEYWORDS);
5796 }
5797}
5798
5799VALUE
5800rb_vm_opt_newarray_hash(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5801{
5802 return vm_opt_newarray_hash(ec, num, ptr);
5803}
5804
5805#undef id_cmp
5806
5807#define IMEMO_CONST_CACHE_SHAREABLE IMEMO_FL_USER0
5808
5809static void
5810vm_track_constant_cache(ID id, void *ic)
5811{
5812 struct rb_id_table *const_cache = GET_VM()->constant_cache;
5813 VALUE lookup_result;
5814 st_table *ics;
5815
5816 if (rb_id_table_lookup(const_cache, id, &lookup_result)) {
5817 ics = (st_table *)lookup_result;
5818 }
5819 else {
5820 ics = st_init_numtable();
5821 rb_id_table_insert(const_cache, id, (VALUE)ics);
5822 }
5823
5824 st_insert(ics, (st_data_t) ic, (st_data_t) Qtrue);
5825}
5826
5827static void
5828vm_ic_track_const_chain(rb_control_frame_t *cfp, IC ic, const ID *segments)
5829{
5830 RB_VM_LOCK_ENTER();
5831
5832 for (int i = 0; segments[i]; i++) {
5833 ID id = segments[i];
5834 if (id == idNULL) continue;
5835 vm_track_constant_cache(id, ic);
5836 }
5837
5838 RB_VM_LOCK_LEAVE();
5839}
5840
5841// For RJIT inlining
5842static inline bool
5843vm_inlined_ic_hit_p(VALUE flags, VALUE value, const rb_cref_t *ic_cref, const VALUE *reg_ep)
5844{
5845 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
5846 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
5847
5848 return (ic_cref == NULL || // no need to check CREF
5849 ic_cref == vm_get_cref(reg_ep));
5850 }
5851 return false;
5852}
5853
5854static bool
5855vm_ic_hit_p(const struct iseq_inline_constant_cache_entry *ice, const VALUE *reg_ep)
5856{
5857 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
5858 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
5859}
5860
5861// YJIT needs this function to never allocate and never raise
5862bool
5863rb_vm_ic_hit_p(IC ic, const VALUE *reg_ep)
5864{
5865 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
5866}
5867
5868static void
5869vm_ic_update(const rb_iseq_t *iseq, IC ic, VALUE val, const VALUE *reg_ep, const VALUE *pc)
5870{
5871 if (ruby_vm_const_missing_count > 0) {
5872 ruby_vm_const_missing_count = 0;
5873 ic->entry = NULL;
5874 return;
5875 }
5876
5877 struct iseq_inline_constant_cache_entry *ice = (struct iseq_inline_constant_cache_entry *)rb_imemo_new(imemo_constcache, 0, 0, 0, 0);
5878 RB_OBJ_WRITE(ice, &ice->value, val);
5879 ice->ic_cref = vm_get_const_key_cref(reg_ep);
5880 if (rb_ractor_shareable_p(val)) ice->flags |= IMEMO_CONST_CACHE_SHAREABLE;
5881 RB_OBJ_WRITE(iseq, &ic->entry, ice);
5882
5883 RUBY_ASSERT(pc >= ISEQ_BODY(iseq)->iseq_encoded);
5884 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
5885 rb_yjit_constant_ic_update(iseq, ic, pos);
5886 rb_rjit_constant_ic_update(iseq, ic, pos);
5887}
5888
5889VALUE
5890rb_vm_opt_getconstant_path(rb_execution_context_t *ec, rb_control_frame_t *const reg_cfp, IC ic)
5891{
5892 VALUE val;
5893 const ID *segments = ic->segments;
5894 struct iseq_inline_constant_cache_entry *ice = ic->entry;
5895 if (ice && vm_ic_hit_p(ice, GET_EP())) {
5896 val = ice->value;
5897
5898 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
5899 } else {
5900 ruby_vm_constant_cache_misses++;
5901 val = vm_get_ev_const_chain(ec, segments);
5902 vm_ic_track_const_chain(GET_CFP(), ic, segments);
5903 // Undo the PC increment to get the address to this instruction
5904 // INSN_ATTR(width) == 2
5905 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
5906 }
5907 return val;
5908}
5909
5910static VALUE
5911vm_once_dispatch(rb_execution_context_t *ec, ISEQ iseq, ISE is)
5912{
5913 rb_thread_t *th = rb_ec_thread_ptr(ec);
5914 rb_thread_t *const RUNNING_THREAD_ONCE_DONE = (rb_thread_t *)(0x1);
5915
5916 again:
5917 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
5918 return is->once.value;
5919 }
5920 else if (is->once.running_thread == NULL) {
5921 VALUE val;
5922 is->once.running_thread = th;
5923 val = rb_ensure(vm_once_exec, (VALUE)iseq, vm_once_clear, (VALUE)is);
5924 RB_OBJ_WRITE(ec->cfp->iseq, &is->once.value, val);
5925 /* is->once.running_thread is cleared by vm_once_clear() */
5926 is->once.running_thread = RUNNING_THREAD_ONCE_DONE; /* success */
5927 return val;
5928 }
5929 else if (is->once.running_thread == th) {
5930 /* recursive once */
5931 return vm_once_exec((VALUE)iseq);
5932 }
5933 else {
5934 /* waiting for finish */
5935 RUBY_VM_CHECK_INTS(ec);
5937 goto again;
5938 }
5939}
5940
5941static OFFSET
5942vm_case_dispatch(CDHASH hash, OFFSET else_offset, VALUE key)
5943{
5944 switch (OBJ_BUILTIN_TYPE(key)) {
5945 case -1:
5946 case T_FLOAT:
5947 case T_SYMBOL:
5948 case T_BIGNUM:
5949 case T_STRING:
5950 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
5951 SYMBOL_REDEFINED_OP_FLAG |
5952 INTEGER_REDEFINED_OP_FLAG |
5953 FLOAT_REDEFINED_OP_FLAG |
5954 NIL_REDEFINED_OP_FLAG |
5955 TRUE_REDEFINED_OP_FLAG |
5956 FALSE_REDEFINED_OP_FLAG |
5957 STRING_REDEFINED_OP_FLAG)) {
5958 st_data_t val;
5959 if (RB_FLOAT_TYPE_P(key)) {
5960 double kval = RFLOAT_VALUE(key);
5961 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
5962 key = FIXABLE(kval) ? LONG2FIX((long)kval) : rb_dbl2big(kval);
5963 }
5964 }
5965 if (rb_hash_stlike_lookup(hash, key, &val)) {
5966 return FIX2LONG((VALUE)val);
5967 }
5968 else {
5969 return else_offset;
5970 }
5971 }
5972 }
5973 return 0;
5974}
5975
5976NORETURN(static void
5977 vm_stack_consistency_error(const rb_execution_context_t *ec,
5978 const rb_control_frame_t *,
5979 const VALUE *));
5980static void
5981vm_stack_consistency_error(const rb_execution_context_t *ec,
5982 const rb_control_frame_t *cfp,
5983 const VALUE *bp)
5984{
5985 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
5986 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
5987 static const char stack_consistency_error[] =
5988 "Stack consistency error (sp: %"PRIdPTRDIFF", bp: %"PRIdPTRDIFF")";
5989#if defined RUBY_DEVEL
5990 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
5991 rb_str_cat_cstr(mesg, "\n");
5992 rb_str_append(mesg, rb_iseq_disasm(cfp->iseq));
5994#else
5995 rb_bug(stack_consistency_error, nsp, nbp);
5996#endif
5997}
5998
5999static VALUE
6000vm_opt_plus(VALUE recv, VALUE obj)
6001{
6002 if (FIXNUM_2_P(recv, obj) &&
6003 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6004 return rb_fix_plus_fix(recv, obj);
6005 }
6006 else if (FLONUM_2_P(recv, obj) &&
6007 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6008 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
6009 }
6010 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6011 return Qundef;
6012 }
6013 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6014 RBASIC_CLASS(obj) == rb_cFloat &&
6015 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6016 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
6017 }
6018 else if (RBASIC_CLASS(recv) == rb_cString &&
6019 RBASIC_CLASS(obj) == rb_cString &&
6020 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6021 return rb_str_opt_plus(recv, obj);
6022 }
6023 else if (RBASIC_CLASS(recv) == rb_cArray &&
6024 RBASIC_CLASS(obj) == rb_cArray &&
6025 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6026 return rb_ary_plus(recv, obj);
6027 }
6028 else {
6029 return Qundef;
6030 }
6031}
6032
6033static VALUE
6034vm_opt_minus(VALUE recv, VALUE obj)
6035{
6036 if (FIXNUM_2_P(recv, obj) &&
6037 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6038 return rb_fix_minus_fix(recv, obj);
6039 }
6040 else if (FLONUM_2_P(recv, obj) &&
6041 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6042 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
6043 }
6044 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6045 return Qundef;
6046 }
6047 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6048 RBASIC_CLASS(obj) == rb_cFloat &&
6049 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6050 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
6051 }
6052 else {
6053 return Qundef;
6054 }
6055}
6056
6057static VALUE
6058vm_opt_mult(VALUE recv, VALUE obj)
6059{
6060 if (FIXNUM_2_P(recv, obj) &&
6061 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6062 return rb_fix_mul_fix(recv, obj);
6063 }
6064 else if (FLONUM_2_P(recv, obj) &&
6065 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6066 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
6067 }
6068 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6069 return Qundef;
6070 }
6071 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6072 RBASIC_CLASS(obj) == rb_cFloat &&
6073 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6074 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
6075 }
6076 else {
6077 return Qundef;
6078 }
6079}
6080
6081static VALUE
6082vm_opt_div(VALUE recv, VALUE obj)
6083{
6084 if (FIXNUM_2_P(recv, obj) &&
6085 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6086 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_div_fix(recv, obj);
6087 }
6088 else if (FLONUM_2_P(recv, obj) &&
6089 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6090 return rb_flo_div_flo(recv, obj);
6091 }
6092 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6093 return Qundef;
6094 }
6095 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6096 RBASIC_CLASS(obj) == rb_cFloat &&
6097 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6098 return rb_flo_div_flo(recv, obj);
6099 }
6100 else {
6101 return Qundef;
6102 }
6103}
6104
6105static VALUE
6106vm_opt_mod(VALUE recv, VALUE obj)
6107{
6108 if (FIXNUM_2_P(recv, obj) &&
6109 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6110 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_mod_fix(recv, obj);
6111 }
6112 else if (FLONUM_2_P(recv, obj) &&
6113 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6114 return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
6115 }
6116 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6117 return Qundef;
6118 }
6119 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6120 RBASIC_CLASS(obj) == rb_cFloat &&
6121 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6122 return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
6123 }
6124 else {
6125 return Qundef;
6126 }
6127}
6128
6129static VALUE
6130vm_opt_neq(const rb_iseq_t *iseq, CALL_DATA cd, CALL_DATA cd_eq, VALUE recv, VALUE obj)
6131{
6132 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6133 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6134
6135 if (!UNDEF_P(val)) {
6136 return RBOOL(!RTEST(val));
6137 }
6138 }
6139
6140 return Qundef;
6141}
6142
6143static VALUE
6144vm_opt_lt(VALUE recv, VALUE obj)
6145{
6146 if (FIXNUM_2_P(recv, obj) &&
6147 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6148 return RBOOL((SIGNED_VALUE)recv < (SIGNED_VALUE)obj);
6149 }
6150 else if (FLONUM_2_P(recv, obj) &&
6151 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6152 return RBOOL(RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj));
6153 }
6154 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6155 return Qundef;
6156 }
6157 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6158 RBASIC_CLASS(obj) == rb_cFloat &&
6159 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6160 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
6161 return RBOOL(RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj));
6162 }
6163 else {
6164 return Qundef;
6165 }
6166}
6167
6168static VALUE
6169vm_opt_le(VALUE recv, VALUE obj)
6170{
6171 if (FIXNUM_2_P(recv, obj) &&
6172 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6173 return RBOOL((SIGNED_VALUE)recv <= (SIGNED_VALUE)obj);
6174 }
6175 else if (FLONUM_2_P(recv, obj) &&
6176 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6177 return RBOOL(RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj));
6178 }
6179 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6180 return Qundef;
6181 }
6182 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6183 RBASIC_CLASS(obj) == rb_cFloat &&
6184 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6185 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
6186 return RBOOL(RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj));
6187 }
6188 else {
6189 return Qundef;
6190 }
6191}
6192
6193static VALUE
6194vm_opt_gt(VALUE recv, VALUE obj)
6195{
6196 if (FIXNUM_2_P(recv, obj) &&
6197 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6198 return RBOOL((SIGNED_VALUE)recv > (SIGNED_VALUE)obj);
6199 }
6200 else if (FLONUM_2_P(recv, obj) &&
6201 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6202 return RBOOL(RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj));
6203 }
6204 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6205 return Qundef;
6206 }
6207 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6208 RBASIC_CLASS(obj) == rb_cFloat &&
6209 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6210 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
6211 return RBOOL(RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj));
6212 }
6213 else {
6214 return Qundef;
6215 }
6216}
6217
6218static VALUE
6219vm_opt_ge(VALUE recv, VALUE obj)
6220{
6221 if (FIXNUM_2_P(recv, obj) &&
6222 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6223 return RBOOL((SIGNED_VALUE)recv >= (SIGNED_VALUE)obj);
6224 }
6225 else if (FLONUM_2_P(recv, obj) &&
6226 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6227 return RBOOL(RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj));
6228 }
6229 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
6230 return Qundef;
6231 }
6232 else if (RBASIC_CLASS(recv) == rb_cFloat &&
6233 RBASIC_CLASS(obj) == rb_cFloat &&
6234 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6235 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
6236 return RBOOL(RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj));
6237 }
6238 else {
6239 return Qundef;
6240 }
6241}
6242
6243
6244static VALUE
6245vm_opt_ltlt(VALUE recv, VALUE obj)
6246{
6247 if (SPECIAL_CONST_P(recv)) {
6248 return Qundef;
6249 }
6250 else if (RBASIC_CLASS(recv) == rb_cString &&
6251 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6252 if (LIKELY(RB_TYPE_P(obj, T_STRING))) {
6253 return rb_str_buf_append(recv, obj);
6254 }
6255 else {
6256 return rb_str_concat(recv, obj);
6257 }
6258 }
6259 else if (RBASIC_CLASS(recv) == rb_cArray &&
6260 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6261 return rb_ary_push(recv, obj);
6262 }
6263 else {
6264 return Qundef;
6265 }
6266}
6267
6268static VALUE
6269vm_opt_and(VALUE recv, VALUE obj)
6270{
6271 // If recv and obj are both fixnums, then the bottom tag bit
6272 // will be 1 on both. 1 & 1 == 1, so the result value will also
6273 // be a fixnum. If either side is *not* a fixnum, then the tag bit
6274 // will be 0, and we return Qundef.
6275 VALUE ret = ((SIGNED_VALUE) recv) & ((SIGNED_VALUE) obj);
6276
6277 if (FIXNUM_P(ret) &&
6278 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6279 return ret;
6280 }
6281 else {
6282 return Qundef;
6283 }
6284}
6285
6286static VALUE
6287vm_opt_or(VALUE recv, VALUE obj)
6288{
6289 if (FIXNUM_2_P(recv, obj) &&
6290 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6291 return recv | obj;
6292 }
6293 else {
6294 return Qundef;
6295 }
6296}
6297
6298static VALUE
6299vm_opt_aref(VALUE recv, VALUE obj)
6300{
6301 if (SPECIAL_CONST_P(recv)) {
6302 if (FIXNUM_2_P(recv, obj) &&
6303 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6304 return rb_fix_aref(recv, obj);
6305 }
6306 return Qundef;
6307 }
6308 else if (RBASIC_CLASS(recv) == rb_cArray &&
6309 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6310 if (FIXNUM_P(obj)) {
6311 return rb_ary_entry_internal(recv, FIX2LONG(obj));
6312 }
6313 else {
6314 return rb_ary_aref1(recv, obj);
6315 }
6316 }
6317 else if (RBASIC_CLASS(recv) == rb_cHash &&
6318 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6319 return rb_hash_aref(recv, obj);
6320 }
6321 else {
6322 return Qundef;
6323 }
6324}
6325
6326static VALUE
6327vm_opt_aset(VALUE recv, VALUE obj, VALUE set)
6328{
6329 if (SPECIAL_CONST_P(recv)) {
6330 return Qundef;
6331 }
6332 else if (RBASIC_CLASS(recv) == rb_cArray &&
6333 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6334 FIXNUM_P(obj)) {
6335 rb_ary_store(recv, FIX2LONG(obj), set);
6336 return set;
6337 }
6338 else if (RBASIC_CLASS(recv) == rb_cHash &&
6339 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6340 rb_hash_aset(recv, obj, set);
6341 return set;
6342 }
6343 else {
6344 return Qundef;
6345 }
6346}
6347
6348static VALUE
6349vm_opt_aref_with(VALUE recv, VALUE key)
6350{
6351 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
6352 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6353 rb_hash_compare_by_id_p(recv) == Qfalse &&
6354 !FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6355 return rb_hash_aref(recv, key);
6356 }
6357 else {
6358 return Qundef;
6359 }
6360}
6361
6362VALUE
6363rb_vm_opt_aref_with(VALUE recv, VALUE key)
6364{
6365 return vm_opt_aref_with(recv, key);
6366}
6367
6368static VALUE
6369vm_opt_aset_with(VALUE recv, VALUE key, VALUE val)
6370{
6371 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
6372 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6373 rb_hash_compare_by_id_p(recv) == Qfalse) {
6374 return rb_hash_aset(recv, key, val);
6375 }
6376 else {
6377 return Qundef;
6378 }
6379}
6380
6381static VALUE
6382vm_opt_length(VALUE recv, int bop)
6383{
6384 if (SPECIAL_CONST_P(recv)) {
6385 return Qundef;
6386 }
6387 else if (RBASIC_CLASS(recv) == rb_cString &&
6388 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6389 if (bop == BOP_EMPTY_P) {
6390 return LONG2NUM(RSTRING_LEN(recv));
6391 }
6392 else {
6393 return rb_str_length(recv);
6394 }
6395 }
6396 else if (RBASIC_CLASS(recv) == rb_cArray &&
6397 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6398 return LONG2NUM(RARRAY_LEN(recv));
6399 }
6400 else if (RBASIC_CLASS(recv) == rb_cHash &&
6401 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6402 return INT2FIX(RHASH_SIZE(recv));
6403 }
6404 else {
6405 return Qundef;
6406 }
6407}
6408
6409static VALUE
6410vm_opt_empty_p(VALUE recv)
6411{
6412 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6413 case Qundef: return Qundef;
6414 case INT2FIX(0): return Qtrue;
6415 default: return Qfalse;
6416 }
6417}
6418
6419VALUE rb_false(VALUE obj);
6420
6421static VALUE
6422vm_opt_nil_p(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv)
6423{
6424 if (NIL_P(recv) &&
6425 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6426 return Qtrue;
6427 }
6428 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
6429 return Qfalse;
6430 }
6431 else {
6432 return Qundef;
6433 }
6434}
6435
6436static VALUE
6437fix_succ(VALUE x)
6438{
6439 switch (x) {
6440 case ~0UL:
6441 /* 0xFFFF_FFFF == INT2FIX(-1)
6442 * `-1.succ` is of course 0. */
6443 return INT2FIX(0);
6444 case RSHIFT(~0UL, 1):
6445 /* 0x7FFF_FFFF == LONG2FIX(0x3FFF_FFFF)
6446 * 0x3FFF_FFFF + 1 == 0x4000_0000, which is a Bignum. */
6447 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
6448 default:
6449 /* LONG2FIX(FIX2LONG(x)+FIX2LONG(y))
6450 * == ((lx*2+1)/2 + (ly*2+1)/2)*2+1
6451 * == lx*2 + ly*2 + 1
6452 * == (lx*2+1) + (ly*2+1) - 1
6453 * == x + y - 1
6454 *
6455 * Here, if we put y := INT2FIX(1):
6456 *
6457 * == x + INT2FIX(1) - 1
6458 * == x + 2 .
6459 */
6460 return x + 2;
6461 }
6462}
6463
6464static VALUE
6465vm_opt_succ(VALUE recv)
6466{
6467 if (FIXNUM_P(recv) &&
6468 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
6469 return fix_succ(recv);
6470 }
6471 else if (SPECIAL_CONST_P(recv)) {
6472 return Qundef;
6473 }
6474 else if (RBASIC_CLASS(recv) == rb_cString &&
6475 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
6476 return rb_str_succ(recv);
6477 }
6478 else {
6479 return Qundef;
6480 }
6481}
6482
6483static VALUE
6484vm_opt_not(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv)
6485{
6486 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
6487 return RBOOL(!RTEST(recv));
6488 }
6489 else {
6490 return Qundef;
6491 }
6492}
6493
6494static VALUE
6495vm_opt_regexpmatch2(VALUE recv, VALUE obj)
6496{
6497 if (SPECIAL_CONST_P(recv)) {
6498 return Qundef;
6499 }
6500 else if (RBASIC_CLASS(recv) == rb_cString &&
6501 CLASS_OF(obj) == rb_cRegexp &&
6502 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
6503 return rb_reg_match(obj, recv);
6504 }
6505 else if (RBASIC_CLASS(recv) == rb_cRegexp &&
6506 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
6507 return rb_reg_match(recv, obj);
6508 }
6509 else {
6510 return Qundef;
6511 }
6512}
6513
6514rb_event_flag_t rb_iseq_event_flags(const rb_iseq_t *iseq, size_t pos);
6515
6516NOINLINE(static void vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp));
6517
6518static inline void
6519vm_trace_hook(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const VALUE *pc,
6520 rb_event_flag_t pc_events, rb_event_flag_t target_event,
6521 rb_hook_list_t *global_hooks, rb_hook_list_t *const *local_hooks_ptr, VALUE val)
6522{
6523 rb_event_flag_t event = pc_events & target_event;
6524 VALUE self = GET_SELF();
6525
6526 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
6527
6528 if (event & global_hooks->events) {
6529 /* increment PC because source line is calculated with PC-1 */
6530 reg_cfp->pc++;
6531 vm_dtrace(event, ec);
6532 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
6533 reg_cfp->pc--;
6534 }
6535
6536 // Load here since global hook above can add and free local hooks
6537 rb_hook_list_t *local_hooks = *local_hooks_ptr;
6538 if (local_hooks != NULL) {
6539 if (event & local_hooks->events) {
6540 /* increment PC because source line is calculated with PC-1 */
6541 reg_cfp->pc++;
6542 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
6543 reg_cfp->pc--;
6544 }
6545 }
6546}
6547
6548// Return true if given cc has cfunc which is NOT handled by opt_send_without_block.
6549bool
6550rb_vm_opt_cfunc_p(CALL_CACHE cc, int insn)
6551{
6552 switch (insn) {
6553 case BIN(opt_eq):
6554 return check_cfunc(vm_cc_cme(cc), rb_obj_equal);
6555 case BIN(opt_nil_p):
6556 return check_cfunc(vm_cc_cme(cc), rb_false);
6557 case BIN(opt_not):
6558 return check_cfunc(vm_cc_cme(cc), rb_obj_not);
6559 default:
6560 return false;
6561 }
6562}
6563
6564#define VM_TRACE_HOOK(target_event, val) do { \
6565 if ((pc_events & (target_event)) & enabled_flags) { \
6566 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
6567 } \
6568} while (0)
6569
6570static VALUE
6571rescue_errinfo(rb_execution_context_t *ec, rb_control_frame_t *cfp)
6572{
6573 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
6574 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
6575 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
6576}
6577
6578static void
6579vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp)
6580{
6581 const VALUE *pc = reg_cfp->pc;
6582 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
6583 rb_event_flag_t global_events = enabled_flags;
6584
6585 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
6586 return;
6587 }
6588 else {
6589 const rb_iseq_t *iseq = reg_cfp->iseq;
6590 VALUE iseq_val = (VALUE)iseq;
6591 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
6592 rb_event_flag_t pc_events = rb_iseq_event_flags(iseq, pos);
6593 rb_hook_list_t *local_hooks = iseq->aux.exec.local_hooks;
6594 rb_hook_list_t *const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
6595 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
6596 rb_hook_list_t *bmethod_local_hooks = NULL;
6597 rb_hook_list_t **bmethod_local_hooks_ptr = NULL;
6598 rb_event_flag_t bmethod_local_events = 0;
6599 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
6600 enabled_flags |= iseq_local_events;
6601
6602 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
6603
6604 if (bmethod_frame) {
6605 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
6606 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
6607 bmethod_local_hooks = me->def->body.bmethod.hooks;
6608 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
6609 if (bmethod_local_hooks) {
6610 bmethod_local_events = bmethod_local_hooks->events;
6611 }
6612 }
6613
6614
6615 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
6616#if 0
6617 /* disable trace */
6618 /* TODO: incomplete */
6619 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
6620#else
6621 /* do not disable trace because of performance problem
6622 * (re-enable overhead)
6623 */
6624#endif
6625 return;
6626 }
6627 else if (ec->trace_arg != NULL) {
6628 /* already tracing */
6629 return;
6630 }
6631 else {
6632 rb_hook_list_t *global_hooks = rb_ec_ractor_hooks(ec);
6633 /* Note, not considering iseq local events here since the same
6634 * iseq could be used in multiple bmethods. */
6635 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
6636
6637 if (0) {
6638 ruby_debug_printf("vm_trace>>%4d (%4x) - %s:%d %s\n",
6639 (int)pos,
6640 (int)pc_events,
6641 RSTRING_PTR(rb_iseq_path(iseq)),
6642 (int)rb_iseq_line_no(iseq, pos),
6643 RSTRING_PTR(rb_iseq_label(iseq)));
6644 }
6645 VM_ASSERT(reg_cfp->pc == pc);
6646 VM_ASSERT(pc_events != 0);
6647
6648 /* check traces */
6649 if ((pc_events & RUBY_EVENT_B_CALL) && bmethod_frame && (bmethod_events & RUBY_EVENT_CALL)) {
6650 /* b_call instruction running as a method. Fire call event. */
6651 vm_trace_hook(ec, reg_cfp, pc, RUBY_EVENT_CALL, RUBY_EVENT_CALL, global_hooks, bmethod_local_hooks_ptr, Qundef);
6652 }
6654 VM_TRACE_HOOK(RUBY_EVENT_RESCUE, rescue_errinfo(ec, reg_cfp));
6655 VM_TRACE_HOOK(RUBY_EVENT_LINE, Qundef);
6656 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE, Qundef);
6657 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH, Qundef);
6658 VM_TRACE_HOOK(RUBY_EVENT_END | RUBY_EVENT_RETURN | RUBY_EVENT_B_RETURN, TOPN(0));
6659 if ((pc_events & RUBY_EVENT_B_RETURN) && bmethod_frame && (bmethod_events & RUBY_EVENT_RETURN)) {
6660 /* b_return instruction running as a method. Fire return event. */
6661 vm_trace_hook(ec, reg_cfp, pc, RUBY_EVENT_RETURN, RUBY_EVENT_RETURN, global_hooks, bmethod_local_hooks_ptr, TOPN(0));
6662 }
6663
6664 // Pin the iseq since `local_hooks_ptr` points inside the iseq's slot on the GC heap.
6665 // We need the pointer to stay valid in case compaction happens in a trace hook.
6666 //
6667 // Similar treatment is unnecessary for `bmethod_local_hooks_ptr` since
6668 // storage for `rb_method_definition_t` is not on the GC heap.
6669 RB_GC_GUARD(iseq_val);
6670 }
6671 }
6672}
6673#undef VM_TRACE_HOOK
6674
6675#if VM_CHECK_MODE > 0
6676NORETURN( NOINLINE( COLDFUNC
6677void rb_vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)));
6678
6679void
6680Init_vm_stack_canary(void)
6681{
6682 /* This has to be called _after_ our PRNG is properly set up. */
6683 int n = ruby_fill_random_bytes(&vm_stack_canary, sizeof vm_stack_canary, false);
6684 vm_stack_canary |= 0x01; // valid VALUE (Fixnum)
6685
6686 vm_stack_canary_was_born = true;
6687 VM_ASSERT(n == 0);
6688}
6689
6690void
6691rb_vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)
6692{
6693 /* Because a method has already been called, why not call
6694 * another one. */
6695 const char *insn = rb_insns_name(i);
6696 VALUE inspection = rb_inspect(c);
6697 const char *str = StringValueCStr(inspection);
6698
6699 rb_bug("dead canary found at %s: %s", insn, str);
6700}
6701
6702#else
6703void Init_vm_stack_canary(void) { /* nothing to do */ }
6704#endif
6705
6706
6707/* a part of the following code is generated by this ruby script:
6708
670916.times{|i|
6710 typedef_args = (0...i).map{|j| "VALUE v#{j+1}"}.join(", ")
6711 typedef_args.prepend(", ") if i != 0
6712 call_args = (0...i).map{|j| "argv[#{j}]"}.join(", ")
6713 call_args.prepend(", ") if i != 0
6714 puts %Q{
6715static VALUE
6716builtin_invoker#{i}(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6717{
6718 typedef VALUE (*rb_invoke_funcptr#{i}_t)(rb_execution_context_t *ec, VALUE self#{typedef_args});
6719 return (*(rb_invoke_funcptr#{i}_t)funcptr)(ec, self#{call_args});
6720}}
6721}
6722
6723puts
6724puts "static VALUE (* const cfunc_invokers[])(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr) = {"
672516.times{|i|
6726 puts " builtin_invoker#{i},"
6727}
6728puts "};"
6729*/
6730
6731static VALUE
6732builtin_invoker0(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6733{
6734 typedef VALUE (*rb_invoke_funcptr0_t)(rb_execution_context_t *ec, VALUE self);
6735 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
6736}
6737
6738static VALUE
6739builtin_invoker1(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6740{
6741 typedef VALUE (*rb_invoke_funcptr1_t)(rb_execution_context_t *ec, VALUE self, VALUE v1);
6742 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
6743}
6744
6745static VALUE
6746builtin_invoker2(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6747{
6748 typedef VALUE (*rb_invoke_funcptr2_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2);
6749 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
6750}
6751
6752static VALUE
6753builtin_invoker3(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6754{
6755 typedef VALUE (*rb_invoke_funcptr3_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3);
6756 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
6757}
6758
6759static VALUE
6760builtin_invoker4(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6761{
6762 typedef VALUE (*rb_invoke_funcptr4_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4);
6763 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
6764}
6765
6766static VALUE
6767builtin_invoker5(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6768{
6769 typedef VALUE (*rb_invoke_funcptr5_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5);
6770 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
6771}
6772
6773static VALUE
6774builtin_invoker6(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6775{
6776 typedef VALUE (*rb_invoke_funcptr6_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6);
6777 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
6778}
6779
6780static VALUE
6781builtin_invoker7(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6782{
6783 typedef VALUE (*rb_invoke_funcptr7_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7);
6784 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
6785}
6786
6787static VALUE
6788builtin_invoker8(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6789{
6790 typedef VALUE (*rb_invoke_funcptr8_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8);
6791 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
6792}
6793
6794static VALUE
6795builtin_invoker9(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6796{
6797 typedef VALUE (*rb_invoke_funcptr9_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9);
6798 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
6799}
6800
6801static VALUE
6802builtin_invoker10(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6803{
6804 typedef VALUE (*rb_invoke_funcptr10_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10);
6805 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
6806}
6807
6808static VALUE
6809builtin_invoker11(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6810{
6811 typedef VALUE (*rb_invoke_funcptr11_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11);
6812 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
6813}
6814
6815static VALUE
6816builtin_invoker12(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6817{
6818 typedef VALUE (*rb_invoke_funcptr12_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12);
6819 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
6820}
6821
6822static VALUE
6823builtin_invoker13(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6824{
6825 typedef VALUE (*rb_invoke_funcptr13_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13);
6826 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
6827}
6828
6829static VALUE
6830builtin_invoker14(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6831{
6832 typedef VALUE (*rb_invoke_funcptr14_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14);
6833 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
6834}
6835
6836static VALUE
6837builtin_invoker15(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6838{
6839 typedef VALUE (*rb_invoke_funcptr15_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14, VALUE v15);
6840 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
6841}
6842
6843typedef VALUE (*builtin_invoker)(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr);
6844
6845static builtin_invoker
6846lookup_builtin_invoker(int argc)
6847{
6848 static const builtin_invoker invokers[] = {
6849 builtin_invoker0,
6850 builtin_invoker1,
6851 builtin_invoker2,
6852 builtin_invoker3,
6853 builtin_invoker4,
6854 builtin_invoker5,
6855 builtin_invoker6,
6856 builtin_invoker7,
6857 builtin_invoker8,
6858 builtin_invoker9,
6859 builtin_invoker10,
6860 builtin_invoker11,
6861 builtin_invoker12,
6862 builtin_invoker13,
6863 builtin_invoker14,
6864 builtin_invoker15,
6865 };
6866
6867 return invokers[argc];
6868}
6869
6870static inline VALUE
6871invoke_bf(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const struct rb_builtin_function* bf, const VALUE *argv)
6872{
6873 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF; // Verify an assumption of `Primitive.attr! :leaf`
6874 SETUP_CANARY(canary_p);
6875 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, (rb_insn_func_t)bf->func_ptr);
6876 CHECK_CANARY(canary_p, BIN(invokebuiltin));
6877 return ret;
6878}
6879
6880static VALUE
6881vm_invoke_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function* bf, const VALUE *argv)
6882{
6883 return invoke_bf(ec, cfp, bf, argv);
6884}
6885
6886static VALUE
6887vm_invoke_builtin_delegate(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function *bf, unsigned int start_index)
6888{
6889 if (0) { // debug print
6890 fputs("vm_invoke_builtin_delegate: passing -> ", stderr);
6891 for (int i=0; i<bf->argc; i++) {
6892 ruby_debug_printf(":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
6893 }
6894 ruby_debug_printf("\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc, bf->func_ptr);
6895 }
6896
6897 if (bf->argc == 0) {
6898 return invoke_bf(ec, cfp, bf, NULL);
6899 }
6900 else {
6901 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
6902 return invoke_bf(ec, cfp, bf, argv);
6903 }
6904}
6905
6906// for __builtin_inline!()
6907
6908VALUE
6909rb_vm_lvar_exposed(rb_execution_context_t *ec, int index)
6910{
6911 const rb_control_frame_t *cfp = ec->cfp;
6912 return cfp->ep[index];
6913}
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
Definition assert.h:177
#define RUBY_EVENT_END
Encountered an end of a class clause.
Definition event.h:40
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
Definition event.h:43
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
Definition event.h:56
#define RUBY_EVENT_CLASS
Encountered a new class.
Definition event.h:39
#define RUBY_EVENT_LINE
Encountered a new line.
Definition event.h:38
#define RUBY_EVENT_RETURN
Encountered a return statement.
Definition event.h:42
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
Definition event.h:44
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
Definition event.h:55
uint32_t rb_event_flag_t
Represents event(s).
Definition event.h:108
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
Definition event.h:41
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
Definition event.h:61
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
Definition class.c:2284
VALUE rb_module_new(void)
Creates a new, anonymous module.
Definition class.c:1066
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class#inherited.
Definition class.c:961
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
Definition class.c:940
#define TYPE(_)
Old name of rb_type.
Definition value_type.h:107
#define FL_SINGLETON
Old name of RUBY_FL_SINGLETON.
Definition fl_type.h:58
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
Definition fl_type.h:66
#define REALLOC_N
Old name of RB_REALLOC_N.
Definition memory.h:397
#define ALLOC
Old name of RB_ALLOC.
Definition memory.h:394
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define T_STRING
Old name of RUBY_T_STRING.
Definition value_type.h:78
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define T_NIL
Old name of RUBY_T_NIL.
Definition value_type.h:72
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition value_type.h:64
#define T_IMEMO
Old name of RUBY_T_IMEMO.
Definition value_type.h:67
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
Definition value_type.h:57
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
Definition value_type.h:79
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
Definition value_type.h:63
#define SYM2ID
Old name of RB_SYM2ID.
Definition symbol.h:45
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:203
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition array.h:653
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define FIX2INT
Old name of RB_FIX2INT.
Definition int.h:41
#define T_MODULE
Old name of RUBY_T_MODULE.
Definition value_type.h:70
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
Definition assume.h:27
#define FIX2ULONG
Old name of RB_FIX2ULONG.
Definition long.h:47
#define T_TRUE
Old name of RUBY_T_TRUE.
Definition value_type.h:81
#define T_ICLASS
Old name of RUBY_T_ICLASS.
Definition value_type.h:66
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define ALLOC_N
Old name of RB_ALLOC_N.
Definition memory.h:393
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
Definition fl_type.h:132
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:652
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define rb_exc_new3
Old name of rb_exc_new_str.
Definition error.h:38
#define T_FALSE
Old name of RUBY_T_FALSE.
Definition value_type.h:61
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define T_OBJECT
Old name of RUBY_T_OBJECT.
Definition value_type.h:75
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define T_CLASS
Old name of RUBY_T_CLASS.
Definition value_type.h:58
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
Definition value_type.h:85
#define FL_TEST
Old name of RB_FL_TEST.
Definition fl_type.h:131
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
Definition fl_type.h:69
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
Definition fl_type.h:130
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition value_type.h:88
void rb_notimplement(void)
Definition error.c:3498
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1344
VALUE rb_eFatal
fatal exception.
Definition error.c:1340
VALUE rb_eNoMethodError
NoMethodError exception.
Definition error.c:1352
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
Definition eval.c:699
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1342
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:423
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
Definition error.c:3805
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1395
VALUE rb_cClass
Class class.
Definition object.c:66
VALUE rb_cArray
Array class.
Definition array.c:39
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
Definition object.c:2058
VALUE rb_cRegexp
Regexp class.
Definition re.c:2592
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
Definition object.c:1237
VALUE rb_cHash
Hash class.
Definition hash.c:110
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:215
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition object.c:645
VALUE rb_cBasicObject
BasicObject class.
Definition object.c:62
VALUE rb_cModule
Module class.
Definition object.c:65
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
Definition object.c:205
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition object.c:830
VALUE rb_cFloat
Float class.
Definition numeric.c:197
VALUE rb_cProc
Proc class.
Definition proc.c:44
VALUE rb_cString
String class.
Definition string.c:78
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
Definition gc.h:631
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:619
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
Definition error.h:35
#define rb_check_frozen
Just another name of rb_check_frozen.
Definition error.h:264
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:280
#define rb_check_frozen_internal(obj)
Definition error.h:247
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition proc.c:990
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
Definition re.c:1882
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
Definition re.c:3647
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
Definition re.c:1857
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
Definition re.c:1939
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
Definition re.c:1840
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
Definition re.c:1906
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
Definition re.c:1972
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:3409
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
Definition string.c:11720
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
Definition string.c:4857
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
Definition string.c:3500
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
Definition string.h:1656
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
Definition string.c:2204
void rb_thread_schedule(void)
Tries to switch to another thread.
Definition thread.c:1466
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
Definition variable.c:3141
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1854
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
Definition variable.c:3917
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
Definition variable.c:3972
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1340
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
Definition variable.c:3596
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
Definition variable.c:2976
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
Definition variable.c:122
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
Definition variable.c:3147
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
Definition variable.c:326
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
Definition variable.c:1871
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
Definition variable.c:3455
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
Definition variable.c:3994
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:283
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
Definition variable.c:3449
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition vm_eval.c:687
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
Definition vm_method.c:1280
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
Definition vm_method.c:1813
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
Definition symbol.c:1095
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
Definition symbol.c:953
int off
Offset inside of ptr.
Definition io.h:5
int len
Length of the buffer.
Definition io.h:8
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
Definition ractor.h:249
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
Definition ractor.h:235
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition memory.h:366
#define ALLOCA_N(type, n)
Definition memory.h:286
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition memory.h:161
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
Definition memory.h:378
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:51
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
Definition rarray.h:281
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
Definition rarray.h:366
#define RARRAY_AREF(a, i)
Definition rarray.h:403
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
Definition rarray.h:52
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
Definition rbasic.h:152
#define RBASIC(obj)
Convenient casting macro.
Definition rbasic.h:40
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
Definition rclass.h:44
#define RHASH_SIZE(h)
Queries the size of the hash.
Definition rhash.h:69
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition rhash.h:79
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
Definition robject.h:136
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
Definition rstring.h:89
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
Definition stdarg.h:64
Definition hash.h:53
Definition iseq.h:267
Definition vm_core.h:233
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
Definition vm_core.h:259
Definition vm_core.h:267
Definition vm_core.h:262
Definition method.h:62
Definition constant.h:33
CREF (Class REFerence)
Definition method.h:44
Definition class.h:36
Definition method.h:54
rb_cref_t * cref
class reference, should be marked
Definition method.h:136
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
Definition method.h:135
Definition st.h:79
IFUNC (Internal FUNCtion)
Definition imemo.h:83
SVAR (Special VARiable)
Definition imemo.h:52
const VALUE cref_or_me
class reference or rb_method_entry_t
Definition imemo.h:54
THROW_DATA.
Definition imemo.h:61
Definition vm_core.h:271
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
Definition value.h:63
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
Definition value_type.h:181
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition value_type.h:263