Ruby 3.1.3p185 (2022-11-24 revision 1a6b16756e0ba6b95ab71a441357ed5484e33498)
vm_insnhelper.c
1/**********************************************************************
2
3 vm_insnhelper.c - instruction helper functions.
4
5 $Author$
6
7 Copyright (C) 2007 Koichi Sasada
8
9**********************************************************************/
10
11#include "ruby/internal/config.h"
12
13#include <math.h>
14
15#include "constant.h"
16#include "debug_counter.h"
17#include "internal.h"
18#include "internal/class.h"
19#include "internal/compar.h"
20#include "internal/hash.h"
21#include "internal/numeric.h"
22#include "internal/proc.h"
23#include "internal/random.h"
24#include "internal/variable.h"
25#include "internal/struct.h"
26#include "variable.h"
27
28/* finish iseq array */
29#include "insns.inc"
30#ifndef MJIT_HEADER
31#include "insns_info.inc"
32#endif
33
34extern rb_method_definition_t *rb_method_definition_create(rb_method_type_t type, ID mid);
35extern void rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts);
36extern int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2);
37extern VALUE rb_make_no_method_exception(VALUE exc, VALUE format, VALUE obj,
38 int argc, const VALUE *argv, int priv);
39
40#ifndef MJIT_HEADER
41static const struct rb_callcache vm_empty_cc;
42static const struct rb_callcache vm_empty_cc_for_super;
43#endif
44
45/* control stack frame */
46
47static rb_control_frame_t *vm_get_ruby_level_caller_cfp(const rb_execution_context_t *ec, const rb_control_frame_t *cfp);
48
49MJIT_STATIC VALUE
50ruby_vm_special_exception_copy(VALUE exc)
51{
52 VALUE e = rb_obj_alloc(rb_class_real(RBASIC_CLASS(exc)));
53 rb_obj_copy_ivar(e, exc);
54 return e;
55}
56
57NORETURN(static void ec_stack_overflow(rb_execution_context_t *ec, int));
58static void
59ec_stack_overflow(rb_execution_context_t *ec, int setup)
60{
61 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
62 ec->raised_flag = RAISED_STACKOVERFLOW;
63 if (setup) {
64 VALUE at = rb_ec_backtrace_object(ec);
65 mesg = ruby_vm_special_exception_copy(mesg);
66 rb_ivar_set(mesg, idBt, at);
67 rb_ivar_set(mesg, idBt_locations, at);
68 }
69 ec->errinfo = mesg;
70 EC_JUMP_TAG(ec, TAG_RAISE);
71}
72
73NORETURN(static void vm_stackoverflow(void));
74#ifdef MJIT_HEADER
75NOINLINE(static COLDFUNC void vm_stackoverflow(void));
76#endif
77
78static void
79vm_stackoverflow(void)
80{
81 ec_stack_overflow(GET_EC(), TRUE);
82}
83
84NORETURN(MJIT_STATIC void rb_ec_stack_overflow(rb_execution_context_t *ec, int crit));
85MJIT_STATIC void
86rb_ec_stack_overflow(rb_execution_context_t *ec, int crit)
87{
88 if (rb_during_gc()) {
89 rb_bug("system stack overflow during GC. Faulty native extension?");
90 }
91 if (crit) {
92 ec->raised_flag = RAISED_STACKOVERFLOW;
93 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
94 EC_JUMP_TAG(ec, TAG_RAISE);
95 }
96#ifdef USE_SIGALTSTACK
97 ec_stack_overflow(ec, TRUE);
98#else
99 ec_stack_overflow(ec, FALSE);
100#endif
101}
102
103
104#if VM_CHECK_MODE > 0
105static int
106callable_class_p(VALUE klass)
107{
108#if VM_CHECK_MODE >= 2
109 if (!klass) return FALSE;
110 switch (RB_BUILTIN_TYPE(klass)) {
111 default:
112 break;
113 case T_ICLASS:
114 if (!RB_TYPE_P(RCLASS_SUPER(klass), T_MODULE)) break;
115 case T_MODULE:
116 return TRUE;
117 }
118 while (klass) {
119 if (klass == rb_cBasicObject) {
120 return TRUE;
121 }
122 klass = RCLASS_SUPER(klass);
123 }
124 return FALSE;
125#else
126 return klass != 0;
127#endif
128}
129
130static int
131callable_method_entry_p(const rb_callable_method_entry_t *cme)
132{
133 if (cme == NULL) {
134 return TRUE;
135 }
136 else {
137 VM_ASSERT(IMEMO_TYPE_P((VALUE)cme, imemo_ment));
138
139 if (callable_class_p(cme->defined_class)) {
140 return TRUE;
141 }
142 else {
143 return FALSE;
144 }
145 }
146}
147
148static void
149vm_check_frame_detail(VALUE type, int req_block, int req_me, int req_cref, VALUE specval, VALUE cref_or_me, int is_cframe, const rb_iseq_t *iseq)
150{
151 unsigned int magic = (unsigned int)(type & VM_FRAME_MAGIC_MASK);
152 enum imemo_type cref_or_me_type = imemo_env; /* impossible value */
153
154 if (RB_TYPE_P(cref_or_me, T_IMEMO)) {
155 cref_or_me_type = imemo_type(cref_or_me);
156 }
157 if (type & VM_FRAME_FLAG_BMETHOD) {
158 req_me = TRUE;
159 }
160
161 if (req_block && (type & VM_ENV_FLAG_LOCAL) == 0) {
162 rb_bug("vm_push_frame: specval (%p) should be a block_ptr on %x frame", (void *)specval, magic);
163 }
164 if (!req_block && (type & VM_ENV_FLAG_LOCAL) != 0) {
165 rb_bug("vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (void *)specval, magic);
166 }
167
168 if (req_me) {
169 if (cref_or_me_type != imemo_ment) {
170 rb_bug("vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
171 }
172 }
173 else {
174 if (req_cref && cref_or_me_type != imemo_cref) {
175 rb_bug("vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
176 }
177 else { /* cref or Qfalse */
178 if (cref_or_me != Qfalse && cref_or_me_type != imemo_cref) {
179 if (((type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
180 /* ignore */
181 }
182 else {
183 rb_bug("vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
184 }
185 }
186 }
187 }
188
189 if (cref_or_me_type == imemo_ment) {
190 const rb_callable_method_entry_t *me = (const rb_callable_method_entry_t *)cref_or_me;
191
192 if (!callable_method_entry_p(me)) {
193 rb_bug("vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
194 }
195 }
196
197 if ((type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
198 VM_ASSERT(iseq == NULL ||
199 RUBY_VM_NORMAL_ISEQ_P(iseq) /* argument error. it should be fixed */);
200 }
201 else {
202 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
203 }
204}
205
206static void
207vm_check_frame(VALUE type,
208 VALUE specval,
209 VALUE cref_or_me,
210 const rb_iseq_t *iseq)
211{
212 VALUE given_magic = type & VM_FRAME_MAGIC_MASK;
213 VM_ASSERT(FIXNUM_P(type));
214
215#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
216 case magic: \
217 vm_check_frame_detail(type, req_block, req_me, req_cref, \
218 specval, cref_or_me, is_cframe, iseq); \
219 break
220 switch (given_magic) {
221 /* BLK ME CREF CFRAME */
222 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
224 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
226 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
227 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
230 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
231 default:
232 rb_bug("vm_push_frame: unknown type (%x)", (unsigned int)given_magic);
233 }
234#undef CHECK
235}
236
237static VALUE vm_stack_canary; /* Initialized later */
238static bool vm_stack_canary_was_born = false;
239
240#ifndef MJIT_HEADER
241MJIT_FUNC_EXPORTED void
242rb_vm_check_canary(const rb_execution_context_t *ec, VALUE *sp)
243{
244 const struct rb_control_frame_struct *reg_cfp = ec->cfp;
245 const struct rb_iseq_struct *iseq;
246
247 if (! LIKELY(vm_stack_canary_was_born)) {
248 return; /* :FIXME: isn't it rather fatal to enter this branch? */
249 }
250 else if ((VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
251 /* This is at the very beginning of a thread. cfp does not exist. */
252 return;
253 }
254 else if (! (iseq = GET_ISEQ())) {
255 return;
256 }
257 else if (LIKELY(sp[0] != vm_stack_canary)) {
258 return;
259 }
260 else {
261 /* we are going to call methods below; squash the canary to
262 * prevent infinite loop. */
263 sp[0] = Qundef;
264 }
265
266 const VALUE *orig = rb_iseq_original_iseq(iseq);
267 const VALUE *encoded = iseq->body->iseq_encoded;
268 const ptrdiff_t pos = GET_PC() - encoded;
269 const enum ruby_vminsn_type insn = (enum ruby_vminsn_type)orig[pos];
270 const char *name = insn_name(insn);
271 const VALUE iseqw = rb_iseqw_new(iseq);
272 const VALUE inspection = rb_inspect(iseqw);
273 const char *stri = rb_str_to_cstr(inspection);
274 const VALUE disasm = rb_iseq_disasm(iseq);
275 const char *strd = rb_str_to_cstr(disasm);
276
277 /* rb_bug() is not capable of outputting this large contents. It
278 is designed to run form a SIGSEGV handler, which tends to be
279 very restricted. */
280 ruby_debug_printf(
281 "We are killing the stack canary set by %s, "
282 "at %s@pc=%"PRIdPTR"\n"
283 "watch out the C stack trace.\n"
284 "%s",
285 name, stri, pos, strd);
286 rb_bug("see above.");
287}
288#endif
289#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
290
291#else
292#define vm_check_canary(ec, sp)
293#define vm_check_frame(a, b, c, d)
294#endif /* VM_CHECK_MODE > 0 */
295
296#if USE_DEBUG_COUNTER
297static void
298vm_push_frame_debug_counter_inc(
299 const struct rb_execution_context_struct *ec,
300 const struct rb_control_frame_struct *reg_cfp,
301 VALUE type)
302{
303 const struct rb_control_frame_struct *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(reg_cfp);
304
305 RB_DEBUG_COUNTER_INC(frame_push);
306
307 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
308 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
309 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
310 if (prev) {
311 if (curr) {
312 RB_DEBUG_COUNTER_INC(frame_R2R);
313 }
314 else {
315 RB_DEBUG_COUNTER_INC(frame_R2C);
316 }
317 }
318 else {
319 if (curr) {
320 RB_DEBUG_COUNTER_INC(frame_C2R);
321 }
322 else {
323 RB_DEBUG_COUNTER_INC(frame_C2C);
324 }
325 }
326 }
327
328 switch (type & VM_FRAME_MAGIC_MASK) {
329 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method); return;
330 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block); return;
331 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class); return;
332 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top); return;
333 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc); return;
334 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc); return;
335 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval); return;
336 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue); return;
337 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy); return;
338 }
339
340 rb_bug("unreachable");
341}
342#else
343#define vm_push_frame_debug_counter_inc(ec, cfp, t) /* void */
344#endif
345
346STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
347STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
348STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
349
350static void
351vm_push_frame(rb_execution_context_t *ec,
352 const rb_iseq_t *iseq,
353 VALUE type,
354 VALUE self,
355 VALUE specval,
356 VALUE cref_or_me,
357 const VALUE *pc,
358 VALUE *sp,
359 int local_size,
360 int stack_max)
361{
362 rb_control_frame_t *const cfp = RUBY_VM_NEXT_CONTROL_FRAME(ec->cfp);
363
364 vm_check_frame(type, specval, cref_or_me, iseq);
365 VM_ASSERT(local_size >= 0);
366
367 /* check stack overflow */
368 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
369 vm_check_canary(ec, sp);
370
371 /* setup vm value stack */
372
373 /* initialize local variables */
374 for (int i=0; i < local_size; i++) {
375 *sp++ = Qnil;
376 }
377
378 /* setup ep with managing data */
379 *sp++ = cref_or_me; /* ep[-2] / Qnil or T_IMEMO(cref) or T_IMEMO(ment) */
380 *sp++ = specval /* ep[-1] / block handler or prev env ptr */;
381 *sp++ = type; /* ep[-0] / ENV_FLAGS */
382
383 /* setup new frame */
384 *cfp = (const struct rb_control_frame_struct) {
385 .pc = pc,
386 .sp = sp,
387 .iseq = iseq,
388 .self = self,
389 .ep = sp - 1,
390 .block_code = NULL,
391 .__bp__ = sp, /* Store initial value of ep as bp to skip calculation cost of bp on JIT cancellation. */
392#if VM_DEBUG_BP_CHECK
393 .bp_check = sp,
394#endif
395 .jit_return = NULL
396 };
397
398 ec->cfp = cfp;
399
400 if (VMDEBUG == 2) {
401 SDR();
402 }
403 vm_push_frame_debug_counter_inc(ec, cfp, type);
404}
405
406/* return TRUE if the frame is finished */
407static inline int
408vm_pop_frame(rb_execution_context_t *ec, rb_control_frame_t *cfp, const VALUE *ep)
409{
410 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
411
412 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
413 if (VMDEBUG == 2) SDR();
414
415 RUBY_VM_CHECK_INTS(ec);
416 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
417
418 return flags & VM_FRAME_FLAG_FINISH;
419}
420
421MJIT_STATIC void
422rb_vm_pop_frame(rb_execution_context_t *ec)
423{
424 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
425}
426
427/* method dispatch */
428static inline VALUE
429rb_arity_error_new(int argc, int min, int max)
430{
431 VALUE err_mess = 0;
432 if (min == max) {
433 err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d)", argc, min);
434 }
435 else if (max == UNLIMITED_ARGUMENTS) {
436 err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d+)", argc, min);
437 }
438 else {
439 err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d..%d)", argc, min, max);
440 }
441 return rb_exc_new3(rb_eArgError, err_mess);
442}
443
444MJIT_STATIC void
445rb_error_arity(int argc, int min, int max)
446{
447 rb_exc_raise(rb_arity_error_new(argc, min, max));
448}
449
450/* lvar */
451
452NOINLINE(static void vm_env_write_slowpath(const VALUE *ep, int index, VALUE v));
453
454static void
455vm_env_write_slowpath(const VALUE *ep, int index, VALUE v)
456{
457 /* remember env value forcely */
458 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
459 VM_FORCE_WRITE(&ep[index], v);
460 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
461 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
462}
463
464static inline void
465vm_env_write(const VALUE *ep, int index, VALUE v)
466{
467 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
468 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
469 VM_STACK_ENV_WRITE(ep, index, v);
470 }
471 else {
472 vm_env_write_slowpath(ep, index, v);
473 }
474}
475
476MJIT_STATIC VALUE
477rb_vm_bh_to_procval(const rb_execution_context_t *ec, VALUE block_handler)
478{
479 if (block_handler == VM_BLOCK_HANDLER_NONE) {
480 return Qnil;
481 }
482 else {
483 switch (vm_block_handler_type(block_handler)) {
484 case block_handler_type_iseq:
485 case block_handler_type_ifunc:
486 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler), rb_cProc);
487 case block_handler_type_symbol:
488 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
489 case block_handler_type_proc:
490 return VM_BH_TO_PROC(block_handler);
491 default:
492 VM_UNREACHABLE(rb_vm_bh_to_procval);
493 }
494 }
495}
496
497/* svar */
498
499#if VM_CHECK_MODE > 0
500static int
501vm_svar_valid_p(VALUE svar)
502{
503 if (RB_TYPE_P((VALUE)svar, T_IMEMO)) {
504 switch (imemo_type(svar)) {
505 case imemo_svar:
506 case imemo_cref:
507 case imemo_ment:
508 return TRUE;
509 default:
510 break;
511 }
512 }
513 rb_bug("vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
514 return FALSE;
515}
516#endif
517
518static inline struct vm_svar *
519lep_svar(const rb_execution_context_t *ec, const VALUE *lep)
520{
521 VALUE svar;
522
523 if (lep && (ec == NULL || ec->root_lep != lep)) {
524 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
525 }
526 else {
527 svar = ec->root_svar;
528 }
529
530 VM_ASSERT(svar == Qfalse || vm_svar_valid_p(svar));
531
532 return (struct vm_svar *)svar;
533}
534
535static inline void
536lep_svar_write(const rb_execution_context_t *ec, const VALUE *lep, const struct vm_svar *svar)
537{
538 VM_ASSERT(vm_svar_valid_p((VALUE)svar));
539
540 if (lep && (ec == NULL || ec->root_lep != lep)) {
541 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (VALUE)svar);
542 }
543 else {
544 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
545 }
546}
547
548static VALUE
549lep_svar_get(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key)
550{
551 const struct vm_svar *svar = lep_svar(ec, lep);
552
553 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) return Qnil;
554
555 switch (key) {
556 case VM_SVAR_LASTLINE:
557 return svar->lastline;
558 case VM_SVAR_BACKREF:
559 return svar->backref;
560 default: {
561 const VALUE ary = svar->others;
562
563 if (NIL_P(ary)) {
564 return Qnil;
565 }
566 else {
567 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
568 }
569 }
570 }
571}
572
573static struct vm_svar *
574svar_new(VALUE obj)
575{
576 return (struct vm_svar *)rb_imemo_new(imemo_svar, Qnil, Qnil, Qnil, obj);
577}
578
579static void
580lep_svar_set(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, VALUE val)
581{
582 struct vm_svar *svar = lep_svar(ec, lep);
583
584 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) {
585 lep_svar_write(ec, lep, svar = svar_new((VALUE)svar));
586 }
587
588 switch (key) {
589 case VM_SVAR_LASTLINE:
590 RB_OBJ_WRITE(svar, &svar->lastline, val);
591 return;
592 case VM_SVAR_BACKREF:
593 RB_OBJ_WRITE(svar, &svar->backref, val);
594 return;
595 default: {
596 VALUE ary = svar->others;
597
598 if (NIL_P(ary)) {
599 RB_OBJ_WRITE(svar, &svar->others, ary = rb_ary_new());
600 }
601 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
602 }
603 }
604}
605
606static inline VALUE
607vm_getspecial(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, rb_num_t type)
608{
609 VALUE val;
610
611 if (type == 0) {
612 val = lep_svar_get(ec, lep, key);
613 }
614 else {
615 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
616
617 if (type & 0x01) {
618 switch (type >> 1) {
619 case '&':
620 val = rb_reg_last_match(backref);
621 break;
622 case '`':
623 val = rb_reg_match_pre(backref);
624 break;
625 case '\'':
626 val = rb_reg_match_post(backref);
627 break;
628 case '+':
629 val = rb_reg_match_last(backref);
630 break;
631 default:
632 rb_bug("unexpected back-ref");
633 }
634 }
635 else {
636 val = rb_reg_nth_match((int)(type >> 1), backref);
637 }
638 }
639 return val;
640}
641
642PUREFUNC(static rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar));
644check_method_entry(VALUE obj, int can_be_svar)
645{
646 if (obj == Qfalse) return NULL;
647
648#if VM_CHECK_MODE > 0
649 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_method_entry: unknown type: %s", rb_obj_info(obj));
650#endif
651
652 switch (imemo_type(obj)) {
653 case imemo_ment:
654 return (rb_callable_method_entry_t *)obj;
655 case imemo_cref:
656 return NULL;
657 case imemo_svar:
658 if (can_be_svar) {
659 return check_method_entry(((struct vm_svar *)obj)->cref_or_me, FALSE);
660 }
661 default:
662#if VM_CHECK_MODE > 0
663 rb_bug("check_method_entry: svar should not be there:");
664#endif
665 return NULL;
666 }
667}
668
669MJIT_STATIC const rb_callable_method_entry_t *
670rb_vm_frame_method_entry(const rb_control_frame_t *cfp)
671{
672 const VALUE *ep = cfp->ep;
674
675 while (!VM_ENV_LOCAL_P(ep)) {
676 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return me;
677 ep = VM_ENV_PREV_EP(ep);
678 }
679
680 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
681}
682
683static const rb_iseq_t *
684method_entry_iseqptr(const rb_callable_method_entry_t *me)
685{
686 switch (me->def->type) {
687 case VM_METHOD_TYPE_ISEQ:
688 return me->def->body.iseq.iseqptr;
689 default:
690 return NULL;
691 }
692}
693
694static rb_cref_t *
695method_entry_cref(const rb_callable_method_entry_t *me)
696{
697 switch (me->def->type) {
698 case VM_METHOD_TYPE_ISEQ:
699 return me->def->body.iseq.cref;
700 default:
701 return NULL;
702 }
703}
704
705#if VM_CHECK_MODE == 0
706PUREFUNC(static rb_cref_t *check_cref(VALUE, int));
707#endif
708static rb_cref_t *
709check_cref(VALUE obj, int can_be_svar)
710{
711 if (obj == Qfalse) return NULL;
712
713#if VM_CHECK_MODE > 0
714 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_cref: unknown type: %s", rb_obj_info(obj));
715#endif
716
717 switch (imemo_type(obj)) {
718 case imemo_ment:
719 return method_entry_cref((rb_callable_method_entry_t *)obj);
720 case imemo_cref:
721 return (rb_cref_t *)obj;
722 case imemo_svar:
723 if (can_be_svar) {
724 return check_cref(((struct vm_svar *)obj)->cref_or_me, FALSE);
725 }
726 default:
727#if VM_CHECK_MODE > 0
728 rb_bug("check_method_entry: svar should not be there:");
729#endif
730 return NULL;
731 }
732}
733
734static inline rb_cref_t *
735vm_env_cref(const VALUE *ep)
736{
737 rb_cref_t *cref;
738
739 while (!VM_ENV_LOCAL_P(ep)) {
740 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return cref;
741 ep = VM_ENV_PREV_EP(ep);
742 }
743
744 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
745}
746
747static int
748is_cref(const VALUE v, int can_be_svar)
749{
750 if (RB_TYPE_P(v, T_IMEMO)) {
751 switch (imemo_type(v)) {
752 case imemo_cref:
753 return TRUE;
754 case imemo_svar:
755 if (can_be_svar) return is_cref(((struct vm_svar *)v)->cref_or_me, FALSE);
756 default:
757 break;
758 }
759 }
760 return FALSE;
761}
762
763static int
764vm_env_cref_by_cref(const VALUE *ep)
765{
766 while (!VM_ENV_LOCAL_P(ep)) {
767 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) return TRUE;
768 ep = VM_ENV_PREV_EP(ep);
769 }
770 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
771}
772
773static rb_cref_t *
774cref_replace_with_duplicated_cref_each_frame(const VALUE *vptr, int can_be_svar, VALUE parent)
775{
776 const VALUE v = *vptr;
777 rb_cref_t *cref, *new_cref;
778
779 if (RB_TYPE_P(v, T_IMEMO)) {
780 switch (imemo_type(v)) {
781 case imemo_cref:
782 cref = (rb_cref_t *)v;
783 new_cref = vm_cref_dup(cref);
784 if (parent) {
785 RB_OBJ_WRITE(parent, vptr, new_cref);
786 }
787 else {
788 VM_FORCE_WRITE(vptr, (VALUE)new_cref);
789 }
790 return (rb_cref_t *)new_cref;
791 case imemo_svar:
792 if (can_be_svar) {
793 return cref_replace_with_duplicated_cref_each_frame(&((struct vm_svar *)v)->cref_or_me, FALSE, v);
794 }
795 /* fall through */
796 case imemo_ment:
797 rb_bug("cref_replace_with_duplicated_cref_each_frame: unreachable");
798 default:
799 break;
800 }
801 }
802 return FALSE;
803}
804
805static rb_cref_t *
806vm_cref_replace_with_duplicated_cref(const VALUE *ep)
807{
808 if (vm_env_cref_by_cref(ep)) {
809 rb_cref_t *cref;
810 VALUE envval;
811
812 while (!VM_ENV_LOCAL_P(ep)) {
813 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
814 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
815 return cref;
816 }
817 ep = VM_ENV_PREV_EP(ep);
818 }
819 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
820 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
821 }
822 else {
823 rb_bug("vm_cref_dup: unreachable");
824 }
825}
826
827static rb_cref_t *
828vm_get_cref(const VALUE *ep)
829{
830 rb_cref_t *cref = vm_env_cref(ep);
831
832 if (cref != NULL) {
833 return cref;
834 }
835 else {
836 rb_bug("vm_get_cref: unreachable");
837 }
838}
839
840rb_cref_t *
841rb_vm_get_cref(const VALUE *ep)
842{
843 return vm_get_cref(ep);
844}
845
846static rb_cref_t *
847vm_ec_cref(const rb_execution_context_t *ec)
848{
849 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
850
851 if (cfp == NULL) {
852 return NULL;
853 }
854 return vm_get_cref(cfp->ep);
855}
856
857static const rb_cref_t *
858vm_get_const_key_cref(const VALUE *ep)
859{
860 const rb_cref_t *cref = vm_get_cref(ep);
861 const rb_cref_t *key_cref = cref;
862
863 while (cref) {
864 if (FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
865 FL_TEST(CREF_CLASS(cref), RCLASS_CLONED)) {
866 return key_cref;
867 }
868 cref = CREF_NEXT(cref);
869 }
870
871 /* does not include singleton class */
872 return NULL;
873}
874
875void
876rb_vm_rewrite_cref(rb_cref_t *cref, VALUE old_klass, VALUE new_klass, rb_cref_t **new_cref_ptr)
877{
878 rb_cref_t *new_cref;
879
880 while (cref) {
881 if (CREF_CLASS(cref) == old_klass) {
882 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
883 *new_cref_ptr = new_cref;
884 return;
885 }
886 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
887 cref = CREF_NEXT(cref);
888 *new_cref_ptr = new_cref;
889 new_cref_ptr = &new_cref->next;
890 }
891 *new_cref_ptr = NULL;
892}
893
894static rb_cref_t *
895vm_cref_push(const rb_execution_context_t *ec, VALUE klass, const VALUE *ep, int pushed_by_eval, int singleton)
896{
897 rb_cref_t *prev_cref = NULL;
898
899 if (ep) {
900 prev_cref = vm_env_cref(ep);
901 }
902 else {
903 rb_control_frame_t *cfp = vm_get_ruby_level_caller_cfp(ec, ec->cfp);
904
905 if (cfp) {
906 prev_cref = vm_env_cref(cfp->ep);
907 }
908 }
909
910 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
911}
912
913static inline VALUE
914vm_get_cbase(const VALUE *ep)
915{
916 const rb_cref_t *cref = vm_get_cref(ep);
917
918 return CREF_CLASS_FOR_DEFINITION(cref);
919}
920
921static inline VALUE
922vm_get_const_base(const VALUE *ep)
923{
924 const rb_cref_t *cref = vm_get_cref(ep);
925
926 while (cref) {
927 if (!CREF_PUSHED_BY_EVAL(cref)) {
928 return CREF_CLASS_FOR_DEFINITION(cref);
929 }
930 cref = CREF_NEXT(cref);
931 }
932
933 return Qundef;
934}
935
936static inline void
937vm_check_if_namespace(VALUE klass)
938{
939 if (!RB_TYPE_P(klass, T_CLASS) && !RB_TYPE_P(klass, T_MODULE)) {
940 rb_raise(rb_eTypeError, "%+"PRIsVALUE" is not a class/module", klass);
941 }
942}
943
944static inline void
945vm_ensure_not_refinement_module(VALUE self)
946{
947 if (RB_TYPE_P(self, T_MODULE) && FL_TEST(self, RMODULE_IS_REFINEMENT)) {
948 rb_warn("not defined at the refinement, but at the outer class/module");
949 }
950}
951
952static inline VALUE
953vm_get_iclass(const rb_control_frame_t *cfp, VALUE klass)
954{
955 return klass;
956}
957
958static inline VALUE
959vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, bool allow_nil, int is_defined)
960{
961 void rb_const_warn_if_deprecated(const rb_const_entry_t *ce, VALUE klass, ID id);
962 VALUE val;
963
964 if (NIL_P(orig_klass) && allow_nil) {
965 /* in current lexical scope */
966 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
967 const rb_cref_t *cref;
968 VALUE klass = Qnil;
969
970 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
971 root_cref = CREF_NEXT(root_cref);
972 }
973 cref = root_cref;
974 while (cref && CREF_NEXT(cref)) {
975 if (CREF_PUSHED_BY_EVAL(cref)) {
976 klass = Qnil;
977 }
978 else {
979 klass = CREF_CLASS(cref);
980 }
981 cref = CREF_NEXT(cref);
982
983 if (!NIL_P(klass)) {
984 VALUE av, am = 0;
986 search_continue:
987 if ((ce = rb_const_lookup(klass, id))) {
988 rb_const_warn_if_deprecated(ce, klass, id);
989 val = ce->value;
990 if (val == Qundef) {
991 if (am == klass) break;
992 am = klass;
993 if (is_defined) return 1;
994 if (rb_autoloading_value(klass, id, &av, NULL)) return av;
995 rb_autoload_load(klass, id);
996 goto search_continue;
997 }
998 else {
999 if (is_defined) {
1000 return 1;
1001 }
1002 else {
1003 if (UNLIKELY(!rb_ractor_main_p())) {
1004 if (!rb_ractor_shareable_p(val)) {
1005 rb_raise(rb_eRactorIsolationError,
1006 "can not access non-shareable objects in constant %"PRIsVALUE"::%s by non-main ractor.", rb_class_path(klass), rb_id2name(id));
1007 }
1008 }
1009 return val;
1010 }
1011 }
1012 }
1013 }
1014 }
1015
1016 /* search self */
1017 if (root_cref && !NIL_P(CREF_CLASS(root_cref))) {
1018 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1019 }
1020 else {
1021 klass = CLASS_OF(ec->cfp->self);
1022 }
1023
1024 if (is_defined) {
1025 return rb_const_defined(klass, id);
1026 }
1027 else {
1028 return rb_const_get(klass, id);
1029 }
1030 }
1031 else {
1032 vm_check_if_namespace(orig_klass);
1033 if (is_defined) {
1034 return rb_public_const_defined_from(orig_klass, id);
1035 }
1036 else {
1037 return rb_public_const_get_from(orig_klass, id);
1038 }
1039 }
1040}
1041
1042static inline VALUE
1043vm_get_cvar_base(const rb_cref_t *cref, const rb_control_frame_t *cfp, int top_level_raise)
1044{
1045 VALUE klass;
1046
1047 if (!cref) {
1048 rb_bug("vm_get_cvar_base: no cref");
1049 }
1050
1051 while (CREF_NEXT(cref) &&
1052 (NIL_P(CREF_CLASS(cref)) || FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
1053 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1054 cref = CREF_NEXT(cref);
1055 }
1056 if (top_level_raise && !CREF_NEXT(cref)) {
1057 rb_raise(rb_eRuntimeError, "class variable access from toplevel");
1058 }
1059
1060 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1061
1062 if (NIL_P(klass)) {
1063 rb_raise(rb_eTypeError, "no class variables available");
1064 }
1065 return klass;
1066}
1067
1068static VALUE
1069vm_search_const_defined_class(const VALUE cbase, ID id)
1070{
1071 if (rb_const_defined_at(cbase, id)) return cbase;
1072 if (cbase == rb_cObject) {
1073 VALUE tmp = RCLASS_SUPER(cbase);
1074 while (tmp) {
1075 if (rb_const_defined_at(tmp, id)) return tmp;
1076 tmp = RCLASS_SUPER(tmp);
1077 }
1078 }
1079 return 0;
1080}
1081
1082static bool
1083iv_index_tbl_lookup(struct st_table *iv_index_tbl, ID id, struct rb_iv_index_tbl_entry **ent)
1084{
1085 int found;
1086 st_data_t ent_data;
1087
1088 if (iv_index_tbl == NULL) return false;
1089
1090 RB_VM_LOCK_ENTER();
1091 {
1092 found = st_lookup(iv_index_tbl, (st_data_t)id, &ent_data);
1093 }
1094 RB_VM_LOCK_LEAVE();
1095 if (found) *ent = (struct rb_iv_index_tbl_entry *)ent_data;
1096
1097 return found ? true : false;
1098}
1099
1100ALWAYS_INLINE(static void fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, struct rb_iv_index_tbl_entry *ent));
1101
1102static inline void
1103fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, struct rb_iv_index_tbl_entry *ent)
1104{
1105 // fill cache
1106 if (!is_attr) {
1107 ic->entry = ent;
1108 RB_OBJ_WRITTEN(iseq, Qundef, ent->class_value);
1109 }
1110 else {
1111 vm_cc_attr_index_set(cc, (int)ent->index + 1);
1112 }
1113}
1114
1115ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, const rb_iseq_t *, IVC, const struct rb_callcache *, int));
1116static inline VALUE
1117vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
1118{
1119#if OPT_IC_FOR_IVAR
1120 VALUE val = Qundef;
1121
1122 if (SPECIAL_CONST_P(obj)) {
1123 // frozen?
1124 }
1125 else if (LIKELY(is_attr ?
1126 RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_unset, vm_cc_attr_index(cc) > 0) :
1127 RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_serial,
1128 ic->entry && ic->entry->class_serial == RCLASS_SERIAL(RBASIC(obj)->klass)))) {
1129 uint32_t index = !is_attr ? ic->entry->index : (vm_cc_attr_index(cc) - 1);
1130
1131 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1132
1133 if (LIKELY(BUILTIN_TYPE(obj) == T_OBJECT) &&
1134 LIKELY(index < ROBJECT_NUMIV(obj))) {
1135 val = ROBJECT_IVPTR(obj)[index];
1136
1137 VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
1138 }
1139 else if (FL_TEST_RAW(obj, FL_EXIVAR)) {
1140 val = rb_ivar_generic_lookup_with_index(obj, id, index);
1141 }
1142
1143 goto ret;
1144 }
1145 else {
1146 struct rb_iv_index_tbl_entry *ent;
1147
1148 if (BUILTIN_TYPE(obj) == T_OBJECT) {
1149 struct st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
1150
1151 if (iv_index_tbl && iv_index_tbl_lookup(iv_index_tbl, id, &ent)) {
1152 fill_ivar_cache(iseq, ic, cc, is_attr, ent);
1153
1154 // get value
1155 if (ent->index < ROBJECT_NUMIV(obj)) {
1156 val = ROBJECT_IVPTR(obj)[ent->index];
1157
1158 VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
1159 }
1160 }
1161 }
1162 else if (FL_TEST_RAW(obj, FL_EXIVAR)) {
1163 struct st_table *iv_index_tbl = RCLASS_IV_INDEX_TBL(rb_obj_class(obj));
1164
1165 if (iv_index_tbl && iv_index_tbl_lookup(iv_index_tbl, id, &ent)) {
1166 fill_ivar_cache(iseq, ic, cc, is_attr, ent);
1167 val = rb_ivar_generic_lookup_with_index(obj, id, ent->index);
1168 }
1169 }
1170 else {
1171 // T_CLASS / T_MODULE
1172 goto general_path;
1173 }
1174
1175 ret:
1176 if (LIKELY(val != Qundef)) {
1177 return val;
1178 }
1179 else {
1180 return Qnil;
1181 }
1182 }
1183 general_path:
1184#endif /* OPT_IC_FOR_IVAR */
1185 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1186
1187 if (is_attr) {
1188 return rb_attr_get(obj, id);
1189 }
1190 else {
1191 return rb_ivar_get(obj, id);
1192 }
1193}
1194
1195ALWAYS_INLINE(static VALUE vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr));
1196NOINLINE(static VALUE vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic));
1197NOINLINE(static VALUE vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc));
1198
1199static VALUE
1200vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
1201{
1203
1204#if OPT_IC_FOR_IVAR
1205 if (RB_TYPE_P(obj, T_OBJECT)) {
1206 struct st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
1207 struct rb_iv_index_tbl_entry *ent;
1208
1209 if (iv_index_tbl_lookup(iv_index_tbl, id, &ent)) {
1210 if (!is_attr) {
1211 ic->entry = ent;
1212 RB_OBJ_WRITTEN(iseq, Qundef, ent->class_value);
1213 }
1214 else if (ent->index >= INT_MAX) {
1215 rb_raise(rb_eArgError, "too many instance variables");
1216 }
1217 else {
1218 vm_cc_attr_index_set(cc, (int)(ent->index + 1));
1219 }
1220
1221 uint32_t index = ent->index;
1222
1223 if (UNLIKELY(index >= ROBJECT_NUMIV(obj))) {
1224 rb_init_iv_list(obj);
1225 }
1226 VALUE *ptr = ROBJECT_IVPTR(obj);
1227 RB_OBJ_WRITE(obj, &ptr[index], val);
1228 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
1229
1230 return val;
1231 }
1232 }
1233#endif
1234 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1235 return rb_ivar_set(obj, id, val);
1236}
1237
1238static VALUE
1239vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic)
1240{
1241 return vm_setivar_slowpath(obj, id, val, iseq, ic, NULL, false);
1242}
1243
1244static VALUE
1245vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc)
1246{
1247 return vm_setivar_slowpath(obj, id, val, NULL, NULL, cc, true);
1248}
1249
1250static inline VALUE
1251vm_setivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
1252{
1253#if OPT_IC_FOR_IVAR
1254 if (LIKELY(RB_TYPE_P(obj, T_OBJECT)) &&
1255 LIKELY(!RB_OBJ_FROZEN_RAW(obj))) {
1256
1257 VM_ASSERT(!rb_ractor_shareable_p(obj));
1258
1259 if (LIKELY(
1260 (!is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_serial, ic->entry && ic->entry->class_serial == RCLASS_SERIAL(RBASIC(obj)->klass))) ||
1261 ( is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_unset, vm_cc_attr_index(cc) > 0)))) {
1262 uint32_t index = !is_attr ? ic->entry->index : vm_cc_attr_index(cc)-1;
1263
1264 if (UNLIKELY(index >= ROBJECT_NUMIV(obj))) {
1265 rb_init_iv_list(obj);
1266 }
1267 VALUE *ptr = ROBJECT_IVPTR(obj);
1268 RB_OBJ_WRITE(obj, &ptr[index], val);
1269 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1270 return val; /* inline cache hit */
1271 }
1272 }
1273 else {
1274 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1275 }
1276#endif /* OPT_IC_FOR_IVAR */
1277 if (is_attr) {
1278 return vm_setivar_slowpath_attr(obj, id, val, cc);
1279 }
1280 else {
1281 return vm_setivar_slowpath_ivar(obj, id, val, iseq, ic);
1282 }
1283}
1284
1285static VALUE
1286update_classvariable_cache(const rb_iseq_t *iseq, VALUE klass, ID id, ICVARC ic)
1287{
1288 VALUE defined_class = 0;
1289 VALUE cvar_value = rb_cvar_find(klass, id, &defined_class);
1290
1291 if (RB_TYPE_P(defined_class, T_ICLASS)) {
1292 defined_class = RBASIC(defined_class)->klass;
1293 }
1294
1295 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1296 if (!rb_cvc_tbl) {
1297 rb_bug("the cvc table should be set");
1298 }
1299
1300 VALUE ent_data;
1301 if (!rb_id_table_lookup(rb_cvc_tbl, id, &ent_data)) {
1302 rb_bug("should have cvar cache entry");
1303 }
1304
1305 struct rb_cvar_class_tbl_entry *ent = (void *)ent_data;
1306 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1307
1308 ic->entry = ent;
1309 RB_OBJ_WRITTEN(iseq, Qundef, ent->class_value);
1310
1311 return cvar_value;
1312}
1313
1314static inline VALUE
1315vm_getclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *reg_cfp, ID id, ICVARC ic)
1316{
1317 const rb_cref_t *cref;
1318
1319 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE()) {
1320 VALUE v = Qundef;
1321 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1322
1323 if (st_lookup(RCLASS_IV_TBL(ic->entry->class_value), (st_data_t)id, &v) &&
1324 LIKELY(rb_ractor_main_p())) {
1325
1326 return v;
1327 }
1328 }
1329
1330 cref = vm_get_cref(GET_EP());
1331 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1332
1333 return update_classvariable_cache(iseq, klass, id, ic);
1334}
1335
1336VALUE
1337rb_vm_getclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID id, ICVARC ic)
1338{
1339 return vm_getclassvariable(iseq, cfp, id, ic);
1340}
1341
1342static inline void
1343vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *reg_cfp, ID id, VALUE val, ICVARC ic)
1344{
1345 const rb_cref_t *cref;
1346
1347 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE()) {
1348 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1349
1350 rb_class_ivar_set(ic->entry->class_value, id, val);
1351 return;
1352 }
1353
1354 cref = vm_get_cref(GET_EP());
1355 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1356
1357 rb_cvar_set(klass, id, val);
1358
1359 update_classvariable_cache(iseq, klass, id, ic);
1360}
1361
1362void
1363rb_vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID id, VALUE val, ICVARC ic)
1364{
1365 vm_setclassvariable(iseq, cfp, id, val, ic);
1366}
1367
1368static inline VALUE
1369vm_getinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, IVC ic)
1370{
1371 return vm_getivar(obj, id, iseq, ic, NULL, FALSE);
1372}
1373
1374static inline void
1375vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
1376{
1377 vm_setivar(obj, id, val, iseq, ic, 0, 0);
1378}
1379
1380void
1381rb_vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
1382{
1383 vm_setinstancevariable(iseq, obj, id, val, ic);
1384}
1385
1386/* Set the instance variable +val+ on object +obj+ at the +index+.
1387 * This function only works with T_OBJECT objects, so make sure
1388 * +obj+ is of type T_OBJECT before using this function.
1389 */
1390VALUE
1391rb_vm_set_ivar_idx(VALUE obj, uint32_t index, VALUE val)
1392{
1394
1396
1397 VM_ASSERT(!rb_ractor_shareable_p(obj));
1398
1399 if (UNLIKELY(index >= ROBJECT_NUMIV(obj))) {
1400 rb_init_iv_list(obj);
1401 }
1402 VALUE *ptr = ROBJECT_IVPTR(obj);
1403 RB_OBJ_WRITE(obj, &ptr[index], val);
1404
1405 return val;
1406}
1407
1408static VALUE
1409vm_throw_continue(const rb_execution_context_t *ec, VALUE err)
1410{
1411 /* continue throw */
1412
1413 if (FIXNUM_P(err)) {
1414 ec->tag->state = FIX2INT(err);
1415 }
1416 else if (SYMBOL_P(err)) {
1417 ec->tag->state = TAG_THROW;
1418 }
1419 else if (THROW_DATA_P(err)) {
1420 ec->tag->state = THROW_DATA_STATE((struct vm_throw_data *)err);
1421 }
1422 else {
1423 ec->tag->state = TAG_RAISE;
1424 }
1425 return err;
1426}
1427
1428static VALUE
1429vm_throw_start(const rb_execution_context_t *ec, rb_control_frame_t *const reg_cfp, enum ruby_tag_type state,
1430 const int flag, const VALUE throwobj)
1431{
1432 const rb_control_frame_t *escape_cfp = NULL;
1433 const rb_control_frame_t * const eocfp = RUBY_VM_END_CONTROL_FRAME(ec); /* end of control frame pointer */
1434
1435 if (flag != 0) {
1436 /* do nothing */
1437 }
1438 else if (state == TAG_BREAK) {
1439 int is_orphan = 1;
1440 const VALUE *ep = GET_EP();
1441 const rb_iseq_t *base_iseq = GET_ISEQ();
1442 escape_cfp = reg_cfp;
1443
1444 while (base_iseq->body->type != ISEQ_TYPE_BLOCK) {
1445 if (escape_cfp->iseq->body->type == ISEQ_TYPE_CLASS) {
1446 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1447 ep = escape_cfp->ep;
1448 base_iseq = escape_cfp->iseq;
1449 }
1450 else {
1451 ep = VM_ENV_PREV_EP(ep);
1452 base_iseq = base_iseq->body->parent_iseq;
1453 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1454 VM_ASSERT(escape_cfp->iseq == base_iseq);
1455 }
1456 }
1457
1458 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1459 /* lambda{... break ...} */
1460 is_orphan = 0;
1461 state = TAG_RETURN;
1462 }
1463 else {
1464 ep = VM_ENV_PREV_EP(ep);
1465
1466 while (escape_cfp < eocfp) {
1467 if (escape_cfp->ep == ep) {
1468 const rb_iseq_t *const iseq = escape_cfp->iseq;
1469 const VALUE epc = escape_cfp->pc - iseq->body->iseq_encoded;
1470 const struct iseq_catch_table *const ct = iseq->body->catch_table;
1471 unsigned int i;
1472
1473 if (!ct) break;
1474 for (i=0; i < ct->size; i++) {
1475 const struct iseq_catch_table_entry *const entry =
1476 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1477
1478 if (entry->type == CATCH_TYPE_BREAK &&
1479 entry->iseq == base_iseq &&
1480 entry->start < epc && entry->end >= epc) {
1481 if (entry->cont == epc) { /* found! */
1482 is_orphan = 0;
1483 }
1484 break;
1485 }
1486 }
1487 break;
1488 }
1489
1490 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1491 }
1492 }
1493
1494 if (is_orphan) {
1495 rb_vm_localjump_error("break from proc-closure", throwobj, TAG_BREAK);
1496 }
1497 }
1498 else if (state == TAG_RETRY) {
1499 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1500
1501 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1502 }
1503 else if (state == TAG_RETURN) {
1504 const VALUE *current_ep = GET_EP();
1505 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1506 int in_class_frame = 0;
1507 int toplevel = 1;
1508 escape_cfp = reg_cfp;
1509
1510 // find target_lep, target_ep
1511 while (!VM_ENV_LOCAL_P(ep)) {
1512 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1513 target_ep = ep;
1514 }
1515 ep = VM_ENV_PREV_EP(ep);
1516 }
1517 target_lep = ep;
1518
1519 while (escape_cfp < eocfp) {
1520 const VALUE *lep = VM_CF_LEP(escape_cfp);
1521
1522 if (!target_lep) {
1523 target_lep = lep;
1524 }
1525
1526 if (lep == target_lep &&
1527 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1528 escape_cfp->iseq->body->type == ISEQ_TYPE_CLASS) {
1529 in_class_frame = 1;
1530 target_lep = 0;
1531 }
1532
1533 if (lep == target_lep) {
1534 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1535 toplevel = 0;
1536 if (in_class_frame) {
1537 /* lambda {class A; ... return ...; end} */
1538 goto valid_return;
1539 }
1540 else {
1541 const VALUE *tep = current_ep;
1542
1543 while (target_lep != tep) {
1544 if (escape_cfp->ep == tep) {
1545 /* in lambda */
1546 if (tep == target_ep) {
1547 goto valid_return;
1548 }
1549 else {
1550 goto unexpected_return;
1551 }
1552 }
1553 tep = VM_ENV_PREV_EP(tep);
1554 }
1555 }
1556 }
1557 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1558 switch (escape_cfp->iseq->body->type) {
1559 case ISEQ_TYPE_TOP:
1560 case ISEQ_TYPE_MAIN:
1561 if (toplevel) {
1562 if (in_class_frame) goto unexpected_return;
1563 if (target_ep == NULL) {
1564 goto valid_return;
1565 }
1566 else {
1567 goto unexpected_return;
1568 }
1569 }
1570 break;
1571 case ISEQ_TYPE_EVAL:
1572 case ISEQ_TYPE_CLASS:
1573 toplevel = 0;
1574 break;
1575 default:
1576 break;
1577 }
1578 }
1579 }
1580
1581 if (escape_cfp->ep == target_lep && escape_cfp->iseq->body->type == ISEQ_TYPE_METHOD) {
1582 if (target_ep == NULL) {
1583 goto valid_return;
1584 }
1585 else {
1586 goto unexpected_return;
1587 }
1588 }
1589
1590 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1591 }
1592 unexpected_return:;
1593 rb_vm_localjump_error("unexpected return", throwobj, TAG_RETURN);
1594
1595 valid_return:;
1596 /* do nothing */
1597 }
1598 else {
1599 rb_bug("isns(throw): unsupported throw type");
1600 }
1601
1602 ec->tag->state = state;
1603 return (VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1604}
1605
1606static VALUE
1607vm_throw(const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
1608 rb_num_t throw_state, VALUE throwobj)
1609{
1610 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1611 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1612
1613 if (state != 0) {
1614 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1615 }
1616 else {
1617 return vm_throw_continue(ec, throwobj);
1618 }
1619}
1620
1621static inline void
1622vm_expandarray(VALUE *sp, VALUE ary, rb_num_t num, int flag)
1623{
1624 int is_splat = flag & 0x01;
1625 rb_num_t space_size = num + is_splat;
1626 VALUE *base = sp - 1;
1627 const VALUE *ptr;
1628 rb_num_t len;
1629 const VALUE obj = ary;
1630
1631 if (!RB_TYPE_P(ary, T_ARRAY) && NIL_P(ary = rb_check_array_type(ary))) {
1632 ary = obj;
1633 ptr = &ary;
1634 len = 1;
1635 }
1636 else {
1637 ptr = RARRAY_CONST_PTR_TRANSIENT(ary);
1638 len = (rb_num_t)RARRAY_LEN(ary);
1639 }
1640
1641 if (space_size == 0) {
1642 /* no space left on stack */
1643 }
1644 else if (flag & 0x02) {
1645 /* post: ..., nil ,ary[-1], ..., ary[0..-num] # top */
1646 rb_num_t i = 0, j;
1647
1648 if (len < num) {
1649 for (i=0; i<num-len; i++) {
1650 *base++ = Qnil;
1651 }
1652 }
1653 for (j=0; i<num; i++, j++) {
1654 VALUE v = ptr[len - j - 1];
1655 *base++ = v;
1656 }
1657 if (is_splat) {
1658 *base = rb_ary_new4(len - j, ptr);
1659 }
1660 }
1661 else {
1662 /* normal: ary[num..-1], ary[num-2], ary[num-3], ..., ary[0] # top */
1663 rb_num_t i;
1664 VALUE *bptr = &base[space_size - 1];
1665
1666 for (i=0; i<num; i++) {
1667 if (len <= i) {
1668 for (; i<num; i++) {
1669 *bptr-- = Qnil;
1670 }
1671 break;
1672 }
1673 *bptr-- = ptr[i];
1674 }
1675 if (is_splat) {
1676 if (num > len) {
1677 *bptr = rb_ary_new();
1678 }
1679 else {
1680 *bptr = rb_ary_new4(len - num, ptr + num);
1681 }
1682 }
1683 }
1684 RB_GC_GUARD(ary);
1685}
1686
1687static VALUE vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
1688
1689static VALUE vm_mtbl_dump(VALUE klass, ID target_mid);
1690
1691static struct rb_class_cc_entries *
1692vm_ccs_create(VALUE klass, const rb_callable_method_entry_t *cme)
1693{
1694 struct rb_class_cc_entries *ccs = ALLOC(struct rb_class_cc_entries);
1695#if VM_CHECK_MODE > 0
1696 ccs->debug_sig = ~(VALUE)ccs;
1697#endif
1698 ccs->capa = 0;
1699 ccs->len = 0;
1700 RB_OBJ_WRITE(klass, &ccs->cme, cme);
1701 METHOD_ENTRY_CACHED_SET((rb_callable_method_entry_t *)cme);
1702 ccs->entries = NULL;
1703 return ccs;
1704}
1705
1706static void
1707vm_ccs_push(VALUE klass, struct rb_class_cc_entries *ccs, const struct rb_callinfo *ci, const struct rb_callcache *cc)
1708{
1709 if (! vm_cc_markable(cc)) {
1710 return;
1711 }
1712 else if (! vm_ci_markable(ci)) {
1713 return;
1714 }
1715
1716 if (UNLIKELY(ccs->len == ccs->capa)) {
1717 if (ccs->capa == 0) {
1718 ccs->capa = 1;
1719 ccs->entries = ALLOC_N(struct rb_class_cc_entries_entry, ccs->capa);
1720 }
1721 else {
1722 ccs->capa *= 2;
1723 REALLOC_N(ccs->entries, struct rb_class_cc_entries_entry, ccs->capa);
1724 }
1725 }
1726 VM_ASSERT(ccs->len < ccs->capa);
1727
1728 const int pos = ccs->len++;
1729 RB_OBJ_WRITE(klass, &ccs->entries[pos].ci, ci);
1730 RB_OBJ_WRITE(klass, &ccs->entries[pos].cc, cc);
1731
1732 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
1733 // for tuning
1734 // vm_mtbl_dump(klass, 0);
1735 }
1736}
1737
1738#if VM_CHECK_MODE > 0
1739void
1740rb_vm_ccs_dump(struct rb_class_cc_entries *ccs)
1741{
1742 ruby_debug_printf("ccs:%p (%d,%d)\n", (void *)ccs, ccs->len, ccs->capa);
1743 for (int i=0; i<ccs->len; i++) {
1744 vm_ci_dump(ccs->entries[i].ci);
1745 rp(ccs->entries[i].cc);
1746 }
1747}
1748
1749static int
1750vm_ccs_verify(struct rb_class_cc_entries *ccs, ID mid, VALUE klass)
1751{
1752 VM_ASSERT(vm_ccs_p(ccs));
1753 VM_ASSERT(ccs->len <= ccs->capa);
1754
1755 for (int i=0; i<ccs->len; i++) {
1756 const struct rb_callinfo *ci = ccs->entries[i].ci;
1757 const struct rb_callcache *cc = ccs->entries[i].cc;
1758
1759 VM_ASSERT(vm_ci_p(ci));
1760 VM_ASSERT(vm_ci_mid(ci) == mid);
1761 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
1762 VM_ASSERT(vm_cc_class_check(cc, klass));
1763 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
1764 }
1765 return TRUE;
1766}
1767#endif
1768
1769#ifndef MJIT_HEADER
1770
1771static const rb_callable_method_entry_t *check_overloaded_cme(const rb_callable_method_entry_t *cme, const struct rb_callinfo * const ci);
1772
1773static const struct rb_callcache *
1774vm_search_cc(const VALUE klass, const struct rb_callinfo * const ci)
1775{
1776 const ID mid = vm_ci_mid(ci);
1777 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
1778 struct rb_class_cc_entries *ccs = NULL;
1779 VALUE ccs_data;
1780
1781 if (cc_tbl) {
1782 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
1783 ccs = (struct rb_class_cc_entries *)ccs_data;
1784 const int ccs_len = ccs->len;
1785
1786 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
1787 rb_vm_ccs_free(ccs);
1788 rb_id_table_delete(cc_tbl, mid);
1789 ccs = NULL;
1790 }
1791 else {
1792 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
1793
1794 for (int i=0; i<ccs_len; i++) {
1795 const struct rb_callinfo *ccs_ci = ccs->entries[i].ci;
1796 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
1797
1798 VM_ASSERT(vm_ci_p(ccs_ci));
1799 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
1800
1801 if (ccs_ci == ci) { // TODO: equality
1802 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
1803
1804 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
1805 VM_ASSERT(ccs_cc->klass == klass);
1806 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
1807
1808 return ccs_cc;
1809 }
1810 }
1811 }
1812 }
1813 }
1814 else {
1815 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
1816 }
1817
1818 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
1819
1820 const rb_callable_method_entry_t *cme;
1821
1822 if (ccs) {
1823 cme = ccs->cme;
1824 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
1825
1826 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
1827 }
1828 else {
1829 cme = rb_callable_method_entry(klass, mid);
1830 }
1831
1832 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
1833
1834 if (cme == NULL) {
1835 // undef or not found: can't cache the information
1836 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
1837 return &vm_empty_cc;
1838 }
1839
1840 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
1841
1842 METHOD_ENTRY_CACHED_SET((struct rb_callable_method_entry_struct *)cme);
1843
1844 if (ccs == NULL) {
1845 VM_ASSERT(cc_tbl != NULL);
1846
1847 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
1848 // rb_callable_method_entry() prepares ccs.
1849 ccs = (struct rb_class_cc_entries *)ccs_data;
1850 }
1851 else {
1852 // TODO: required?
1853 ccs = vm_ccs_create(klass, cme);
1854 rb_id_table_insert(cc_tbl, mid, (VALUE)ccs);
1855 }
1856 }
1857
1858 cme = check_overloaded_cme(cme, ci);
1859
1860 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general);
1861 vm_ccs_push(klass, ccs, ci, cc);
1862
1863 VM_ASSERT(vm_cc_cme(cc) != NULL);
1864 VM_ASSERT(cme->called_id == mid);
1865 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
1866
1867 return cc;
1868}
1869
1870MJIT_FUNC_EXPORTED const struct rb_callcache *
1871rb_vm_search_method_slowpath(const struct rb_callinfo *ci, VALUE klass)
1872{
1873 const struct rb_callcache *cc;
1874
1875 VM_ASSERT(RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS));
1876
1877 RB_VM_LOCK_ENTER();
1878 {
1879 cc = vm_search_cc(klass, ci);
1880
1881 VM_ASSERT(cc);
1882 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
1883 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
1884 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
1885 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
1886 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
1887 }
1888 RB_VM_LOCK_LEAVE();
1889
1890 return cc;
1891}
1892#endif
1893
1894static const struct rb_callcache *
1895vm_search_method_slowpath0(VALUE cd_owner, struct rb_call_data *cd, VALUE klass)
1896{
1897#if USE_DEBUG_COUNTER
1898 const struct rb_callcache *old_cc = cd->cc;
1899#endif
1900
1901 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
1902
1903#if OPT_INLINE_METHOD_CACHE
1904 cd->cc = cc;
1905
1906 const struct rb_callcache *empty_cc =
1907#ifdef MJIT_HEADER
1908 rb_vm_empty_cc();
1909#else
1910 &vm_empty_cc;
1911#endif
1912 if (cd_owner && cc != empty_cc) RB_OBJ_WRITTEN(cd_owner, Qundef, cc);
1913
1914#if USE_DEBUG_COUNTER
1915 if (old_cc == &empty_cc) {
1916 // empty
1917 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
1918 }
1919 else if (old_cc == cc) {
1920 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
1921 }
1922 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
1923 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
1924 }
1925 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
1926 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
1927 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
1928 }
1929 else {
1930 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
1931 }
1932#endif
1933#endif // OPT_INLINE_METHOD_CACHE
1934
1935 VM_ASSERT(vm_cc_cme(cc) == NULL ||
1936 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
1937
1938 return cc;
1939}
1940
1941#ifndef MJIT_HEADER
1942ALWAYS_INLINE(static const struct rb_callcache *vm_search_method_fastpath(VALUE cd_owner, struct rb_call_data *cd, VALUE klass));
1943#endif
1944static const struct rb_callcache *
1945vm_search_method_fastpath(VALUE cd_owner, struct rb_call_data *cd, VALUE klass)
1946{
1947 const struct rb_callcache *cc = cd->cc;
1948
1949#if OPT_INLINE_METHOD_CACHE
1950 if (LIKELY(vm_cc_class_check(cc, klass))) {
1951 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
1952 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
1953 RB_DEBUG_COUNTER_INC(mc_inline_hit);
1954 VM_ASSERT(vm_cc_cme(cc) == NULL || // not found
1955 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) || // search_super w/ define_method
1956 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci)); // cme->called_id == ci->mid
1957
1958 return cc;
1959 }
1960 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
1961 }
1962 else {
1963 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
1964 }
1965#endif
1966
1967 return vm_search_method_slowpath0(cd_owner, cd, klass);
1968}
1969
1970static const struct rb_callcache *
1971vm_search_method(VALUE cd_owner, struct rb_call_data *cd, VALUE recv)
1972{
1973 VALUE klass = CLASS_OF(recv);
1974 VM_ASSERT(klass != Qfalse);
1975 VM_ASSERT(RBASIC_CLASS(klass) == 0 || rb_obj_is_kind_of(klass, rb_cClass));
1976
1977 return vm_search_method_fastpath(cd_owner, cd, klass);
1978}
1979
1980static inline int
1981check_cfunc(const rb_callable_method_entry_t *me, VALUE (*func)(ANYARGS))
1982{
1983 if (! me) {
1984 return false;
1985 }
1986 else {
1987 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
1988 VM_ASSERT(callable_method_entry_p(me));
1989 VM_ASSERT(me->def);
1990 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
1991 return false;
1992 }
1993 else {
1994 return me->def->body.cfunc.func == func;
1995 }
1996 }
1997}
1998
1999static inline int
2000vm_method_cfunc_is(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv, VALUE (*func)(ANYARGS))
2001{
2002 VM_ASSERT(iseq != NULL);
2003 const struct rb_callcache *cc = vm_search_method((VALUE)iseq, cd, recv);
2004 return check_cfunc(vm_cc_cme(cc), func);
2005}
2006
2007#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2008
2009static inline bool
2010FIXNUM_2_P(VALUE a, VALUE b)
2011{
2012 /* FIXNUM_P(a) && FIXNUM_P(b)
2013 * == ((a & 1) && (b & 1))
2014 * == a & b & 1 */
2015 SIGNED_VALUE x = a;
2016 SIGNED_VALUE y = b;
2017 SIGNED_VALUE z = x & y & 1;
2018 return z == 1;
2019}
2020
2021static inline bool
2022FLONUM_2_P(VALUE a, VALUE b)
2023{
2024#if USE_FLONUM
2025 /* FLONUM_P(a) && FLONUM_P(b)
2026 * == ((a & 3) == 2) && ((b & 3) == 2)
2027 * == ! ((a ^ 2) | (b ^ 2) & 3)
2028 */
2029 SIGNED_VALUE x = a;
2030 SIGNED_VALUE y = b;
2031 SIGNED_VALUE z = ((x ^ 2) | (y ^ 2)) & 3;
2032 return !z;
2033#else
2034 return false;
2035#endif
2036}
2037
2038static VALUE
2039opt_equality_specialized(VALUE recv, VALUE obj)
2040{
2041 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2042 goto compare_by_identity;
2043 }
2044 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2045 goto compare_by_identity;
2046 }
2047 else if (STATIC_SYM_P(recv) && STATIC_SYM_P(obj) && EQ_UNREDEFINED_P(SYMBOL)) {
2048 goto compare_by_identity;
2049 }
2050 else if (SPECIAL_CONST_P(recv)) {
2051 //
2052 }
2053 else if (RBASIC_CLASS(recv) == rb_cFloat && RB_FLOAT_TYPE_P(obj) && EQ_UNREDEFINED_P(FLOAT)) {
2054 double a = RFLOAT_VALUE(recv);
2055 double b = RFLOAT_VALUE(obj);
2056
2057#if MSC_VERSION_BEFORE(1300)
2058 if (isnan(a)) {
2059 return Qfalse;
2060 }
2061 else if (isnan(b)) {
2062 return Qfalse;
2063 }
2064 else
2065#endif
2066 return RBOOL(a == b);
2067 }
2068 else if (RBASIC_CLASS(recv) == rb_cString && EQ_UNREDEFINED_P(STRING)) {
2069 if (recv == obj) {
2070 return Qtrue;
2071 }
2072 else if (RB_TYPE_P(obj, T_STRING)) {
2073 return rb_str_eql_internal(obj, recv);
2074 }
2075 }
2076 return Qundef;
2077
2078 compare_by_identity:
2079 return RBOOL(recv == obj);
2080}
2081
2082static VALUE
2083opt_equality(const rb_iseq_t *cd_owner, VALUE recv, VALUE obj, CALL_DATA cd)
2084{
2085 VM_ASSERT(cd_owner != NULL);
2086
2087 VALUE val = opt_equality_specialized(recv, obj);
2088 if (val != Qundef) return val;
2089
2090 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2091 return Qundef;
2092 }
2093 else {
2094 return RBOOL(recv == obj);
2095 }
2096}
2097
2098#undef EQ_UNREDEFINED_P
2099
2100#ifndef MJIT_HEADER
2101
2102static inline const struct rb_callcache *gccct_method_search(rb_execution_context_t *ec, VALUE recv, ID mid, int argc); // vm_eval.c
2103NOINLINE(static VALUE opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid));
2104
2105static VALUE
2106opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid)
2107{
2108 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, 1);
2109
2110 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2111 return RBOOL(recv == obj);
2112 }
2113 else {
2114 return Qundef;
2115 }
2116}
2117
2118static VALUE
2119opt_equality_by_mid(VALUE recv, VALUE obj, ID mid)
2120{
2121 VALUE val = opt_equality_specialized(recv, obj);
2122 if (val != Qundef) {
2123 return val;
2124 }
2125 else {
2126 return opt_equality_by_mid_slowpath(recv, obj, mid);
2127 }
2128}
2129
2130VALUE
2131rb_equal_opt(VALUE obj1, VALUE obj2)
2132{
2133 return opt_equality_by_mid(obj1, obj2, idEq);
2134}
2135
2136VALUE
2137rb_eql_opt(VALUE obj1, VALUE obj2)
2138{
2139 return opt_equality_by_mid(obj1, obj2, idEqlP);
2140}
2141
2142#endif // MJIT_HEADER
2143
2144extern VALUE rb_vm_call0(rb_execution_context_t *ec, VALUE, ID, int, const VALUE*, const rb_callable_method_entry_t *, int kw_splat);
2145extern VALUE rb_vm_call_with_refinements(rb_execution_context_t *, VALUE, ID, int, const VALUE *, int);
2146
2147static VALUE
2148check_match(rb_execution_context_t *ec, VALUE pattern, VALUE target, enum vm_check_match_type type)
2149{
2150 switch (type) {
2151 case VM_CHECKMATCH_TYPE_WHEN:
2152 return pattern;
2153 case VM_CHECKMATCH_TYPE_RESCUE:
2154 if (!rb_obj_is_kind_of(pattern, rb_cModule)) {
2155 rb_raise(rb_eTypeError, "class or module required for rescue clause");
2156 }
2157 /* fall through */
2158 case VM_CHECKMATCH_TYPE_CASE: {
2159 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target, RB_NO_KEYWORDS);
2160 }
2161 default:
2162 rb_bug("check_match: unreachable");
2163 }
2164}
2165
2166
2167#if MSC_VERSION_BEFORE(1300)
2168#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2169#else
2170#define CHECK_CMP_NAN(a, b) /* do nothing */
2171#endif
2172
2173static inline VALUE
2174double_cmp_lt(double a, double b)
2175{
2176 CHECK_CMP_NAN(a, b);
2177 return RBOOL(a < b);
2178}
2179
2180static inline VALUE
2181double_cmp_le(double a, double b)
2182{
2183 CHECK_CMP_NAN(a, b);
2184 return RBOOL(a <= b);
2185}
2186
2187static inline VALUE
2188double_cmp_gt(double a, double b)
2189{
2190 CHECK_CMP_NAN(a, b);
2191 return RBOOL(a > b);
2192}
2193
2194static inline VALUE
2195double_cmp_ge(double a, double b)
2196{
2197 CHECK_CMP_NAN(a, b);
2198 return RBOOL(a >= b);
2199}
2200
2201static inline VALUE *
2202vm_base_ptr(const rb_control_frame_t *cfp)
2203{
2204#if 0 // we may optimize and use this once we confirm it does not spoil performance on JIT.
2205 const rb_control_frame_t *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
2206
2207 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2208 VALUE *bp = prev_cfp->sp + cfp->iseq->body->local_table_size + VM_ENV_DATA_SIZE;
2209 if (cfp->iseq->body->type == ISEQ_TYPE_METHOD) {
2210 /* adjust `self' */
2211 bp += 1;
2212 }
2213#if VM_DEBUG_BP_CHECK
2214 if (bp != cfp->bp_check) {
2215 ruby_debug_printf("bp_check: %ld, bp: %ld\n",
2216 (long)(cfp->bp_check - GET_EC()->vm_stack),
2217 (long)(bp - GET_EC()->vm_stack));
2218 rb_bug("vm_base_ptr: unreachable");
2219 }
2220#endif
2221 return bp;
2222 }
2223 else {
2224 return NULL;
2225 }
2226#else
2227 return cfp->__bp__;
2228#endif
2229}
2230
2231/* method call processes with call_info */
2232
2233#include "vm_args.c"
2234
2235static inline VALUE vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc, int param_size, int local_size);
2236ALWAYS_INLINE(static VALUE vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me, int opt_pc, int param_size, int local_size));
2237static inline VALUE vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc);
2238static VALUE vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
2239static VALUE vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2240static VALUE vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2241static inline VALUE vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2242
2243static vm_call_handler vm_call_iseq_setup_func(const struct rb_callinfo *ci, const int param_size, const int local_size);
2244
2245static VALUE
2246vm_call_iseq_setup_tailcall_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2247{
2248 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2249
2250 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2251}
2252
2253static VALUE
2254vm_call_iseq_setup_normal_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2255{
2256 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2257
2258 const struct rb_callcache *cc = calling->cc;
2259 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2260 int param = iseq->body->param.size;
2261 int local = iseq->body->local_table_size;
2262 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2263}
2264
2265MJIT_STATIC bool
2266rb_simple_iseq_p(const rb_iseq_t *iseq)
2267{
2268 return iseq->body->param.flags.has_opt == FALSE &&
2269 iseq->body->param.flags.has_rest == FALSE &&
2270 iseq->body->param.flags.has_post == FALSE &&
2271 iseq->body->param.flags.has_kw == FALSE &&
2272 iseq->body->param.flags.has_kwrest == FALSE &&
2273 iseq->body->param.flags.accepts_no_kwarg == FALSE &&
2274 iseq->body->param.flags.has_block == FALSE;
2275}
2276
2277MJIT_FUNC_EXPORTED bool
2278rb_iseq_only_optparam_p(const rb_iseq_t *iseq)
2279{
2280 return iseq->body->param.flags.has_opt == TRUE &&
2281 iseq->body->param.flags.has_rest == FALSE &&
2282 iseq->body->param.flags.has_post == FALSE &&
2283 iseq->body->param.flags.has_kw == FALSE &&
2284 iseq->body->param.flags.has_kwrest == FALSE &&
2285 iseq->body->param.flags.accepts_no_kwarg == FALSE &&
2286 iseq->body->param.flags.has_block == FALSE;
2287}
2288
2289MJIT_FUNC_EXPORTED bool
2290rb_iseq_only_kwparam_p(const rb_iseq_t *iseq)
2291{
2292 return iseq->body->param.flags.has_opt == FALSE &&
2293 iseq->body->param.flags.has_rest == FALSE &&
2294 iseq->body->param.flags.has_post == FALSE &&
2295 iseq->body->param.flags.has_kw == TRUE &&
2296 iseq->body->param.flags.has_kwrest == FALSE &&
2297 iseq->body->param.flags.has_block == FALSE;
2298}
2299
2300// If true, cc->call needs to include `CALLER_SETUP_ARG` (i.e. can't be skipped in fastpath)
2301MJIT_STATIC bool
2302rb_splat_or_kwargs_p(const struct rb_callinfo *restrict ci)
2303{
2304 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
2305}
2306
2307
2308static inline void
2309CALLER_SETUP_ARG(struct rb_control_frame_struct *restrict cfp,
2310 struct rb_calling_info *restrict calling,
2311 const struct rb_callinfo *restrict ci)
2312{
2313 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2314 VALUE final_hash;
2315 /* This expands the rest argument to the stack.
2316 * So, vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT is now inconsistent.
2317 */
2318 vm_caller_setup_arg_splat(cfp, calling);
2319 if (!IS_ARGS_KW_OR_KW_SPLAT(ci) &&
2320 calling->argc > 0 &&
2321 RB_TYPE_P((final_hash = *(cfp->sp - 1)), T_HASH) &&
2322 (((struct RHash *)final_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2323 *(cfp->sp - 1) = rb_hash_dup(final_hash);
2324 calling->kw_splat = 1;
2325 }
2326 }
2327 if (UNLIKELY(IS_ARGS_KW_OR_KW_SPLAT(ci))) {
2328 if (IS_ARGS_KEYWORD(ci)) {
2329 /* This converts VM_CALL_KWARG style to VM_CALL_KW_SPLAT style
2330 * by creating a keyword hash.
2331 * So, vm_ci_flag(ci) & VM_CALL_KWARG is now inconsistent.
2332 */
2333 vm_caller_setup_arg_kw(cfp, calling, ci);
2334 }
2335 else {
2336 VALUE keyword_hash = cfp->sp[-1];
2337 if (!RB_TYPE_P(keyword_hash, T_HASH)) {
2338 /* Convert a non-hash keyword splat to a new hash */
2339 cfp->sp[-1] = rb_hash_dup(rb_to_hash_type(keyword_hash));
2340 }
2341 else if (!IS_ARGS_KW_SPLAT_MUT(ci)) {
2342 /* Convert a hash keyword splat to a new hash unless
2343 * a mutable keyword splat was passed.
2344 */
2345 cfp->sp[-1] = rb_hash_dup(keyword_hash);
2346 }
2347 }
2348 }
2349}
2350
2351static inline void
2352CALLER_REMOVE_EMPTY_KW_SPLAT(struct rb_control_frame_struct *restrict cfp,
2353 struct rb_calling_info *restrict calling,
2354 const struct rb_callinfo *restrict ci)
2355{
2356 if (UNLIKELY(calling->kw_splat)) {
2357 /* This removes the last Hash object if it is empty.
2358 * So, vm_ci_flag(ci) & VM_CALL_KW_SPLAT is now inconsistent.
2359 */
2360 if (RHASH_EMPTY_P(cfp->sp[-1])) {
2361 cfp->sp--;
2362 calling->argc--;
2363 calling->kw_splat = 0;
2364 }
2365 }
2366}
2367
2368#define USE_OPT_HIST 0
2369
2370#if USE_OPT_HIST
2371#define OPT_HIST_MAX 64
2372static int opt_hist[OPT_HIST_MAX+1];
2373
2374__attribute__((destructor))
2375static void
2376opt_hist_show_results_at_exit(void)
2377{
2378 for (int i=0; i<OPT_HIST_MAX; i++) {
2379 ruby_debug_printf("opt_hist\t%d\t%d\n", i, opt_hist[i]);
2380 }
2381}
2382#endif
2383
2384static VALUE
2385vm_call_iseq_setup_normal_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2386 struct rb_calling_info *calling)
2387{
2388 const struct rb_callcache *cc = calling->cc;
2389 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2390 const int lead_num = iseq->body->param.lead_num;
2391 const int opt = calling->argc - lead_num;
2392 const int opt_num = iseq->body->param.opt_num;
2393 const int opt_pc = (int)iseq->body->param.opt_table[opt];
2394 const int param = iseq->body->param.size;
2395 const int local = iseq->body->local_table_size;
2396 const int delta = opt_num - opt;
2397
2398 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2399
2400#if USE_OPT_HIST
2401 if (opt_pc < OPT_HIST_MAX) {
2402 opt_hist[opt]++;
2403 }
2404 else {
2405 opt_hist[OPT_HIST_MAX]++;
2406 }
2407#endif
2408
2409 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2410}
2411
2412static VALUE
2413vm_call_iseq_setup_tailcall_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2414 struct rb_calling_info *calling)
2415{
2416 const struct rb_callcache *cc = calling->cc;
2417 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2418 const int lead_num = iseq->body->param.lead_num;
2419 const int opt = calling->argc - lead_num;
2420 const int opt_pc = (int)iseq->body->param.opt_table[opt];
2421
2422 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2423
2424#if USE_OPT_HIST
2425 if (opt_pc < OPT_HIST_MAX) {
2426 opt_hist[opt]++;
2427 }
2428 else {
2429 opt_hist[OPT_HIST_MAX]++;
2430 }
2431#endif
2432
2433 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2434}
2435
2436static void
2437args_setup_kw_parameters(rb_execution_context_t *const ec, const rb_iseq_t *const iseq,
2438 VALUE *const passed_values, const int passed_keyword_len, const VALUE *const passed_keywords,
2439 VALUE *const locals);
2440
2441static VALUE
2442vm_call_iseq_setup_kwparm_kwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2443 struct rb_calling_info *calling)
2444{
2445 const struct rb_callinfo *ci = calling->ci;
2446 const struct rb_callcache *cc = calling->cc;
2447
2448 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2449 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2450
2451 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2452 const struct rb_iseq_param_keyword *kw_param = iseq->body->param.keyword;
2453 const struct rb_callinfo_kwarg *kw_arg = vm_ci_kwarg(ci);
2454 const int ci_kw_len = kw_arg->keyword_len;
2455 const VALUE * const ci_keywords = kw_arg->keywords;
2456 VALUE *argv = cfp->sp - calling->argc;
2457 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2458 const int lead_num = iseq->body->param.lead_num;
2459 VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
2460 MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
2461 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2462
2463 int param = iseq->body->param.size;
2464 int local = iseq->body->local_table_size;
2465 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2466}
2467
2468static VALUE
2469vm_call_iseq_setup_kwparm_nokwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2470 struct rb_calling_info *calling)
2471{
2472 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->ci;
2473 const struct rb_callcache *cc = calling->cc;
2474
2475 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2476 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2477
2478 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2479 const struct rb_iseq_param_keyword *kw_param = iseq->body->param.keyword;
2480 VALUE * const argv = cfp->sp - calling->argc;
2481 VALUE * const klocals = argv + kw_param->bits_start - kw_param->num;
2482
2483 int i;
2484 for (i=0; i<kw_param->num; i++) {
2485 klocals[i] = kw_param->default_values[i];
2486 }
2487 klocals[i] = INT2FIX(0); // kw specify flag
2488 // NOTE:
2489 // nobody check this value, but it should be cleared because it can
2490 // points invalid VALUE (T_NONE objects, raw pointer and so on).
2491
2492 int param = iseq->body->param.size;
2493 int local = iseq->body->local_table_size;
2494 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2495}
2496
2497static inline int
2498vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
2499 const rb_iseq_t *iseq, VALUE *argv, int param_size, int local_size)
2500{
2501 const struct rb_callinfo *ci = calling->ci;
2502 const struct rb_callcache *cc = calling->cc;
2503 bool cacheable_ci = vm_ci_markable(ci);
2504
2505 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
2506 if (LIKELY(rb_simple_iseq_p(iseq))) {
2507 rb_control_frame_t *cfp = ec->cfp;
2508 CALLER_SETUP_ARG(cfp, calling, ci);
2509 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2510
2511 if (calling->argc != iseq->body->param.lead_num) {
2512 argument_arity_error(ec, iseq, calling->argc, iseq->body->param.lead_num, iseq->body->param.lead_num);
2513 }
2514
2515 VM_ASSERT(ci == calling->ci);
2516 VM_ASSERT(cc == calling->cc);
2517 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), cacheable_ci && vm_call_iseq_optimizable_p(ci, cc));
2518 return 0;
2519 }
2520 else if (rb_iseq_only_optparam_p(iseq)) {
2521 rb_control_frame_t *cfp = ec->cfp;
2522 CALLER_SETUP_ARG(cfp, calling, ci);
2523 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2524
2525 const int lead_num = iseq->body->param.lead_num;
2526 const int opt_num = iseq->body->param.opt_num;
2527 const int argc = calling->argc;
2528 const int opt = argc - lead_num;
2529
2530 if (opt < 0 || opt > opt_num) {
2531 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
2532 }
2533
2534 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2535 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
2536 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2537 cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
2538 }
2539 else {
2540 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
2541 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2542 cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
2543 }
2544
2545 /* initialize opt vars for self-references */
2546 VM_ASSERT((int)iseq->body->param.size == lead_num + opt_num);
2547 for (int i=argc; i<lead_num + opt_num; i++) {
2548 argv[i] = Qnil;
2549 }
2550 return (int)iseq->body->param.opt_table[opt];
2551 }
2552 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
2553 const int lead_num = iseq->body->param.lead_num;
2554 const int argc = calling->argc;
2555 const struct rb_iseq_param_keyword *kw_param = iseq->body->param.keyword;
2556
2557 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
2558 const struct rb_callinfo_kwarg *kw_arg = vm_ci_kwarg(ci);
2559
2560 if (argc - kw_arg->keyword_len == lead_num) {
2561 const int ci_kw_len = kw_arg->keyword_len;
2562 const VALUE * const ci_keywords = kw_arg->keywords;
2563 VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
2564 MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
2565
2566 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2567 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2568
2569 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
2570 cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
2571
2572 return 0;
2573 }
2574 }
2575 else if (argc == lead_num) {
2576 /* no kwarg */
2577 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2578 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
2579
2580 if (klocals[kw_param->num] == INT2FIX(0)) {
2581 /* copy from default_values */
2582 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
2583 cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
2584 }
2585
2586 return 0;
2587 }
2588 }
2589 }
2590
2591 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
2592}
2593
2594static VALUE
2595vm_call_iseq_setup(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2596{
2597 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
2598
2599 const struct rb_callcache *cc = calling->cc;
2600 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2601 const int param_size = iseq->body->param.size;
2602 const int local_size = iseq->body->local_table_size;
2603 const int opt_pc = vm_callee_setup_arg(ec, calling, def_iseq_ptr(vm_cc_cme(cc)->def), cfp->sp - calling->argc, param_size, local_size);
2604 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
2605}
2606
2607static inline VALUE
2608vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
2609 int opt_pc, int param_size, int local_size)
2610{
2611 const struct rb_callinfo *ci = calling->ci;
2612 const struct rb_callcache *cc = calling->cc;
2613
2614 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2615 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
2616 }
2617 else {
2618 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2619 }
2620}
2621
2622static inline VALUE
2623vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me,
2624 int opt_pc, int param_size, int local_size)
2625{
2626 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2627 VALUE *argv = cfp->sp - calling->argc;
2628 VALUE *sp = argv + param_size;
2629 cfp->sp = argv - 1 /* recv */;
2630
2631 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
2632 calling->block_handler, (VALUE)me,
2633 iseq->body->iseq_encoded + opt_pc, sp,
2634 local_size - param_size,
2635 iseq->body->stack_max);
2636 return Qundef;
2637}
2638
2639static inline VALUE
2640vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc)
2641{
2642 const struct rb_callcache *cc = calling->cc;
2643 unsigned int i;
2644 VALUE *argv = cfp->sp - calling->argc;
2645 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
2646 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2647 VALUE *src_argv = argv;
2648 VALUE *sp_orig, *sp;
2649 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
2650
2651 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
2652 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
2653 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
2654 dst_captured->code.val = src_captured->code.val;
2655 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
2656 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
2657 }
2658 else {
2659 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
2660 }
2661 }
2662
2663 vm_pop_frame(ec, cfp, cfp->ep);
2664 cfp = ec->cfp;
2665
2666 sp_orig = sp = cfp->sp;
2667
2668 /* push self */
2669 sp[0] = calling->recv;
2670 sp++;
2671
2672 /* copy arguments */
2673 for (i=0; i < iseq->body->param.size; i++) {
2674 *sp++ = src_argv[i];
2675 }
2676
2677 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
2678 calling->recv, calling->block_handler, (VALUE)me,
2679 iseq->body->iseq_encoded + opt_pc, sp,
2680 iseq->body->local_table_size - iseq->body->param.size,
2681 iseq->body->stack_max);
2682
2683 cfp->sp = sp_orig;
2684
2685 return Qundef;
2686}
2687
2688static void
2689ractor_unsafe_check(void)
2690{
2691 if (!rb_ractor_main_p()) {
2692 rb_raise(rb_eRactorUnsafeError, "ractor unsafe method called from not main ractor");
2693 }
2694}
2695
2696static VALUE
2697call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2698{
2699 ractor_unsafe_check();
2700 return (*func)(recv, rb_ary_new4(argc, argv));
2701}
2702
2703static VALUE
2704call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2705{
2706 ractor_unsafe_check();
2707 return (*func)(argc, argv, recv);
2708}
2709
2710static VALUE
2711call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2712{
2713 ractor_unsafe_check();
2714 VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
2715 return (*f)(recv);
2716}
2717
2718static VALUE
2719call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2720{
2721 ractor_unsafe_check();
2722 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
2723 return (*f)(recv, argv[0]);
2724}
2725
2726static VALUE
2727call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2728{
2729 ractor_unsafe_check();
2730 VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
2731 return (*f)(recv, argv[0], argv[1]);
2732}
2733
2734static VALUE
2735call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2736{
2737 ractor_unsafe_check();
2738 VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
2739 return (*f)(recv, argv[0], argv[1], argv[2]);
2740}
2741
2742static VALUE
2743call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2744{
2745 ractor_unsafe_check();
2746 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
2747 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
2748}
2749
2750static VALUE
2751call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2752{
2753 ractor_unsafe_check();
2754 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
2755 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
2756}
2757
2758static VALUE
2759call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2760{
2761 ractor_unsafe_check();
2763 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
2764}
2765
2766static VALUE
2767call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2768{
2769 ractor_unsafe_check();
2771 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
2772}
2773
2774static VALUE
2775call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2776{
2777 ractor_unsafe_check();
2779 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
2780}
2781
2782static VALUE
2783call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2784{
2785 ractor_unsafe_check();
2787 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
2788}
2789
2790static VALUE
2791call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2792{
2793 ractor_unsafe_check();
2795 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
2796}
2797
2798static VALUE
2799call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2800{
2801 ractor_unsafe_check();
2803 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
2804}
2805
2806static VALUE
2807call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2808{
2809 ractor_unsafe_check();
2811 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
2812}
2813
2814static VALUE
2815call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2816{
2817 ractor_unsafe_check();
2819 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
2820}
2821
2822static VALUE
2823call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2824{
2825 ractor_unsafe_check();
2827 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
2828}
2829
2830static VALUE
2831call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2832{
2833 ractor_unsafe_check();
2835 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
2836}
2837
2838static VALUE
2839ractor_safe_call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2840{
2841 return (*func)(recv, rb_ary_new4(argc, argv));
2842}
2843
2844static VALUE
2845ractor_safe_call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2846{
2847 return (*func)(argc, argv, recv);
2848}
2849
2850static VALUE
2851ractor_safe_call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2852{
2853 VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
2854 return (*f)(recv);
2855}
2856
2857static VALUE
2858ractor_safe_call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2859{
2860 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
2861 return (*f)(recv, argv[0]);
2862}
2863
2864static VALUE
2865ractor_safe_call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2866{
2867 VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
2868 return (*f)(recv, argv[0], argv[1]);
2869}
2870
2871static VALUE
2872ractor_safe_call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2873{
2874 VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
2875 return (*f)(recv, argv[0], argv[1], argv[2]);
2876}
2877
2878static VALUE
2879ractor_safe_call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2880{
2881 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
2882 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
2883}
2884
2885static VALUE
2886ractor_safe_call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2887{
2888 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
2889 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
2890}
2891
2892static VALUE
2893ractor_safe_call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2894{
2896 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
2897}
2898
2899static VALUE
2900ractor_safe_call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2901{
2903 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
2904}
2905
2906static VALUE
2907ractor_safe_call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2908{
2910 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
2911}
2912
2913static VALUE
2914ractor_safe_call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2915{
2917 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
2918}
2919
2920static VALUE
2921ractor_safe_call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2922{
2924 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
2925}
2926
2927static VALUE
2928ractor_safe_call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2929{
2931 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
2932}
2933
2934static VALUE
2935ractor_safe_call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2936{
2938 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
2939}
2940
2941static VALUE
2942ractor_safe_call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2943{
2945 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
2946}
2947
2948static VALUE
2949ractor_safe_call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2950{
2952 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
2953}
2954
2955static VALUE
2956ractor_safe_call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2957{
2959 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
2960}
2961
2962static inline int
2963vm_cfp_consistent_p(rb_execution_context_t *ec, const rb_control_frame_t *reg_cfp)
2964{
2965 const int ov_flags = RAISED_STACKOVERFLOW;
2966 if (LIKELY(reg_cfp == ec->cfp + 1)) return TRUE;
2967 if (rb_ec_raised_p(ec, ov_flags)) {
2968 rb_ec_raised_reset(ec, ov_flags);
2969 return TRUE;
2970 }
2971 return FALSE;
2972}
2973
2974#define CHECK_CFP_CONSISTENCY(func) \
2975 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
2976 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
2977
2978static inline
2979const rb_method_cfunc_t *
2980vm_method_cfunc_entry(const rb_callable_method_entry_t *me)
2981{
2982#if VM_DEBUG_VERIFY_METHOD_CACHE
2983 switch (me->def->type) {
2984 case VM_METHOD_TYPE_CFUNC:
2985 case VM_METHOD_TYPE_NOTIMPLEMENTED:
2986 break;
2987# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
2988 METHOD_BUG(ISEQ);
2989 METHOD_BUG(ATTRSET);
2990 METHOD_BUG(IVAR);
2991 METHOD_BUG(BMETHOD);
2992 METHOD_BUG(ZSUPER);
2993 METHOD_BUG(UNDEF);
2994 METHOD_BUG(OPTIMIZED);
2995 METHOD_BUG(MISSING);
2996 METHOD_BUG(REFINED);
2997 METHOD_BUG(ALIAS);
2998# undef METHOD_BUG
2999 default:
3000 rb_bug("wrong method type: %d", me->def->type);
3001 }
3002#endif
3003 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3004}
3005
3006static VALUE
3007vm_call_cfunc_with_frame(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3008{
3009 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3010 const struct rb_callinfo *ci = calling->ci;
3011 const struct rb_callcache *cc = calling->cc;
3012 VALUE val;
3013 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
3014 const rb_method_cfunc_t *cfunc = vm_method_cfunc_entry(me);
3015 int len = cfunc->argc;
3016
3017 VALUE recv = calling->recv;
3018 VALUE block_handler = calling->block_handler;
3019 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3020 int argc = calling->argc;
3021 int orig_argc = argc;
3022
3023 if (UNLIKELY(calling->kw_splat)) {
3024 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3025 }
3026
3027 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3028 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, recv, me->def->original_id, vm_ci_mid(ci), me->owner, Qundef);
3029
3030 vm_push_frame(ec, NULL, frame_type, recv,
3031 block_handler, (VALUE)me,
3032 0, ec->cfp->sp, 0, 0);
3033
3034 if (len >= 0) rb_check_arity(argc, len, len);
3035
3036 reg_cfp->sp -= orig_argc + 1;
3037 val = (*cfunc->invoker)(recv, argc, reg_cfp->sp + 1, cfunc->func);
3038
3039 CHECK_CFP_CONSISTENCY("vm_call_cfunc");
3040
3041 rb_vm_pop_frame(ec);
3042
3043 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3044 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3045
3046 return val;
3047}
3048
3049static VALUE
3050vm_call_cfunc(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3051{
3052 const struct rb_callinfo *ci = calling->ci;
3053 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3054
3055 CALLER_SETUP_ARG(reg_cfp, calling, ci);
3056 CALLER_REMOVE_EMPTY_KW_SPLAT(reg_cfp, calling, ci);
3057 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat);
3058 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3059}
3060
3061static VALUE
3062vm_call_ivar(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3063{
3064 const struct rb_callcache *cc = calling->cc;
3065 RB_DEBUG_COUNTER_INC(ccf_ivar);
3066 cfp->sp -= 1;
3067 return vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE);
3068}
3069
3070static VALUE
3071vm_call_attrset(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3072{
3073 const struct rb_callcache *cc = calling->cc;
3074 RB_DEBUG_COUNTER_INC(ccf_attrset);
3075 VALUE val = *(cfp->sp - 1);
3076 cfp->sp -= 2;
3077 return vm_setivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, val, NULL, NULL, cc, 1);
3078}
3079
3080bool
3081rb_vm_call_ivar_attrset_p(const vm_call_handler ch)
3082{
3083 return (ch == vm_call_ivar || ch == vm_call_attrset);
3084}
3085
3086static inline VALUE
3087vm_call_bmethod_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv)
3088{
3089 rb_proc_t *proc;
3090 VALUE val;
3091 const struct rb_callcache *cc = calling->cc;
3092 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3093 VALUE procv = cme->def->body.bmethod.proc;
3094
3095 if (!RB_OBJ_SHAREABLE_P(procv) &&
3096 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3097 rb_raise(rb_eRuntimeError, "defined with an un-shareable Proc in a different Ractor");
3098 }
3099
3100 /* control block frame */
3101 GetProcPtr(procv, proc);
3102 val = rb_vm_invoke_bmethod(ec, proc, calling->recv, calling->argc, argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
3103
3104 return val;
3105}
3106
3107static VALUE
3108vm_call_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3109{
3110 RB_DEBUG_COUNTER_INC(ccf_bmethod);
3111
3112 VALUE *argv;
3113 int argc;
3114 const struct rb_callinfo *ci = calling->ci;
3115
3116 CALLER_SETUP_ARG(cfp, calling, ci);
3117 argc = calling->argc;
3118 argv = ALLOCA_N(VALUE, argc);
3119 MEMCPY(argv, cfp->sp - argc, VALUE, argc);
3120 cfp->sp += - argc - 1;
3121
3122 return vm_call_bmethod_body(ec, calling, argv);
3123}
3124
3125MJIT_FUNC_EXPORTED VALUE
3126rb_find_defined_class_by_owner(VALUE current_class, VALUE target_owner)
3127{
3128 VALUE klass = current_class;
3129
3130 /* for prepended Module, then start from cover class */
3131 if (RB_TYPE_P(klass, T_ICLASS) && FL_TEST(klass, RICLASS_IS_ORIGIN) &&
3132 RB_TYPE_P(RBASIC_CLASS(klass), T_CLASS)) {
3133 klass = RBASIC_CLASS(klass);
3134 }
3135
3136 while (RTEST(klass)) {
3137 VALUE owner = RB_TYPE_P(klass, T_ICLASS) ? RBASIC_CLASS(klass) : klass;
3138 if (owner == target_owner) {
3139 return klass;
3140 }
3141 klass = RCLASS_SUPER(klass);
3142 }
3143
3144 return current_class; /* maybe module function */
3145}
3146
3147static const rb_callable_method_entry_t *
3148aliased_callable_method_entry(const rb_callable_method_entry_t *me)
3149{
3150 const rb_method_entry_t *orig_me = me->def->body.alias.original_me;
3151 const rb_callable_method_entry_t *cme;
3152
3153 if (orig_me->defined_class == 0) {
3154 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
3155 VM_ASSERT(RB_TYPE_P(orig_me->owner, T_MODULE));
3156 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
3157
3158 if (me->def->alias_count + me->def->complemented_count == 0) {
3159 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
3160 }
3161 else {
3163 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
3164 rb_method_definition_set((rb_method_entry_t *)me, def, (void *)cme);
3165 }
3166 }
3167 else {
3168 cme = (const rb_callable_method_entry_t *)orig_me;
3169 }
3170
3171 VM_ASSERT(callable_method_entry_p(cme));
3172 return cme;
3173}
3174
3176rb_aliased_callable_method_entry(const rb_callable_method_entry_t *me)
3177{
3178 return aliased_callable_method_entry(me);
3179}
3180
3181static VALUE
3182vm_call_alias(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3183{
3184 calling->cc = &VM_CC_ON_STACK(Qundef,
3185 vm_call_general,
3186 { 0 },
3187 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
3188
3189 return vm_call_method_each_type(ec, cfp, calling);
3190}
3191
3192static enum method_missing_reason
3193ci_missing_reason(const struct rb_callinfo *ci)
3194{
3195 enum method_missing_reason stat = MISSING_NOENTRY;
3196 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
3197 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
3198 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
3199 return stat;
3200}
3201
3202static VALUE
3203vm_call_symbol(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3204 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE symbol)
3205{
3206 ASSUME(calling->argc >= 0);
3207 /* Also assumes CALLER_SETUP_ARG is already done. */
3208
3209 enum method_missing_reason missing_reason = MISSING_NOENTRY;
3210 int argc = calling->argc;
3211 VALUE recv = calling->recv;
3212 VALUE klass = CLASS_OF(recv);
3213 ID mid = rb_check_id(&symbol);
3214 int flags = VM_CALL_FCALL |
3215 VM_CALL_OPT_SEND |
3216 (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3217
3218 if (UNLIKELY(! mid)) {
3219 mid = idMethodMissing;
3220 missing_reason = ci_missing_reason(ci);
3221 ec->method_missing_reason = missing_reason;
3222
3223 /* E.g. when argc == 2
3224 *
3225 * | | | | TOPN
3226 * | | +------+
3227 * | | +---> | arg1 | 0
3228 * +------+ | +------+
3229 * | arg1 | -+ +-> | arg0 | 1
3230 * +------+ | +------+
3231 * | arg0 | ---+ | sym | 2
3232 * +------+ +------+
3233 * | recv | | recv | 3
3234 * --+------+--------+------+------
3235 */
3236 int i = argc;
3237 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3238 INC_SP(1);
3239 MEMMOVE(&TOPN(i - 1), &TOPN(i), VALUE, i);
3240 argc = ++calling->argc;
3241
3242 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3243 /* Inadvertent symbol creation shall be forbidden, see [Feature #5112] */
3244 TOPN(i) = symbol;
3245 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3246 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
3247 VALUE exc = rb_make_no_method_exception(
3248 rb_eNoMethodError, 0, recv, argc, argv, priv);
3249
3250 rb_exc_raise(exc);
3251 }
3252 else {
3253 TOPN(i) = rb_str_intern(symbol);
3254 }
3255 }
3256
3257 calling->ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci));
3258 calling->cc = &VM_CC_ON_STACK(klass,
3259 vm_call_general,
3260 { .method_missing_reason = missing_reason },
3261 rb_callable_method_entry_with_refinements(klass, mid, NULL));
3262
3263 return vm_call_method(ec, reg_cfp, calling);
3264}
3265
3266static VALUE
3267vm_call_opt_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3268{
3269 RB_DEBUG_COUNTER_INC(ccf_opt_send);
3270
3271 int i;
3272 VALUE sym;
3273
3274 CALLER_SETUP_ARG(reg_cfp, calling, calling->ci);
3275
3276 i = calling->argc - 1;
3277
3278 if (calling->argc == 0) {
3279 rb_raise(rb_eArgError, "no method name given");
3280 }
3281 else {
3282 sym = TOPN(i);
3283 /* E.g. when i == 2
3284 *
3285 * | | | | TOPN
3286 * +------+ | |
3287 * | arg1 | ---+ | | 0
3288 * +------+ | +------+
3289 * | arg0 | -+ +-> | arg1 | 1
3290 * +------+ | +------+
3291 * | sym | +---> | arg0 | 2
3292 * +------+ +------+
3293 * | recv | | recv | 3
3294 * --+------+--------+------+------
3295 */
3296 /* shift arguments */
3297 if (i > 0) {
3298 MEMMOVE(&TOPN(i), &TOPN(i-1), VALUE, i);
3299 }
3300 calling->argc -= 1;
3301 DEC_SP(1);
3302
3303 return vm_call_symbol(ec, reg_cfp, calling, calling->ci, sym);
3304 }
3305}
3306
3307static VALUE
3308vm_call_method_missing_body(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling,
3309 const struct rb_callinfo *orig_ci, enum method_missing_reason reason)
3310{
3311 RB_DEBUG_COUNTER_INC(ccf_method_missing);
3312
3313 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
3314 unsigned int argc;
3315
3316 CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
3317 argc = calling->argc + 1;
3318
3319 unsigned int flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3320 calling->argc = argc;
3321
3322 /* shift arguments: m(a, b, c) #=> method_missing(:m, a, b, c) */
3323 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3324 vm_check_canary(ec, reg_cfp->sp);
3325 if (argc > 1) {
3326 MEMMOVE(argv+1, argv, VALUE, argc-1);
3327 }
3328 argv[0] = ID2SYM(vm_ci_mid(orig_ci));
3329 INC_SP(1);
3330
3331 ec->method_missing_reason = reason;
3332 calling->ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci));
3333 calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, { 0 },
3334 rb_callable_method_entry_without_refinements(CLASS_OF(calling->recv), idMethodMissing, NULL));
3335 return vm_call_method(ec, reg_cfp, calling);
3336}
3337
3338static VALUE
3339vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3340{
3341 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->ci, vm_cc_cmethod_missing_reason(calling->cc));
3342}
3343
3344static const rb_callable_method_entry_t *refined_method_callable_without_refinement(const rb_callable_method_entry_t *me);
3345static VALUE
3346vm_call_zsuper(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, VALUE klass)
3347{
3348 klass = RCLASS_SUPER(klass);
3349
3350 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, vm_ci_mid(calling->ci)) : NULL;
3351 if (cme == NULL) {
3352 return vm_call_method_nome(ec, cfp, calling);
3353 }
3354 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
3355 cme->def->body.refined.orig_me) {
3356 cme = refined_method_callable_without_refinement(cme);
3357 }
3358
3359 calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, { 0 }, cme);
3360
3361 return vm_call_method_each_type(ec, cfp, calling);
3362}
3363
3364static inline VALUE
3365find_refinement(VALUE refinements, VALUE klass)
3366{
3367 if (NIL_P(refinements)) {
3368 return Qnil;
3369 }
3370 return rb_hash_lookup(refinements, klass);
3371}
3372
3373PUREFUNC(static rb_control_frame_t * current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp));
3374static rb_control_frame_t *
3375current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp)
3376{
3377 rb_control_frame_t *top_cfp = cfp;
3378
3379 if (cfp->iseq && cfp->iseq->body->type == ISEQ_TYPE_BLOCK) {
3380 const rb_iseq_t *local_iseq = cfp->iseq->body->local_iseq;
3381
3382 do {
3383 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
3384 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
3385 /* TODO: orphan block */
3386 return top_cfp;
3387 }
3388 } while (cfp->iseq != local_iseq);
3389 }
3390 return cfp;
3391}
3392
3393static const rb_callable_method_entry_t *
3394refined_method_callable_without_refinement(const rb_callable_method_entry_t *me)
3395{
3396 const rb_method_entry_t *orig_me = me->def->body.refined.orig_me;
3397 const rb_callable_method_entry_t *cme;
3398
3399 if (orig_me->defined_class == 0) {
3400 cme = NULL;
3402 }
3403 else {
3404 cme = (const rb_callable_method_entry_t *)orig_me;
3405 }
3406
3407 VM_ASSERT(callable_method_entry_p(cme));
3408
3409 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
3410 cme = NULL;
3411 }
3412
3413 return cme;
3414}
3415
3416static const rb_callable_method_entry_t *
3417search_refined_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3418{
3419 ID mid = vm_ci_mid(calling->ci);
3420 const rb_cref_t *cref = vm_get_cref(cfp->ep);
3421 const struct rb_callcache * const cc = calling->cc;
3422 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3423
3424 for (; cref; cref = CREF_NEXT(cref)) {
3425 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
3426 if (NIL_P(refinement)) continue;
3427
3428 const rb_callable_method_entry_t *const ref_me =
3429 rb_callable_method_entry(refinement, mid);
3430
3431 if (ref_me) {
3432 if (vm_cc_call(cc) == vm_call_super_method) {
3433 const rb_control_frame_t *top_cfp = current_method_entry(ec, cfp);
3434 const rb_callable_method_entry_t *top_me = rb_vm_frame_method_entry(top_cfp);
3435 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
3436 continue;
3437 }
3438 }
3439
3440 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
3441 cme->def != ref_me->def) {
3442 cme = ref_me;
3443 }
3444 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
3445 return cme;
3446 }
3447 }
3448 else {
3449 return NULL;
3450 }
3451 }
3452
3453 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
3454 return refined_method_callable_without_refinement(vm_cc_cme(cc));
3455 }
3456 else {
3457 VALUE klass = RCLASS_SUPER(vm_cc_cme(cc)->defined_class);
3458 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, mid) : NULL;
3459 return cme;
3460 }
3461}
3462
3463static VALUE
3464vm_call_refined(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3465{
3466 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(Qundef, vm_call_general, { 0 },
3467 search_refined_method(ec, cfp, calling));
3468
3469 if (vm_cc_cme(ref_cc)) {
3470 calling->cc= ref_cc;
3471 return vm_call_method(ec, cfp, calling);
3472 }
3473 else {
3474 return vm_call_method_nome(ec, cfp, calling);
3475 }
3476}
3477
3478static inline VALUE vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_callinfo *ci, bool is_lambda, VALUE block_handler);
3479
3480NOINLINE(static VALUE
3481 vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3482 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler));
3483
3484static VALUE
3485vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3486 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler)
3487{
3488 int argc = calling->argc;
3489
3490 /* remove self */
3491 if (argc > 0) MEMMOVE(&TOPN(argc), &TOPN(argc-1), VALUE, argc);
3492 DEC_SP(1);
3493
3494 return vm_invoke_block(ec, reg_cfp, calling, ci, false, block_handler);
3495}
3496
3497static VALUE
3498vm_call_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3499{
3500 RB_DEBUG_COUNTER_INC(ccf_opt_call);
3501
3502 const struct rb_callinfo *ci = calling->ci;
3503 VALUE procval = calling->recv;
3504 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
3505}
3506
3507static VALUE
3508vm_call_opt_block_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3509{
3510 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
3511
3512 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
3513 const struct rb_callinfo *ci = calling->ci;
3514
3515 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
3516 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
3517 }
3518 else {
3519 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
3520 calling->cc = rb_vm_search_method_slowpath(ci, CLASS_OF(calling->recv));
3521 return vm_call_general(ec, reg_cfp, calling);
3522 }
3523}
3524
3525static VALUE
3526vm_call_opt_struct_aref0(rb_execution_context_t *ec, struct rb_calling_info *calling)
3527{
3528 VALUE recv = calling->recv;
3529
3530 VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
3531 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3532 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
3533
3534 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3535 return internal_RSTRUCT_GET(recv, off);
3536}
3537
3538static VALUE
3539vm_call_opt_struct_aref(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3540{
3541 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
3542
3543 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
3544 reg_cfp->sp -= 1;
3545 return ret;
3546}
3547
3548static VALUE
3549vm_call_opt_struct_aset0(rb_execution_context_t *ec, struct rb_calling_info *calling, VALUE val)
3550{
3551 VALUE recv = calling->recv;
3552
3553 VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
3554 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3555 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
3556
3557 rb_check_frozen(recv);
3558
3559 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3560 internal_RSTRUCT_SET(recv, off, val);
3561
3562 return val;
3563}
3564
3565static VALUE
3566vm_call_opt_struct_aset(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3567{
3568 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
3569
3570 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
3571 reg_cfp->sp -= 2;
3572 return ret;
3573}
3574
3575NOINLINE(static VALUE vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
3576 const struct rb_callinfo *ci, const struct rb_callcache *cc));
3577
3578static VALUE
3579vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
3580 const struct rb_callinfo *ci, const struct rb_callcache *cc)
3581{
3582 switch (vm_cc_cme(cc)->def->body.optimized.type) {
3583 case OPTIMIZED_METHOD_TYPE_SEND:
3584 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
3585 return vm_call_opt_send(ec, cfp, calling);
3586 case OPTIMIZED_METHOD_TYPE_CALL:
3587 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
3588 return vm_call_opt_call(ec, cfp, calling);
3589 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
3590 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
3591 return vm_call_opt_block_call(ec, cfp, calling);
3592 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
3593 CALLER_SETUP_ARG(cfp, calling, ci);
3594 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3595 rb_check_arity(calling->argc, 0, 0);
3596 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3597 return vm_call_opt_struct_aref(ec, cfp, calling);
3598
3599 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
3600 CALLER_SETUP_ARG(cfp, calling, ci);
3601 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3602 rb_check_arity(calling->argc, 1, 1);
3603 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3604 return vm_call_opt_struct_aset(ec, cfp, calling);
3605 default:
3606 rb_bug("vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
3607 }
3608}
3609
3610#define VM_CALL_METHOD_ATTR(var, func, nohook) \
3611 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
3612 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
3613 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
3614 var = func; \
3615 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
3616 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
3617 } \
3618 else { \
3619 nohook; \
3620 var = func; \
3621 }
3622
3623static VALUE
3624vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3625{
3626 const struct rb_callinfo *ci = calling->ci;
3627 const struct rb_callcache *cc = calling->cc;
3628 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3629 VALUE v;
3630
3631 switch (cme->def->type) {
3632 case VM_METHOD_TYPE_ISEQ:
3633 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
3634 return vm_call_iseq_setup(ec, cfp, calling);
3635
3636 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3637 case VM_METHOD_TYPE_CFUNC:
3638 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
3639 return vm_call_cfunc(ec, cfp, calling);
3640
3641 case VM_METHOD_TYPE_ATTRSET:
3642 CALLER_SETUP_ARG(cfp, calling, ci);
3643 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3644
3645 rb_check_arity(calling->argc, 1, 1);
3646 vm_cc_attr_index_set(cc, 0);
3647 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG);
3648 VM_CALL_METHOD_ATTR(v,
3649 vm_call_attrset(ec, cfp, calling),
3650 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
3651 return v;
3652
3653 case VM_METHOD_TYPE_IVAR:
3654 CALLER_SETUP_ARG(cfp, calling, ci);
3655 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3656 rb_check_arity(calling->argc, 0, 0);
3657 vm_cc_attr_index_set(cc, 0);
3658 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT);
3659 VM_CALL_METHOD_ATTR(v,
3660 vm_call_ivar(ec, cfp, calling),
3661 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
3662 return v;
3663
3664 case VM_METHOD_TYPE_MISSING:
3665 vm_cc_method_missing_reason_set(cc, 0);
3666 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
3667 return vm_call_method_missing(ec, cfp, calling);
3668
3669 case VM_METHOD_TYPE_BMETHOD:
3670 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
3671 return vm_call_bmethod(ec, cfp, calling);
3672
3673 case VM_METHOD_TYPE_ALIAS:
3674 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
3675 return vm_call_alias(ec, cfp, calling);
3676
3677 case VM_METHOD_TYPE_OPTIMIZED:
3678 return vm_call_optimized(ec, cfp, calling, ci, cc);
3679
3680 case VM_METHOD_TYPE_UNDEF:
3681 break;
3682
3683 case VM_METHOD_TYPE_ZSUPER:
3684 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
3685
3686 case VM_METHOD_TYPE_REFINED:
3687 // CC_SET_FASTPATH(cc, vm_call_refined, TRUE);
3688 // should not set FASTPATH since vm_call_refined assumes cc->call is vm_call_super_method on invokesuper.
3689 return vm_call_refined(ec, cfp, calling);
3690 }
3691
3692 rb_bug("vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
3693}
3694
3695NORETURN(static void vm_raise_method_missing(rb_execution_context_t *ec, int argc, const VALUE *argv, VALUE obj, int call_status));
3696
3697static VALUE
3698vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3699{
3700 /* method missing */
3701 const struct rb_callinfo *ci = calling->ci;
3702 const int stat = ci_missing_reason(ci);
3703
3704 if (vm_ci_mid(ci) == idMethodMissing) {
3705 rb_control_frame_t *reg_cfp = cfp;
3706 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
3707 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
3708 }
3709 else {
3710 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
3711 }
3712}
3713
3714static inline VALUE
3715vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3716{
3717 const struct rb_callinfo *ci = calling->ci;
3718 const struct rb_callcache *cc = calling->cc;
3719
3720 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
3721
3722 if (vm_cc_cme(cc) != NULL) {
3723 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
3724 case METHOD_VISI_PUBLIC: /* likely */
3725 return vm_call_method_each_type(ec, cfp, calling);
3726
3727 case METHOD_VISI_PRIVATE:
3728 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
3729 enum method_missing_reason stat = MISSING_PRIVATE;
3730 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
3731
3732 vm_cc_method_missing_reason_set(cc, stat);
3733 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
3734 return vm_call_method_missing(ec, cfp, calling);
3735 }
3736 return vm_call_method_each_type(ec, cfp, calling);
3737
3738 case METHOD_VISI_PROTECTED:
3739 if (!(vm_ci_flag(ci) & VM_CALL_OPT_SEND)) {
3740 if (!rb_obj_is_kind_of(cfp->self, vm_cc_cme(cc)->defined_class)) {
3741 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
3742 return vm_call_method_missing(ec, cfp, calling);
3743 }
3744 else {
3745 /* caching method info to dummy cc */
3746 VM_ASSERT(vm_cc_cme(cc) != NULL);
3747 struct rb_callcache cc_on_stack = *cc;
3748 FL_SET_RAW((VALUE)&cc_on_stack, VM_CALLCACHE_UNMARKABLE);
3749 calling->cc = &cc_on_stack;
3750 return vm_call_method_each_type(ec, cfp, calling);
3751 }
3752 }
3753 return vm_call_method_each_type(ec, cfp, calling);
3754
3755 default:
3756 rb_bug("unreachable");
3757 }
3758 }
3759 else {
3760 return vm_call_method_nome(ec, cfp, calling);
3761 }
3762}
3763
3764static VALUE
3765vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3766{
3767 RB_DEBUG_COUNTER_INC(ccf_general);
3768 return vm_call_method(ec, reg_cfp, calling);
3769}
3770
3771void
3772rb_vm_cc_general(const struct rb_callcache *cc)
3773{
3774 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
3775 VM_ASSERT(cc != vm_cc_empty());
3776
3777 *(vm_call_handler *)&cc->call_ = vm_call_general;
3778}
3779
3780static VALUE
3781vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3782{
3783 RB_DEBUG_COUNTER_INC(ccf_super_method);
3784
3785 // This line is introduced to make different from `vm_call_general` because some compilers (VC we found)
3786 // can merge the function and the address of the function becomes same.
3787 // The address of `vm_call_super_method` is used in `search_refined_method`, so it should be different.
3788 if (ec == NULL) rb_bug("unreachable");
3789
3790 /* this check is required to distinguish with other functions. */
3791 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
3792 return vm_call_method(ec, reg_cfp, calling);
3793}
3794
3795/* super */
3796
3797static inline VALUE
3798vm_search_normal_superclass(VALUE klass)
3799{
3800 if (BUILTIN_TYPE(klass) == T_ICLASS &&
3801 FL_TEST_RAW(RBASIC(klass)->klass, RMODULE_IS_REFINEMENT)) {
3802 klass = RBASIC(klass)->klass;
3803 }
3804 klass = RCLASS_ORIGIN(klass);
3805 return RCLASS_SUPER(klass);
3806}
3807
3808NORETURN(static void vm_super_outside(void));
3809
3810static void
3811vm_super_outside(void)
3812{
3813 rb_raise(rb_eNoMethodError, "super called outside of method");
3814}
3815
3816static const struct rb_callcache *
3817empty_cc_for_super(void)
3818{
3819#ifdef MJIT_HEADER
3820 return rb_vm_empty_cc_for_super();
3821#else
3822 return &vm_empty_cc_for_super;
3823#endif
3824}
3825
3826static const struct rb_callcache *
3827vm_search_super_method(const rb_control_frame_t *reg_cfp, struct rb_call_data *cd, VALUE recv)
3828{
3829 VALUE current_defined_class;
3830 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
3831
3832 if (!me) {
3833 vm_super_outside();
3834 }
3835
3836 current_defined_class = me->defined_class;
3837
3838 if (!NIL_P(RCLASS_REFINED_CLASS(current_defined_class))) {
3839 current_defined_class = RCLASS_REFINED_CLASS(current_defined_class);
3840 }
3841
3842 if (BUILTIN_TYPE(current_defined_class) != T_MODULE &&
3843 !FL_TEST_RAW(current_defined_class, RMODULE_INCLUDED_INTO_REFINEMENT) &&
3844 reg_cfp->iseq != method_entry_iseqptr(me) &&
3845 !rb_obj_is_kind_of(recv, current_defined_class)) {
3846 VALUE m = RB_TYPE_P(current_defined_class, T_ICLASS) ?
3847 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
3848
3849 if (m) { /* not bound UnboundMethod */
3850 rb_raise(rb_eTypeError,
3851 "self has wrong type to call super in this context: "
3852 "%"PRIsVALUE" (expected %"PRIsVALUE")",
3853 rb_obj_class(recv), m);
3854 }
3855 }
3856
3857 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
3858 rb_raise(rb_eRuntimeError,
3859 "implicit argument passing of super from method defined"
3860 " by define_method() is not supported."
3861 " Specify all arguments explicitly.");
3862 }
3863
3864 ID mid = me->def->original_id;
3865
3866 // update iseq. really? (TODO)
3867 cd->ci = vm_ci_new_runtime(mid,
3868 vm_ci_flag(cd->ci),
3869 vm_ci_argc(cd->ci),
3870 vm_ci_kwarg(cd->ci));
3871
3872 RB_OBJ_WRITTEN(reg_cfp->iseq, Qundef, cd->ci);
3873
3874 const struct rb_callcache *cc;
3875
3876 VALUE klass = vm_search_normal_superclass(me->defined_class);
3877
3878 if (!klass) {
3879 /* bound instance method of module */
3880 cc = vm_cc_new(klass, NULL, vm_call_method_missing);
3881 RB_OBJ_WRITE(reg_cfp->iseq, &cd->cc, cc);
3882 }
3883 else {
3884 cc = vm_search_method_fastpath((VALUE)reg_cfp->iseq, cd, klass);
3885 const rb_callable_method_entry_t *cached_cme = vm_cc_cme(cc);
3886
3887 // define_method can cache for different method id
3888 if (cached_cme == NULL) {
3889 // empty_cc_for_super is not markable object
3890 cd->cc = empty_cc_for_super();
3891 }
3892 else if (cached_cme->called_id != mid) {
3893 const rb_callable_method_entry_t *cme = rb_callable_method_entry(klass, mid);
3894 if (cme) {
3895 cc = vm_cc_new(klass, cme, vm_call_super_method);
3896 RB_OBJ_WRITE(reg_cfp->iseq, &cd->cc, cc);
3897 }
3898 else {
3899 cd->cc = cc = empty_cc_for_super();
3900 }
3901 }
3902 else {
3903 switch (cached_cme->def->type) {
3904 // vm_call_refined (search_refined_method) assumes cc->call is vm_call_super_method on invokesuper
3905 case VM_METHOD_TYPE_REFINED:
3906 // cc->klass is superclass of receiver class. Checking cc->klass is not enough to invalidate IVC for the receiver class.
3907 case VM_METHOD_TYPE_ATTRSET:
3908 case VM_METHOD_TYPE_IVAR:
3909 vm_cc_call_set(cc, vm_call_super_method); // invalidate fastpath
3910 break;
3911 default:
3912 break; // use fastpath
3913 }
3914 }
3915 }
3916
3917 VM_ASSERT((vm_cc_cme(cc), true));
3918
3919 return cc;
3920}
3921
3922/* yield */
3923
3924static inline int
3925block_proc_is_lambda(const VALUE procval)
3926{
3927 rb_proc_t *proc;
3928
3929 if (procval) {
3930 GetProcPtr(procval, proc);
3931 return proc->is_lambda;
3932 }
3933 else {
3934 return 0;
3935 }
3936}
3937
3938static VALUE
3939vm_yield_with_cfunc(rb_execution_context_t *ec,
3940 const struct rb_captured_block *captured,
3941 VALUE self, int argc, const VALUE *argv, int kw_splat, VALUE block_handler,
3943{
3944 int is_lambda = FALSE; /* TODO */
3945 VALUE val, arg, blockarg;
3946 int frame_flag;
3947 const struct vm_ifunc *ifunc = captured->code.ifunc;
3948
3949 if (is_lambda) {
3950 arg = rb_ary_new4(argc, argv);
3951 }
3952 else if (argc == 0) {
3953 arg = Qnil;
3954 }
3955 else {
3956 arg = argv[0];
3957 }
3958
3959 blockarg = rb_vm_bh_to_procval(ec, block_handler);
3960
3961 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
3962 if (kw_splat) {
3963 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
3964 }
3965
3966 vm_push_frame(ec, (const rb_iseq_t *)captured->code.ifunc,
3967 frame_flag,
3968 self,
3969 VM_GUARDED_PREV_EP(captured->ep),
3970 (VALUE)me,
3971 0, ec->cfp->sp, 0, 0);
3972 val = (*ifunc->func)(arg, (VALUE)ifunc->data, argc, argv, blockarg);
3973 rb_vm_pop_frame(ec);
3974
3975 return val;
3976}
3977
3978static VALUE
3979vm_yield_with_symbol(rb_execution_context_t *ec, VALUE symbol, int argc, const VALUE *argv, int kw_splat, VALUE block_handler)
3980{
3981 return rb_sym_proc_call(SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
3982}
3983
3984static inline int
3985vm_callee_setup_block_arg_arg0_splat(rb_control_frame_t *cfp, const rb_iseq_t *iseq, VALUE *argv, VALUE ary)
3986{
3987 int i;
3988 long len = RARRAY_LEN(ary);
3989
3990 CHECK_VM_STACK_OVERFLOW(cfp, iseq->body->param.lead_num);
3991
3992 for (i=0; i<len && i<iseq->body->param.lead_num; i++) {
3993 argv[i] = RARRAY_AREF(ary, i);
3994 }
3995
3996 return i;
3997}
3998
3999static inline VALUE
4000vm_callee_setup_block_arg_arg0_check(VALUE *argv)
4001{
4002 VALUE ary, arg0 = argv[0];
4003 ary = rb_check_array_type(arg0);
4004#if 0
4005 argv[0] = arg0;
4006#else
4007 VM_ASSERT(argv[0] == arg0);
4008#endif
4009 return ary;
4010}
4011
4012static int
4013vm_callee_setup_block_arg(rb_execution_context_t *ec, struct rb_calling_info *calling, const struct rb_callinfo *ci, const rb_iseq_t *iseq, VALUE *argv, const enum arg_setup_type arg_setup_type)
4014{
4015 if (rb_simple_iseq_p(iseq)) {
4016 rb_control_frame_t *cfp = ec->cfp;
4017 VALUE arg0;
4018
4019 CALLER_SETUP_ARG(cfp, calling, ci);
4020 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
4021
4022 if (arg_setup_type == arg_setup_block &&
4023 calling->argc == 1 &&
4024 iseq->body->param.flags.has_lead &&
4025 !iseq->body->param.flags.ambiguous_param0 &&
4026 !NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
4027 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
4028 }
4029
4030 if (calling->argc != iseq->body->param.lead_num) {
4031 if (arg_setup_type == arg_setup_block) {
4032 if (calling->argc < iseq->body->param.lead_num) {
4033 int i;
4034 CHECK_VM_STACK_OVERFLOW(cfp, iseq->body->param.lead_num);
4035 for (i=calling->argc; i<iseq->body->param.lead_num; i++) argv[i] = Qnil;
4036 calling->argc = iseq->body->param.lead_num; /* fill rest parameters */
4037 }
4038 else if (calling->argc > iseq->body->param.lead_num) {
4039 calling->argc = iseq->body->param.lead_num; /* simply truncate arguments */
4040 }
4041 }
4042 else {
4043 argument_arity_error(ec, iseq, calling->argc, iseq->body->param.lead_num, iseq->body->param.lead_num);
4044 }
4045 }
4046
4047 return 0;
4048 }
4049 else {
4050 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
4051 }
4052}
4053
4054static int
4055vm_yield_setup_args(rb_execution_context_t *ec, const rb_iseq_t *iseq, const int argc, VALUE *argv, int kw_splat, VALUE block_handler, enum arg_setup_type arg_setup_type)
4056{
4057 struct rb_calling_info calling_entry, *calling;
4058
4059 calling = &calling_entry;
4060 calling->argc = argc;
4061 calling->block_handler = block_handler;
4062 calling->kw_splat = kw_splat;
4063 calling->recv = Qundef;
4064 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, (kw_splat ? VM_CALL_KW_SPLAT : 0), 0, 0);
4065
4066 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
4067}
4068
4069/* ruby iseq -> ruby block */
4070
4071static VALUE
4072vm_invoke_iseq_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4073 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4074 bool is_lambda, VALUE block_handler)
4075{
4076 const struct rb_captured_block *captured = VM_BH_TO_ISEQ_BLOCK(block_handler);
4077 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4078 const int arg_size = iseq->body->param.size;
4079 VALUE * const rsp = GET_SP() - calling->argc;
4080 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, rsp, is_lambda ? arg_setup_method : arg_setup_block);
4081
4082 SET_SP(rsp);
4083
4084 vm_push_frame(ec, iseq,
4085 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
4086 captured->self,
4087 VM_GUARDED_PREV_EP(captured->ep), 0,
4088 iseq->body->iseq_encoded + opt_pc,
4089 rsp + arg_size,
4090 iseq->body->local_table_size - arg_size, iseq->body->stack_max);
4091
4092 return Qundef;
4093}
4094
4095static VALUE
4096vm_invoke_symbol_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4097 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4098 MAYBE_UNUSED(bool is_lambda), VALUE block_handler)
4099{
4100 if (calling->argc < 1) {
4101 rb_raise(rb_eArgError, "no receiver given");
4102 }
4103 else {
4104 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
4105 CALLER_SETUP_ARG(reg_cfp, calling, ci);
4106 calling->recv = TOPN(--calling->argc);
4107 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol);
4108 }
4109}
4110
4111static VALUE
4112vm_invoke_ifunc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4113 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4114 MAYBE_UNUSED(bool is_lambda), VALUE block_handler)
4115{
4116 VALUE val;
4117 int argc;
4118 const struct rb_captured_block *captured = VM_BH_TO_IFUNC_BLOCK(block_handler);
4119 CALLER_SETUP_ARG(ec->cfp, calling, ci);
4120 CALLER_REMOVE_EMPTY_KW_SPLAT(ec->cfp, calling, ci);
4121 argc = calling->argc;
4122 val = vm_yield_with_cfunc(ec, captured, captured->self, argc, STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
4123 POPN(argc); /* TODO: should put before C/yield? */
4124 return val;
4125}
4126
4127static VALUE
4128vm_proc_to_block_handler(VALUE procval)
4129{
4130 const struct rb_block *block = vm_proc_block(procval);
4131
4132 switch (vm_block_type(block)) {
4133 case block_type_iseq:
4134 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
4135 case block_type_ifunc:
4136 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
4137 case block_type_symbol:
4138 return VM_BH_FROM_SYMBOL(block->as.symbol);
4139 case block_type_proc:
4140 return VM_BH_FROM_PROC(block->as.proc);
4141 }
4142 VM_UNREACHABLE(vm_yield_with_proc);
4143 return Qundef;
4144}
4145
4146static VALUE
4147vm_invoke_proc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4148 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4149 bool is_lambda, VALUE block_handler)
4150{
4151 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
4152 VALUE proc = VM_BH_TO_PROC(block_handler);
4153 is_lambda = block_proc_is_lambda(proc);
4154 block_handler = vm_proc_to_block_handler(proc);
4155 }
4156
4157 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4158}
4159
4160static inline VALUE
4161vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4162 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4163 bool is_lambda, VALUE block_handler)
4164{
4165 VALUE (*func)(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4166 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4167 bool is_lambda, VALUE block_handler);
4168
4169 switch (vm_block_handler_type(block_handler)) {
4170 case block_handler_type_iseq: func = vm_invoke_iseq_block; break;
4171 case block_handler_type_ifunc: func = vm_invoke_ifunc_block; break;
4172 case block_handler_type_proc: func = vm_invoke_proc_block; break;
4173 case block_handler_type_symbol: func = vm_invoke_symbol_block; break;
4174 default: rb_bug("vm_invoke_block: unreachable");
4175 }
4176
4177 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4178}
4179
4180static VALUE
4181vm_make_proc_with_iseq(const rb_iseq_t *blockiseq)
4182{
4183 const rb_execution_context_t *ec = GET_EC();
4184 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
4185 struct rb_captured_block *captured;
4186
4187 if (cfp == 0) {
4188 rb_bug("vm_make_proc_with_iseq: unreachable");
4189 }
4190
4191 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
4192 captured->code.iseq = blockiseq;
4193
4194 return rb_vm_make_proc(ec, captured, rb_cProc);
4195}
4196
4197static VALUE
4198vm_once_exec(VALUE iseq)
4199{
4200 VALUE proc = vm_make_proc_with_iseq((rb_iseq_t *)iseq);
4201 return rb_proc_call_with_block(proc, 0, 0, Qnil);
4202}
4203
4204static VALUE
4205vm_once_clear(VALUE data)
4206{
4207 union iseq_inline_storage_entry *is = (union iseq_inline_storage_entry *)data;
4208 is->once.running_thread = NULL;
4209 return Qnil;
4210}
4211
4212/* defined insn */
4213
4214static bool
4215check_respond_to_missing(VALUE obj, VALUE v)
4216{
4217 VALUE args[2];
4218 VALUE r;
4219
4220 args[0] = obj; args[1] = Qfalse;
4221 r = rb_check_funcall(v, idRespond_to_missing, 2, args);
4222 if (r != Qundef && RTEST(r)) {
4223 return true;
4224 }
4225 else {
4226 return false;
4227 }
4228}
4229
4230static bool
4231vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE v)
4232{
4233 VALUE klass;
4234 enum defined_type type = (enum defined_type)op_type;
4235
4236 switch (type) {
4237 case DEFINED_IVAR:
4238 return rb_ivar_defined(GET_SELF(), SYM2ID(obj));
4239 break;
4240 case DEFINED_GVAR:
4241 return rb_gvar_defined(SYM2ID(obj));
4242 break;
4243 case DEFINED_CVAR: {
4244 const rb_cref_t *cref = vm_get_cref(GET_EP());
4245 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
4246 return rb_cvar_defined(klass, SYM2ID(obj));
4247 break;
4248 }
4249 case DEFINED_CONST:
4250 case DEFINED_CONST_FROM: {
4251 bool allow_nil = type == DEFINED_CONST;
4252 klass = v;
4253 return vm_get_ev_const(ec, klass, SYM2ID(obj), allow_nil, true);
4254 break;
4255 }
4256 case DEFINED_FUNC:
4257 klass = CLASS_OF(v);
4258 return rb_ec_obj_respond_to(ec, v, SYM2ID(obj), TRUE);
4259 break;
4260 case DEFINED_METHOD:{
4261 VALUE klass = CLASS_OF(v);
4262 const rb_method_entry_t *me = rb_method_entry_with_refinements(klass, SYM2ID(obj), NULL);
4263
4264 if (me) {
4265 switch (METHOD_ENTRY_VISI(me)) {
4266 case METHOD_VISI_PRIVATE:
4267 break;
4268 case METHOD_VISI_PROTECTED:
4269 if (!rb_obj_is_kind_of(GET_SELF(), rb_class_real(me->defined_class))) {
4270 break;
4271 }
4272 case METHOD_VISI_PUBLIC:
4273 return true;
4274 break;
4275 default:
4276 rb_bug("vm_defined: unreachable: %u", (unsigned int)METHOD_ENTRY_VISI(me));
4277 }
4278 }
4279 else {
4280 return check_respond_to_missing(obj, v);
4281 }
4282 break;
4283 }
4284 case DEFINED_YIELD:
4285 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
4286 return true;
4287 }
4288 break;
4289 case DEFINED_ZSUPER:
4290 {
4291 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(GET_CFP());
4292
4293 if (me) {
4294 VALUE klass = vm_search_normal_superclass(me->defined_class);
4295 ID id = me->def->original_id;
4296
4297 return rb_method_boundp(klass, id, 0);
4298 }
4299 }
4300 break;
4301 case DEFINED_REF:{
4302 return vm_getspecial(ec, GET_LEP(), Qfalse, FIX2INT(obj)) != Qnil;
4303 break;
4304 }
4305 default:
4306 rb_bug("unimplemented defined? type (VM)");
4307 break;
4308 }
4309
4310 return false;
4311}
4312
4313bool
4314rb_vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE v)
4315{
4316 return vm_defined(ec, reg_cfp, op_type, obj, v);
4317}
4318
4319static const VALUE *
4320vm_get_ep(const VALUE *const reg_ep, rb_num_t lv)
4321{
4322 rb_num_t i;
4323 const VALUE *ep = reg_ep;
4324 for (i = 0; i < lv; i++) {
4325 ep = GET_PREV_EP(ep);
4326 }
4327 return ep;
4328}
4329
4330static VALUE
4331vm_get_special_object(const VALUE *const reg_ep,
4332 enum vm_special_object_type type)
4333{
4334 switch (type) {
4335 case VM_SPECIAL_OBJECT_VMCORE:
4336 return rb_mRubyVMFrozenCore;
4337 case VM_SPECIAL_OBJECT_CBASE:
4338 return vm_get_cbase(reg_ep);
4339 case VM_SPECIAL_OBJECT_CONST_BASE:
4340 return vm_get_const_base(reg_ep);
4341 default:
4342 rb_bug("putspecialobject insn: unknown value_type %d", type);
4343 }
4344}
4345
4346static VALUE
4347vm_concat_array(VALUE ary1, VALUE ary2st)
4348{
4349 const VALUE ary2 = ary2st;
4350 VALUE tmp1 = rb_check_to_array(ary1);
4351 VALUE tmp2 = rb_check_to_array(ary2);
4352
4353 if (NIL_P(tmp1)) {
4354 tmp1 = rb_ary_new3(1, ary1);
4355 }
4356
4357 if (NIL_P(tmp2)) {
4358 tmp2 = rb_ary_new3(1, ary2);
4359 }
4360
4361 if (tmp1 == ary1) {
4362 tmp1 = rb_ary_dup(ary1);
4363 }
4364 return rb_ary_concat(tmp1, tmp2);
4365}
4366
4367static VALUE
4368vm_splat_array(VALUE flag, VALUE ary)
4369{
4370 VALUE tmp = rb_check_to_array(ary);
4371 if (NIL_P(tmp)) {
4372 return rb_ary_new3(1, ary);
4373 }
4374 else if (RTEST(flag)) {
4375 return rb_ary_dup(tmp);
4376 }
4377 else {
4378 return tmp;
4379 }
4380}
4381
4382VALUE
4383rb_vm_splat_array(VALUE flag, VALUE ary)
4384{
4385 return vm_splat_array(flag, ary);
4386}
4387
4388static VALUE
4389vm_check_match(rb_execution_context_t *ec, VALUE target, VALUE pattern, rb_num_t flag)
4390{
4391 enum vm_check_match_type type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
4392
4393 if (flag & VM_CHECKMATCH_ARRAY) {
4394 long i;
4395 const long n = RARRAY_LEN(pattern);
4396
4397 for (i = 0; i < n; i++) {
4398 VALUE v = RARRAY_AREF(pattern, i);
4399 VALUE c = check_match(ec, v, target, type);
4400
4401 if (RTEST(c)) {
4402 return c;
4403 }
4404 }
4405 return Qfalse;
4406 }
4407 else {
4408 return check_match(ec, pattern, target, type);
4409 }
4410}
4411
4412static VALUE
4413vm_check_keyword(lindex_t bits, lindex_t idx, const VALUE *ep)
4414{
4415 const VALUE kw_bits = *(ep - bits);
4416
4417 if (FIXNUM_P(kw_bits)) {
4418 unsigned int b = (unsigned int)FIX2ULONG(kw_bits);
4419 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
4420 return Qfalse;
4421 }
4422 else {
4423 VM_ASSERT(RB_TYPE_P(kw_bits, T_HASH));
4424 if (rb_hash_has_key(kw_bits, INT2FIX(idx))) return Qfalse;
4425 }
4426 return Qtrue;
4427}
4428
4429static void
4430vm_dtrace(rb_event_flag_t flag, rb_execution_context_t *ec)
4431{
4432 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
4433 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
4434 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
4435 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
4436
4437 switch (flag) {
4438 case RUBY_EVENT_CALL:
4439 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
4440 return;
4441 case RUBY_EVENT_C_CALL:
4442 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
4443 return;
4444 case RUBY_EVENT_RETURN:
4445 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
4446 return;
4448 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
4449 return;
4450 }
4451 }
4452}
4453
4454static VALUE
4455vm_const_get_under(ID id, rb_num_t flags, VALUE cbase)
4456{
4457 VALUE ns;
4458
4459 if ((ns = vm_search_const_defined_class(cbase, id)) == 0) {
4460 return ns;
4461 }
4462 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
4463 return rb_public_const_get_at(ns, id);
4464 }
4465 else {
4466 return rb_const_get_at(ns, id);
4467 }
4468}
4469
4470static VALUE
4471vm_check_if_class(ID id, rb_num_t flags, VALUE super, VALUE klass)
4472{
4473 if (!RB_TYPE_P(klass, T_CLASS)) {
4474 return 0;
4475 }
4476 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
4477 VALUE tmp = rb_class_real(RCLASS_SUPER(klass));
4478
4479 if (tmp != super) {
4480 rb_raise(rb_eTypeError,
4481 "superclass mismatch for class %"PRIsVALUE"",
4482 rb_id2str(id));
4483 }
4484 else {
4485 return klass;
4486 }
4487 }
4488 else {
4489 return klass;
4490 }
4491}
4492
4493static VALUE
4494vm_check_if_module(ID id, VALUE mod)
4495{
4496 if (!RB_TYPE_P(mod, T_MODULE)) {
4497 return 0;
4498 }
4499 else {
4500 return mod;
4501 }
4502}
4503
4504static VALUE
4505declare_under(ID id, VALUE cbase, VALUE c)
4506{
4507 rb_set_class_path_string(c, cbase, rb_id2str(id));
4508 rb_const_set(cbase, id, c);
4509 return c;
4510}
4511
4512static VALUE
4513vm_declare_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
4514{
4515 /* new class declaration */
4516 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
4517 VALUE c = declare_under(id, cbase, rb_define_class_id(id, s));
4519 rb_class_inherited(s, c);
4520 return c;
4521}
4522
4523static VALUE
4524vm_declare_module(ID id, VALUE cbase)
4525{
4526 /* new module declaration */
4527 return declare_under(id, cbase, rb_module_new());
4528}
4529
4530NORETURN(static void unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old));
4531static void
4532unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old)
4533{
4534 VALUE name = rb_id2str(id);
4535 VALUE message = rb_sprintf("%"PRIsVALUE" is not a %s",
4536 name, type);
4537 VALUE location = rb_const_source_location_at(cbase, id);
4538 if (!NIL_P(location)) {
4539 rb_str_catf(message, "\n%"PRIsVALUE":%"PRIsVALUE":"
4540 " previous definition of %"PRIsVALUE" was here",
4541 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
4542 }
4543 rb_exc_raise(rb_exc_new_str(rb_eTypeError, message));
4544}
4545
4546static VALUE
4547vm_define_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
4548{
4549 VALUE klass;
4550
4551 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !RB_TYPE_P(super, T_CLASS)) {
4552 rb_raise(rb_eTypeError,
4553 "superclass must be an instance of Class (given an instance of %"PRIsVALUE")",
4554 rb_obj_class(super));
4555 }
4556
4557 vm_check_if_namespace(cbase);
4558
4559 /* find klass */
4560 rb_autoload_load(cbase, id);
4561 if ((klass = vm_const_get_under(id, flags, cbase)) != 0) {
4562 if (!vm_check_if_class(id, flags, super, klass))
4563 unmatched_redefinition("class", cbase, id, klass);
4564 return klass;
4565 }
4566 else {
4567 return vm_declare_class(id, flags, cbase, super);
4568 }
4569}
4570
4571static VALUE
4572vm_define_module(ID id, rb_num_t flags, VALUE cbase)
4573{
4574 VALUE mod;
4575
4576 vm_check_if_namespace(cbase);
4577 if ((mod = vm_const_get_under(id, flags, cbase)) != 0) {
4578 if (!vm_check_if_module(id, mod))
4579 unmatched_redefinition("module", cbase, id, mod);
4580 return mod;
4581 }
4582 else {
4583 return vm_declare_module(id, cbase);
4584 }
4585}
4586
4587static VALUE
4588vm_find_or_create_class_by_id(ID id,
4589 rb_num_t flags,
4590 VALUE cbase,
4591 VALUE super)
4592{
4593 rb_vm_defineclass_type_t type = VM_DEFINECLASS_TYPE(flags);
4594
4595 switch (type) {
4596 case VM_DEFINECLASS_TYPE_CLASS:
4597 /* classdef returns class scope value */
4598 return vm_define_class(id, flags, cbase, super);
4599
4600 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
4601 /* classdef returns class scope value */
4602 return rb_singleton_class(cbase);
4603
4604 case VM_DEFINECLASS_TYPE_MODULE:
4605 /* classdef returns class scope value */
4606 return vm_define_module(id, flags, cbase);
4607
4608 default:
4609 rb_bug("unknown defineclass type: %d", (int)type);
4610 }
4611}
4612
4613static rb_method_visibility_t
4614vm_scope_visibility_get(const rb_execution_context_t *ec)
4615{
4616 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
4617
4618 if (!vm_env_cref_by_cref(cfp->ep)) {
4619 return METHOD_VISI_PUBLIC;
4620 }
4621 else {
4622 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
4623 }
4624}
4625
4626static int
4627vm_scope_module_func_check(const rb_execution_context_t *ec)
4628{
4629 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
4630
4631 if (!vm_env_cref_by_cref(cfp->ep)) {
4632 return FALSE;
4633 }
4634 else {
4635 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
4636 }
4637}
4638
4639static void
4640vm_define_method(const rb_execution_context_t *ec, VALUE obj, ID id, VALUE iseqval, int is_singleton)
4641{
4642 VALUE klass;
4643 rb_method_visibility_t visi;
4644 rb_cref_t *cref = vm_ec_cref(ec);
4645
4646 if (is_singleton) {
4647 klass = rb_singleton_class(obj); /* class and frozen checked in this API */
4648 visi = METHOD_VISI_PUBLIC;
4649 }
4650 else {
4651 klass = CREF_CLASS_FOR_DEFINITION(cref);
4652 visi = vm_scope_visibility_get(ec);
4653 }
4654
4655 if (NIL_P(klass)) {
4656 rb_raise(rb_eTypeError, "no class/module to add method");
4657 }
4658
4659 rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, visi);
4660
4661 if (!is_singleton && vm_scope_module_func_check(ec)) {
4662 klass = rb_singleton_class(klass);
4663 rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
4664 }
4665}
4666
4667static VALUE
4668vm_invokeblock_i(struct rb_execution_context_struct *ec,
4669 struct rb_control_frame_struct *reg_cfp,
4670 struct rb_calling_info *calling)
4671{
4672 const struct rb_callinfo *ci = calling->ci;
4673 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
4674
4675 if (block_handler == VM_BLOCK_HANDLER_NONE) {
4676 rb_vm_localjump_error("no block given (yield)", Qnil, 0);
4677 }
4678 else {
4679 return vm_invoke_block(ec, GET_CFP(), calling, ci, false, block_handler);
4680 }
4681}
4682
4683#ifdef MJIT_HEADER
4684static const struct rb_callcache *
4685vm_search_method_wrap(const struct rb_control_frame_struct *reg_cfp, struct rb_call_data *cd, VALUE recv)
4686{
4687 return vm_search_method((VALUE)reg_cfp->iseq, cd, recv);
4688}
4689
4690static const struct rb_callcache *
4691vm_search_invokeblock(const struct rb_control_frame_struct *reg_cfp, struct rb_call_data *cd, VALUE recv)
4692{
4693 static const struct rb_callcache cc = {
4694 .flags = T_IMEMO | (imemo_callcache << FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
4695 .klass = 0,
4696 .cme_ = 0,
4697 .call_ = vm_invokeblock_i,
4698 .aux_ = {0},
4699 };
4700 return &cc;
4701}
4702
4703# define mexp_search_method vm_search_method_wrap
4704# define mexp_search_super vm_search_super_method
4705# define mexp_search_invokeblock vm_search_invokeblock
4706#else
4707enum method_explorer_type {
4708 mexp_search_method,
4709 mexp_search_invokeblock,
4710 mexp_search_super,
4711};
4712#endif
4713
4714static
4715#ifndef MJIT_HEADER
4716inline
4717#endif
4718VALUE
4719vm_sendish(
4720 struct rb_execution_context_struct *ec,
4721 struct rb_control_frame_struct *reg_cfp,
4722 struct rb_call_data *cd,
4723 VALUE block_handler,
4724#ifdef MJIT_HEADER
4725 const struct rb_callcache *(*method_explorer)(const struct rb_control_frame_struct *cfp, struct rb_call_data *cd, VALUE recv)
4726#else
4727 enum method_explorer_type method_explorer
4728#endif
4729) {
4730 VALUE val = Qundef;
4731 const struct rb_callinfo *ci = cd->ci;
4732 const struct rb_callcache *cc;
4733 int argc = vm_ci_argc(ci);
4734 VALUE recv = TOPN(argc);
4735 struct rb_calling_info calling = {
4736 .block_handler = block_handler,
4737 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
4738 .recv = recv,
4739 .argc = argc,
4740 .ci = ci,
4741 };
4742
4743// The enum-based branch and inlining are faster in VM, but function pointers without inlining are faster in JIT.
4744#ifdef MJIT_HEADER
4745 calling.cc = cc = method_explorer(GET_CFP(), cd, recv);
4746 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
4747#else
4748 switch (method_explorer) {
4749 case mexp_search_method:
4750 calling.cc = cc = vm_search_method_fastpath((VALUE)reg_cfp->iseq, cd, CLASS_OF(recv));
4751 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
4752 break;
4753 case mexp_search_super:
4754 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
4755 calling.ci = cd->ci; // TODO: does it safe?
4756 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
4757 break;
4758 case mexp_search_invokeblock:
4759 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
4760 break;
4761 }
4762#endif
4763
4764 if (val != Qundef) {
4765 return val; /* CFUNC normal return */
4766 }
4767 else {
4768 RESTORE_REGS(); /* CFP pushed in cc->call() */
4769 }
4770
4771#ifdef MJIT_HEADER
4772 /* When calling ISeq which may catch an exception from JIT-ed
4773 code, we should not call mjit_exec directly to prevent the
4774 caller frame from being canceled. That's because the caller
4775 frame may have stack values in the local variables and the
4776 cancelling the caller frame will purge them. But directly
4777 calling mjit_exec is faster... */
4778 if (GET_ISEQ()->body->catch_except_p) {
4779 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
4780 return vm_exec(ec, true);
4781 }
4782 else if ((val = mjit_exec(ec)) == Qundef) {
4783 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
4784 return vm_exec(ec, false);
4785 }
4786 else {
4787 return val;
4788 }
4789#else
4790 /* When calling from VM, longjmp in the callee won't purge any
4791 JIT-ed caller frames. So it's safe to directly call
4792 mjit_exec. */
4793 return mjit_exec(ec);
4794#endif
4795}
4796
4797/* object.c */
4798VALUE rb_nil_to_s(VALUE);
4799VALUE rb_true_to_s(VALUE);
4800VALUE rb_false_to_s(VALUE);
4801/* numeric.c */
4802VALUE rb_int_to_s(int argc, VALUE *argv, VALUE x);
4803VALUE rb_fix_to_s(VALUE);
4804/* variable.c */
4805VALUE rb_mod_to_s(VALUE);
4806VALUE rb_mod_name(VALUE);
4807
4808static VALUE
4809vm_objtostring(const rb_iseq_t *iseq, VALUE recv, CALL_DATA cd)
4810{
4811 const struct rb_callcache *cc = vm_search_method((VALUE)iseq, cd, recv);
4812
4813 switch (TYPE(recv)) {
4814 case T_STRING:
4815 return recv;
4816 case T_SYMBOL:
4817 if (check_cfunc(vm_cc_cme(cc), rb_sym_to_s)) {
4818 // rb_sym_to_s() allocates a mutable string, but since we are only
4819 // going to use this string for interpolation, it's fine to use the
4820 // frozen string.
4821 return rb_sym2str(recv);
4822 }
4823 break;
4824 case T_MODULE:
4825 case T_CLASS:
4826 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
4827 // rb_mod_to_s() allocates a mutable string, but since we are only
4828 // going to use this string for interpolation, it's fine to use the
4829 // frozen string.
4830 VALUE val = rb_mod_name(recv);
4831 if (val == Qnil) {
4832 val = rb_mod_to_s(recv);
4833 }
4834 return val;
4835 }
4836 break;
4837 case T_NIL:
4838 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
4839 return rb_nil_to_s(recv);
4840 }
4841 break;
4842 case T_TRUE:
4843 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
4844 return rb_true_to_s(recv);
4845 }
4846 break;
4847 case T_FALSE:
4848 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
4849 return rb_false_to_s(recv);
4850 }
4851 break;
4852 case T_FIXNUM:
4853 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
4854 return rb_fix_to_s(recv);
4855 }
4856 break;
4857 }
4858 return Qundef;
4859}
4860
4861static VALUE
4862vm_opt_str_freeze(VALUE str, int bop, ID id)
4863{
4864 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
4865 return str;
4866 }
4867 else {
4868 return Qundef;
4869 }
4870}
4871
4872/* this macro is mandatory to use OPTIMIZED_CMP. What a design! */
4873#define id_cmp idCmp
4874
4875static VALUE
4876vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
4877{
4878 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
4879 if (num == 0) {
4880 return Qnil;
4881 }
4882 else {
4883 struct cmp_opt_data cmp_opt = { 0, 0 };
4884 VALUE result = *ptr;
4885 rb_snum_t i = num - 1;
4886 while (i-- > 0) {
4887 const VALUE v = *++ptr;
4888 if (OPTIMIZED_CMP(v, result, cmp_opt) > 0) {
4889 result = v;
4890 }
4891 }
4892 return result;
4893 }
4894 }
4895 else {
4896 return rb_vm_call_with_refinements(ec, rb_ary_new4(num, ptr), idMax, 0, NULL, RB_NO_KEYWORDS);
4897 }
4898}
4899
4900static VALUE
4901vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
4902{
4903 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
4904 if (num == 0) {
4905 return Qnil;
4906 }
4907 else {
4908 struct cmp_opt_data cmp_opt = { 0, 0 };
4909 VALUE result = *ptr;
4910 rb_snum_t i = num - 1;
4911 while (i-- > 0) {
4912 const VALUE v = *++ptr;
4913 if (OPTIMIZED_CMP(v, result, cmp_opt) < 0) {
4914 result = v;
4915 }
4916 }
4917 return result;
4918 }
4919 }
4920 else {
4921 return rb_vm_call_with_refinements(ec, rb_ary_new4(num, ptr), idMin, 0, NULL, RB_NO_KEYWORDS);
4922 }
4923}
4924
4925#undef id_cmp
4926
4927#define IMEMO_CONST_CACHE_SHAREABLE IMEMO_FL_USER0
4928
4929// For MJIT inlining
4930static inline bool
4931vm_inlined_ic_hit_p(VALUE flags, VALUE value, const rb_cref_t *ic_cref, rb_serial_t ic_serial, const VALUE *reg_ep)
4932{
4933 if (ic_serial == GET_GLOBAL_CONSTANT_STATE() &&
4934 ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p())) {
4935
4936 VM_ASSERT((flags & IMEMO_CONST_CACHE_SHAREABLE) ? rb_ractor_shareable_p(value) : true);
4937
4938 return (ic_cref == NULL || // no need to check CREF
4939 ic_cref == vm_get_cref(reg_ep));
4940 }
4941 return false;
4942}
4943
4944static bool
4945vm_ic_hit_p(const struct iseq_inline_constant_cache_entry *ice, const VALUE *reg_ep)
4946{
4947 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
4948 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, GET_IC_SERIAL(ice), reg_ep);
4949}
4950
4951// YJIT needs this function to never allocate and never raise
4952bool
4953rb_vm_ic_hit_p(IC ic, const VALUE *reg_ep)
4954{
4955 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
4956}
4957
4958static void
4959vm_ic_update(const rb_iseq_t *iseq, IC ic, VALUE val, const VALUE *reg_ep)
4960{
4961
4962 struct iseq_inline_constant_cache_entry *ice = (struct iseq_inline_constant_cache_entry *)rb_imemo_new(imemo_constcache, 0, 0, 0, 0);
4963 RB_OBJ_WRITE(ice, &ice->value, val);
4964 ice->ic_cref = vm_get_const_key_cref(reg_ep);
4965 SET_IC_SERIAL(ice, GET_GLOBAL_CONSTANT_STATE() - ruby_vm_const_missing_count);
4966 if (rb_ractor_shareable_p(val)) ice->flags |= IMEMO_CONST_CACHE_SHAREABLE;
4967 ruby_vm_const_missing_count = 0;
4968 RB_OBJ_WRITE(iseq, &ic->entry, ice);
4969#ifndef MJIT_HEADER
4970 // MJIT and YJIT can't be on at the same time, so there is no need to
4971 // notify YJIT about changes to the IC when running inside MJIT code.
4972 rb_yjit_constant_ic_update(iseq, ic);
4973#endif
4974}
4975
4976static VALUE
4977vm_once_dispatch(rb_execution_context_t *ec, ISEQ iseq, ISE is)
4978{
4979 rb_thread_t *th = rb_ec_thread_ptr(ec);
4980 rb_thread_t *const RUNNING_THREAD_ONCE_DONE = (rb_thread_t *)(0x1);
4981
4982 again:
4983 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
4984 return is->once.value;
4985 }
4986 else if (is->once.running_thread == NULL) {
4987 VALUE val;
4988 is->once.running_thread = th;
4989 val = rb_ensure(vm_once_exec, (VALUE)iseq, vm_once_clear, (VALUE)is);
4990 RB_OBJ_WRITE(ec->cfp->iseq, &is->once.value, val);
4991 /* is->once.running_thread is cleared by vm_once_clear() */
4992 is->once.running_thread = RUNNING_THREAD_ONCE_DONE; /* success */
4993 return val;
4994 }
4995 else if (is->once.running_thread == th) {
4996 /* recursive once */
4997 return vm_once_exec((VALUE)iseq);
4998 }
4999 else {
5000 /* waiting for finish */
5001 RUBY_VM_CHECK_INTS(ec);
5003 goto again;
5004 }
5005}
5006
5007static OFFSET
5008vm_case_dispatch(CDHASH hash, OFFSET else_offset, VALUE key)
5009{
5010 switch (OBJ_BUILTIN_TYPE(key)) {
5011 case -1:
5012 case T_FLOAT:
5013 case T_SYMBOL:
5014 case T_BIGNUM:
5015 case T_STRING:
5016 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
5017 SYMBOL_REDEFINED_OP_FLAG |
5018 INTEGER_REDEFINED_OP_FLAG |
5019 FLOAT_REDEFINED_OP_FLAG |
5020 NIL_REDEFINED_OP_FLAG |
5021 TRUE_REDEFINED_OP_FLAG |
5022 FALSE_REDEFINED_OP_FLAG |
5023 STRING_REDEFINED_OP_FLAG)) {
5024 st_data_t val;
5025 if (RB_FLOAT_TYPE_P(key)) {
5026 double kval = RFLOAT_VALUE(key);
5027 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
5028 key = FIXABLE(kval) ? LONG2FIX((long)kval) : rb_dbl2big(kval);
5029 }
5030 }
5031 if (rb_hash_stlike_lookup(hash, key, &val)) {
5032 return FIX2LONG((VALUE)val);
5033 }
5034 else {
5035 return else_offset;
5036 }
5037 }
5038 }
5039 return 0;
5040}
5041
5042NORETURN(static void
5043 vm_stack_consistency_error(const rb_execution_context_t *ec,
5044 const rb_control_frame_t *,
5045 const VALUE *));
5046static void
5047vm_stack_consistency_error(const rb_execution_context_t *ec,
5048 const rb_control_frame_t *cfp,
5049 const VALUE *bp)
5050{
5051 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
5052 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
5053 static const char stack_consistency_error[] =
5054 "Stack consistency error (sp: %"PRIdPTRDIFF", bp: %"PRIdPTRDIFF")";
5055#if defined RUBY_DEVEL
5056 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
5057 rb_str_cat_cstr(mesg, "\n");
5058 rb_str_append(mesg, rb_iseq_disasm(cfp->iseq));
5059 rb_exc_fatal(rb_exc_new3(rb_eFatal, mesg));
5060#else
5061 rb_bug(stack_consistency_error, nsp, nbp);
5062#endif
5063}
5064
5065static VALUE
5066vm_opt_plus(VALUE recv, VALUE obj)
5067{
5068 if (FIXNUM_2_P(recv, obj) &&
5069 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
5070 return rb_fix_plus_fix(recv, obj);
5071 }
5072 else if (FLONUM_2_P(recv, obj) &&
5073 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5074 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
5075 }
5076 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5077 return Qundef;
5078 }
5079 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5080 RBASIC_CLASS(obj) == rb_cFloat &&
5081 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5082 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
5083 }
5084 else if (RBASIC_CLASS(recv) == rb_cString &&
5085 RBASIC_CLASS(obj) == rb_cString &&
5086 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
5087 return rb_str_opt_plus(recv, obj);
5088 }
5089 else if (RBASIC_CLASS(recv) == rb_cArray &&
5090 RBASIC_CLASS(obj) == rb_cArray &&
5091 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
5092 return rb_ary_plus(recv, obj);
5093 }
5094 else {
5095 return Qundef;
5096 }
5097}
5098
5099static VALUE
5100vm_opt_minus(VALUE recv, VALUE obj)
5101{
5102 if (FIXNUM_2_P(recv, obj) &&
5103 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
5104 return rb_fix_minus_fix(recv, obj);
5105 }
5106 else if (FLONUM_2_P(recv, obj) &&
5107 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5108 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
5109 }
5110 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5111 return Qundef;
5112 }
5113 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5114 RBASIC_CLASS(obj) == rb_cFloat &&
5115 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5116 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
5117 }
5118 else {
5119 return Qundef;
5120 }
5121}
5122
5123static VALUE
5124vm_opt_mult(VALUE recv, VALUE obj)
5125{
5126 if (FIXNUM_2_P(recv, obj) &&
5127 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
5128 return rb_fix_mul_fix(recv, obj);
5129 }
5130 else if (FLONUM_2_P(recv, obj) &&
5131 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5132 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
5133 }
5134 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5135 return Qundef;
5136 }
5137 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5138 RBASIC_CLASS(obj) == rb_cFloat &&
5139 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5140 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
5141 }
5142 else {
5143 return Qundef;
5144 }
5145}
5146
5147static VALUE
5148vm_opt_div(VALUE recv, VALUE obj)
5149{
5150 if (FIXNUM_2_P(recv, obj) &&
5151 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
5152 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_div_fix(recv, obj);
5153 }
5154 else if (FLONUM_2_P(recv, obj) &&
5155 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5156 return rb_flo_div_flo(recv, obj);
5157 }
5158 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5159 return Qundef;
5160 }
5161 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5162 RBASIC_CLASS(obj) == rb_cFloat &&
5163 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5164 return rb_flo_div_flo(recv, obj);
5165 }
5166 else {
5167 return Qundef;
5168 }
5169}
5170
5171static VALUE
5172vm_opt_mod(VALUE recv, VALUE obj)
5173{
5174 if (FIXNUM_2_P(recv, obj) &&
5175 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
5176 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_mod_fix(recv, obj);
5177 }
5178 else if (FLONUM_2_P(recv, obj) &&
5179 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5180 return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
5181 }
5182 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5183 return Qundef;
5184 }
5185 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5186 RBASIC_CLASS(obj) == rb_cFloat &&
5187 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5188 return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
5189 }
5190 else {
5191 return Qundef;
5192 }
5193}
5194
5195VALUE
5196rb_vm_opt_mod(VALUE recv, VALUE obj)
5197{
5198 return vm_opt_mod(recv, obj);
5199}
5200
5201static VALUE
5202vm_opt_neq(const rb_iseq_t *iseq, CALL_DATA cd, CALL_DATA cd_eq, VALUE recv, VALUE obj)
5203{
5204 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
5205 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
5206
5207 if (val != Qundef) {
5208 return RTEST(val) ? Qfalse : Qtrue;
5209 }
5210 }
5211
5212 return Qundef;
5213}
5214
5215static VALUE
5216vm_opt_lt(VALUE recv, VALUE obj)
5217{
5218 if (FIXNUM_2_P(recv, obj) &&
5219 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
5220 return RBOOL((SIGNED_VALUE)recv < (SIGNED_VALUE)obj);
5221 }
5222 else if (FLONUM_2_P(recv, obj) &&
5223 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5224 return RBOOL(RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj));
5225 }
5226 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5227 return Qundef;
5228 }
5229 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5230 RBASIC_CLASS(obj) == rb_cFloat &&
5231 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5232 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5233 return RBOOL(RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj));
5234 }
5235 else {
5236 return Qundef;
5237 }
5238}
5239
5240static VALUE
5241vm_opt_le(VALUE recv, VALUE obj)
5242{
5243 if (FIXNUM_2_P(recv, obj) &&
5244 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
5245 return RBOOL((SIGNED_VALUE)recv <= (SIGNED_VALUE)obj);
5246 }
5247 else if (FLONUM_2_P(recv, obj) &&
5248 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5249 return RBOOL(RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj));
5250 }
5251 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5252 return Qundef;
5253 }
5254 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5255 RBASIC_CLASS(obj) == rb_cFloat &&
5256 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5257 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5258 return RBOOL(RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj));
5259 }
5260 else {
5261 return Qundef;
5262 }
5263}
5264
5265static VALUE
5266vm_opt_gt(VALUE recv, VALUE obj)
5267{
5268 if (FIXNUM_2_P(recv, obj) &&
5269 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
5270 return RBOOL((SIGNED_VALUE)recv > (SIGNED_VALUE)obj);
5271 }
5272 else if (FLONUM_2_P(recv, obj) &&
5273 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5274 return RBOOL(RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj));
5275 }
5276 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5277 return Qundef;
5278 }
5279 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5280 RBASIC_CLASS(obj) == rb_cFloat &&
5281 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5282 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5283 return RBOOL(RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj));
5284 }
5285 else {
5286 return Qundef;
5287 }
5288}
5289
5290static VALUE
5291vm_opt_ge(VALUE recv, VALUE obj)
5292{
5293 if (FIXNUM_2_P(recv, obj) &&
5294 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
5295 return RBOOL((SIGNED_VALUE)recv >= (SIGNED_VALUE)obj);
5296 }
5297 else if (FLONUM_2_P(recv, obj) &&
5298 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5299 return RBOOL(RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj));
5300 }
5301 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5302 return Qundef;
5303 }
5304 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5305 RBASIC_CLASS(obj) == rb_cFloat &&
5306 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5307 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5308 return RBOOL(RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj));
5309 }
5310 else {
5311 return Qundef;
5312 }
5313}
5314
5315
5316static VALUE
5317vm_opt_ltlt(VALUE recv, VALUE obj)
5318{
5319 if (SPECIAL_CONST_P(recv)) {
5320 return Qundef;
5321 }
5322 else if (RBASIC_CLASS(recv) == rb_cString &&
5323 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
5324 return rb_str_concat(recv, obj);
5325 }
5326 else if (RBASIC_CLASS(recv) == rb_cArray &&
5327 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
5328 return rb_ary_push(recv, obj);
5329 }
5330 else {
5331 return Qundef;
5332 }
5333}
5334
5335static VALUE
5336vm_opt_and(VALUE recv, VALUE obj)
5337{
5338 if (FIXNUM_2_P(recv, obj) &&
5339 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
5340 return (recv & obj) | 1;
5341 }
5342 else {
5343 return Qundef;
5344 }
5345}
5346
5347static VALUE
5348vm_opt_or(VALUE recv, VALUE obj)
5349{
5350 if (FIXNUM_2_P(recv, obj) &&
5351 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
5352 return recv | obj;
5353 }
5354 else {
5355 return Qundef;
5356 }
5357}
5358
5359static VALUE
5360vm_opt_aref(VALUE recv, VALUE obj)
5361{
5362 if (SPECIAL_CONST_P(recv)) {
5363 if (FIXNUM_2_P(recv, obj) &&
5364 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
5365 return rb_fix_aref(recv, obj);
5366 }
5367 return Qundef;
5368 }
5369 else if (RBASIC_CLASS(recv) == rb_cArray &&
5370 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
5371 if (FIXNUM_P(obj)) {
5372 return rb_ary_entry_internal(recv, FIX2LONG(obj));
5373 }
5374 else {
5375 return rb_ary_aref1(recv, obj);
5376 }
5377 }
5378 else if (RBASIC_CLASS(recv) == rb_cHash &&
5379 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
5380 return rb_hash_aref(recv, obj);
5381 }
5382 else {
5383 return Qundef;
5384 }
5385}
5386
5387static VALUE
5388vm_opt_aset(VALUE recv, VALUE obj, VALUE set)
5389{
5390 if (SPECIAL_CONST_P(recv)) {
5391 return Qundef;
5392 }
5393 else if (RBASIC_CLASS(recv) == rb_cArray &&
5394 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
5395 FIXNUM_P(obj)) {
5396 rb_ary_store(recv, FIX2LONG(obj), set);
5397 return set;
5398 }
5399 else if (RBASIC_CLASS(recv) == rb_cHash &&
5400 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
5401 rb_hash_aset(recv, obj, set);
5402 return set;
5403 }
5404 else {
5405 return Qundef;
5406 }
5407}
5408
5409static VALUE
5410vm_opt_aref_with(VALUE recv, VALUE key)
5411{
5412 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
5413 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
5414 rb_hash_compare_by_id_p(recv) == Qfalse) {
5415 return rb_hash_aref(recv, key);
5416 }
5417 else {
5418 return Qundef;
5419 }
5420}
5421
5422static VALUE
5423vm_opt_aset_with(VALUE recv, VALUE key, VALUE val)
5424{
5425 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
5426 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
5427 rb_hash_compare_by_id_p(recv) == Qfalse) {
5428 return rb_hash_aset(recv, key, val);
5429 }
5430 else {
5431 return Qundef;
5432 }
5433}
5434
5435static VALUE
5436vm_opt_length(VALUE recv, int bop)
5437{
5438 if (SPECIAL_CONST_P(recv)) {
5439 return Qundef;
5440 }
5441 else if (RBASIC_CLASS(recv) == rb_cString &&
5442 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5443 if (bop == BOP_EMPTY_P) {
5444 return LONG2NUM(RSTRING_LEN(recv));
5445 }
5446 else {
5447 return rb_str_length(recv);
5448 }
5449 }
5450 else if (RBASIC_CLASS(recv) == rb_cArray &&
5451 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
5452 return LONG2NUM(RARRAY_LEN(recv));
5453 }
5454 else if (RBASIC_CLASS(recv) == rb_cHash &&
5455 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
5456 return INT2FIX(RHASH_SIZE(recv));
5457 }
5458 else {
5459 return Qundef;
5460 }
5461}
5462
5463static VALUE
5464vm_opt_empty_p(VALUE recv)
5465{
5466 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
5467 case Qundef: return Qundef;
5468 case INT2FIX(0): return Qtrue;
5469 default: return Qfalse;
5470 }
5471}
5472
5473VALUE rb_false(VALUE obj);
5474
5475static VALUE
5476vm_opt_nil_p(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv)
5477{
5478 if (NIL_P(recv) &&
5479 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
5480 return Qtrue;
5481 }
5482 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
5483 return Qfalse;
5484 }
5485 else {
5486 return Qundef;
5487 }
5488}
5489
5490static VALUE
5491fix_succ(VALUE x)
5492{
5493 switch (x) {
5494 case ~0UL:
5495 /* 0xFFFF_FFFF == INT2FIX(-1)
5496 * `-1.succ` is of course 0. */
5497 return INT2FIX(0);
5498 case RSHIFT(~0UL, 1):
5499 /* 0x7FFF_FFFF == LONG2FIX(0x3FFF_FFFF)
5500 * 0x3FFF_FFFF + 1 == 0x4000_0000, which is a Bignum. */
5501 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
5502 default:
5503 /* LONG2FIX(FIX2LONG(x)+FIX2LONG(y))
5504 * == ((lx*2+1)/2 + (ly*2+1)/2)*2+1
5505 * == lx*2 + ly*2 + 1
5506 * == (lx*2+1) + (ly*2+1) - 1
5507 * == x + y - 1
5508 *
5509 * Here, if we put y := INT2FIX(1):
5510 *
5511 * == x + INT2FIX(1) - 1
5512 * == x + 2 .
5513 */
5514 return x + 2;
5515 }
5516}
5517
5518static VALUE
5519vm_opt_succ(VALUE recv)
5520{
5521 if (FIXNUM_P(recv) &&
5522 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
5523 return fix_succ(recv);
5524 }
5525 else if (SPECIAL_CONST_P(recv)) {
5526 return Qundef;
5527 }
5528 else if (RBASIC_CLASS(recv) == rb_cString &&
5529 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
5530 return rb_str_succ(recv);
5531 }
5532 else {
5533 return Qundef;
5534 }
5535}
5536
5537static VALUE
5538vm_opt_not(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv)
5539{
5540 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
5541 return RTEST(recv) ? Qfalse : Qtrue;
5542 }
5543 else {
5544 return Qundef;
5545 }
5546}
5547
5548static VALUE
5549vm_opt_regexpmatch2(VALUE recv, VALUE obj)
5550{
5551 if (SPECIAL_CONST_P(recv)) {
5552 return Qundef;
5553 }
5554 else if (RBASIC_CLASS(recv) == rb_cString &&
5555 CLASS_OF(obj) == rb_cRegexp &&
5556 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
5557 return rb_reg_match(obj, recv);
5558 }
5559 else if (RBASIC_CLASS(recv) == rb_cRegexp &&
5560 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
5561 return rb_reg_match(recv, obj);
5562 }
5563 else {
5564 return Qundef;
5565 }
5566}
5567
5568rb_event_flag_t rb_iseq_event_flags(const rb_iseq_t *iseq, size_t pos);
5569
5570NOINLINE(static void vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp));
5571
5572static inline void
5573vm_trace_hook(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const VALUE *pc,
5574 rb_event_flag_t pc_events, rb_event_flag_t target_event,
5575 rb_hook_list_t *global_hooks, rb_hook_list_t *local_hooks, VALUE val)
5576{
5577 rb_event_flag_t event = pc_events & target_event;
5578 VALUE self = GET_SELF();
5579
5580 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
5581
5582 if (event & global_hooks->events) {
5583 /* increment PC because source line is calculated with PC-1 */
5584 reg_cfp->pc++;
5585 vm_dtrace(event, ec);
5586 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
5587 reg_cfp->pc--;
5588 }
5589
5590 if (local_hooks != NULL) {
5591 if (event & local_hooks->events) {
5592 /* increment PC because source line is calculated with PC-1 */
5593 reg_cfp->pc++;
5594 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
5595 reg_cfp->pc--;
5596 }
5597 }
5598}
5599
5600// Return true if given cc has cfunc which is NOT handled by opt_send_without_block.
5601bool
5602rb_vm_opt_cfunc_p(CALL_CACHE cc, int insn)
5603{
5604 switch (insn) {
5605 case BIN(opt_eq):
5606 return check_cfunc(vm_cc_cme(cc), rb_obj_equal);
5607 case BIN(opt_nil_p):
5608 return check_cfunc(vm_cc_cme(cc), rb_false);
5609 case BIN(opt_not):
5610 return check_cfunc(vm_cc_cme(cc), rb_obj_not);
5611 default:
5612 return false;
5613 }
5614}
5615
5616#define VM_TRACE_HOOK(target_event, val) do { \
5617 if ((pc_events & (target_event)) & enabled_flags) { \
5618 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks, (val)); \
5619 } \
5620} while (0)
5621
5622static void
5623vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp)
5624{
5625 const VALUE *pc = reg_cfp->pc;
5626 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
5627 rb_event_flag_t global_events = enabled_flags;
5628
5629 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
5630 return;
5631 }
5632 else {
5633 const rb_iseq_t *iseq = reg_cfp->iseq;
5634 size_t pos = pc - iseq->body->iseq_encoded;
5635 rb_event_flag_t pc_events = rb_iseq_event_flags(iseq, pos);
5636 rb_hook_list_t *local_hooks = iseq->aux.exec.local_hooks;
5637 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
5638 rb_hook_list_t *bmethod_local_hooks = NULL;
5639 rb_event_flag_t bmethod_local_events = 0;
5640 bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
5641 enabled_flags |= iseq_local_events;
5642
5643 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
5644
5645 if (bmethod_frame) {
5646 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
5647 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
5648 bmethod_local_hooks = me->def->body.bmethod.hooks;
5649 if (bmethod_local_hooks) {
5650 bmethod_local_events = bmethod_local_hooks->events;
5651 }
5652 }
5653
5654
5655 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
5656#if 0
5657 /* disable trace */
5658 /* TODO: incomplete */
5659 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
5660#else
5661 /* do not disable trace because of performance problem
5662 * (re-enable overhead)
5663 */
5664#endif
5665 return;
5666 }
5667 else if (ec->trace_arg != NULL) {
5668 /* already tracing */
5669 return;
5670 }
5671 else {
5672 rb_hook_list_t *global_hooks = rb_ec_ractor_hooks(ec);
5673 /* Note, not considering iseq local events here since the same
5674 * iseq could be used in multiple bmethods. */
5675 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
5676
5677 if (0) {
5678 ruby_debug_printf("vm_trace>>%4d (%4x) - %s:%d %s\n",
5679 (int)pos,
5680 (int)pc_events,
5681 RSTRING_PTR(rb_iseq_path(iseq)),
5682 (int)rb_iseq_line_no(iseq, pos),
5683 RSTRING_PTR(rb_iseq_label(iseq)));
5684 }
5685 VM_ASSERT(reg_cfp->pc == pc);
5686 VM_ASSERT(pc_events != 0);
5687
5688 /* check traces */
5689 if ((pc_events & RUBY_EVENT_B_CALL) && bmethod_frame && (bmethod_events & RUBY_EVENT_CALL)) {
5690 /* b_call instruction running as a method. Fire call event. */
5691 vm_trace_hook(ec, reg_cfp, pc, RUBY_EVENT_CALL, RUBY_EVENT_CALL, global_hooks, bmethod_local_hooks, Qundef);
5692 }
5694 VM_TRACE_HOOK(RUBY_EVENT_LINE, Qundef);
5695 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE, Qundef);
5696 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH, Qundef);
5697 VM_TRACE_HOOK(RUBY_EVENT_END | RUBY_EVENT_RETURN | RUBY_EVENT_B_RETURN, TOPN(0));
5698 if ((pc_events & RUBY_EVENT_B_RETURN) && bmethod_frame && (bmethod_events & RUBY_EVENT_RETURN)) {
5699 /* b_return instruction running as a method. Fire return event. */
5700 vm_trace_hook(ec, reg_cfp, pc, RUBY_EVENT_RETURN, RUBY_EVENT_RETURN, global_hooks, bmethod_local_hooks, TOPN(0));
5701 }
5702 }
5703 }
5704}
5705#undef VM_TRACE_HOOK
5706
5707#if VM_CHECK_MODE > 0
5708NORETURN( NOINLINE( COLDFUNC
5709void rb_vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)));
5710
5711void
5712Init_vm_stack_canary(void)
5713{
5714 /* This has to be called _after_ our PRNG is properly set up. */
5715 int n = ruby_fill_random_bytes(&vm_stack_canary, sizeof vm_stack_canary, false);
5716 vm_stack_canary |= 0x01; // valid VALUE (Fixnum)
5717
5718 vm_stack_canary_was_born = true;
5719 VM_ASSERT(n == 0);
5720}
5721
5722#ifndef MJIT_HEADER
5723MJIT_FUNC_EXPORTED void
5724rb_vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)
5725{
5726 /* Because a method has already been called, why not call
5727 * another one. */
5728 const char *insn = rb_insns_name(i);
5729 VALUE inspection = rb_inspect(c);
5730 const char *str = StringValueCStr(inspection);
5731
5732 rb_bug("dead canary found at %s: %s", insn, str);
5733}
5734#endif
5735
5736#else
5737void Init_vm_stack_canary(void) { /* nothing to do */ }
5738#endif
5739
5740
5741/* a part of the following code is generated by this ruby script:
5742
574316.times{|i|
5744 typedef_args = (0...i).map{|j| "VALUE v#{j+1}"}.join(", ")
5745 typedef_args.prepend(", ") if i != 0
5746 call_args = (0...i).map{|j| "argv[#{j}]"}.join(", ")
5747 call_args.prepend(", ") if i != 0
5748 puts %Q{
5749static VALUE
5750builtin_invoker#{i}(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5751{
5752 typedef VALUE (*rb_invoke_funcptr#{i}_t)(rb_execution_context_t *ec, VALUE self#{typedef_args});
5753 return (*(rb_invoke_funcptr#{i}_t)funcptr)(ec, self#{call_args});
5754}}
5755}
5756
5757puts
5758puts "static VALUE (* const cfunc_invokers[])(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr) = {"
575916.times{|i|
5760 puts " builtin_invoker#{i},"
5761}
5762puts "};"
5763*/
5764
5765static VALUE
5766builtin_invoker0(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5767{
5768 typedef VALUE (*rb_invoke_funcptr0_t)(rb_execution_context_t *ec, VALUE self);
5769 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
5770}
5771
5772static VALUE
5773builtin_invoker1(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5774{
5775 typedef VALUE (*rb_invoke_funcptr1_t)(rb_execution_context_t *ec, VALUE self, VALUE v1);
5776 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
5777}
5778
5779static VALUE
5780builtin_invoker2(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5781{
5782 typedef VALUE (*rb_invoke_funcptr2_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2);
5783 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
5784}
5785
5786static VALUE
5787builtin_invoker3(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5788{
5789 typedef VALUE (*rb_invoke_funcptr3_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3);
5790 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
5791}
5792
5793static VALUE
5794builtin_invoker4(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5795{
5796 typedef VALUE (*rb_invoke_funcptr4_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4);
5797 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
5798}
5799
5800static VALUE
5801builtin_invoker5(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5802{
5803 typedef VALUE (*rb_invoke_funcptr5_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5);
5804 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
5805}
5806
5807static VALUE
5808builtin_invoker6(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5809{
5810 typedef VALUE (*rb_invoke_funcptr6_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6);
5811 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
5812}
5813
5814static VALUE
5815builtin_invoker7(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5816{
5817 typedef VALUE (*rb_invoke_funcptr7_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7);
5818 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
5819}
5820
5821static VALUE
5822builtin_invoker8(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5823{
5824 typedef VALUE (*rb_invoke_funcptr8_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8);
5825 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
5826}
5827
5828static VALUE
5829builtin_invoker9(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5830{
5831 typedef VALUE (*rb_invoke_funcptr9_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9);
5832 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
5833}
5834
5835static VALUE
5836builtin_invoker10(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5837{
5838 typedef VALUE (*rb_invoke_funcptr10_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10);
5839 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
5840}
5841
5842static VALUE
5843builtin_invoker11(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5844{
5845 typedef VALUE (*rb_invoke_funcptr11_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11);
5846 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
5847}
5848
5849static VALUE
5850builtin_invoker12(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5851{
5852 typedef VALUE (*rb_invoke_funcptr12_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12);
5853 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
5854}
5855
5856static VALUE
5857builtin_invoker13(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5858{
5859 typedef VALUE (*rb_invoke_funcptr13_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13);
5860 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
5861}
5862
5863static VALUE
5864builtin_invoker14(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5865{
5866 typedef VALUE (*rb_invoke_funcptr14_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14);
5867 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
5868}
5869
5870static VALUE
5871builtin_invoker15(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
5872{
5873 typedef VALUE (*rb_invoke_funcptr15_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14, VALUE v15);
5874 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
5875}
5876
5877typedef VALUE (*builtin_invoker)(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr);
5878
5879static builtin_invoker
5880lookup_builtin_invoker(int argc)
5881{
5882 static const builtin_invoker invokers[] = {
5883 builtin_invoker0,
5884 builtin_invoker1,
5885 builtin_invoker2,
5886 builtin_invoker3,
5887 builtin_invoker4,
5888 builtin_invoker5,
5889 builtin_invoker6,
5890 builtin_invoker7,
5891 builtin_invoker8,
5892 builtin_invoker9,
5893 builtin_invoker10,
5894 builtin_invoker11,
5895 builtin_invoker12,
5896 builtin_invoker13,
5897 builtin_invoker14,
5898 builtin_invoker15,
5899 };
5900
5901 return invokers[argc];
5902}
5903
5904static inline VALUE
5905invoke_bf(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const struct rb_builtin_function* bf, const VALUE *argv)
5906{
5907 const bool canary_p = reg_cfp->iseq->body->builtin_inline_p; // Verify an assumption of `Primitive.attr! 'inline'`
5908 SETUP_CANARY(canary_p);
5909 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, (rb_insn_func_t)bf->func_ptr);
5910 CHECK_CANARY(canary_p, BIN(invokebuiltin));
5911 return ret;
5912}
5913
5914static VALUE
5915vm_invoke_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function* bf, const VALUE *argv)
5916{
5917 return invoke_bf(ec, cfp, bf, argv);
5918}
5919
5920static VALUE
5921vm_invoke_builtin_delegate(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function *bf, unsigned int start_index)
5922{
5923 if (0) { // debug print
5924 fputs("vm_invoke_builtin_delegate: passing -> ", stderr);
5925 for (int i=0; i<bf->argc; i++) {
5926 ruby_debug_printf(":%s ", rb_id2name(cfp->iseq->body->local_table[i+start_index]));
5927 }
5928 ruby_debug_printf("\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc, bf->func_ptr);
5929 }
5930
5931 if (bf->argc == 0) {
5932 return invoke_bf(ec, cfp, bf, NULL);
5933 }
5934 else {
5935 const VALUE *argv = cfp->ep - cfp->iseq->body->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
5936 return invoke_bf(ec, cfp, bf, argv);
5937 }
5938}
5939
5940// for __builtin_inline!()
5941
5942VALUE
5943rb_vm_lvar_exposed(rb_execution_context_t *ec, int index)
5944{
5945 const rb_control_frame_t *cfp = ec->cfp;
5946 return cfp->ep[index];
5947}
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
Definition: assert.h:177
#define RUBY_EVENT_END
Encountered an end of a class clause.
Definition: event.h:36
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
Definition: event.h:39
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
Definition: event.h:52
#define RUBY_EVENT_CLASS
Encountered a new class.
Definition: event.h:35
#define RUBY_EVENT_LINE
Encountered a new line.
Definition: event.h:34
#define RUBY_EVENT_RETURN
Encountered a return statement.
Definition: event.h:38
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
Definition: event.h:40
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
Definition: event.h:51
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
Definition: event.h:37
static VALUE RB_OBJ_FROZEN_RAW(VALUE obj)
This is an implenentation detail of RB_OBJ_FROZEN().
Definition: fl_type.h:912
VALUE rb_class_inherited(VALUE, VALUE)
Calls Class::inherited.
Definition: class.c:828
#define TYPE(_)
Old name of rb_type.
Definition: value_type.h:107
#define FL_SINGLETON
Old name of RUBY_FL_SINGLETON.
Definition: fl_type.h:58
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
Definition: fl_type.h:67
#define REALLOC_N
Old name of RB_REALLOC_N.
Definition: memory.h:397
#define ALLOC
Old name of RB_ALLOC.
Definition: memory.h:394
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition: double.h:28
#define T_STRING
Old name of RUBY_T_STRING.
Definition: value_type.h:78
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition: long.h:48
#define T_NIL
Old name of RUBY_T_NIL.
Definition: value_type.h:72
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition: value_type.h:64
#define T_IMEMO
Old name of RUBY_T_IMEMO.
Definition: value_type.h:67
#define ID2SYM
Old name of RB_ID2SYM.
Definition: symbol.h:44
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
Definition: value_type.h:57
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
Definition: value_type.h:79
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
Definition: value_type.h:63
#define SYM2ID
Old name of RB_SYM2ID.
Definition: symbol.h:45
#define CLASS_OF
Old name of rb_class_of.
Definition: globals.h:203
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition: array.h:653
#define FIXABLE
Old name of RB_FIXABLE.
Definition: fixnum.h:25
#define LONG2FIX
Old name of RB_INT2FIX.
Definition: long.h:49
#define FIX2INT
Old name of RB_FIX2INT.
Definition: int.h:41
#define T_MODULE
Old name of RUBY_T_MODULE.
Definition: value_type.h:70
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
Definition: assume.h:29
#define FIX2ULONG
Old name of RB_FIX2ULONG.
Definition: long.h:47
#define T_TRUE
Old name of RUBY_T_TRUE.
Definition: value_type.h:81
#define T_ICLASS
Old name of RUBY_T_ICLASS.
Definition: value_type.h:66
#define T_HASH
Old name of RUBY_T_HASH.
Definition: value_type.h:65
#define ALLOC_N
Old name of RB_ALLOC_N.
Definition: memory.h:393
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
Definition: fl_type.h:140
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition: array.h:652
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition: long.h:50
#define rb_exc_new3
Old name of rb_exc_new_str.
Definition: error.h:38
#define T_FALSE
Old name of RUBY_T_FALSE.
Definition: value_type.h:61
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition: long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition: value_type.h:56
#define T_OBJECT
Old name of RUBY_T_OBJECT.
Definition: value_type.h:75
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition: value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition: double.h:29
#define T_CLASS
Old name of RUBY_T_CLASS.
Definition: value_type.h:58
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
Definition: value_type.h:85
#define FL_TEST
Old name of RB_FL_TEST.
Definition: fl_type.h:139
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
Definition: fl_type.h:70
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
Definition: fl_type.h:138
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition: value_type.h:88
void rb_notimplement(void)
Definition: error.c:3064
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
Definition: error.c:3021
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition: eval.c:671
void rb_bug(const char *fmt,...)
Interpreter panic switch.
Definition: error.c:802
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
Definition: eval.c:684
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports always regardless of runtime -W flag.
Definition: error.c:418
VALUE rb_ensure(VALUE(*b_proc)(VALUE), VALUE data1, VALUE(*e_proc)(VALUE), VALUE data2)
An equivalent to ensure clause.
Definition: eval.c:979
VALUE rb_cArray
Array class.
Definition: array.c:40
VALUE rb_cRegexp
Regexp class.
Definition: re.c:2370
VALUE rb_cHash
Hash class.
Definition: hash.c:92
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
Definition: object.c:178
VALUE rb_cFloat
Float class.
Definition: numeric.c:191
VALUE rb_cProc
Proc class.
Definition: proc.c:52
VALUE rb_cString
String class.
Definition: string.c:80
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
Definition: rgengc.h:232
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition: rgengc.h:220
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
Definition: array.c:4790
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
Definition: array.c:2663
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
Definition: array.c:4731
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
Definition: array.c:989
VALUE rb_ary_new(void)
Allocates a new, empty array.
Definition: array.c:750
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
Definition: array.c:1308
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
Definition: array.c:1679
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
Definition: array.c:1148
VALUE rb_dbl2big(double d)
Converts a C's double into a bignum.
Definition: bignum.c:5254
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
Definition: error.h:35
#define rb_check_frozen
Just another name of rb_check_frozen.
Definition: error.h:278
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition: error.h:294
#define rb_check_frozen_internal(obj)
Definition: error.h:261
int rb_during_gc(void)
Queries if the GC is busy.
Definition: gc.c:10300
VALUE rb_hash_aref(VALUE hash, VALUE key)
Queries the given key in the given hash table.
Definition: hash.c:2082
VALUE rb_hash_aset(VALUE hash, VALUE key, VALUE val)
Inserts or replaces ("upsert"s) the objects into the given hash table.
Definition: hash.c:2903
VALUE rb_hash_lookup(VALUE hash, VALUE key)
Identical to rb_hash_aref(), except it always returns RUBY_Qnil for misshits.
Definition: hash.c:2108
VALUE rb_hash_dup(VALUE hash)
Duplicates a hash.
Definition: hash.c:1585
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition: proc.c:1027
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
Definition: re.c:1818
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
Definition: re.c:3260
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
Definition: re.c:1793
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
Definition: re.c:1862
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
Definition: re.c:1836
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
Definition: re.c:1879
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition: string.c:3317
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
Definition: string.c:11516
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
Definition: string.c:4564
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
Definition: string.c:3418
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
Definition: string.c:2180
VALUE rb_str_cat_cstr(VALUE dst, const char *src)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
Definition: string.c:3171
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
Definition: symbol.c:837
void rb_thread_schedule(void)
Tries to switch to another thread.
Definition: thread.c:1619
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
Definition: variable.c:2733
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
Definition: variable.c:1293
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition: variable.c:1575
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
Definition: variable.c:3487
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
Definition: variable.c:3541
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition: variable.c:1285
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
Definition: variable.c:3106
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
Definition: variable.c:2540
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
Definition: variable.c:121
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
Definition: variable.c:2739
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
Definition: variable.c:215
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
Definition: variable.c:1592
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
Definition: variable.c:3043
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
Definition: variable.c:3563
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition: variable.c:172
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
Definition: variable.c:3037
int rb_method_basic_definition_p(VALUE klass, ID mid)
Well... Let us hesitate from describing what a "basic definition" is.
Definition: vm_method.c:2643
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition: vm_eval.c:664
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
Definition: vm_method.c:1123
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
Definition: vm_method.c:1641
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
Definition: symbol.c:1066
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
Definition: symbol.c:924
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
Definition: symbol.c:941
VALUE rb_id2str(ID id)
Identical to rb_id2name(), except it returns a Ruby's String instead of C's.
Definition: symbol.c:935
VALUE rb_sprintf(const char *fmt,...)
Ruby's extended sprintf(3).
Definition: sprintf.c:1201
VALUE rb_str_catf(VALUE dst, const char *fmt,...)
Identical to rb_sprintf(), except it renders the output to the specified object rather than creating ...
Definition: sprintf.c:1241
VALUE rb_uint2big(uintptr_t i)
Converts a C's intptr_t into an instance of rb_cInteger.
Definition: bignum.c:3169
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition: memory.h:366
#define ALLOCA_N(type, n)
Definition: memory.h:286
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition: memory.h:161
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
Definition: memory.h:378
VALUE type(ANYARGS)
ANYARGS-ed function type.
Definition: cxxanyargs.hpp:56
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
Definition: ractor.h:249
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
Definition: ractor.h:235
#define RARRAY_LEN
Just another name of rb_array_len.
Definition: rarray.h:68
#define RARRAY_CONST_PTR_TRANSIENT
Just another name of rb_array_const_ptr_transient.
Definition: rarray.h:70
#define RARRAY_AREF(a, i)
Definition: rarray.h:588
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
Definition: rbasic.h:152
#define RBASIC(obj)
Convenient casting macro.
Definition: rbasic.h:40
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
Definition: rclass.h:46
#define RHASH_SIZE(h)
Queries the size of the hash.
Definition: rhash.h:82
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition: rhash.h:92
static uint32_t ROBJECT_NUMIV(VALUE obj)
Queries the number of instance variables.
Definition: robject.h:145
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
Definition: robject.h:171
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
Definition: rstring.h:483
static char * RSTRING_PTR(VALUE str)
Queries the contents pointer of the string.
Definition: rstring.h:497
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
Definition: rstring.h:95
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition: scan_args.h:69
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
Definition: stdarg.h:64
CREF (Class REFerence)
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
rb_cref_t * cref
class reference, should be marked
IFUNC (Internal FUNCtion)
SVAR (Special VARiable)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
Definition: value.h:63
uintptr_t VALUE
Type that represents a Ruby object.
Definition: value.h:40
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
Definition: value_type.h:181
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition: value_type.h:263
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition: value_type.h:375