11#include "ruby/internal/config.h"
16#include "debug_counter.h"
18#include "internal/class.h"
19#include "internal/compar.h"
20#include "internal/hash.h"
21#include "internal/numeric.h"
22#include "internal/proc.h"
23#include "internal/random.h"
24#include "internal/variable.h"
25#include "internal/struct.h"
30#include "insns_info.inc"
36 int argc,
const VALUE *argv,
int priv);
46ruby_vm_special_exception_copy(
VALUE exc)
49 rb_obj_copy_ivar(e, exc);
57 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
58 ec->raised_flag = RAISED_STACKOVERFLOW;
60 VALUE at = rb_ec_backtrace_object(ec);
61 mesg = ruby_vm_special_exception_copy(mesg);
66 EC_JUMP_TAG(ec, TAG_RAISE);
69NORETURN(
static void vm_stackoverflow(
void));
74 ec_stack_overflow(GET_EC(), TRUE);
82 rb_bug(
"system stack overflow during GC. Faulty native extension?");
85 ec->raised_flag = RAISED_STACKOVERFLOW;
86 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
87 EC_JUMP_TAG(ec, TAG_RAISE);
90 ec_stack_overflow(ec, TRUE);
92 ec_stack_overflow(ec, FALSE);
100callable_class_p(
VALUE klass)
102#if VM_CHECK_MODE >= 2
103 if (!klass)
return FALSE;
131 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment));
133 if (callable_class_p(cme->defined_class)) {
143vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
145 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
146 enum imemo_type cref_or_me_type = imemo_env;
148 if (RB_TYPE_P(cref_or_me,
T_IMEMO)) {
149 cref_or_me_type = imemo_type(cref_or_me);
151 if (
type & VM_FRAME_FLAG_BMETHOD) {
155 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
156 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
158 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
159 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
163 if (cref_or_me_type != imemo_ment) {
164 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
168 if (req_cref && cref_or_me_type != imemo_cref) {
169 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
172 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
173 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
177 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
183 if (cref_or_me_type == imemo_ment) {
186 if (!callable_method_entry_p(me)) {
187 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
191 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
192 VM_ASSERT(iseq == NULL ||
194 RUBY_VM_NORMAL_ISEQ_P(iseq)
198 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
208 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
211#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
213 vm_check_frame_detail(type, req_block, req_me, req_cref, \
214 specval, cref_or_me, is_cframe, iseq); \
216 switch (given_magic) {
218 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
219 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
220 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
221 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
222 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
224 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
226 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
228 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
233static VALUE vm_stack_canary;
234static bool vm_stack_canary_was_born =
false;
242 if (! LIKELY(vm_stack_canary_was_born)) {
245 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
249 else if (! (iseq = GET_ISEQ())) {
252 else if (LIKELY(sp[0] != vm_stack_canary)) {
261 const VALUE *orig = rb_iseq_original_iseq(iseq);
262 const VALUE *encoded = ISEQ_BODY(iseq)->iseq_encoded;
263 const ptrdiff_t pos = GET_PC() - encoded;
264 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
265 const char *name = insn_name(insn);
266 const VALUE iseqw = rb_iseqw_new(iseq);
268 const char *stri = rb_str_to_cstr(inspection);
269 const VALUE disasm = rb_iseq_disasm(iseq);
270 const char *strd = rb_str_to_cstr(disasm);
276 "We are killing the stack canary set by %s, "
277 "at %s@pc=%"PRIdPTR
"\n"
278 "watch out the C stack trace.\n"
280 name, stri, pos, strd);
281 rb_bug(
"see above.");
283#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
286#define vm_check_canary(ec, sp)
287#define vm_check_frame(a, b, c, d)
292vm_push_frame_debug_counter_inc(
299 RB_DEBUG_COUNTER_INC(frame_push);
301 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
302 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
303 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
306 RB_DEBUG_COUNTER_INC(frame_R2R);
309 RB_DEBUG_COUNTER_INC(frame_R2C);
314 RB_DEBUG_COUNTER_INC(frame_C2R);
317 RB_DEBUG_COUNTER_INC(frame_C2C);
322 switch (
type & VM_FRAME_MAGIC_MASK) {
323 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
324 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
325 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
326 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
327 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
328 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
329 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
330 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
331 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
334 rb_bug(
"unreachable");
337#define vm_push_frame_debug_counter_inc(ec, cfp, t)
340STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
341STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
342STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
358 vm_check_frame(
type, specval, cref_or_me, iseq);
359 VM_ASSERT(local_size >= 0);
362 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
363 vm_check_canary(ec, sp);
368 for (
int i=0; i < local_size; i++) {
396 vm_push_frame_debug_counter_inc(ec, cfp,
type);
404 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
405 if (VMDEBUG == 2) SDR();
407 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
414 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
416 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
417 if (VMDEBUG == 2) SDR();
419 RUBY_VM_CHECK_INTS(ec);
420 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
422 return flags & VM_FRAME_FLAG_FINISH;
428 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
435 VALUE tmpbuf = rb_imemo_tmpbuf_auto_free_pointer();
437 rb_imemo_tmpbuf_set_ptr(tmpbuf, ptr);
441 dmy_iseq->body = dmy_body;
442 dmy_body->type = ISEQ_TYPE_TOP;
443 dmy_body->location.pathobj = fname;
447 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
449 VM_BLOCK_HANDLER_NONE,
461rb_arity_error_new(
int argc,
int min,
int max)
463 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
471 rb_str_catf(err_mess,
"..%d", max);
478rb_error_arity(
int argc,
int min,
int max)
480 rb_exc_raise(rb_arity_error_new(argc, min, max));
485NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
488vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
491 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
492 VM_FORCE_WRITE(&ep[index], v);
493 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
494 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
498vm_env_write(
const VALUE *ep,
int index,
VALUE v)
500 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
501 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
502 VM_STACK_ENV_WRITE(ep, index, v);
505 vm_env_write_slowpath(ep, index, v);
510rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
512 vm_env_write(ep, index, v);
518 if (block_handler == VM_BLOCK_HANDLER_NONE) {
522 switch (vm_block_handler_type(block_handler)) {
523 case block_handler_type_iseq:
524 case block_handler_type_ifunc:
525 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
526 case block_handler_type_symbol:
527 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
528 case block_handler_type_proc:
529 return VM_BH_TO_PROC(block_handler);
531 VM_UNREACHABLE(rb_vm_bh_to_procval);
540vm_svar_valid_p(
VALUE svar)
543 switch (imemo_type(svar)) {
552 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
562 if (lep && (ec == NULL || ec->root_lep != lep)) {
563 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
566 svar = ec->root_svar;
569 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
577 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
579 if (lep && (ec == NULL || ec->root_lep != lep)) {
580 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
583 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
590 const struct vm_svar *svar = lep_svar(ec, lep);
595 case VM_SVAR_LASTLINE:
596 return svar->lastline;
597 case VM_SVAR_BACKREF:
598 return svar->backref;
600 const VALUE ary = svar->others;
606 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
621 struct vm_svar *svar = lep_svar(ec, lep);
624 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
628 case VM_SVAR_LASTLINE:
631 case VM_SVAR_BACKREF:
635 VALUE ary = svar->others;
640 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
651 val = lep_svar_get(ec, lep, key);
654 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
671 rb_bug(
"unexpected back-ref");
684 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
694 return rb_reg_last_defined(backref);
696 rb_bug(
"unexpected back-ref");
700 nth = (int)(
type >> 1);
707check_method_entry(
VALUE obj,
int can_be_svar)
709 if (obj ==
Qfalse)
return NULL;
712 if (!RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
715 switch (imemo_type(obj)) {
726 rb_bug(
"check_method_entry: svar should not be there:");
735 const VALUE *ep = cfp->ep;
738 while (!VM_ENV_LOCAL_P(ep)) {
739 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
740 ep = VM_ENV_PREV_EP(ep);
743 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
749 switch (me->def->type) {
750 case VM_METHOD_TYPE_ISEQ:
751 return me->def->body.iseq.
iseqptr;
760 switch (me->def->type) {
761 case VM_METHOD_TYPE_ISEQ:
762 return me->def->body.iseq.
cref;
768#if VM_CHECK_MODE == 0
772check_cref(
VALUE obj,
int can_be_svar)
774 if (obj ==
Qfalse)
return NULL;
777 if (!RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
780 switch (imemo_type(obj)) {
791 rb_bug(
"check_method_entry: svar should not be there:");
798vm_env_cref(
const VALUE *ep)
802 while (!VM_ENV_LOCAL_P(ep)) {
803 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
804 ep = VM_ENV_PREV_EP(ep);
807 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
811is_cref(
const VALUE v,
int can_be_svar)
814 switch (imemo_type(v)) {
827vm_env_cref_by_cref(
const VALUE *ep)
829 while (!VM_ENV_LOCAL_P(ep)) {
830 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
831 ep = VM_ENV_PREV_EP(ep);
833 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
837cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
839 const VALUE v = *vptr;
843 switch (imemo_type(v)) {
846 new_cref = vm_cref_dup(cref);
851 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
856 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
860 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
869vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
871 if (vm_env_cref_by_cref(ep)) {
875 while (!VM_ENV_LOCAL_P(ep)) {
876 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
877 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
880 ep = VM_ENV_PREV_EP(ep);
882 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
883 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
886 rb_bug(
"vm_cref_dup: unreachable");
891vm_get_cref(
const VALUE *ep)
899 rb_bug(
"vm_get_cref: unreachable");
904rb_vm_get_cref(
const VALUE *ep)
906 return vm_get_cref(ep);
917 return vm_get_cref(cfp->ep);
921vm_get_const_key_cref(
const VALUE *ep)
928 RCLASS_EXT(CREF_CLASS(cref))->cloned) {
931 cref = CREF_NEXT(cref);
944 if (CREF_CLASS(cref) == old_klass) {
945 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
946 *new_cref_ptr = new_cref;
949 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
950 cref = CREF_NEXT(cref);
951 *new_cref_ptr = new_cref;
952 new_cref_ptr = &new_cref->next;
954 *new_cref_ptr = NULL;
963 prev_cref = vm_env_cref(ep);
969 prev_cref = vm_env_cref(cfp->ep);
973 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
977vm_get_cbase(
const VALUE *ep)
981 return CREF_CLASS_FOR_DEFINITION(cref);
985vm_get_const_base(
const VALUE *ep)
990 if (!CREF_PUSHED_BY_EVAL(cref)) {
991 return CREF_CLASS_FOR_DEFINITION(cref);
993 cref = CREF_NEXT(cref);
1000vm_check_if_namespace(
VALUE klass)
1003 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1008vm_ensure_not_refinement_module(
VALUE self)
1010 if (RB_TYPE_P(self,
T_MODULE) &&
FL_TEST(self, RMODULE_IS_REFINEMENT)) {
1011 rb_warn(
"not defined at the refinement, but at the outer class/module");
1027 if (
NIL_P(orig_klass) && allow_nil) {
1029 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1033 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1034 root_cref = CREF_NEXT(root_cref);
1037 while (cref && CREF_NEXT(cref)) {
1038 if (CREF_PUSHED_BY_EVAL(cref)) {
1042 klass = CREF_CLASS(cref);
1044 cref = CREF_NEXT(cref);
1046 if (!
NIL_P(klass)) {
1050 if ((ce = rb_const_lookup(klass,
id))) {
1051 rb_const_warn_if_deprecated(ce, klass,
id);
1054 if (am == klass)
break;
1056 if (is_defined)
return 1;
1057 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1059 goto search_continue;
1066 if (UNLIKELY(!rb_ractor_main_p())) {
1068 rb_raise(rb_eRactorIsolationError,
1069 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass), rb_id2name(
id));
1080 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1081 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1095 vm_check_if_namespace(orig_klass);
1097 return rb_public_const_defined_from(orig_klass,
id);
1100 return rb_public_const_get_from(orig_klass,
id);
1108 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1116 int allow_nil = TRUE;
1117 if (segments[0] == idNULL) {
1122 while (segments[idx]) {
1123 ID id = segments[idx++];
1124 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1137 rb_bug(
"vm_get_cvar_base: no cref");
1140 while (CREF_NEXT(cref) &&
1142 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1143 cref = CREF_NEXT(cref);
1145 if (top_level_raise && !CREF_NEXT(cref)) {
1149 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1157ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1159fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1162 vm_cc_attr_index_set(cc, index, shape_id);
1165 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1169#define ractor_incidental_shareable_p(cond, val) \
1170 (!(cond) || rb_ractor_shareable_p(val))
1171#define ractor_object_incidental_shareable_p(obj, val) \
1172 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1174#define ATTR_INDEX_NOT_SET (attr_index_t)-1
1182 shape_id_t shape_id;
1186 return default_value;
1189#if SHAPE_IN_BASIC_FLAGS
1190 shape_id = RBASIC_SHAPE_ID(obj);
1198#if !SHAPE_IN_BASIC_FLAGS
1199 shape_id = ROBJECT_SHAPE_ID(obj);
1205 if (UNLIKELY(!rb_ractor_main_p())) {
1215 ivar_list = RCLASS_IVPTR(obj);
1217#if !SHAPE_IN_BASIC_FLAGS
1218 shape_id = RCLASS_SHAPE_ID(obj);
1226 rb_gen_ivtbl_get(obj,
id, &ivtbl);
1227#if !SHAPE_IN_BASIC_FLAGS
1228 shape_id = ivtbl->shape_id;
1230 ivar_list = ivtbl->as.shape.ivptr;
1233 return default_value;
1237 shape_id_t cached_id;
1241 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1244 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1247 if (LIKELY(cached_id == shape_id)) {
1248 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1250 if (index == ATTR_INDEX_NOT_SET) {
1251 return default_value;
1254 val = ivar_list[index];
1255#if USE_DEBUG_COUNTER
1256 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1259 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1265#if USE_DEBUG_COUNTER
1267 if (cached_id != INVALID_SHAPE_ID) {
1268 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1271 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1275 if (cached_id != INVALID_SHAPE_ID) {
1276 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1279 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1282 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1285 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1289 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1294 table = (
st_table *)RCLASS_IVPTR(obj);
1298 table = ROBJECT_IV_HASH(obj);
1303 if (rb_gen_ivtbl_get(obj, 0, &ivtbl)) {
1304 table = ivtbl->as.complex.table;
1310 if (!table || !st_lookup(table,
id, &val)) {
1311 val = default_value;
1315 shape_id_t previous_cached_id = cached_id;
1316 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1319 if (cached_id != previous_cached_id) {
1320 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1323 if (index == ATTR_INDEX_NOT_SET) {
1324 val = default_value;
1328 val = ivar_list[index];
1334 vm_cc_attr_index_initialize(cc, shape_id);
1337 vm_ic_attr_index_initialize(ic, shape_id);
1340 val = default_value;
1346 if (default_value !=
Qundef) {
1354 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1357 return rb_attr_get(obj,
id);
1365populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1367 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1371 vm_cc_attr_index_set(cc, index, next_shape_id);
1374 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1386 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1391 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1393 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1395 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1396 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1399 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1409 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1415 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1418NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1420vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1422#if SHAPE_IN_BASIC_FLAGS
1423 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1425 shape_id_t shape_id = rb_generic_shape_id(obj);
1431 if (shape_id == dest_shape_id) {
1432 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1434 else if (dest_shape_id != INVALID_SHAPE_ID) {
1435 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1436 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1438 if (shape_id == dest_shape->parent_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1449 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1451 if (shape_id != dest_shape_id) {
1452#if SHAPE_IN_BASIC_FLAGS
1453 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1455 ivtbl->shape_id = dest_shape_id;
1461 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1467vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1475 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1476 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1478 if (LIKELY(shape_id == dest_shape_id)) {
1479 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1482 else if (dest_shape_id != INVALID_SHAPE_ID) {
1483 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1484 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1485 shape_id_t source_shape_id = dest_shape->parent_id;
1487 if (shape_id == source_shape_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1488 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1490 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1492 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id),
id) == dest_shape);
1508 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1509 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1515 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1527 VALUE defined_class = 0;
1530 if (RB_TYPE_P(defined_class,
T_ICLASS)) {
1531 defined_class =
RBASIC(defined_class)->klass;
1534 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1536 rb_bug(
"the cvc table should be set");
1540 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1541 rb_bug(
"should have cvar cache entry");
1546 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1562 cref = vm_get_cref(GET_EP());
1564 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1565 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1567 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1573 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1575 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1581 return vm_getclassvariable(iseq, cfp,
id, ic);
1588 cref = vm_get_cref(GET_EP());
1590 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1591 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1593 rb_class_ivar_set(ic->entry->class_value,
id, val);
1597 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1601 update_classvariable_cache(iseq, klass,
id, cref, ic);
1607 vm_setclassvariable(iseq, cfp,
id, val, ic);
1613 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1624 shape_id_t dest_shape_id;
1626 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1628 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1635 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1639 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1646 vm_setinstancevariable(iseq, obj,
id, val, ic);
1655 ec->tag->state = RUBY_TAG_FATAL;
1658 ec->tag->state = TAG_THROW;
1660 else if (THROW_DATA_P(err)) {
1661 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1664 ec->tag->state = TAG_RAISE;
1671 const int flag,
const VALUE throwobj)
1679 else if (state == TAG_BREAK) {
1681 const VALUE *ep = GET_EP();
1682 const rb_iseq_t *base_iseq = GET_ISEQ();
1683 escape_cfp = reg_cfp;
1685 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1686 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1687 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1688 ep = escape_cfp->ep;
1689 base_iseq = escape_cfp->iseq;
1692 ep = VM_ENV_PREV_EP(ep);
1693 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1694 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1695 VM_ASSERT(escape_cfp->iseq == base_iseq);
1699 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1705 ep = VM_ENV_PREV_EP(ep);
1707 while (escape_cfp < eocfp) {
1708 if (escape_cfp->ep == ep) {
1709 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1710 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1715 for (i=0; i < ct->size; i++) {
1717 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1719 if (entry->type == CATCH_TYPE_BREAK &&
1720 entry->iseq == base_iseq &&
1721 entry->start < epc && entry->end >= epc) {
1722 if (entry->cont == epc) {
1731 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1736 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1739 else if (state == TAG_RETRY) {
1740 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1742 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1744 else if (state == TAG_RETURN) {
1745 const VALUE *current_ep = GET_EP();
1746 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1747 int in_class_frame = 0;
1749 escape_cfp = reg_cfp;
1752 while (!VM_ENV_LOCAL_P(ep)) {
1753 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1756 ep = VM_ENV_PREV_EP(ep);
1760 while (escape_cfp < eocfp) {
1761 const VALUE *lep = VM_CF_LEP(escape_cfp);
1767 if (lep == target_lep &&
1768 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1769 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1774 if (lep == target_lep) {
1775 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1777 if (in_class_frame) {
1782 const VALUE *tep = current_ep;
1784 while (target_lep != tep) {
1785 if (escape_cfp->ep == tep) {
1787 if (tep == target_ep) {
1791 goto unexpected_return;
1794 tep = VM_ENV_PREV_EP(tep);
1798 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1799 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1801 case ISEQ_TYPE_MAIN:
1803 if (in_class_frame)
goto unexpected_return;
1804 if (target_ep == NULL) {
1808 goto unexpected_return;
1812 case ISEQ_TYPE_EVAL: {
1814 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1815 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1816 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1817 t = ISEQ_BODY(is)->type;
1819 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1822 case ISEQ_TYPE_CLASS:
1831 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1832 if (target_ep == NULL) {
1836 goto unexpected_return;
1840 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1843 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1849 rb_bug(
"isns(throw): unsupported throw type");
1852 ec->tag->state = state;
1853 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1858 rb_num_t throw_state,
VALUE throwobj)
1860 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1861 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1864 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1867 return vm_throw_continue(ec, throwobj);
1874 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1880 int is_splat = flag & 0x01;
1883 const VALUE obj = ary;
1885 if (!RB_TYPE_P(ary,
T_ARRAY) &&
NIL_P(ary = rb_check_array_type(ary))) {
1895 if (num + is_splat == 0) {
1898 else if (flag & 0x02) {
1903 for (i = 0; i < num -
len; i++) {
1908 for (j = 0; i < num; i++, j++) {
1921 *cfp->sp++ = rb_ary_new();
1930 for (; i < num -
len; i++) {
1934 for (rb_num_t j = 0; i < num; i++, j++) {
1935 *cfp->sp++ = ptr[
len - j - 1];
1939 for (rb_num_t j = 0; j < num; j++) {
1940 *cfp->sp++ = ptr[num - j - 1];
1956#if VM_CHECK_MODE > 0
1957 ccs->debug_sig = ~(
VALUE)ccs;
1963 ccs->entries = NULL;
1965 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
1973 if (! vm_cc_markable(cc)) {
1976 else if (! vm_ci_markable(ci)) {
1980 if (UNLIKELY(ccs->len == ccs->capa)) {
1981 if (ccs->capa == 0) {
1983 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
1987 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
1990 VM_ASSERT(ccs->len < ccs->capa);
1992 const int pos = ccs->len++;
1996 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2002#if VM_CHECK_MODE > 0
2006 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2007 for (
int i=0; i<ccs->len; i++) {
2008 vm_ci_dump(ccs->entries[i].ci);
2009 rp(ccs->entries[i].cc);
2016 VM_ASSERT(vm_ccs_p(ccs));
2017 VM_ASSERT(ccs->len <= ccs->capa);
2019 for (
int i=0; i<ccs->len; i++) {
2020 const struct rb_callinfo *ci = ccs->entries[i].ci;
2023 VM_ASSERT(vm_ci_p(ci));
2024 VM_ASSERT(vm_ci_mid(ci) == mid);
2025 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2026 VM_ASSERT(vm_cc_class_check(cc, klass));
2027 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2028 VM_ASSERT(!vm_cc_super_p(cc));
2029 VM_ASSERT(!vm_cc_refinement_p(cc));
2040 const ID mid = vm_ci_mid(ci);
2041 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
2046 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2048 const int ccs_len = ccs->len;
2050 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2051 rb_vm_ccs_free(ccs);
2052 rb_id_table_delete(cc_tbl, mid);
2056 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2058 for (
int i=0; i<ccs_len; i++) {
2059 const struct rb_callinfo *ccs_ci = ccs->entries[i].ci;
2060 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2062 VM_ASSERT(vm_ci_p(ccs_ci));
2063 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2066 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2068 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2069 VM_ASSERT(ccs_cc->klass == klass);
2070 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2079 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2082 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2088 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2090 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2093 cme = rb_callable_method_entry(klass, mid);
2096 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2100 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2101 return &vm_empty_cc;
2104 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2109 VM_ASSERT(cc_tbl != NULL);
2111 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2117 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2121 cme = check_overloaded_cme(cme, ci);
2123 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2124 vm_ccs_push(klass, ccs, ci, cc);
2126 VM_ASSERT(vm_cc_cme(cc) != NULL);
2127 VM_ASSERT(cme->called_id == mid);
2128 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2142 cc = vm_search_cc(klass, ci);
2145 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2146 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2147 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2148 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2149 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2159#if USE_DEBUG_COUNTER
2163 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2165#if OPT_INLINE_METHOD_CACHE
2169 if (cd_owner && cc != empty_cc) {
2173#if USE_DEBUG_COUNTER
2174 if (old_cc == empty_cc) {
2176 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2178 else if (old_cc == cc) {
2179 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2181 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2182 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2184 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2185 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2186 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2189 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2194 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2195 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2206#if OPT_INLINE_METHOD_CACHE
2207 if (LIKELY(vm_cc_class_check(cc, klass))) {
2208 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2209 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2210 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2211 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2212 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2213 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2217 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2220 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2224 return vm_search_method_slowpath0(cd_owner, cd, klass);
2231 VM_ASSERT(klass !=
Qfalse);
2234 return vm_search_method_fastpath(cd_owner, cd, klass);
2237#if __has_attribute(transparent_union)
2250 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2251 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2252 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2253 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2254 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2255 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2269 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2270 VM_ASSERT(callable_method_entry_p(me));
2272 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2276#if __has_attribute(transparent_union)
2277 return me->def->body.cfunc.func == func.anyargs;
2279 return me->def->body.cfunc.func == func;
2288 VM_ASSERT(iseq != NULL);
2290 return check_cfunc(vm_cc_cme(cc), func);
2293#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2325opt_equality_specialized(
VALUE recv,
VALUE obj)
2327 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2328 goto compare_by_identity;
2330 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2331 goto compare_by_identity;
2334 goto compare_by_identity;
2343#if MSC_VERSION_BEFORE(1300)
2347 else if (isnan(b)) {
2352 return RBOOL(a == b);
2358 else if (RB_TYPE_P(obj,
T_STRING)) {
2359 return rb_str_eql_internal(obj, recv);
2364 compare_by_identity:
2365 return RBOOL(recv == obj);
2371 VM_ASSERT(cd_owner != NULL);
2373 VALUE val = opt_equality_specialized(recv, obj);
2374 if (!UNDEF_P(val))
return val;
2376 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2380 return RBOOL(recv == obj);
2384#undef EQ_UNREDEFINED_P
2387NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2390opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2392 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, 1);
2394 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2395 return RBOOL(recv == obj);
2405 VALUE val = opt_equality_specialized(recv, obj);
2406 if (!UNDEF_P(val)) {
2410 return opt_equality_by_mid_slowpath(recv, obj, mid);
2417 return opt_equality_by_mid(obj1, obj2, idEq);
2423 return opt_equality_by_mid(obj1, obj2, idEqlP);
2433 case VM_CHECKMATCH_TYPE_WHEN:
2435 case VM_CHECKMATCH_TYPE_RESCUE:
2437 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2440 case VM_CHECKMATCH_TYPE_CASE: {
2441 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2444 rb_bug(
"check_match: unreachable");
2449#if MSC_VERSION_BEFORE(1300)
2450#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2452#define CHECK_CMP_NAN(a, b)
2456double_cmp_lt(
double a,
double b)
2458 CHECK_CMP_NAN(a, b);
2459 return RBOOL(a < b);
2463double_cmp_le(
double a,
double b)
2465 CHECK_CMP_NAN(a, b);
2466 return RBOOL(a <= b);
2470double_cmp_gt(
double a,
double b)
2472 CHECK_CMP_NAN(a, b);
2473 return RBOOL(a > b);
2477double_cmp_ge(
double a,
double b)
2479 CHECK_CMP_NAN(a, b);
2480 return RBOOL(a >= b);
2484static inline VALUE *
2489 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2490 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2491 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2495#if VM_DEBUG_BP_CHECK
2496 if (bp != cfp->bp_check) {
2497 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2498 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2499 (
long)(bp - GET_EC()->vm_stack));
2500 rb_bug(
"vm_base_ptr: unreachable");
2513 return vm_base_ptr(cfp);
2528static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2533 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2535 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2541 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2544 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2545 int param = ISEQ_BODY(iseq)->param.size;
2546 int local = ISEQ_BODY(iseq)->local_table_size;
2547 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2553 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2554 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2555 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2556 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2557 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2558 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2559 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2563rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2565 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2566 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2567 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2568 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2569 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2570 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2571 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2575rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2577 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2578 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2579 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2580 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2581 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2582 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2585#define ALLOW_HEAP_ARGV (-2)
2586#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2591 vm_check_canary(GET_EC(), cfp->sp);
2597 int argc = calling->argc;
2599 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2603 VALUE *argv = cfp->sp - argc;
2604 VALUE argv_ary = rb_ary_hidden_new(
len + argc + 1);
2605 rb_ary_cat(argv_ary, argv, argc);
2606 rb_ary_cat(argv_ary, ptr,
len);
2607 cfp->sp -= argc - 1;
2608 cfp->sp[-1] = argv_ary;
2610 calling->heap_argv = argv_ary;
2616 if (max_args >= 0 &&
len + argc > max_args) {
2624 calling->argc +=
len - (max_args - argc + 1);
2625 len = max_args - argc + 1;
2634 calling->heap_argv = 0;
2636 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2638 for (i = 0; i <
len; i++) {
2639 *cfp->sp++ = ptr[i];
2651 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2652 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2653 const VALUE h = rb_hash_new_with_size(kw_len);
2654 VALUE *sp = cfp->sp;
2657 for (i=0; i<kw_len; i++) {
2658 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2662 cfp->sp -= kw_len - 1;
2663 calling->argc -= kw_len - 1;
2664 calling->kw_splat = 1;
2668vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2670 if (UNLIKELY(!RB_TYPE_P(keyword_hash,
T_HASH))) {
2672 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2674 else if (!IS_ARGS_KW_SPLAT_MUT(ci)) {
2678 keyword_hash = rb_hash_dup(keyword_hash);
2680 return keyword_hash;
2686 const struct rb_callinfo *restrict ci,
int max_args)
2688 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2689 if (IS_ARGS_KW_SPLAT(ci)) {
2691 VM_ASSERT(calling->kw_splat == 1);
2695 VALUE ary = cfp->sp[0];
2696 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2699 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2703 if (UNLIKELY(calling->heap_argv)) {
2704 rb_ary_push(calling->heap_argv, kwh);
2705 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2706 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2707 calling->kw_splat = 0;
2715 VM_ASSERT(calling->kw_splat == 1);
2719 calling->kw_splat = 0;
2724 VM_ASSERT(calling->kw_splat == 0);
2728 VALUE ary = cfp->sp[0];
2730 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2735 VALUE last_hash, argv_ary;
2736 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2737 if (!IS_ARGS_KEYWORD(ci) &&
2739 RB_TYPE_P((last_hash = rb_ary_last(0, NULL, argv_ary)),
T_HASH) &&
2740 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2742 rb_ary_pop(argv_ary);
2744 rb_ary_push(argv_ary, rb_hash_dup(last_hash));
2745 calling->kw_splat = 1;
2751 if (!IS_ARGS_KEYWORD(ci) &&
2752 calling->argc > 0 &&
2753 RB_TYPE_P((last_hash = cfp->sp[-1]),
T_HASH) &&
2754 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2761 cfp->sp[-1] = rb_hash_dup(last_hash);
2762 calling->kw_splat = 1;
2768 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2770 VM_ASSERT(calling->kw_splat == 1);
2771 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2776 calling->kw_splat = 0;
2782 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2784 VM_ASSERT(calling->kw_splat == 0);
2790 vm_caller_setup_arg_kw(cfp, calling, ci);
2794#define USE_OPT_HIST 0
2797#define OPT_HIST_MAX 64
2798static int opt_hist[OPT_HIST_MAX+1];
2802opt_hist_show_results_at_exit(
void)
2804 for (
int i=0; i<OPT_HIST_MAX; i++) {
2805 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2815 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2816 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2817 const int opt = calling->argc - lead_num;
2818 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2819 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2820 const int param = ISEQ_BODY(iseq)->param.size;
2821 const int local = ISEQ_BODY(iseq)->local_table_size;
2822 const int delta = opt_num - opt;
2824 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2827 if (opt_pc < OPT_HIST_MAX) {
2831 opt_hist[OPT_HIST_MAX]++;
2835 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2843 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2844 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2845 const int opt = calling->argc - lead_num;
2846 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2848 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2851 if (opt_pc < OPT_HIST_MAX) {
2855 opt_hist[OPT_HIST_MAX]++;
2859 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2864 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2865 VALUE *
const locals);
2874 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2875 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2877 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2878 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2880 const int ci_kw_len = kw_arg->keyword_len;
2881 const VALUE *
const ci_keywords = kw_arg->keywords;
2882 VALUE *argv = cfp->sp - calling->argc;
2883 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2884 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2886 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2887 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2889 int param = ISEQ_BODY(iseq)->param.size;
2890 int local = ISEQ_BODY(iseq)->local_table_size;
2891 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2898 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
2901 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2902 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2904 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2905 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2906 VALUE *
const argv = cfp->sp - calling->argc;
2907 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2910 for (i=0; i<kw_param->num; i++) {
2911 klocals[i] = kw_param->default_values[i];
2918 int param = ISEQ_BODY(iseq)->param.size;
2919 int local = ISEQ_BODY(iseq)->local_table_size;
2920 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2930 cfp->sp -= (calling->argc + 1);
2931 return builtin_invoker0(ec, calling->recv, NULL, (rb_insn_func_t)bf->func_ptr);
2936 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
2940 bool cacheable_ci = vm_ci_markable(ci);
2942 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
2943 if (LIKELY(rb_simple_iseq_p(iseq))) {
2945 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2946 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
2948 if (calling->argc != lead_num) {
2949 argument_arity_error(ec, iseq, calling->argc, lead_num, lead_num);
2952 VM_ASSERT(ci == calling->cd->ci);
2953 VM_ASSERT(cc == calling->cc);
2955 if (cacheable_ci && vm_call_iseq_optimizable_p(ci, cc)) {
2956 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_INLINE) &&
2958 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
2959 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
2960 CC_SET_FASTPATH(cc, vm_call_single_noarg_inline_builtin,
true);
2963 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
2968 else if (rb_iseq_only_optparam_p(iseq)) {
2971 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2972 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2974 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
2975 const int argc = calling->argc;
2976 const int opt = argc - lead_num;
2978 if (opt < 0 || opt > opt_num) {
2979 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
2982 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2983 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
2984 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2985 cacheable_ci && vm_call_cacheable(ci, cc));
2988 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
2989 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2990 cacheable_ci && vm_call_cacheable(ci, cc));
2994 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
2995 for (
int i=argc; i<lead_num + opt_num; i++) {
2998 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3000 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3001 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3002 const int argc = calling->argc;
3003 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3005 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3008 if (argc - kw_arg->keyword_len == lead_num) {
3009 const int ci_kw_len = kw_arg->keyword_len;
3010 const VALUE *
const ci_keywords = kw_arg->keywords;
3012 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3014 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3015 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
3017 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3018 cacheable_ci && vm_call_cacheable(ci, cc));
3023 else if (argc == lead_num) {
3025 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3026 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
3028 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3030 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3031 cacheable_ci && vm_call_cacheable(ci, cc));
3039 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3045 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3048 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3049 const int param_size = ISEQ_BODY(iseq)->param.size;
3050 const int local_size = ISEQ_BODY(iseq)->local_table_size;
3051 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3052 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3057 int opt_pc,
int param_size,
int local_size)
3062 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3063 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3066 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3072 int opt_pc,
int param_size,
int local_size)
3074 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3075 VALUE *argv = cfp->sp - calling->argc;
3076 VALUE *sp = argv + param_size;
3077 cfp->sp = argv - 1 ;
3079 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3080 calling->block_handler, (
VALUE)me,
3081 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3082 local_size - param_size,
3083 ISEQ_BODY(iseq)->stack_max);
3092 VALUE *argv = cfp->sp - calling->argc;
3094 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3095 VALUE *src_argv = argv;
3096 VALUE *sp_orig, *sp;
3097 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3099 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3100 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3101 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3102 dst_captured->code.val = src_captured->code.val;
3103 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3104 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3107 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3111 vm_pop_frame(ec, cfp, cfp->ep);
3114 sp_orig = sp = cfp->sp;
3117 sp[0] = calling->recv;
3121 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3122 *sp++ = src_argv[i];
3125 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3126 calling->recv, calling->block_handler, (
VALUE)me,
3127 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3128 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3129 ISEQ_BODY(iseq)->stack_max);
3137ractor_unsafe_check(
void)
3139 if (!rb_ractor_main_p()) {
3140 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3147 ractor_unsafe_check();
3155 ractor_unsafe_check();
3157 return (*f)(argc, argv, recv);
3163 ractor_unsafe_check();
3171 ractor_unsafe_check();
3173 return (*f)(recv, argv[0]);
3179 ractor_unsafe_check();
3181 return (*f)(recv, argv[0], argv[1]);
3187 ractor_unsafe_check();
3189 return (*f)(recv, argv[0], argv[1], argv[2]);
3195 ractor_unsafe_check();
3197 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3203 ractor_unsafe_check();
3204 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3205 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3211 ractor_unsafe_check();
3212 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3213 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3219 ractor_unsafe_check();
3220 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3221 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3227 ractor_unsafe_check();
3228 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3229 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3235 ractor_unsafe_check();
3236 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3237 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3243 ractor_unsafe_check();
3244 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3245 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3251 ractor_unsafe_check();
3252 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3253 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3259 ractor_unsafe_check();
3260 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3261 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3267 ractor_unsafe_check();
3268 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3269 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3275 ractor_unsafe_check();
3276 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3277 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3283 ractor_unsafe_check();
3284 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3285 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3299 return (*f)(argc, argv, recv);
3313 return (*f)(recv, argv[0]);
3320 return (*f)(recv, argv[0], argv[1]);
3327 return (*f)(recv, argv[0], argv[1], argv[2]);
3334 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3340 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3341 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3347 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3348 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3354 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3355 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3361 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3362 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3368 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3369 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3375 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3376 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3382 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3383 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3389 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3390 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3396 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3397 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3403 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3404 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3410 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3411 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3417 const int ov_flags = RAISED_STACKOVERFLOW;
3418 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3419 if (rb_ec_raised_p(ec, ov_flags)) {
3420 rb_ec_raised_reset(ec, ov_flags);
3426#define CHECK_CFP_CONSISTENCY(func) \
3427 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3428 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3434#if VM_DEBUG_VERIFY_METHOD_CACHE
3435 switch (me->def->type) {
3436 case VM_METHOD_TYPE_CFUNC:
3437 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3439# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3441 METHOD_BUG(ATTRSET);
3443 METHOD_BUG(BMETHOD);
3446 METHOD_BUG(OPTIMIZED);
3447 METHOD_BUG(MISSING);
3448 METHOD_BUG(REFINED);
3452 rb_bug(
"wrong method type: %d", me->def->type);
3455 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3462 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3469 VALUE recv = calling->recv;
3470 VALUE block_handler = calling->block_handler;
3471 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3473 if (UNLIKELY(calling->kw_splat)) {
3474 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3477 VM_ASSERT(reg_cfp == ec->cfp);
3479 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3482 vm_push_frame(ec, NULL, frame_type, recv,
3483 block_handler, (
VALUE)me,
3484 0, ec->cfp->sp, 0, 0);
3486 int len = cfunc->argc;
3489 reg_cfp->sp = stack_bottom;
3490 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3492 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3494 rb_vm_pop_frame(ec);
3496 VM_ASSERT(ec->cfp->sp == stack_bottom);
3498 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3499 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3506rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3508 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3514 int argc = calling->argc;
3515 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3516 VALUE *argv = &stack_bottom[1];
3518 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3525 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3527 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3529 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3530 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3533 VALUE *stack_bottom = reg_cfp->sp - 2;
3535 VM_ASSERT(calling->argc == 1);
3536 VM_ASSERT(RB_TYPE_P(argv_ary,
T_ARRAY));
3539 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3542 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat);
3544 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3551 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3554 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3555 return vm_call_cfunc_other(ec, reg_cfp, calling);
3559 calling->kw_splat = 0;
3561 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3562 VALUE *sp = stack_bottom;
3563 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3564 for(i = 0; i < argc; i++) {
3569 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3575 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3576 VALUE argv_ary = reg_cfp->sp[-1];
3580 int argc_offset = 0;
3582 if (UNLIKELY(argc > 0 &&
3583 RB_TYPE_P((last_hash = argv[argc-1]),
T_HASH) &&
3584 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3586 return vm_call_cfunc_other(ec, reg_cfp, calling);
3590 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3596 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3597 VALUE keyword_hash = reg_cfp->sp[-1];
3600 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3603 return vm_call_cfunc_other(ec, reg_cfp, calling);
3610 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3612 if (IS_ARGS_SPLAT(ci)) {
3613 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3615 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3616 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3618 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3620 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3621 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3625 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3626 return vm_call_cfunc_other(ec, reg_cfp, calling);
3633 RB_DEBUG_COUNTER_INC(ccf_ivar);
3635 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
3642 RB_DEBUG_COUNTER_INC(ccf_attrset);
3643 VALUE val = *(cfp->sp - 1);
3645 attr_index_t index = vm_cc_attr_index(cc);
3646 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3647 ID id = vm_cc_cme(cc)->def->body.attr.id;
3649 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3658 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3659 if (!UNDEF_P(res)) {
3664 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
3672 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
3682 VALUE procv = cme->def->body.bmethod.proc;
3685 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3686 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
3690 GetProcPtr(procv, proc);
3691 val = rb_vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
3702 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
3706 VALUE procv = cme->def->body.bmethod.proc;
3709 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3710 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
3714 GetProcPtr(procv, proc);
3715 const struct rb_block *block = &proc->block;
3717 while (vm_block_type(block) == block_type_proc) {
3718 block = vm_proc_block(block->as.proc);
3720 VM_ASSERT(vm_block_type(block) == block_type_iseq);
3723 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
3724 VALUE *
const argv = cfp->sp - calling->argc;
3725 const int arg_size = ISEQ_BODY(iseq)->param.size;
3728 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
3729 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
3732 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
3737 vm_push_frame(ec, iseq,
3738 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
3740 VM_GUARDED_PREV_EP(captured->ep),
3742 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
3744 ISEQ_BODY(iseq)->local_table_size - arg_size,
3745 ISEQ_BODY(iseq)->stack_max);
3753 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
3757 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
3758 if (UNLIKELY(calling->heap_argv)) {
3763 argc = calling->argc;
3766 cfp->sp += - argc - 1;
3769 return vm_call_bmethod_body(ec, calling, argv);
3775 RB_DEBUG_COUNTER_INC(ccf_bmethod);
3779 VALUE procv = cme->def->body.bmethod.proc;
3781 GetProcPtr(procv, proc);
3782 const struct rb_block *block = &proc->block;
3784 while (vm_block_type(block) == block_type_proc) {
3785 block = vm_proc_block(block->as.proc);
3787 if (vm_block_type(block) == block_type_iseq) {
3788 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
3789 return vm_call_iseq_bmethod(ec, cfp, calling);
3792 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
3793 return vm_call_noniseq_bmethod(ec, cfp, calling);
3797rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
3799 VALUE klass = current_class;
3802 if (RB_TYPE_P(klass,
T_ICLASS) &&
FL_TEST(klass, RICLASS_IS_ORIGIN) &&
3807 while (
RTEST(klass)) {
3809 if (owner == target_owner) {
3815 return current_class;
3824 if (orig_me->defined_class == 0) {
3825 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
3826 VM_ASSERT(RB_TYPE_P(orig_me->owner,
T_MODULE));
3827 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
3829 if (me->def->reference_count == 1) {
3830 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
3834 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
3842 VM_ASSERT(callable_method_entry_p(cme));
3849 return aliased_callable_method_entry(me);
3855 calling->cc = &VM_CC_ON_STACK(
Qundef,
3858 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
3860 return vm_call_method_each_type(ec, cfp, calling);
3863static enum method_missing_reason
3866 enum method_missing_reason stat = MISSING_NOENTRY;
3867 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
3868 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
3869 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
3879 ASSUME(calling->argc >= 0);
3881 enum method_missing_reason missing_reason = MISSING_NOENTRY;
3882 int argc = calling->argc;
3883 VALUE recv = calling->recv;
3886 flags |= VM_CALL_OPT_SEND;
3888 if (UNLIKELY(! mid)) {
3889 mid = idMethodMissing;
3890 missing_reason = ci_missing_reason(ci);
3891 ec->method_missing_reason = missing_reason;
3894 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3895 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3896 rb_ary_unshift(argv_ary, symbol);
3899 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3900 VALUE exc = rb_make_no_method_exception(
3905 rb_ary_unshift(argv_ary, rb_str_intern(symbol));
3922 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3925 argc = ++calling->argc;
3927 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3930 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3931 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
3932 VALUE exc = rb_make_no_method_exception(
3938 TOPN(i) = rb_str_intern(symbol);
3944 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
3947 calling->cc = &VM_CC_ON_STACK(klass,
3949 { .method_missing_reason = missing_reason },
3950 rb_callable_method_entry_with_refinements(klass, mid, NULL));
3952 if (flags & VM_CALL_FCALL) {
3953 return vm_call_method(ec, reg_cfp, calling);
3957 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
3959 if (vm_cc_cme(cc) != NULL) {
3960 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
3961 case METHOD_VISI_PUBLIC:
3962 return vm_call_method_each_type(ec, reg_cfp, calling);
3963 case METHOD_VISI_PRIVATE:
3964 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
3966 case METHOD_VISI_PROTECTED:
3967 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
3970 VM_UNREACHABLE(vm_call_method);
3972 return vm_call_method_missing(ec, reg_cfp, calling);
3975 return vm_call_method_nome(ec, reg_cfp, calling);
3985 i = calling->argc - 1;
3987 if (calling->argc == 0) {
3988 rb_raise(rb_eArgError,
"no method name given");
4012 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4018 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4020 int flags = VM_CALL_FCALL;
4024 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4025 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4026 sym = rb_ary_shift(argv_ary);
4027 flags |= VM_CALL_ARGS_SPLAT;
4028 if (calling->kw_splat) {
4029 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4030 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4031 calling->kw_splat = 0;
4033 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4036 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4037 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4043 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4044 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4050 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4053 int flags = vm_ci_flag(ci);
4055 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
4056 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4057 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4058 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
4059 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4060 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4063 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4064 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4069 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4071 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4073 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4074 unsigned int argc, flag;
4076 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4077 argc = ++calling->argc;
4080 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4081 vm_check_canary(ec, reg_cfp->sp);
4085 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4088 ec->method_missing_reason = reason;
4090 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4093 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4094 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4095 return vm_call_method(ec, reg_cfp, calling);
4101 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4112 return vm_call_method_nome(ec, cfp, calling);
4114 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4115 cme->def->body.refined.orig_me) {
4116 cme = refined_method_callable_without_refinement(cme);
4119 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4121 return vm_call_method_each_type(ec, cfp, calling);
4125find_refinement(
VALUE refinements,
VALUE klass)
4127 if (
NIL_P(refinements)) {
4130 return rb_hash_lookup(refinements, klass);
4139 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4140 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4143 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4144 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4148 }
while (cfp->iseq != local_iseq);
4159 if (orig_me->defined_class == 0) {
4167 VM_ASSERT(callable_method_entry_p(cme));
4169 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4179 ID mid = vm_ci_mid(calling->cd->ci);
4180 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4184 for (; cref; cref = CREF_NEXT(cref)) {
4185 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4186 if (
NIL_P(refinement))
continue;
4189 rb_callable_method_entry(refinement, mid);
4192 if (vm_cc_call(cc) == vm_call_super_method) {
4195 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4200 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4201 cme->def != ref_me->def) {
4204 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4213 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4214 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4229 if (calling->cd->cc) {
4230 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4232 return vm_call_method(ec, cfp, calling);
4235 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4236 calling->cc= ref_cc;
4237 return vm_call_method(ec, cfp, calling);
4241 return vm_call_method_nome(ec, cfp, calling);
4247NOINLINE(
static VALUE
4255 int argc = calling->argc;
4258 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4261 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4267 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4270 VALUE procval = calling->recv;
4271 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4277 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4279 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4282 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4283 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4286 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4287 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4288 return vm_call_general(ec, reg_cfp, calling);
4295 VALUE recv = calling->recv;
4297 VM_ASSERT(RB_TYPE_P(recv,
T_STRUCT));
4298 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4299 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4301 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4302 return internal_RSTRUCT_GET(recv,
off);
4308 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4310 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4318 VALUE recv = calling->recv;
4320 VM_ASSERT(RB_TYPE_P(recv,
T_STRUCT));
4321 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4322 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4326 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4327 internal_RSTRUCT_SET(recv,
off, val);
4335 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4337 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4345#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4346 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4347 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4348 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4350 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4351 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4362 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4363 case OPTIMIZED_METHOD_TYPE_SEND:
4364 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4365 return vm_call_opt_send(ec, cfp, calling);
4366 case OPTIMIZED_METHOD_TYPE_CALL:
4367 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4368 return vm_call_opt_call(ec, cfp, calling);
4369 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4370 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4371 return vm_call_opt_block_call(ec, cfp, calling);
4372 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4373 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4377 VM_CALL_METHOD_ATTR(v,
4378 vm_call_opt_struct_aref(ec, cfp, calling),
4379 set_vm_cc_ivar(cc); \
4380 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4383 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4384 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4388 VM_CALL_METHOD_ATTR(v,
4389 vm_call_opt_struct_aset(ec, cfp, calling),
4390 set_vm_cc_ivar(cc); \
4391 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4395 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4407 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4409 switch (cme->def->type) {
4410 case VM_METHOD_TYPE_ISEQ:
4411 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4412 return vm_call_iseq_setup(ec, cfp, calling);
4414 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4415 case VM_METHOD_TYPE_CFUNC:
4416 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4417 return vm_call_cfunc(ec, cfp, calling);
4419 case VM_METHOD_TYPE_ATTRSET:
4420 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4424 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG);
4426 if (vm_cc_markable(cc)) {
4427 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4428 VM_CALL_METHOD_ATTR(v,
4429 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4430 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4436 VM_CALLCACHE_UNMARKABLE |
4437 VM_CALLCACHE_ON_STACK,
4443 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
4448 VM_CALL_METHOD_ATTR(v,
4449 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4450 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4454 case VM_METHOD_TYPE_IVAR:
4455 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4457 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4458 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT);
4459 VM_CALL_METHOD_ATTR(v,
4460 vm_call_ivar(ec, cfp, calling),
4461 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4464 case VM_METHOD_TYPE_MISSING:
4465 vm_cc_method_missing_reason_set(cc, 0);
4466 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4467 return vm_call_method_missing(ec, cfp, calling);
4469 case VM_METHOD_TYPE_BMETHOD:
4470 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4471 return vm_call_bmethod(ec, cfp, calling);
4473 case VM_METHOD_TYPE_ALIAS:
4474 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4475 return vm_call_alias(ec, cfp, calling);
4477 case VM_METHOD_TYPE_OPTIMIZED:
4478 return vm_call_optimized(ec, cfp, calling, ci, cc);
4480 case VM_METHOD_TYPE_UNDEF:
4483 case VM_METHOD_TYPE_ZSUPER:
4484 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4486 case VM_METHOD_TYPE_REFINED:
4489 return vm_call_refined(ec, cfp, calling);
4492 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4502 const int stat = ci_missing_reason(ci);
4504 if (vm_ci_mid(ci) == idMethodMissing) {
4505 if (UNLIKELY(calling->heap_argv)) {
4510 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4511 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4515 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4527 VALUE defined_class = me->defined_class;
4528 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4529 return NIL_P(refined_class) ? defined_class : refined_class;
4538 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4540 if (vm_cc_cme(cc) != NULL) {
4541 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4542 case METHOD_VISI_PUBLIC:
4543 return vm_call_method_each_type(ec, cfp, calling);
4545 case METHOD_VISI_PRIVATE:
4546 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4547 enum method_missing_reason stat = MISSING_PRIVATE;
4548 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4550 vm_cc_method_missing_reason_set(cc, stat);
4551 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4552 return vm_call_method_missing(ec, cfp, calling);
4554 return vm_call_method_each_type(ec, cfp, calling);
4556 case METHOD_VISI_PROTECTED:
4557 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4558 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4560 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4561 return vm_call_method_missing(ec, cfp, calling);
4565 VM_ASSERT(vm_cc_cme(cc) != NULL);
4568 calling->cc = &cc_on_stack;
4569 return vm_call_method_each_type(ec, cfp, calling);
4572 return vm_call_method_each_type(ec, cfp, calling);
4575 rb_bug(
"unreachable");
4579 return vm_call_method_nome(ec, cfp, calling);
4586 RB_DEBUG_COUNTER_INC(ccf_general);
4587 return vm_call_method(ec, reg_cfp, calling);
4593 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4594 VM_ASSERT(cc != vm_cc_empty());
4596 *(vm_call_handler *)&cc->call_ = vm_call_general;
4602 RB_DEBUG_COUNTER_INC(ccf_super_method);
4607 if (ec == NULL) rb_bug(
"unreachable");
4610 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4611 return vm_call_method(ec, reg_cfp, calling);
4617vm_search_normal_superclass(
VALUE klass)
4622 klass =
RBASIC(klass)->klass;
4624 klass = RCLASS_ORIGIN(klass);
4628NORETURN(
static void vm_super_outside(
void));
4631vm_super_outside(
void)
4637empty_cc_for_super(
void)
4639 return &vm_empty_cc_for_super;
4645 VALUE current_defined_class;
4652 current_defined_class = vm_defined_class_for_protected_call(me);
4655 reg_cfp->iseq != method_entry_iseqptr(me) &&
4658 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
4662 "self has wrong type to call super in this context: "
4663 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
4668 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
4670 "implicit argument passing of super from method defined"
4671 " by define_method() is not supported."
4672 " Specify all arguments explicitly.");
4675 ID mid = me->def->original_id;
4678 cd->ci = vm_ci_new_runtime(mid,
4681 vm_ci_kwarg(cd->ci));
4687 VALUE klass = vm_search_normal_superclass(me->defined_class);
4691 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
4695 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
4699 if (cached_cme == NULL) {
4701 cd->cc = empty_cc_for_super();
4703 else if (cached_cme->called_id != mid) {
4706 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
4710 cd->cc = cc = empty_cc_for_super();
4714 switch (cached_cme->def->type) {
4716 case VM_METHOD_TYPE_REFINED:
4718 case VM_METHOD_TYPE_ATTRSET:
4719 case VM_METHOD_TYPE_IVAR:
4720 vm_cc_call_set(cc, vm_call_super_method);
4728 VM_ASSERT((vm_cc_cme(cc),
true));
4736block_proc_is_lambda(
const VALUE procval)
4741 GetProcPtr(procval, proc);
4742 return proc->is_lambda;
4752 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
4755 int is_lambda = FALSE;
4756 VALUE val, arg, blockarg;
4758 const struct vm_ifunc *ifunc = captured->code.ifunc;
4763 else if (argc == 0) {
4770 blockarg = rb_vm_bh_to_procval(ec, block_handler);
4772 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
4774 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
4777 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
4780 VM_GUARDED_PREV_EP(captured->ep),
4782 0, ec->cfp->sp, 0, 0);
4783 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
4784 rb_vm_pop_frame(ec);
4792 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
4798 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
4807 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4809 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
4817vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
4819 VALUE ary, arg0 = argv[0];
4820 ary = rb_check_array_type(arg0);
4824 VM_ASSERT(argv[0] == arg0);
4832 if (rb_simple_iseq_p(iseq)) {
4836 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
4838 if (arg_setup_type == arg_setup_block &&
4839 calling->argc == 1 &&
4840 ISEQ_BODY(iseq)->param.flags.has_lead &&
4841 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
4842 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
4843 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
4846 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
4847 if (arg_setup_type == arg_setup_block) {
4848 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
4850 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4851 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
4852 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
4854 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
4855 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
4859 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
4866 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
4875 calling = &calling_entry;
4876 calling->argc = argc;
4877 calling->block_handler = block_handler;
4878 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
4880 calling->heap_argv = 0;
4881 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
4883 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
4891 bool is_lambda,
VALUE block_handler)
4894 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4895 const int arg_size = ISEQ_BODY(iseq)->param.size;
4896 VALUE *
const rsp = GET_SP() - calling->argc;
4897 VALUE *
const argv = rsp;
4898 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
4902 vm_push_frame(ec, iseq,
4903 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
4905 VM_GUARDED_PREV_EP(captured->ep), 0,
4906 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4908 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
4916 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
4918 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
4919 int flags = vm_ci_flag(ci);
4921 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
4922 ((calling->argc == 0) ||
4923 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4924 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4925 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
4926 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4928 if (UNLIKELY(calling->heap_argv)) {
4929#if VM_ARGC_STACK_MAX < 0
4931 rb_raise(rb_eArgError,
"no receiver given");
4934 calling->recv = rb_ary_shift(calling->heap_argv);
4937 reg_cfp->sp[-1] = reg_cfp->sp[-2];
4938 reg_cfp->sp[-2] = calling->recv;
4939 flags |= VM_CALL_ARGS_SPLAT;
4942 if (calling->argc < 1) {
4943 rb_raise(rb_eArgError,
"no receiver given");
4945 calling->recv = TOPN(--calling->argc);
4947 if (calling->kw_splat) {
4948 flags |= VM_CALL_KW_SPLAT;
4952 if (calling->argc < 1) {
4953 rb_raise(rb_eArgError,
"no receiver given");
4955 calling->recv = TOPN(--calling->argc);
4958 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
4964 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
4969 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
4970 argc = calling->argc;
4971 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
4977vm_proc_to_block_handler(
VALUE procval)
4979 const struct rb_block *block = vm_proc_block(procval);
4981 switch (vm_block_type(block)) {
4982 case block_type_iseq:
4983 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
4984 case block_type_ifunc:
4985 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
4986 case block_type_symbol:
4987 return VM_BH_FROM_SYMBOL(block->as.symbol);
4988 case block_type_proc:
4989 return VM_BH_FROM_PROC(block->as.proc);
4991 VM_UNREACHABLE(vm_yield_with_proc);
4998 bool is_lambda,
VALUE block_handler)
5000 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5001 VALUE proc = VM_BH_TO_PROC(block_handler);
5002 is_lambda = block_proc_is_lambda(proc);
5003 block_handler = vm_proc_to_block_handler(proc);
5006 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5012 bool is_lambda,
VALUE block_handler)
5016 bool is_lambda,
VALUE block_handler);
5018 switch (vm_block_handler_type(block_handler)) {
5019 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5020 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5021 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5022 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5023 default: rb_bug(
"vm_invoke_block: unreachable");
5026 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5030vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5037 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5040 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5041 captured->code.iseq = blockiseq;
5043 return rb_vm_make_proc(ec, captured,
rb_cProc);
5047vm_once_exec(
VALUE iseq)
5054vm_once_clear(
VALUE data)
5057 is->once.running_thread = NULL;
5069 args[0] = obj; args[1] =
Qfalse;
5071 if (!UNDEF_P(r) &&
RTEST(r)) {
5083 enum defined_type
type = (
enum defined_type)op_type;
5090 return rb_gvar_defined(
SYM2ID(obj));
5092 case DEFINED_CVAR: {
5093 const rb_cref_t *cref = vm_get_cref(GET_EP());
5094 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5099 case DEFINED_CONST_FROM: {
5100 bool allow_nil =
type == DEFINED_CONST;
5102 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5107 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5109 case DEFINED_METHOD:{
5114 switch (METHOD_ENTRY_VISI(me)) {
5115 case METHOD_VISI_PRIVATE:
5117 case METHOD_VISI_PROTECTED:
5121 case METHOD_VISI_PUBLIC:
5125 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5129 return check_respond_to_missing(obj, v);
5134 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5138 case DEFINED_ZSUPER:
5143 VALUE klass = vm_search_normal_superclass(me->defined_class);
5144 if (!klass)
return false;
5146 ID id = me->def->original_id;
5153 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5155 rb_bug(
"unimplemented defined? type (VM)");
5165 return vm_defined(ec, reg_cfp, op_type, obj, v);
5169vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5172 const VALUE *ep = reg_ep;
5173 for (i = 0; i < lv; i++) {
5174 ep = GET_PREV_EP(ep);
5180vm_get_special_object(
const VALUE *
const reg_ep,
5181 enum vm_special_object_type
type)
5184 case VM_SPECIAL_OBJECT_VMCORE:
5185 return rb_mRubyVMFrozenCore;
5186 case VM_SPECIAL_OBJECT_CBASE:
5187 return vm_get_cbase(reg_ep);
5188 case VM_SPECIAL_OBJECT_CONST_BASE:
5189 return vm_get_const_base(reg_ep);
5191 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5198 const VALUE ary2 = ary2st;
5199 VALUE tmp1 = rb_check_to_array(ary1);
5200 VALUE tmp2 = rb_check_to_array(ary2);
5211 tmp1 = rb_ary_dup(ary1);
5213 return rb_ary_concat(tmp1, tmp2);
5221 return vm_concat_array(ary1, ary2st);
5227 VALUE tmp = rb_check_to_array(ary);
5231 else if (
RTEST(flag)) {
5232 return rb_ary_dup(tmp);
5244 return vm_splat_array(flag, ary);
5250 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5252 if (flag & VM_CHECKMATCH_ARRAY) {
5256 for (i = 0; i < n; i++) {
5258 VALUE c = check_match(ec, v, target,
type);
5267 return check_match(ec, pattern, target,
type);
5274 return vm_check_match(ec, target, pattern, flag);
5278vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5280 const VALUE kw_bits = *(ep - bits);
5283 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5284 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5288 VM_ASSERT(RB_TYPE_P(kw_bits,
T_HASH));
5297 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5298 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5299 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5300 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5304 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5307 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5310 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5313 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5320vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5325 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5326 return rb_public_const_get_at(cbase,
id);
5334vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5336 if (!RB_TYPE_P(klass,
T_CLASS)) {
5339 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5344 "superclass mismatch for class %"PRIsVALUE
"",
5357vm_check_if_module(
ID id,
VALUE mod)
5376vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5379 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5387vm_declare_module(
ID id,
VALUE cbase)
5393NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5397 VALUE name = rb_id2str(
id);
5398 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5400 VALUE location = rb_const_source_location_at(cbase,
id);
5401 if (!
NIL_P(location)) {
5402 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5403 " previous definition of %"PRIsVALUE
" was here",
5404 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
5410vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5414 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !RB_TYPE_P(super,
T_CLASS)) {
5416 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5420 vm_check_if_namespace(cbase);
5424 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5425 if (!vm_check_if_class(
id, flags, super, klass))
5426 unmatched_redefinition(
"class", cbase,
id, klass);
5430 return vm_declare_class(
id, flags, cbase, super);
5435vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5439 vm_check_if_namespace(cbase);
5440 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5441 if (!vm_check_if_module(
id, mod))
5442 unmatched_redefinition(
"module", cbase,
id, mod);
5446 return vm_declare_module(
id, cbase);
5451vm_find_or_create_class_by_id(
ID id,
5456 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5459 case VM_DEFINECLASS_TYPE_CLASS:
5461 return vm_define_class(
id, flags, cbase, super);
5463 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5467 case VM_DEFINECLASS_TYPE_MODULE:
5469 return vm_define_module(
id, flags, cbase);
5472 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5476static rb_method_visibility_t
5481 if (!vm_env_cref_by_cref(cfp->ep)) {
5482 return METHOD_VISI_PUBLIC;
5485 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5494 if (!vm_env_cref_by_cref(cfp->ep)) {
5498 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5506 rb_method_visibility_t visi;
5511 visi = METHOD_VISI_PUBLIC;
5514 klass = CREF_CLASS_FOR_DEFINITION(cref);
5515 visi = vm_scope_visibility_get(ec);
5522 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
5524 if (
id == idInitialize && klass != rb_cObject && RB_TYPE_P(klass,
T_CLASS) && (
rb_get_alloc_func(klass) == rb_class_allocate_instance)) {
5526 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval);
5529 if (!is_singleton && vm_scope_module_func_check(ec)) {
5531 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5541 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5543 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5544 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5547 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5551enum method_explorer_type {
5553 mexp_search_invokeblock,
5562 VALUE block_handler,
5563 enum method_explorer_type method_explorer
5568 int argc = vm_ci_argc(ci);
5569 VALUE recv = TOPN(argc);
5571 .block_handler = block_handler,
5572 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5578 switch (method_explorer) {
5579 case mexp_search_method:
5580 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5581 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5583 case mexp_search_super:
5584 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5585 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5587 case mexp_search_invokeblock:
5588 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5598 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
5599 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
5608 VALUE bh = VM_BLOCK_HANDLER_NONE;
5609 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
5618 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
5619 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
5628 VALUE bh = VM_BLOCK_HANDLER_NONE;
5629 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
5666 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
5672 val = rb_mod_to_s(recv);
5678 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
5679 return rb_nil_to_s(recv);
5683 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
5684 return rb_true_to_s(recv);
5688 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
5689 return rb_false_to_s(recv);
5693 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
5694 return rb_fix_to_s(recv);
5702vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
5704 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5718 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
5723 VALUE result = *ptr;
5724 rb_snum_t i = num - 1;
5726 const VALUE v = *++ptr;
5727 if (OPTIMIZED_CMP(v, result) > 0) {
5742 return vm_opt_newarray_max(ec, num, ptr);
5748 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
5753 VALUE result = *ptr;
5754 rb_snum_t i = num - 1;
5756 const VALUE v = *++ptr;
5757 if (OPTIMIZED_CMP(v, result) < 0) {
5772 return vm_opt_newarray_min(ec, num, ptr);
5779 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
5780 return rb_ary_hash_values(num, ptr);
5790 return vm_opt_newarray_hash(ec, num, ptr);
5795#define IMEMO_CONST_CACHE_SHAREABLE IMEMO_FL_USER0
5798vm_track_constant_cache(
ID id,
void *ic)
5800 struct rb_id_table *const_cache = GET_VM()->constant_cache;
5801 VALUE lookup_result;
5804 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
5808 ics = st_init_numtable();
5809 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
5812 st_insert(ics, (st_data_t) ic, (st_data_t)
Qtrue);
5820 for (
int i = 0; segments[i]; i++) {
5821 ID id = segments[i];
5822 if (
id == idNULL)
continue;
5823 vm_track_constant_cache(
id, ic);
5833 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
5834 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
5836 return (ic_cref == NULL ||
5837 ic_cref == vm_get_cref(reg_ep));
5845 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
5846 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
5851rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
5853 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
5859 if (ruby_vm_const_missing_count > 0) {
5860 ruby_vm_const_missing_count = 0;
5867 ice->ic_cref = vm_get_const_key_cref(reg_ep);
5872 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
5873 rb_yjit_constant_ic_update(iseq, ic, pos);
5874 rb_rjit_constant_ic_update(iseq, ic, pos);
5883 if (ice && vm_ic_hit_p(ice, GET_EP())) {
5886 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
5888 ruby_vm_constant_cache_misses++;
5889 val = vm_get_ev_const_chain(ec, segments);
5890 vm_ic_track_const_chain(GET_CFP(), ic, segments);
5893 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
5905 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
5906 return is->once.value;
5908 else if (is->once.running_thread == NULL) {
5910 is->once.running_thread = th;
5914 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
5917 else if (is->once.running_thread == th) {
5919 return vm_once_exec((
VALUE)iseq);
5923 RUBY_VM_CHECK_INTS(ec);
5930vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
5932 switch (OBJ_BUILTIN_TYPE(key)) {
5938 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
5939 SYMBOL_REDEFINED_OP_FLAG |
5940 INTEGER_REDEFINED_OP_FLAG |
5941 FLOAT_REDEFINED_OP_FLAG |
5942 NIL_REDEFINED_OP_FLAG |
5943 TRUE_REDEFINED_OP_FLAG |
5944 FALSE_REDEFINED_OP_FLAG |
5945 STRING_REDEFINED_OP_FLAG)) {
5949 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
5953 if (rb_hash_stlike_lookup(hash, key, &val)) {
5973 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
5974 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
5975 static const char stack_consistency_error[] =
5976 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
5977#if defined RUBY_DEVEL
5978 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
5983 rb_bug(stack_consistency_error, nsp, nbp);
5990 if (FIXNUM_2_P(recv, obj) &&
5991 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
5992 return rb_fix_plus_fix(recv, obj);
5994 else if (FLONUM_2_P(recv, obj) &&
5995 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6003 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6008 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6009 return rb_str_opt_plus(recv, obj);
6013 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6014 return rb_ary_plus(recv, obj);
6024 if (FIXNUM_2_P(recv, obj) &&
6025 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6026 return rb_fix_minus_fix(recv, obj);
6028 else if (FLONUM_2_P(recv, obj) &&
6029 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6037 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6048 if (FIXNUM_2_P(recv, obj) &&
6049 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6050 return rb_fix_mul_fix(recv, obj);
6052 else if (FLONUM_2_P(recv, obj) &&
6053 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6061 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6072 if (FIXNUM_2_P(recv, obj) &&
6073 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6074 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6076 else if (FLONUM_2_P(recv, obj) &&
6077 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6078 return rb_flo_div_flo(recv, obj);
6085 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6086 return rb_flo_div_flo(recv, obj);
6096 if (FIXNUM_2_P(recv, obj) &&
6097 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6098 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6100 else if (FLONUM_2_P(recv, obj) &&
6101 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6109 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6120 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6121 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6123 if (!UNDEF_P(val)) {
6124 return RBOOL(!
RTEST(val));
6134 if (FIXNUM_2_P(recv, obj) &&
6135 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6138 else if (FLONUM_2_P(recv, obj) &&
6139 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6147 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6159 if (FIXNUM_2_P(recv, obj) &&
6160 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6163 else if (FLONUM_2_P(recv, obj) &&
6164 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6172 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6184 if (FIXNUM_2_P(recv, obj) &&
6185 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6188 else if (FLONUM_2_P(recv, obj) &&
6189 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6197 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6209 if (FIXNUM_2_P(recv, obj) &&
6210 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6213 else if (FLONUM_2_P(recv, obj) &&
6214 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6222 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6239 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6240 if (LIKELY(RB_TYPE_P(obj,
T_STRING))) {
6241 return rb_str_buf_append(recv, obj);
6248 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6249 return rb_ary_push(recv, obj);
6266 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6277 if (FIXNUM_2_P(recv, obj) &&
6278 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6290 if (FIXNUM_2_P(recv, obj) &&
6291 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6292 return rb_fix_aref(recv, obj);
6297 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6299 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6302 return rb_ary_aref1(recv, obj);
6306 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6307 return rb_hash_aref(recv, obj);
6321 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6323 rb_ary_store(recv,
FIX2LONG(obj), set);
6327 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6328 rb_hash_aset(recv, obj, set);
6340 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6341 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
6342 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6343 return rb_hash_aref(recv, key);
6353 return vm_opt_aref_with(recv, key);
6360 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6361 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
6362 return rb_hash_aset(recv, key, val);
6370vm_opt_length(
VALUE recv,
int bop)
6376 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6377 if (bop == BOP_EMPTY_P) {
6378 return LONG2NUM(RSTRING_LEN(recv));
6385 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6389 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6398vm_opt_empty_p(
VALUE recv)
6400 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6413 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6416 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
6432 case RSHIFT(~0UL, 1):
6435 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
6453vm_opt_succ(
VALUE recv)
6456 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
6457 return fix_succ(recv);
6463 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
6474 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
6475 return RBOOL(!
RTEST(recv));
6490 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
6494 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
6512 VALUE self = GET_SELF();
6514 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
6516 if (event & global_hooks->events) {
6519 vm_dtrace(event, ec);
6520 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
6526 if (local_hooks != NULL) {
6527 if (event & local_hooks->events) {
6530 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
6542 return check_cfunc(vm_cc_cme(cc), rb_obj_equal);
6543 case BIN(opt_nil_p):
6544 return check_cfunc(vm_cc_cme(cc), rb_false);
6546 return check_cfunc(vm_cc_cme(cc), rb_obj_not);
6552#define VM_TRACE_HOOK(target_event, val) do { \
6553 if ((pc_events & (target_event)) & enabled_flags) { \
6554 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
6561 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
6562 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
6563 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
6569 const VALUE *pc = reg_cfp->pc;
6570 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
6573 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
6579 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
6582 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
6583 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
6587 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
6588 enabled_flags |= iseq_local_events;
6590 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
6592 if (bmethod_frame) {
6594 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
6595 bmethod_local_hooks = me->def->body.bmethod.hooks;
6596 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
6597 if (bmethod_local_hooks) {
6598 bmethod_local_events = bmethod_local_hooks->events;
6603 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
6607 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
6615 else if (ec->trace_arg != NULL) {
6623 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
6626 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
6629 RSTRING_PTR(rb_iseq_path(iseq)),
6630 (
int)rb_iseq_line_no(iseq, pos),
6631 RSTRING_PTR(rb_iseq_label(iseq)));
6633 VM_ASSERT(reg_cfp->pc == pc);
6634 VM_ASSERT(pc_events != 0);
6644 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
6645 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
6663#if VM_CHECK_MODE > 0
6664NORETURN( NOINLINE( COLDFUNC
6665void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
6668Init_vm_stack_canary(
void)
6671 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
6672 vm_stack_canary |= 0x01;
6674 vm_stack_canary_was_born =
true;
6679rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
6683 const char *insn = rb_insns_name(i);
6687 rb_bug(
"dead canary found at %s: %s", insn, str);
6691void Init_vm_stack_canary(
void) { }
6723 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
6730 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
6737 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
6744 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
6751 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
6758 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
6765 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
6772 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
6779 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
6785 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
6786 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
6792 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
6793 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
6799 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
6800 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
6806 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
6807 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
6813 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
6814 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
6820 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
6821 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
6827 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
6828 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
6833static builtin_invoker
6834lookup_builtin_invoker(
int argc)
6836 static const builtin_invoker invokers[] = {
6855 return invokers[argc];
6861 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
6862 SETUP_CANARY(canary_p);
6863 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, (rb_insn_func_t)bf->func_ptr);
6864 CHECK_CANARY(canary_p, BIN(invokebuiltin));
6871 return invoke_bf(ec, cfp, bf, argv);
6878 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
6879 for (
int i=0; i<bf->argc; i++) {
6880 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
6882 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc, bf->func_ptr);
6885 if (bf->argc == 0) {
6886 return invoke_bf(ec, cfp, bf, NULL);
6889 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
6890 return invoke_bf(ec, cfp, bf, argv);
6900 return cfp->ep[index];
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define FL_SINGLETON
Old name of RUBY_FL_SINGLETON.
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
#define rb_check_frozen
Just another name of rb_check_frozen
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
#define rb_check_frozen_internal(obj)
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.