11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/struct.h"
34#include "insns_info.inc"
36extern rb_method_definition_t *rb_method_definition_create(rb_method_type_t
type,
ID mid);
37extern void rb_method_definition_set(
const rb_method_entry_t *me, rb_method_definition_t *def,
void *opts);
38extern int rb_method_definition_eq(
const rb_method_definition_t *d1,
const rb_method_definition_t *d2);
40 int argc,
const VALUE *argv,
int priv);
47static rb_control_frame_t *vm_get_ruby_level_caller_cfp(
const rb_execution_context_t *ec,
const rb_control_frame_t *cfp);
50ruby_vm_special_exception_copy(
VALUE exc)
53 rb_obj_copy_ivar(e, exc);
57NORETURN(
static void ec_stack_overflow(rb_execution_context_t *ec,
int));
59ec_stack_overflow(rb_execution_context_t *ec,
int setup)
61 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
62 ec->raised_flag = RAISED_STACKOVERFLOW;
64 VALUE at = rb_ec_backtrace_object(ec);
65 mesg = ruby_vm_special_exception_copy(mesg);
70 EC_JUMP_TAG(ec, TAG_RAISE);
73NORETURN(
static void vm_stackoverflow(
void));
78 ec_stack_overflow(GET_EC(), TRUE);
81NORETURN(
void rb_ec_stack_overflow(rb_execution_context_t *ec,
int crit));
83rb_ec_stack_overflow(rb_execution_context_t *ec,
int crit)
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
94 ec_stack_overflow(ec, TRUE);
96 ec_stack_overflow(ec, FALSE);
100static inline void stack_check(rb_execution_context_t *ec);
104callable_class_p(
VALUE klass)
106#if VM_CHECK_MODE >= 2
107 if (!klass)
return FALSE;
129callable_method_entry_p(
const rb_callable_method_entry_t *cme)
135 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment));
137 if (callable_class_p(cme->defined_class)) {
147vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
149 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
150 enum imemo_type cref_or_me_type = imemo_env;
152 if (RB_TYPE_P(cref_or_me,
T_IMEMO)) {
153 cref_or_me_type = imemo_type(cref_or_me);
155 if (
type & VM_FRAME_FLAG_BMETHOD) {
159 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
160 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
162 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
163 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
167 if (cref_or_me_type != imemo_ment) {
168 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
172 if (req_cref && cref_or_me_type != imemo_cref) {
173 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
176 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
177 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
181 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
187 if (cref_or_me_type == imemo_ment) {
188 const rb_callable_method_entry_t *me = (
const rb_callable_method_entry_t *)cref_or_me;
190 if (!callable_method_entry_p(me)) {
191 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
195 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
196 VM_ASSERT(iseq == NULL ||
198 RUBY_VM_NORMAL_ISEQ_P(iseq)
202 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
210 const rb_iseq_t *iseq)
212 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
215#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
217 vm_check_frame_detail(type, req_block, req_me, req_cref, \
218 specval, cref_or_me, is_cframe, iseq); \
220 switch (given_magic) {
222 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
224 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
226 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
227 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
230 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
232 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
237static VALUE vm_stack_canary;
238static bool vm_stack_canary_was_born =
false;
241rb_vm_check_canary(
const rb_execution_context_t *ec,
VALUE *sp)
246 if (! LIKELY(vm_stack_canary_was_born)) {
249 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
253 else if (! (iseq = GET_ISEQ())) {
256 else if (LIKELY(sp[0] != vm_stack_canary)) {
265 const VALUE *orig = rb_iseq_original_iseq(iseq);
266 const VALUE *encoded = ISEQ_BODY(iseq)->iseq_encoded;
267 const ptrdiff_t pos = GET_PC() - encoded;
268 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
269 const char *name = insn_name(insn);
270 const VALUE iseqw = rb_iseqw_new(iseq);
272 const char *stri = rb_str_to_cstr(inspection);
273 const VALUE disasm = rb_iseq_disasm(iseq);
274 const char *strd = rb_str_to_cstr(disasm);
280 "We are killing the stack canary set by %s, "
281 "at %s@pc=%"PRIdPTR
"\n"
282 "watch out the C stack trace.\n"
284 name, stri, pos, strd);
285 rb_bug(
"see above.");
287#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
290#define vm_check_canary(ec, sp)
291#define vm_check_frame(a, b, c, d)
296vm_push_frame_debug_counter_inc(
303 RB_DEBUG_COUNTER_INC(frame_push);
305 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
306 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
307 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
310 RB_DEBUG_COUNTER_INC(frame_R2R);
313 RB_DEBUG_COUNTER_INC(frame_R2C);
318 RB_DEBUG_COUNTER_INC(frame_C2R);
321 RB_DEBUG_COUNTER_INC(frame_C2C);
326 switch (
type & VM_FRAME_MAGIC_MASK) {
327 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
328 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
329 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
330 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
331 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
332 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
333 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
334 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
335 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
338 rb_bug(
"unreachable");
341#define vm_push_frame_debug_counter_inc(ec, cfp, t)
344STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
345STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
346STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
349vm_push_frame(rb_execution_context_t *ec,
350 const rb_iseq_t *iseq,
360 rb_control_frame_t *
const cfp = RUBY_VM_NEXT_CONTROL_FRAME(ec->cfp);
362 vm_check_frame(
type, specval, cref_or_me, iseq);
363 VM_ASSERT(local_size >= 0);
366 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
367 vm_check_canary(ec, sp);
372 for (
int i=0; i < local_size; i++) {
399 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
400 atomic_signal_fence(memory_order_seq_cst);
408 vm_push_frame_debug_counter_inc(ec, cfp,
type);
412rb_vm_pop_frame_no_int(rb_execution_context_t *ec)
414 rb_control_frame_t *cfp = ec->cfp;
416 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
417 if (VMDEBUG == 2) SDR();
419 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
424vm_pop_frame(rb_execution_context_t *ec, rb_control_frame_t *cfp,
const VALUE *ep)
426 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
428 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
429 if (VMDEBUG == 2) SDR();
431 RUBY_VM_CHECK_INTS(ec);
432 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
434 return flags & VM_FRAME_FLAG_FINISH;
438rb_vm_pop_frame(rb_execution_context_t *ec)
440 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
445rb_vm_push_frame_fname(rb_execution_context_t *ec,
VALUE fname)
447 VALUE tmpbuf = rb_imemo_tmpbuf_auto_free_pointer();
449 rb_imemo_tmpbuf_set_ptr(tmpbuf, ptr);
453 dmy_iseq->body = dmy_body;
454 dmy_body->type = ISEQ_TYPE_TOP;
455 dmy_body->location.pathobj = fname;
459 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
461 VM_BLOCK_HANDLER_NONE,
473rb_arity_error_new(
int argc,
int min,
int max)
475 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
483 rb_str_catf(err_mess,
"..%d", max);
490rb_error_arity(
int argc,
int min,
int max)
492 rb_exc_raise(rb_arity_error_new(argc, min, max));
497NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
500vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
503 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
504 VM_FORCE_WRITE(&ep[index], v);
505 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
506 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
510vm_env_write(
const VALUE *ep,
int index,
VALUE v)
512 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
513 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
514 VM_STACK_ENV_WRITE(ep, index, v);
517 vm_env_write_slowpath(ep, index, v);
522rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
524 vm_env_write(ep, index, v);
528rb_vm_bh_to_procval(
const rb_execution_context_t *ec,
VALUE block_handler)
530 if (block_handler == VM_BLOCK_HANDLER_NONE) {
534 switch (vm_block_handler_type(block_handler)) {
535 case block_handler_type_iseq:
536 case block_handler_type_ifunc:
537 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
538 case block_handler_type_symbol:
539 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
540 case block_handler_type_proc:
541 return VM_BH_TO_PROC(block_handler);
543 VM_UNREACHABLE(rb_vm_bh_to_procval);
552vm_svar_valid_p(
VALUE svar)
555 switch (imemo_type(svar)) {
564 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
570lep_svar(
const rb_execution_context_t *ec,
const VALUE *lep)
574 if (lep && (ec == NULL || ec->root_lep != lep)) {
575 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
578 svar = ec->root_svar;
581 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
587lep_svar_write(
const rb_execution_context_t *ec,
const VALUE *lep,
const struct vm_svar *svar)
589 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
591 if (lep && (ec == NULL || ec->root_lep != lep)) {
592 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
595 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
600lep_svar_get(
const rb_execution_context_t *ec,
const VALUE *lep, rb_num_t key)
602 const struct vm_svar *svar = lep_svar(ec, lep);
607 case VM_SVAR_LASTLINE:
608 return svar->lastline;
609 case VM_SVAR_BACKREF:
610 return svar->backref;
612 const VALUE ary = svar->others;
618 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
631lep_svar_set(
const rb_execution_context_t *ec,
const VALUE *lep, rb_num_t key,
VALUE val)
633 struct vm_svar *svar = lep_svar(ec, lep);
636 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
640 case VM_SVAR_LASTLINE:
643 case VM_SVAR_BACKREF:
647 VALUE ary = svar->others;
652 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
658vm_getspecial(
const rb_execution_context_t *ec,
const VALUE *lep, rb_num_t key, rb_num_t
type)
663 val = lep_svar_get(ec, lep, key);
666 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
683 rb_bug(
"unexpected back-ref");
694vm_backref_defined(
const rb_execution_context_t *ec,
const VALUE *lep, rb_num_t
type)
696 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
706 return rb_reg_last_defined(backref);
708 rb_bug(
"unexpected back-ref");
712 nth = (int)(
type >> 1);
717PUREFUNC(
static rb_callable_method_entry_t *check_method_entry(
VALUE obj,
int can_be_svar));
718static rb_callable_method_entry_t *
719check_method_entry(
VALUE obj,
int can_be_svar)
721 if (obj ==
Qfalse)
return NULL;
724 if (!RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
727 switch (imemo_type(obj)) {
729 return (rb_callable_method_entry_t *)obj;
738 rb_bug(
"check_method_entry: svar should not be there:");
744const rb_callable_method_entry_t *
745rb_vm_frame_method_entry(
const rb_control_frame_t *cfp)
747 const VALUE *ep = cfp->ep;
748 rb_callable_method_entry_t *me;
750 while (!VM_ENV_LOCAL_P(ep)) {
751 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
752 ep = VM_ENV_PREV_EP(ep);
755 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
758static const rb_iseq_t *
759method_entry_iseqptr(
const rb_callable_method_entry_t *me)
761 switch (me->def->type) {
762 case VM_METHOD_TYPE_ISEQ:
763 return me->def->body.iseq.
iseqptr;
770method_entry_cref(
const rb_callable_method_entry_t *me)
772 switch (me->def->type) {
773 case VM_METHOD_TYPE_ISEQ:
774 return me->def->body.iseq.
cref;
780#if VM_CHECK_MODE == 0
781PUREFUNC(
static rb_cref_t *check_cref(
VALUE,
int));
784check_cref(
VALUE obj,
int can_be_svar)
786 if (obj ==
Qfalse)
return NULL;
789 if (!RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
792 switch (imemo_type(obj)) {
794 return method_entry_cref((rb_callable_method_entry_t *)obj);
796 return (rb_cref_t *)obj;
803 rb_bug(
"check_method_entry: svar should not be there:");
809static inline rb_cref_t *
810vm_env_cref(
const VALUE *ep)
814 while (!VM_ENV_LOCAL_P(ep)) {
815 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
816 ep = VM_ENV_PREV_EP(ep);
819 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
823is_cref(
const VALUE v,
int can_be_svar)
826 switch (imemo_type(v)) {
839vm_env_cref_by_cref(
const VALUE *ep)
841 while (!VM_ENV_LOCAL_P(ep)) {
842 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
843 ep = VM_ENV_PREV_EP(ep);
845 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
849cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
851 const VALUE v = *vptr;
852 rb_cref_t *cref, *new_cref;
855 switch (imemo_type(v)) {
857 cref = (rb_cref_t *)v;
858 new_cref = vm_cref_dup(cref);
863 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
865 return (rb_cref_t *)new_cref;
868 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
872 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
881vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
883 if (vm_env_cref_by_cref(ep)) {
887 while (!VM_ENV_LOCAL_P(ep)) {
888 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
889 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
892 ep = VM_ENV_PREV_EP(ep);
894 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
895 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
898 rb_bug(
"vm_cref_dup: unreachable");
903vm_get_cref(
const VALUE *ep)
905 rb_cref_t *cref = vm_env_cref(ep);
911 rb_bug(
"vm_get_cref: unreachable");
916rb_vm_get_cref(
const VALUE *ep)
918 return vm_get_cref(ep);
922vm_ec_cref(
const rb_execution_context_t *ec)
924 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
929 return vm_get_cref(cfp->ep);
932static const rb_cref_t *
933vm_get_const_key_cref(
const VALUE *ep)
935 const rb_cref_t *cref = vm_get_cref(ep);
936 const rb_cref_t *key_cref = cref;
940 RCLASS_EXT(CREF_CLASS(cref))->cloned) {
943 cref = CREF_NEXT(cref);
951rb_vm_rewrite_cref(rb_cref_t *cref,
VALUE old_klass,
VALUE new_klass, rb_cref_t **new_cref_ptr)
956 if (CREF_CLASS(cref) == old_klass) {
957 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
958 *new_cref_ptr = new_cref;
961 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
962 cref = CREF_NEXT(cref);
963 *new_cref_ptr = new_cref;
964 new_cref_ptr = &new_cref->next;
966 *new_cref_ptr = NULL;
970vm_cref_push(
const rb_execution_context_t *ec,
VALUE klass,
const VALUE *ep,
int pushed_by_eval,
int singleton)
972 rb_cref_t *prev_cref = NULL;
975 prev_cref = vm_env_cref(ep);
978 rb_control_frame_t *cfp = vm_get_ruby_level_caller_cfp(ec, ec->cfp);
981 prev_cref = vm_env_cref(cfp->ep);
985 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
989vm_get_cbase(
const VALUE *ep)
991 const rb_cref_t *cref = vm_get_cref(ep);
993 return CREF_CLASS_FOR_DEFINITION(cref);
997vm_get_const_base(
const VALUE *ep)
999 const rb_cref_t *cref = vm_get_cref(ep);
1002 if (!CREF_PUSHED_BY_EVAL(cref)) {
1003 return CREF_CLASS_FOR_DEFINITION(cref);
1005 cref = CREF_NEXT(cref);
1012vm_check_if_namespace(
VALUE klass)
1015 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1020vm_ensure_not_refinement_module(
VALUE self)
1022 if (RB_TYPE_P(self,
T_MODULE) &&
FL_TEST(self, RMODULE_IS_REFINEMENT)) {
1023 rb_warn(
"not defined at the refinement, but at the outer class/module");
1028vm_get_iclass(
const rb_control_frame_t *cfp,
VALUE klass)
1034vm_get_ev_const(rb_execution_context_t *ec,
VALUE orig_klass,
ID id,
bool allow_nil,
int is_defined)
1036 void rb_const_warn_if_deprecated(
const rb_const_entry_t *ce,
VALUE klass,
ID id);
1039 if (
NIL_P(orig_klass) && allow_nil) {
1041 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1042 const rb_cref_t *cref;
1045 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1046 root_cref = CREF_NEXT(root_cref);
1049 while (cref && CREF_NEXT(cref)) {
1050 if (CREF_PUSHED_BY_EVAL(cref)) {
1054 klass = CREF_CLASS(cref);
1056 cref = CREF_NEXT(cref);
1058 if (!
NIL_P(klass)) {
1060 rb_const_entry_t *ce;
1062 if ((ce = rb_const_lookup(klass,
id))) {
1063 rb_const_warn_if_deprecated(ce, klass,
id);
1066 if (am == klass)
break;
1068 if (is_defined)
return 1;
1069 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1071 goto search_continue;
1078 if (UNLIKELY(!rb_ractor_main_p())) {
1080 rb_raise(rb_eRactorIsolationError,
1081 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass), rb_id2name(
id));
1092 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1093 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1107 vm_check_if_namespace(orig_klass);
1109 return rb_public_const_defined_from(orig_klass,
id);
1112 return rb_public_const_get_from(orig_klass,
id);
1118rb_vm_get_ev_const(rb_execution_context_t *ec,
VALUE orig_klass,
ID id,
VALUE allow_nil)
1120 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1124vm_get_ev_const_chain(rb_execution_context_t *ec,
const ID *segments)
1128 int allow_nil = TRUE;
1129 if (segments[0] == idNULL) {
1134 while (segments[idx]) {
1135 ID id = segments[idx++];
1136 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1144vm_get_cvar_base(
const rb_cref_t *cref,
const rb_control_frame_t *cfp,
int top_level_raise)
1149 rb_bug(
"vm_get_cvar_base: no cref");
1152 while (CREF_NEXT(cref) &&
1154 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1155 cref = CREF_NEXT(cref);
1157 if (top_level_raise && !CREF_NEXT(cref)) {
1161 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1169ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq, IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1171fill_ivar_cache(
const rb_iseq_t *iseq, IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1174 vm_cc_attr_index_set(cc, index, shape_id);
1177 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1181#define ractor_incidental_shareable_p(cond, val) \
1182 (!(cond) || rb_ractor_shareable_p(val))
1183#define ractor_object_incidental_shareable_p(obj, val) \
1184 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1186#define ATTR_INDEX_NOT_SET (attr_index_t)-1
1190vm_getivar(
VALUE obj,
ID id,
const rb_iseq_t *iseq, IVC ic,
const struct rb_callcache *cc,
int is_attr,
VALUE default_value)
1194 shape_id_t shape_id;
1198 return default_value;
1201#if SHAPE_IN_BASIC_FLAGS
1202 shape_id = RBASIC_SHAPE_ID(obj);
1210#if !SHAPE_IN_BASIC_FLAGS
1211 shape_id = ROBJECT_SHAPE_ID(obj);
1217 if (UNLIKELY(!rb_ractor_main_p())) {
1227 ivar_list = RCLASS_IVPTR(obj);
1229#if !SHAPE_IN_BASIC_FLAGS
1230 shape_id = RCLASS_SHAPE_ID(obj);
1238 rb_gen_ivtbl_get(obj,
id, &ivtbl);
1239#if !SHAPE_IN_BASIC_FLAGS
1240 shape_id = ivtbl->shape_id;
1242 ivar_list = ivtbl->as.shape.ivptr;
1245 return default_value;
1249 shape_id_t cached_id;
1253 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1256 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1259 if (LIKELY(cached_id == shape_id)) {
1260 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1262 if (index == ATTR_INDEX_NOT_SET) {
1263 return default_value;
1266 val = ivar_list[index];
1267#if USE_DEBUG_COUNTER
1268 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1271 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1277#if USE_DEBUG_COUNTER
1279 if (cached_id != INVALID_SHAPE_ID) {
1280 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1283 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1287 if (cached_id != INVALID_SHAPE_ID) {
1288 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1291 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1294 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1297 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1301 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1302 st_table *table = NULL;
1306 table = (st_table *)RCLASS_IVPTR(obj);
1310 table = ROBJECT_IV_HASH(obj);
1315 if (rb_gen_ivtbl_get(obj, 0, &ivtbl)) {
1316 table = ivtbl->as.complex.table;
1322 if (!table || !st_lookup(table,
id, &val)) {
1323 val = default_value;
1327 shape_id_t previous_cached_id = cached_id;
1328 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1331 if (cached_id != previous_cached_id) {
1332 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1335 if (index == ATTR_INDEX_NOT_SET) {
1336 val = default_value;
1340 val = ivar_list[index];
1346 vm_cc_attr_index_initialize(cc, shape_id);
1349 vm_ic_attr_index_initialize(ic, shape_id);
1352 val = default_value;
1358 if (default_value !=
Qundef) {
1366 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1369 return rb_attr_get(obj,
id);
1377populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq, IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1379 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1383 vm_cc_attr_index_set(cc, index, next_shape_id);
1386 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1390ALWAYS_INLINE(
static VALUE vm_setivar_slowpath(
VALUE obj,
ID id,
VALUE val,
const rb_iseq_t *iseq, IVC ic,
const struct rb_callcache *cc,
int is_attr));
1391NOINLINE(
static VALUE vm_setivar_slowpath_ivar(
VALUE obj,
ID id,
VALUE val,
const rb_iseq_t *iseq, IVC ic));
1395vm_setivar_slowpath(
VALUE obj,
ID id,
VALUE val,
const rb_iseq_t *iseq, IVC ic,
const struct rb_callcache *cc,
int is_attr)
1398 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1403 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1405 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1407 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1408 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1411 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1419vm_setivar_slowpath_ivar(
VALUE obj,
ID id,
VALUE val,
const rb_iseq_t *iseq, IVC ic)
1421 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1427 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1430NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1432vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1434#if SHAPE_IN_BASIC_FLAGS
1435 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1437 shape_id_t shape_id = rb_generic_shape_id(obj);
1443 if (shape_id == dest_shape_id) {
1444 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1446 else if (dest_shape_id != INVALID_SHAPE_ID) {
1447 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1448 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1450 if (shape_id == dest_shape->parent_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1461 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1463 if (shape_id != dest_shape_id) {
1464#if SHAPE_IN_BASIC_FLAGS
1465 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1467 ivtbl->shape_id = dest_shape_id;
1473 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1479vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1487 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1488 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1490 if (LIKELY(shape_id == dest_shape_id)) {
1491 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1494 else if (dest_shape_id != INVALID_SHAPE_ID) {
1495 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1496 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1497 shape_id_t source_shape_id = dest_shape->parent_id;
1499 if (shape_id == source_shape_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1500 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1502 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1504 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id),
id) == dest_shape);
1520 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1521 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1527 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1537update_classvariable_cache(
const rb_iseq_t *iseq,
VALUE klass,
ID id,
const rb_cref_t * cref, ICVARC ic)
1539 VALUE defined_class = 0;
1542 if (RB_TYPE_P(defined_class,
T_ICLASS)) {
1543 defined_class =
RBASIC(defined_class)->klass;
1546 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1548 rb_bug(
"the cvc table should be set");
1552 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1553 rb_bug(
"should have cvar cache entry");
1558 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1571vm_getclassvariable(
const rb_iseq_t *iseq,
const rb_control_frame_t *reg_cfp,
ID id, ICVARC ic)
1573 const rb_cref_t *cref;
1574 cref = vm_get_cref(GET_EP());
1576 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1577 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1579 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1585 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1587 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1591rb_vm_getclassvariable(
const rb_iseq_t *iseq,
const rb_control_frame_t *cfp,
ID id, ICVARC ic)
1593 return vm_getclassvariable(iseq, cfp,
id, ic);
1597vm_setclassvariable(
const rb_iseq_t *iseq,
const rb_control_frame_t *reg_cfp,
ID id,
VALUE val, ICVARC ic)
1599 const rb_cref_t *cref;
1600 cref = vm_get_cref(GET_EP());
1602 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1603 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1605 rb_class_ivar_set(ic->entry->class_value,
id, val);
1609 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1613 update_classvariable_cache(iseq, klass,
id, cref, ic);
1617rb_vm_setclassvariable(
const rb_iseq_t *iseq,
const rb_control_frame_t *cfp,
ID id,
VALUE val, ICVARC ic)
1619 vm_setclassvariable(iseq, cfp,
id, val, ic);
1623vm_getinstancevariable(
const rb_iseq_t *iseq,
VALUE obj,
ID id, IVC ic)
1625 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1629vm_setinstancevariable(
const rb_iseq_t *iseq,
VALUE obj,
ID id,
VALUE val, IVC ic)
1636 shape_id_t dest_shape_id;
1638 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1640 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1647 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1651 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1656rb_vm_setinstancevariable(
const rb_iseq_t *iseq,
VALUE obj,
ID id,
VALUE val, IVC ic)
1658 vm_setinstancevariable(iseq, obj,
id, val, ic);
1662vm_throw_continue(
const rb_execution_context_t *ec,
VALUE err)
1667 ec->tag->state = RUBY_TAG_FATAL;
1670 ec->tag->state = TAG_THROW;
1672 else if (THROW_DATA_P(err)) {
1673 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1676 ec->tag->state = TAG_RAISE;
1682vm_throw_start(
const rb_execution_context_t *ec, rb_control_frame_t *
const reg_cfp,
enum ruby_tag_type state,
1683 const int flag,
const VALUE throwobj)
1685 const rb_control_frame_t *escape_cfp = NULL;
1686 const rb_control_frame_t *
const eocfp = RUBY_VM_END_CONTROL_FRAME(ec);
1691 else if (state == TAG_BREAK) {
1693 const VALUE *ep = GET_EP();
1694 const rb_iseq_t *base_iseq = GET_ISEQ();
1695 escape_cfp = reg_cfp;
1697 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1698 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1699 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1700 ep = escape_cfp->ep;
1701 base_iseq = escape_cfp->iseq;
1704 ep = VM_ENV_PREV_EP(ep);
1705 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1706 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1707 VM_ASSERT(escape_cfp->iseq == base_iseq);
1711 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1717 ep = VM_ENV_PREV_EP(ep);
1719 while (escape_cfp < eocfp) {
1720 if (escape_cfp->ep == ep) {
1721 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1722 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1727 for (i=0; i < ct->size; i++) {
1729 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1731 if (entry->type == CATCH_TYPE_BREAK &&
1732 entry->iseq == base_iseq &&
1733 entry->start < epc && entry->end >= epc) {
1734 if (entry->cont == epc) {
1743 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1748 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1751 else if (state == TAG_RETRY) {
1752 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1754 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1756 else if (state == TAG_RETURN) {
1757 const VALUE *current_ep = GET_EP();
1758 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1759 int in_class_frame = 0;
1761 escape_cfp = reg_cfp;
1764 while (!VM_ENV_LOCAL_P(ep)) {
1765 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1768 ep = VM_ENV_PREV_EP(ep);
1772 while (escape_cfp < eocfp) {
1773 const VALUE *lep = VM_CF_LEP(escape_cfp);
1779 if (lep == target_lep &&
1780 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1781 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1786 if (lep == target_lep) {
1787 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1789 if (in_class_frame) {
1794 const VALUE *tep = current_ep;
1796 while (target_lep != tep) {
1797 if (escape_cfp->ep == tep) {
1799 if (tep == target_ep) {
1803 goto unexpected_return;
1806 tep = VM_ENV_PREV_EP(tep);
1810 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1811 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1813 case ISEQ_TYPE_MAIN:
1815 if (in_class_frame)
goto unexpected_return;
1816 if (target_ep == NULL) {
1820 goto unexpected_return;
1824 case ISEQ_TYPE_EVAL: {
1825 const rb_iseq_t *is = escape_cfp->iseq;
1826 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1827 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1828 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1829 t = ISEQ_BODY(is)->type;
1831 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1834 case ISEQ_TYPE_CLASS:
1843 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1844 if (target_ep == NULL) {
1848 goto unexpected_return;
1852 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1855 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1861 rb_bug(
"isns(throw): unsupported throw type");
1864 ec->tag->state = state;
1865 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1869vm_throw(
const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
1870 rb_num_t throw_state,
VALUE throwobj)
1872 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1873 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1876 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1879 return vm_throw_continue(ec, throwobj);
1884rb_vm_throw(
const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t throw_state,
VALUE throwobj)
1886 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1892 int is_splat = flag & 0x01;
1895 const VALUE obj = ary;
1897 if (!RB_TYPE_P(ary,
T_ARRAY) &&
NIL_P(ary = rb_check_array_type(ary))) {
1907 if (num + is_splat == 0) {
1910 else if (flag & 0x02) {
1915 for (i = 0; i < num -
len; i++) {
1920 for (j = 0; i < num; i++, j++) {
1933 *cfp->sp++ = rb_ary_new();
1942 for (; i < num -
len; i++) {
1946 for (rb_num_t j = 0; i < num; i++, j++) {
1947 *cfp->sp++ = ptr[
len - j - 1];
1951 for (rb_num_t j = 0; j < num; j++) {
1952 *cfp->sp++ = ptr[num - j - 1];
1960static VALUE vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling);
1965vm_ccs_create(
VALUE klass,
struct rb_id_table *cc_tbl,
ID mid,
const rb_callable_method_entry_t *cme)
1968#if VM_CHECK_MODE > 0
1969 ccs->debug_sig = ~(
VALUE)ccs;
1974 METHOD_ENTRY_CACHED_SET((rb_callable_method_entry_t *)cme);
1975 ccs->entries = NULL;
1977 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
1985 if (! vm_cc_markable(cc)) {
1988 else if (! vm_ci_markable(ci)) {
1992 if (UNLIKELY(ccs->len == ccs->capa)) {
1993 if (ccs->capa == 0) {
1995 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
1999 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
2002 VM_ASSERT(ccs->len < ccs->capa);
2004 const int pos = ccs->len++;
2008 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2014#if VM_CHECK_MODE > 0
2018 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2019 for (
int i=0; i<ccs->len; i++) {
2020 vm_ci_dump(ccs->entries[i].ci);
2021 rp(ccs->entries[i].cc);
2028 VM_ASSERT(vm_ccs_p(ccs));
2029 VM_ASSERT(ccs->len <= ccs->capa);
2031 for (
int i=0; i<ccs->len; i++) {
2032 const struct rb_callinfo *ci = ccs->entries[i].ci;
2035 VM_ASSERT(vm_ci_p(ci));
2036 VM_ASSERT(vm_ci_mid(ci) == mid);
2037 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2038 VM_ASSERT(vm_cc_class_check(cc, klass));
2039 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2040 VM_ASSERT(!vm_cc_super_p(cc));
2041 VM_ASSERT(!vm_cc_refinement_p(cc));
2047static const rb_callable_method_entry_t *check_overloaded_cme(
const rb_callable_method_entry_t *cme,
const struct rb_callinfo *
const ci);
2052 const ID mid = vm_ci_mid(ci);
2053 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
2058 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2060 const int ccs_len = ccs->len;
2062 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2063 rb_vm_ccs_free(ccs);
2064 rb_id_table_delete(cc_tbl, mid);
2068 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2070 for (
int i=0; i<ccs_len; i++) {
2071 const struct rb_callinfo *ccs_ci = ccs->entries[i].ci;
2072 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2074 VM_ASSERT(vm_ci_p(ccs_ci));
2075 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2078 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2080 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2081 VM_ASSERT(ccs_cc->klass == klass);
2082 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2091 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2094 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2096 const rb_callable_method_entry_t *cme;
2100 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2102 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2105 cme = rb_callable_method_entry(klass, mid);
2108 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2112 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2113 return &vm_empty_cc;
2116 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2121 VM_ASSERT(cc_tbl != NULL);
2123 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2129 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2133 cme = check_overloaded_cme(cme, ci);
2135 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2136 vm_ccs_push(klass, ccs, ci, cc);
2138 VM_ASSERT(vm_cc_cme(cc) != NULL);
2139 VM_ASSERT(cme->called_id == mid);
2140 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2154 cc = vm_search_cc(klass, ci);
2157 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2158 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2159 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2160 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2161 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2171#if USE_DEBUG_COUNTER
2175 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2177#if OPT_INLINE_METHOD_CACHE
2181 if (cd_owner && cc != empty_cc) {
2185#if USE_DEBUG_COUNTER
2186 if (old_cc == empty_cc) {
2188 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2190 else if (old_cc == cc) {
2191 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2193 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2194 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2196 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2197 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2198 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2201 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2206 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2207 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2218#if OPT_INLINE_METHOD_CACHE
2219 if (LIKELY(vm_cc_class_check(cc, klass))) {
2220 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2221 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2222 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2223 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2224 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2225 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2229 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2232 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2236 return vm_search_method_slowpath0(cd_owner, cd, klass);
2243 VM_ASSERT(klass !=
Qfalse);
2246 return vm_search_method_fastpath(cd_owner, cd, klass);
2249#if __has_attribute(transparent_union)
2262 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2263 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2264 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2265 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2266 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2267 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2275check_cfunc(
const rb_callable_method_entry_t *me, cfunc_type func)
2281 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2282 VM_ASSERT(callable_method_entry_p(me));
2284 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2288#if __has_attribute(transparent_union)
2289 return me->def->body.cfunc.func == func.anyargs;
2291 return me->def->body.cfunc.func == func;
2298vm_method_cfunc_is(
const rb_iseq_t *iseq, CALL_DATA cd,
VALUE recv, cfunc_type func)
2300 VM_ASSERT(iseq != NULL);
2302 return check_cfunc(vm_cc_cme(cc), func);
2305#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2337opt_equality_specialized(
VALUE recv,
VALUE obj)
2339 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2340 goto compare_by_identity;
2342 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2343 goto compare_by_identity;
2346 goto compare_by_identity;
2355#if MSC_VERSION_BEFORE(1300)
2359 else if (isnan(b)) {
2364 return RBOOL(a == b);
2370 else if (RB_TYPE_P(obj,
T_STRING)) {
2371 return rb_str_eql_internal(obj, recv);
2376 compare_by_identity:
2377 return RBOOL(recv == obj);
2381opt_equality(
const rb_iseq_t *cd_owner,
VALUE recv,
VALUE obj, CALL_DATA cd)
2383 VM_ASSERT(cd_owner != NULL);
2385 VALUE val = opt_equality_specialized(recv, obj);
2386 if (!UNDEF_P(val))
return val;
2388 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2392 return RBOOL(recv == obj);
2396#undef EQ_UNREDEFINED_P
2398static inline const struct rb_callcache *gccct_method_search(rb_execution_context_t *ec,
VALUE recv,
ID mid,
int argc);
2399NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2402opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2404 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, 1);
2406 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2407 return RBOOL(recv == obj);
2417 VALUE val = opt_equality_specialized(recv, obj);
2418 if (!UNDEF_P(val)) {
2422 return opt_equality_by_mid_slowpath(recv, obj, mid);
2429 return opt_equality_by_mid(obj1, obj2, idEq);
2435 return opt_equality_by_mid(obj1, obj2, idEqlP);
2438extern VALUE rb_vm_call0(rb_execution_context_t *ec,
VALUE,
ID,
int,
const VALUE*,
const rb_callable_method_entry_t *,
int kw_splat);
2439extern VALUE rb_vm_call_with_refinements(rb_execution_context_t *,
VALUE,
ID,
int,
const VALUE *,
int);
2442check_match(rb_execution_context_t *ec,
VALUE pattern,
VALUE target,
enum vm_check_match_type
type)
2445 case VM_CHECKMATCH_TYPE_WHEN:
2447 case VM_CHECKMATCH_TYPE_RESCUE:
2449 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2452 case VM_CHECKMATCH_TYPE_CASE: {
2453 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2456 rb_bug(
"check_match: unreachable");
2461#if MSC_VERSION_BEFORE(1300)
2462#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2464#define CHECK_CMP_NAN(a, b)
2468double_cmp_lt(
double a,
double b)
2470 CHECK_CMP_NAN(a, b);
2471 return RBOOL(a < b);
2475double_cmp_le(
double a,
double b)
2477 CHECK_CMP_NAN(a, b);
2478 return RBOOL(a <= b);
2482double_cmp_gt(
double a,
double b)
2484 CHECK_CMP_NAN(a, b);
2485 return RBOOL(a > b);
2489double_cmp_ge(
double a,
double b)
2491 CHECK_CMP_NAN(a, b);
2492 return RBOOL(a >= b);
2496static inline VALUE *
2497vm_base_ptr(
const rb_control_frame_t *cfp)
2499 const rb_control_frame_t *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
2501 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2502 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2503 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2507#if VM_DEBUG_BP_CHECK
2508 if (bp != cfp->bp_check) {
2509 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2510 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2511 (
long)(bp - GET_EC()->vm_stack));
2512 rb_bug(
"vm_base_ptr: unreachable");
2523rb_vm_base_ptr(
const rb_control_frame_t *cfp)
2525 return vm_base_ptr(cfp);
2532static inline VALUE vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
int opt_pc,
int param_size,
int local_size);
2533ALWAYS_INLINE(
static VALUE vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
const rb_callable_method_entry_t *me,
int opt_pc,
int param_size,
int local_size));
2534static inline VALUE vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
int opt_pc);
2535static VALUE vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling);
2536static VALUE vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling);
2537static VALUE vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling);
2538static inline VALUE vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling);
2540static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2543vm_call_iseq_setup_tailcall_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
2545 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2547 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2551vm_call_iseq_setup_normal_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
2553 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2556 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2557 int param = ISEQ_BODY(iseq)->param.size;
2558 int local = ISEQ_BODY(iseq)->local_table_size;
2559 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2563rb_simple_iseq_p(
const rb_iseq_t *iseq)
2565 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2566 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2567 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2568 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2569 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2570 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2571 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2575rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2577 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2578 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2579 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2580 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2581 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2582 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2583 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2587rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2589 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2590 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2591 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2592 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2593 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2594 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2597#define ALLOW_HEAP_ARGV (-2)
2598#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2601vm_caller_setup_arg_splat(rb_control_frame_t *cfp,
struct rb_calling_info *calling,
VALUE ary,
int max_args)
2603 vm_check_canary(GET_EC(), cfp->sp);
2609 int argc = calling->argc;
2611 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2615 VALUE *argv = cfp->sp - argc;
2616 VALUE argv_ary = rb_ary_hidden_new(
len + argc + 1);
2617 rb_ary_cat(argv_ary, argv, argc);
2618 rb_ary_cat(argv_ary, ptr,
len);
2619 cfp->sp -= argc - 1;
2620 cfp->sp[-1] = argv_ary;
2622 calling->heap_argv = argv_ary;
2628 if (max_args >= 0 &&
len + argc > max_args) {
2636 calling->argc +=
len - (max_args - argc + 1);
2637 len = max_args - argc + 1;
2646 calling->heap_argv = 0;
2648 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2650 for (i = 0; i <
len; i++) {
2651 *cfp->sp++ = ptr[i];
2663 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2664 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2665 const VALUE h = rb_hash_new_with_size(kw_len);
2666 VALUE *sp = cfp->sp;
2669 for (i=0; i<kw_len; i++) {
2670 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2674 cfp->sp -= kw_len - 1;
2675 calling->argc -= kw_len - 1;
2676 calling->kw_splat = 1;
2680vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2682 if (UNLIKELY(!RB_TYPE_P(keyword_hash,
T_HASH))) {
2684 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2686 else if (!IS_ARGS_KW_SPLAT_MUT(ci)) {
2690 keyword_hash = rb_hash_dup(keyword_hash);
2692 return keyword_hash;
2698 const struct rb_callinfo *restrict ci,
int max_args)
2700 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2701 if (IS_ARGS_KW_SPLAT(ci)) {
2703 VM_ASSERT(calling->kw_splat == 1);
2707 VALUE ary = cfp->sp[0];
2708 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2711 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2715 if (UNLIKELY(calling->heap_argv)) {
2716 rb_ary_push(calling->heap_argv, kwh);
2717 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2718 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2719 calling->kw_splat = 0;
2727 VM_ASSERT(calling->kw_splat == 1);
2731 calling->kw_splat = 0;
2736 VM_ASSERT(calling->kw_splat == 0);
2740 VALUE ary = cfp->sp[0];
2742 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2747 VALUE last_hash, argv_ary;
2748 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2749 if (!IS_ARGS_KEYWORD(ci) &&
2751 RB_TYPE_P((last_hash = rb_ary_last(0, NULL, argv_ary)),
T_HASH) &&
2752 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2754 rb_ary_pop(argv_ary);
2756 rb_ary_push(argv_ary, rb_hash_dup(last_hash));
2757 calling->kw_splat = 1;
2763 if (!IS_ARGS_KEYWORD(ci) &&
2764 calling->argc > 0 &&
2765 RB_TYPE_P((last_hash = cfp->sp[-1]),
T_HASH) &&
2766 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2773 cfp->sp[-1] = rb_hash_dup(last_hash);
2774 calling->kw_splat = 1;
2780 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2782 VM_ASSERT(calling->kw_splat == 1);
2783 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2788 calling->kw_splat = 0;
2794 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2796 VM_ASSERT(calling->kw_splat == 0);
2802 vm_caller_setup_arg_kw(cfp, calling, ci);
2806#define USE_OPT_HIST 0
2809#define OPT_HIST_MAX 64
2810static int opt_hist[OPT_HIST_MAX+1];
2814opt_hist_show_results_at_exit(
void)
2816 for (
int i=0; i<OPT_HIST_MAX; i++) {
2817 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2823vm_call_iseq_setup_normal_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2827 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2828 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2829 const int opt = calling->argc - lead_num;
2830 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2831 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2832 const int param = ISEQ_BODY(iseq)->param.size;
2833 const int local = ISEQ_BODY(iseq)->local_table_size;
2834 const int delta = opt_num - opt;
2836 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2839 if (opt_pc < OPT_HIST_MAX) {
2843 opt_hist[OPT_HIST_MAX]++;
2847 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2851vm_call_iseq_setup_tailcall_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2855 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2856 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2857 const int opt = calling->argc - lead_num;
2858 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2860 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2863 if (opt_pc < OPT_HIST_MAX) {
2867 opt_hist[OPT_HIST_MAX]++;
2871 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2875args_setup_kw_parameters(rb_execution_context_t *
const ec,
const rb_iseq_t *
const iseq,
2876 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2877 VALUE *
const locals);
2880vm_call_iseq_setup_kwparm_kwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2886 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2887 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2889 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2890 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2892 const int ci_kw_len = kw_arg->keyword_len;
2893 const VALUE *
const ci_keywords = kw_arg->keywords;
2894 VALUE *argv = cfp->sp - calling->argc;
2895 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2896 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2898 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2899 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2901 int param = ISEQ_BODY(iseq)->param.size;
2902 int local = ISEQ_BODY(iseq)->local_table_size;
2903 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2907vm_call_iseq_setup_kwparm_nokwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2910 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
2913 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2914 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2916 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2917 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2918 VALUE *
const argv = cfp->sp - calling->argc;
2919 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2922 for (i=0; i<kw_param->num; i++) {
2923 klocals[i] = kw_param->default_values[i];
2930 int param = ISEQ_BODY(iseq)->param.size;
2931 int local = ISEQ_BODY(iseq)->local_table_size;
2932 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2935static VALUE builtin_invoker0(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr);
2938vm_call_single_noarg_inline_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2942 cfp->sp -= (calling->argc + 1);
2943 return builtin_invoker0(ec, calling->recv, NULL, (rb_insn_func_t)bf->func_ptr);
2947vm_callee_setup_arg(rb_execution_context_t *ec,
struct rb_calling_info *calling,
2948 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
2952 bool cacheable_ci = vm_ci_markable(ci);
2954 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
2955 if (LIKELY(rb_simple_iseq_p(iseq))) {
2956 rb_control_frame_t *cfp = ec->cfp;
2957 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2958 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
2960 if (calling->argc != lead_num) {
2961 argument_arity_error(ec, iseq, calling->argc, lead_num, lead_num);
2964 VM_ASSERT(ci == calling->cd->ci);
2965 VM_ASSERT(cc == calling->cc);
2967 if (cacheable_ci && vm_call_iseq_optimizable_p(ci, cc)) {
2968 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_INLINE) &&
2970 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
2971 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
2972 CC_SET_FASTPATH(cc, vm_call_single_noarg_inline_builtin,
true);
2975 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
2980 else if (rb_iseq_only_optparam_p(iseq)) {
2981 rb_control_frame_t *cfp = ec->cfp;
2983 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2984 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2986 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
2987 const int argc = calling->argc;
2988 const int opt = argc - lead_num;
2990 if (opt < 0 || opt > opt_num) {
2991 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
2994 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2995 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
2996 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2997 cacheable_ci && vm_call_cacheable(ci, cc));
3000 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3001 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3002 cacheable_ci && vm_call_cacheable(ci, cc));
3006 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3007 for (
int i=argc; i<lead_num + opt_num; i++) {
3010 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3012 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3013 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3014 const int argc = calling->argc;
3015 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3017 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3020 if (argc - kw_arg->keyword_len == lead_num) {
3021 const int ci_kw_len = kw_arg->keyword_len;
3022 const VALUE *
const ci_keywords = kw_arg->keywords;
3024 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3026 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3027 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
3029 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3030 cacheable_ci && vm_call_cacheable(ci, cc));
3035 else if (argc == lead_num) {
3037 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3038 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
3040 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3042 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3043 cacheable_ci && vm_call_cacheable(ci, cc));
3051 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3055vm_call_iseq_setup(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
3057 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3060 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3061 const int param_size = ISEQ_BODY(iseq)->param.size;
3062 const int local_size = ISEQ_BODY(iseq)->local_table_size;
3063 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3064 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3068vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
3069 int opt_pc,
int param_size,
int local_size)
3074 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3075 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3078 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3083vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
const rb_callable_method_entry_t *me,
3084 int opt_pc,
int param_size,
int local_size)
3086 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3087 VALUE *argv = cfp->sp - calling->argc;
3088 VALUE *sp = argv + param_size;
3089 cfp->sp = argv - 1 ;
3091 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3092 calling->block_handler, (
VALUE)me,
3093 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3094 local_size - param_size,
3095 ISEQ_BODY(iseq)->stack_max);
3100vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
int opt_pc)
3104 VALUE *argv = cfp->sp - calling->argc;
3105 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
3106 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3107 VALUE *src_argv = argv;
3108 VALUE *sp_orig, *sp;
3109 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3111 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3112 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3113 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3114 dst_captured->code.val = src_captured->code.val;
3115 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3116 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3119 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3123 vm_pop_frame(ec, cfp, cfp->ep);
3126 sp_orig = sp = cfp->sp;
3129 sp[0] = calling->recv;
3133 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3134 *sp++ = src_argv[i];
3137 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3138 calling->recv, calling->block_handler, (
VALUE)me,
3139 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3140 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3141 ISEQ_BODY(iseq)->stack_max);
3149ractor_unsafe_check(
void)
3151 if (!rb_ractor_main_p()) {
3152 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3159 ractor_unsafe_check();
3167 ractor_unsafe_check();
3169 return (*f)(argc, argv, recv);
3175 ractor_unsafe_check();
3183 ractor_unsafe_check();
3185 return (*f)(recv, argv[0]);
3191 ractor_unsafe_check();
3193 return (*f)(recv, argv[0], argv[1]);
3199 ractor_unsafe_check();
3201 return (*f)(recv, argv[0], argv[1], argv[2]);
3207 ractor_unsafe_check();
3209 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3215 ractor_unsafe_check();
3216 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3217 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3223 ractor_unsafe_check();
3224 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3225 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3231 ractor_unsafe_check();
3232 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3233 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3239 ractor_unsafe_check();
3240 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3241 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3247 ractor_unsafe_check();
3248 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3249 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3255 ractor_unsafe_check();
3256 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3257 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3263 ractor_unsafe_check();
3264 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3265 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3271 ractor_unsafe_check();
3272 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3273 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3279 ractor_unsafe_check();
3280 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3281 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3287 ractor_unsafe_check();
3288 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3289 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3295 ractor_unsafe_check();
3296 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3297 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3311 return (*f)(argc, argv, recv);
3325 return (*f)(recv, argv[0]);
3332 return (*f)(recv, argv[0], argv[1]);
3339 return (*f)(recv, argv[0], argv[1], argv[2]);
3346 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3352 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3353 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3359 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3360 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3366 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3367 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3373 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3374 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3380 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3381 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3387 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3388 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3394 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3395 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3401 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3402 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3408 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3409 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3415 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3416 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3422 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3423 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3427vm_cfp_consistent_p(rb_execution_context_t *ec,
const rb_control_frame_t *reg_cfp)
3429 const int ov_flags = RAISED_STACKOVERFLOW;
3430 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3431 if (rb_ec_raised_p(ec, ov_flags)) {
3432 rb_ec_raised_reset(ec, ov_flags);
3438#define CHECK_CFP_CONSISTENCY(func) \
3439 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3440 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3443const rb_method_cfunc_t *
3444vm_method_cfunc_entry(
const rb_callable_method_entry_t *me)
3446#if VM_DEBUG_VERIFY_METHOD_CACHE
3447 switch (me->def->type) {
3448 case VM_METHOD_TYPE_CFUNC:
3449 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3451# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3453 METHOD_BUG(ATTRSET);
3455 METHOD_BUG(BMETHOD);
3458 METHOD_BUG(OPTIMIZED);
3459 METHOD_BUG(MISSING);
3460 METHOD_BUG(REFINED);
3464 rb_bug(
"wrong method type: %d", me->def->type);
3467 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3471vm_call_cfunc_with_frame_(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling,
3474 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3478 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
3479 const rb_method_cfunc_t *cfunc = vm_method_cfunc_entry(me);
3481 VALUE recv = calling->recv;
3482 VALUE block_handler = calling->block_handler;
3483 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3485 if (UNLIKELY(calling->kw_splat)) {
3486 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3489 VM_ASSERT(reg_cfp == ec->cfp);
3491 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3494 vm_push_frame(ec, NULL, frame_type, recv,
3495 block_handler, (
VALUE)me,
3496 0, ec->cfp->sp, 0, 0);
3498 int len = cfunc->argc;
3501 reg_cfp->sp = stack_bottom;
3502 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3504 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3506 rb_vm_pop_frame(ec);
3508 VM_ASSERT(ec->cfp->sp == stack_bottom);
3510 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3511 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3518rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3520 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3524vm_call_cfunc_with_frame(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
3526 int argc = calling->argc;
3527 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3528 VALUE *argv = &stack_bottom[1];
3530 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3534vm_call_cfunc_other(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
3537 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3539 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3541 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3542 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3545 VALUE *stack_bottom = reg_cfp->sp - 2;
3547 VM_ASSERT(calling->argc == 1);
3548 VM_ASSERT(RB_TYPE_P(argv_ary,
T_ARRAY));
3551 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3554 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat);
3556 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3561vm_call_cfunc_array_argv(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling,
int stack_offset,
int argc_offset)
3563 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3566 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3567 return vm_call_cfunc_other(ec, reg_cfp, calling);
3571 calling->kw_splat = 0;
3573 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3574 VALUE *sp = stack_bottom;
3575 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3576 for(i = 0; i < argc; i++) {
3581 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3585vm_call_cfunc_only_splat(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
3587 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3588 VALUE argv_ary = reg_cfp->sp[-1];
3592 int argc_offset = 0;
3594 if (UNLIKELY(argc > 0 &&
3595 RB_TYPE_P((last_hash = argv[argc-1]),
T_HASH) &&
3596 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3598 return vm_call_cfunc_other(ec, reg_cfp, calling);
3602 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3606vm_call_cfunc_only_splat_kw(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
3608 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3609 VALUE keyword_hash = reg_cfp->sp[-1];
3612 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3615 return vm_call_cfunc_other(ec, reg_cfp, calling);
3619vm_call_cfunc(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
3622 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3624 if (IS_ARGS_SPLAT(ci)) {
3625 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3627 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3628 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3630 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3632 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3633 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3637 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3638 return vm_call_cfunc_other(ec, reg_cfp, calling);
3642vm_call_ivar(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
3645 RB_DEBUG_COUNTER_INC(ccf_ivar);
3647 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
3652vm_call_attrset_direct(rb_execution_context_t *ec, rb_control_frame_t *cfp,
const struct rb_callcache *cc,
VALUE obj)
3654 RB_DEBUG_COUNTER_INC(ccf_attrset);
3655 VALUE val = *(cfp->sp - 1);
3657 attr_index_t index = vm_cc_attr_index(cc);
3658 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3659 ID id = vm_cc_cme(cc)->def->body.attr.id;
3661 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3670 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3671 if (!UNDEF_P(res)) {
3676 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
3682vm_call_attrset(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
3684 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
3688vm_call_bmethod_body(rb_execution_context_t *ec,
struct rb_calling_info *calling,
const VALUE *argv)
3693 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3694 VALUE procv = cme->def->body.bmethod.proc;
3697 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3698 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
3702 GetProcPtr(procv, proc);
3703 val = rb_vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
3708static int vm_callee_setup_block_arg(rb_execution_context_t *ec,
struct rb_calling_info *calling,
const struct rb_callinfo *ci,
const rb_iseq_t *iseq,
VALUE *argv,
const enum arg_setup_type arg_setup_type);
3709static VALUE invoke_bmethod(rb_execution_context_t *ec,
const rb_iseq_t *iseq,
VALUE self,
const struct rb_captured_block *captured,
const rb_callable_method_entry_t *me,
VALUE type,
int opt_pc);
3712vm_call_iseq_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
3714 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
3717 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3718 VALUE procv = cme->def->body.bmethod.proc;
3721 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3722 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
3726 GetProcPtr(procv, proc);
3727 const struct rb_block *block = &proc->block;
3729 while (vm_block_type(block) == block_type_proc) {
3730 block = vm_proc_block(block->as.proc);
3732 VM_ASSERT(vm_block_type(block) == block_type_iseq);
3735 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
3736 VALUE *
const argv = cfp->sp - calling->argc;
3737 const int arg_size = ISEQ_BODY(iseq)->param.size;
3740 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
3741 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
3744 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
3749 vm_push_frame(ec, iseq,
3750 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
3752 VM_GUARDED_PREV_EP(captured->ep),
3754 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
3756 ISEQ_BODY(iseq)->local_table_size - arg_size,
3757 ISEQ_BODY(iseq)->stack_max);
3763vm_call_noniseq_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
3765 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
3769 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
3770 if (UNLIKELY(calling->heap_argv)) {
3775 argc = calling->argc;
3778 cfp->sp += - argc - 1;
3781 return vm_call_bmethod_body(ec, calling, argv);
3785vm_call_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
3787 RB_DEBUG_COUNTER_INC(ccf_bmethod);
3790 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3791 VALUE procv = cme->def->body.bmethod.proc;
3793 GetProcPtr(procv, proc);
3794 const struct rb_block *block = &proc->block;
3796 while (vm_block_type(block) == block_type_proc) {
3797 block = vm_proc_block(block->as.proc);
3799 if (vm_block_type(block) == block_type_iseq) {
3800 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
3801 return vm_call_iseq_bmethod(ec, cfp, calling);
3804 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
3805 return vm_call_noniseq_bmethod(ec, cfp, calling);
3809rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
3811 VALUE klass = current_class;
3814 if (RB_TYPE_P(klass,
T_ICLASS) &&
FL_TEST(klass, RICLASS_IS_ORIGIN) &&
3819 while (
RTEST(klass)) {
3821 if (owner == target_owner) {
3827 return current_class;
3830static const rb_callable_method_entry_t *
3831aliased_callable_method_entry(
const rb_callable_method_entry_t *me)
3833 const rb_method_entry_t *orig_me = me->def->body.alias.original_me;
3834 const rb_callable_method_entry_t *cme;
3836 if (orig_me->defined_class == 0) {
3837 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
3838 VM_ASSERT(RB_TYPE_P(orig_me->owner,
T_MODULE));
3839 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
3841 if (me->def->reference_count == 1) {
3842 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
3845 rb_method_definition_t *def =
3846 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
3847 rb_method_definition_set((rb_method_entry_t *)me, def, (
void *)cme);
3851 cme = (
const rb_callable_method_entry_t *)orig_me;
3854 VM_ASSERT(callable_method_entry_p(cme));
3858const rb_callable_method_entry_t *
3859rb_aliased_callable_method_entry(
const rb_callable_method_entry_t *me)
3861 return aliased_callable_method_entry(me);
3865vm_call_alias(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
3867 calling->cc = &VM_CC_ON_STACK(
Qundef,
3870 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
3872 return vm_call_method_each_type(ec, cfp, calling);
3875static enum method_missing_reason
3878 enum method_missing_reason stat = MISSING_NOENTRY;
3879 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
3880 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
3881 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
3885static VALUE vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling);
3888vm_call_symbol(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3891 ASSUME(calling->argc >= 0);
3893 enum method_missing_reason missing_reason = MISSING_NOENTRY;
3894 int argc = calling->argc;
3895 VALUE recv = calling->recv;
3898 flags |= VM_CALL_OPT_SEND;
3900 if (UNLIKELY(! mid)) {
3901 mid = idMethodMissing;
3902 missing_reason = ci_missing_reason(ci);
3903 ec->method_missing_reason = missing_reason;
3906 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3907 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3908 rb_ary_unshift(argv_ary, symbol);
3911 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3912 VALUE exc = rb_make_no_method_exception(
3917 rb_ary_unshift(argv_ary, rb_str_intern(symbol));
3934 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3937 argc = ++calling->argc;
3939 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3942 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3943 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
3944 VALUE exc = rb_make_no_method_exception(
3950 TOPN(i) = rb_str_intern(symbol);
3956 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
3959 calling->cc = &VM_CC_ON_STACK(klass,
3961 { .method_missing_reason = missing_reason },
3962 rb_callable_method_entry_with_refinements(klass, mid, NULL));
3964 if (flags & VM_CALL_FCALL) {
3965 return vm_call_method(ec, reg_cfp, calling);
3969 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
3971 if (vm_cc_cme(cc) != NULL) {
3972 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
3973 case METHOD_VISI_PUBLIC:
3974 return vm_call_method_each_type(ec, reg_cfp, calling);
3975 case METHOD_VISI_PRIVATE:
3976 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
3978 case METHOD_VISI_PROTECTED:
3979 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
3982 VM_UNREACHABLE(vm_call_method);
3984 return vm_call_method_missing(ec, reg_cfp, calling);
3987 return vm_call_method_nome(ec, reg_cfp, calling);
3991vm_call_opt_send0(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling,
int flags)
3997 i = calling->argc - 1;
3999 if (calling->argc == 0) {
4000 rb_raise(rb_eArgError,
"no method name given");
4024 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4028vm_call_opt_send_complex(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4030 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4032 int flags = VM_CALL_FCALL;
4036 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4037 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4038 sym = rb_ary_shift(argv_ary);
4039 flags |= VM_CALL_ARGS_SPLAT;
4040 if (calling->kw_splat) {
4041 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4042 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4043 calling->kw_splat = 0;
4045 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4048 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4049 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4053vm_call_opt_send_simple(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4055 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4056 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4060vm_call_opt_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4062 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4065 int flags = vm_ci_flag(ci);
4067 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
4068 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4069 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4070 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
4071 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4072 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4075 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4076 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4080vm_call_method_missing_body(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling,
4081 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4083 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4085 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4086 unsigned int argc, flag;
4088 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4089 argc = ++calling->argc;
4092 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4093 vm_check_canary(ec, reg_cfp->sp);
4097 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4100 ec->method_missing_reason = reason;
4102 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4105 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4106 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4107 return vm_call_method(ec, reg_cfp, calling);
4111vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4113 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4116static const rb_callable_method_entry_t *refined_method_callable_without_refinement(
const rb_callable_method_entry_t *me);
4118vm_call_zsuper(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
VALUE klass)
4122 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, vm_ci_mid(calling->cd->ci)) : NULL;
4124 return vm_call_method_nome(ec, cfp, calling);
4126 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4127 cme->def->body.refined.orig_me) {
4128 cme = refined_method_callable_without_refinement(cme);
4131 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4133 return vm_call_method_each_type(ec, cfp, calling);
4137find_refinement(
VALUE refinements,
VALUE klass)
4139 if (
NIL_P(refinements)) {
4142 return rb_hash_lookup(refinements, klass);
4145PUREFUNC(
static rb_control_frame_t * current_method_entry(
const rb_execution_context_t *ec, rb_control_frame_t *cfp));
4146static rb_control_frame_t *
4147current_method_entry(
const rb_execution_context_t *ec, rb_control_frame_t *cfp)
4149 rb_control_frame_t *top_cfp = cfp;
4151 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4152 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4155 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4156 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4160 }
while (cfp->iseq != local_iseq);
4165static const rb_callable_method_entry_t *
4166refined_method_callable_without_refinement(
const rb_callable_method_entry_t *me)
4168 const rb_method_entry_t *orig_me = me->def->body.refined.orig_me;
4169 const rb_callable_method_entry_t *cme;
4171 if (orig_me->defined_class == 0) {
4176 cme = (
const rb_callable_method_entry_t *)orig_me;
4179 VM_ASSERT(callable_method_entry_p(cme));
4181 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4188static const rb_callable_method_entry_t *
4189search_refined_method(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
4191 ID mid = vm_ci_mid(calling->cd->ci);
4192 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4194 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
4196 for (; cref; cref = CREF_NEXT(cref)) {
4197 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4198 if (
NIL_P(refinement))
continue;
4200 const rb_callable_method_entry_t *
const ref_me =
4201 rb_callable_method_entry(refinement, mid);
4204 if (vm_cc_call(cc) == vm_call_super_method) {
4205 const rb_control_frame_t *top_cfp = current_method_entry(ec, cfp);
4206 const rb_callable_method_entry_t *top_me = rb_vm_frame_method_entry(top_cfp);
4207 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4212 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4213 cme->def != ref_me->def) {
4216 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4225 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4226 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4230 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, mid) : NULL;
4236vm_call_refined(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
4238 const rb_callable_method_entry_t *ref_cme = search_refined_method(ec, cfp, calling);
4241 if (calling->cd->cc) {
4242 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4244 return vm_call_method(ec, cfp, calling);
4247 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4248 calling->cc= ref_cc;
4249 return vm_call_method(ec, cfp, calling);
4253 return vm_call_method_nome(ec, cfp, calling);
4257static inline VALUE vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling,
const struct rb_callinfo *ci,
bool is_lambda,
VALUE block_handler);
4259NOINLINE(
static VALUE
4260 vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4264vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4267 int argc = calling->argc;
4270 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4273 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4277vm_call_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4279 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4282 VALUE procval = calling->recv;
4283 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4287vm_call_opt_block_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4289 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4291 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4294 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4295 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4298 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4299 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4300 return vm_call_general(ec, reg_cfp, calling);
4305vm_call_opt_struct_aref0(rb_execution_context_t *ec,
struct rb_calling_info *calling)
4307 VALUE recv = calling->recv;
4309 VM_ASSERT(RB_TYPE_P(recv,
T_STRUCT));
4310 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4311 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4313 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4314 return internal_RSTRUCT_GET(recv,
off);
4318vm_call_opt_struct_aref(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4320 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4322 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4328vm_call_opt_struct_aset0(rb_execution_context_t *ec,
struct rb_calling_info *calling,
VALUE val)
4330 VALUE recv = calling->recv;
4332 VM_ASSERT(RB_TYPE_P(recv,
T_STRUCT));
4333 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4334 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4338 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4339 internal_RSTRUCT_SET(recv,
off, val);
4345vm_call_opt_struct_aset(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4347 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4349 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4354NOINLINE(
static VALUE vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
4357#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4358 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4359 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4360 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4362 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4363 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4371vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling,
4374 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4375 case OPTIMIZED_METHOD_TYPE_SEND:
4376 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4377 return vm_call_opt_send(ec, cfp, calling);
4378 case OPTIMIZED_METHOD_TYPE_CALL:
4379 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4380 return vm_call_opt_call(ec, cfp, calling);
4381 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4382 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4383 return vm_call_opt_block_call(ec, cfp, calling);
4384 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4385 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4389 VM_CALL_METHOD_ATTR(v,
4390 vm_call_opt_struct_aref(ec, cfp, calling),
4391 set_vm_cc_ivar(cc); \
4392 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4395 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4396 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4400 VM_CALL_METHOD_ATTR(v,
4401 vm_call_opt_struct_aset(ec, cfp, calling),
4402 set_vm_cc_ivar(cc); \
4403 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4407 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4412vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
4416 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
4419 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4421 switch (cme->def->type) {
4422 case VM_METHOD_TYPE_ISEQ:
4423 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4424 return vm_call_iseq_setup(ec, cfp, calling);
4426 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4427 case VM_METHOD_TYPE_CFUNC:
4428 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4429 return vm_call_cfunc(ec, cfp, calling);
4431 case VM_METHOD_TYPE_ATTRSET:
4432 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4436 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG);
4438 if (vm_cc_markable(cc)) {
4439 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4440 VM_CALL_METHOD_ATTR(v,
4441 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4442 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4448 VM_CALLCACHE_UNMARKABLE |
4449 VM_CALLCACHE_ON_STACK,
4455 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
4460 VM_CALL_METHOD_ATTR(v,
4461 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4462 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4466 case VM_METHOD_TYPE_IVAR:
4467 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4469 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4470 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT);
4471 VM_CALL_METHOD_ATTR(v,
4472 vm_call_ivar(ec, cfp, calling),
4473 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4476 case VM_METHOD_TYPE_MISSING:
4477 vm_cc_method_missing_reason_set(cc, 0);
4478 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4479 return vm_call_method_missing(ec, cfp, calling);
4481 case VM_METHOD_TYPE_BMETHOD:
4482 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4483 return vm_call_bmethod(ec, cfp, calling);
4485 case VM_METHOD_TYPE_ALIAS:
4486 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4487 return vm_call_alias(ec, cfp, calling);
4489 case VM_METHOD_TYPE_OPTIMIZED:
4490 return vm_call_optimized(ec, cfp, calling, ci, cc);
4492 case VM_METHOD_TYPE_UNDEF:
4495 case VM_METHOD_TYPE_ZSUPER:
4496 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4498 case VM_METHOD_TYPE_REFINED:
4501 return vm_call_refined(ec, cfp, calling);
4504 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4507NORETURN(
static void vm_raise_method_missing(rb_execution_context_t *ec,
int argc,
const VALUE *argv,
VALUE obj,
int call_status));
4510vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
4514 const int stat = ci_missing_reason(ci);
4516 if (vm_ci_mid(ci) == idMethodMissing) {
4517 if (UNLIKELY(calling->heap_argv)) {
4521 rb_control_frame_t *reg_cfp = cfp;
4522 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4523 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4527 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4537vm_defined_class_for_protected_call(
const rb_callable_method_entry_t *me)
4539 VALUE defined_class = me->defined_class;
4540 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4541 return NIL_P(refined_class) ? defined_class : refined_class;
4545vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
4550 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4552 if (vm_cc_cme(cc) != NULL) {
4553 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4554 case METHOD_VISI_PUBLIC:
4555 return vm_call_method_each_type(ec, cfp, calling);
4557 case METHOD_VISI_PRIVATE:
4558 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4559 enum method_missing_reason stat = MISSING_PRIVATE;
4560 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4562 vm_cc_method_missing_reason_set(cc, stat);
4563 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4564 return vm_call_method_missing(ec, cfp, calling);
4566 return vm_call_method_each_type(ec, cfp, calling);
4568 case METHOD_VISI_PROTECTED:
4569 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4570 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4572 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4573 return vm_call_method_missing(ec, cfp, calling);
4577 VM_ASSERT(vm_cc_cme(cc) != NULL);
4580 calling->cc = &cc_on_stack;
4581 return vm_call_method_each_type(ec, cfp, calling);
4584 return vm_call_method_each_type(ec, cfp, calling);
4587 rb_bug(
"unreachable");
4591 return vm_call_method_nome(ec, cfp, calling);
4596vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4598 RB_DEBUG_COUNTER_INC(ccf_general);
4599 return vm_call_method(ec, reg_cfp, calling);
4605 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4606 VM_ASSERT(cc != vm_cc_empty());
4608 *(vm_call_handler *)&cc->call_ = vm_call_general;
4612vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling)
4614 RB_DEBUG_COUNTER_INC(ccf_super_method);
4619 if (ec == NULL) rb_bug(
"unreachable");
4622 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4623 return vm_call_method(ec, reg_cfp, calling);
4629vm_search_normal_superclass(
VALUE klass)
4634 klass =
RBASIC(klass)->klass;
4636 klass = RCLASS_ORIGIN(klass);
4640NORETURN(
static void vm_super_outside(
void));
4643vm_super_outside(
void)
4649empty_cc_for_super(
void)
4651 return &vm_empty_cc_for_super;
4655vm_search_super_method(
const rb_control_frame_t *reg_cfp,
struct rb_call_data *cd,
VALUE recv)
4657 VALUE current_defined_class;
4658 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
4664 current_defined_class = vm_defined_class_for_protected_call(me);
4667 reg_cfp->iseq != method_entry_iseqptr(me) &&
4670 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
4674 "self has wrong type to call super in this context: "
4675 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
4680 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
4682 "implicit argument passing of super from method defined"
4683 " by define_method() is not supported."
4684 " Specify all arguments explicitly.");
4687 ID mid = me->def->original_id;
4690 cd->ci = vm_ci_new_runtime(mid,
4693 vm_ci_kwarg(cd->ci));
4699 VALUE klass = vm_search_normal_superclass(me->defined_class);
4703 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
4707 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
4708 const rb_callable_method_entry_t *cached_cme = vm_cc_cme(cc);
4711 if (cached_cme == NULL) {
4713 cd->cc = empty_cc_for_super();
4715 else if (cached_cme->called_id != mid) {
4716 const rb_callable_method_entry_t *cme = rb_callable_method_entry(klass, mid);
4718 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
4722 cd->cc = cc = empty_cc_for_super();
4726 switch (cached_cme->def->type) {
4728 case VM_METHOD_TYPE_REFINED:
4730 case VM_METHOD_TYPE_ATTRSET:
4731 case VM_METHOD_TYPE_IVAR:
4732 vm_cc_call_set(cc, vm_call_super_method);
4740 VM_ASSERT((vm_cc_cme(cc),
true));
4748block_proc_is_lambda(
const VALUE procval)
4753 GetProcPtr(procval, proc);
4754 return proc->is_lambda;
4762vm_yield_with_cfunc(rb_execution_context_t *ec,
4764 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
4765 const rb_callable_method_entry_t *me)
4767 int is_lambda = FALSE;
4768 VALUE val, arg, blockarg;
4770 const struct vm_ifunc *ifunc = captured->code.ifunc;
4775 else if (argc == 0) {
4782 blockarg = rb_vm_bh_to_procval(ec, block_handler);
4784 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
4786 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
4789 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
4792 VM_GUARDED_PREV_EP(captured->ep),
4794 0, ec->cfp->sp, 0, 0);
4795 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
4796 rb_vm_pop_frame(ec);
4802rb_vm_yield_with_cfunc(rb_execution_context_t *ec,
const struct rb_captured_block *captured,
int argc,
const VALUE *argv)
4804 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
4808vm_yield_with_symbol(rb_execution_context_t *ec,
VALUE symbol,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler)
4810 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
4814vm_callee_setup_block_arg_arg0_splat(rb_control_frame_t *cfp,
const rb_iseq_t *iseq,
VALUE *argv,
VALUE ary)
4819 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4821 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
4829vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
4831 VALUE ary, arg0 = argv[0];
4832 ary = rb_check_array_type(arg0);
4836 VM_ASSERT(argv[0] == arg0);
4842vm_callee_setup_block_arg(rb_execution_context_t *ec,
struct rb_calling_info *calling,
const struct rb_callinfo *ci,
const rb_iseq_t *iseq,
VALUE *argv,
const enum arg_setup_type arg_setup_type)
4844 if (rb_simple_iseq_p(iseq)) {
4845 rb_control_frame_t *cfp = ec->cfp;
4848 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
4850 if (arg_setup_type == arg_setup_block &&
4851 calling->argc == 1 &&
4852 ISEQ_BODY(iseq)->param.flags.has_lead &&
4853 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
4854 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
4855 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
4858 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
4859 if (arg_setup_type == arg_setup_block) {
4860 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
4862 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4863 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
4864 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
4866 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
4867 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
4871 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
4878 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
4883vm_yield_setup_args(rb_execution_context_t *ec,
const rb_iseq_t *iseq,
const int argc,
VALUE *argv,
int flags,
VALUE block_handler,
enum arg_setup_type arg_setup_type)
4887 calling = &calling_entry;
4888 calling->argc = argc;
4889 calling->block_handler = block_handler;
4890 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
4892 calling->heap_argv = 0;
4893 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
4895 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
4901vm_invoke_iseq_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4903 bool is_lambda,
VALUE block_handler)
4906 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4907 const int arg_size = ISEQ_BODY(iseq)->param.size;
4908 VALUE *
const rsp = GET_SP() - calling->argc;
4909 VALUE *
const argv = rsp;
4910 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
4914 vm_push_frame(ec, iseq,
4915 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
4917 VM_GUARDED_PREV_EP(captured->ep), 0,
4918 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4920 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
4926vm_invoke_symbol_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4928 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
4930 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
4931 int flags = vm_ci_flag(ci);
4933 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
4934 ((calling->argc == 0) ||
4935 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4936 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4937 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
4938 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4940 if (UNLIKELY(calling->heap_argv)) {
4941#if VM_ARGC_STACK_MAX < 0
4943 rb_raise(rb_eArgError,
"no receiver given");
4946 calling->recv = rb_ary_shift(calling->heap_argv);
4949 reg_cfp->sp[-1] = reg_cfp->sp[-2];
4950 reg_cfp->sp[-2] = calling->recv;
4951 flags |= VM_CALL_ARGS_SPLAT;
4954 if (calling->argc < 1) {
4955 rb_raise(rb_eArgError,
"no receiver given");
4957 calling->recv = TOPN(--calling->argc);
4959 if (calling->kw_splat) {
4960 flags |= VM_CALL_KW_SPLAT;
4964 if (calling->argc < 1) {
4965 rb_raise(rb_eArgError,
"no receiver given");
4967 calling->recv = TOPN(--calling->argc);
4970 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
4974vm_invoke_ifunc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4976 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
4981 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
4982 argc = calling->argc;
4983 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
4989vm_proc_to_block_handler(
VALUE procval)
4991 const struct rb_block *block = vm_proc_block(procval);
4993 switch (vm_block_type(block)) {
4994 case block_type_iseq:
4995 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
4996 case block_type_ifunc:
4997 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
4998 case block_type_symbol:
4999 return VM_BH_FROM_SYMBOL(block->as.symbol);
5000 case block_type_proc:
5001 return VM_BH_FROM_PROC(block->as.proc);
5003 VM_UNREACHABLE(vm_yield_with_proc);
5008vm_invoke_proc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
5010 bool is_lambda,
VALUE block_handler)
5012 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5013 VALUE proc = VM_BH_TO_PROC(block_handler);
5014 is_lambda = block_proc_is_lambda(proc);
5015 block_handler = vm_proc_to_block_handler(proc);
5018 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5022vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
5024 bool is_lambda,
VALUE block_handler)
5026 VALUE (*func)(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
5028 bool is_lambda,
VALUE block_handler);
5030 switch (vm_block_handler_type(block_handler)) {
5031 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5032 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5033 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5034 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5035 default: rb_bug(
"vm_invoke_block: unreachable");
5038 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5042vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5044 const rb_execution_context_t *ec = GET_EC();
5045 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
5049 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5052 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5053 captured->code.iseq = blockiseq;
5055 return rb_vm_make_proc(ec, captured,
rb_cProc);
5059vm_once_exec(
VALUE iseq)
5061 VALUE proc = vm_make_proc_with_iseq((rb_iseq_t *)iseq);
5066vm_once_clear(
VALUE data)
5069 is->once.running_thread = NULL;
5081 args[0] = obj; args[1] =
Qfalse;
5083 if (!UNDEF_P(r) &&
RTEST(r)) {
5092vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type,
VALUE obj,
VALUE v)
5095 enum defined_type
type = (
enum defined_type)op_type;
5102 return rb_gvar_defined(
SYM2ID(obj));
5104 case DEFINED_CVAR: {
5105 const rb_cref_t *cref = vm_get_cref(GET_EP());
5106 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5111 case DEFINED_CONST_FROM: {
5112 bool allow_nil =
type == DEFINED_CONST;
5114 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5119 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5121 case DEFINED_METHOD:{
5123 const rb_method_entry_t *me = rb_method_entry_with_refinements(klass,
SYM2ID(obj), NULL);
5126 switch (METHOD_ENTRY_VISI(me)) {
5127 case METHOD_VISI_PRIVATE:
5129 case METHOD_VISI_PROTECTED:
5133 case METHOD_VISI_PUBLIC:
5137 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5141 return check_respond_to_missing(obj, v);
5146 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5150 case DEFINED_ZSUPER:
5152 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(GET_CFP());
5155 VALUE klass = vm_search_normal_superclass(me->defined_class);
5156 if (!klass)
return false;
5158 ID id = me->def->original_id;
5165 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5167 rb_bug(
"unimplemented defined? type (VM)");
5175rb_vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type,
VALUE obj,
VALUE v)
5177 return vm_defined(ec, reg_cfp, op_type, obj, v);
5181vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5184 const VALUE *ep = reg_ep;
5185 for (i = 0; i < lv; i++) {
5186 ep = GET_PREV_EP(ep);
5192vm_get_special_object(
const VALUE *
const reg_ep,
5193 enum vm_special_object_type
type)
5196 case VM_SPECIAL_OBJECT_VMCORE:
5197 return rb_mRubyVMFrozenCore;
5198 case VM_SPECIAL_OBJECT_CBASE:
5199 return vm_get_cbase(reg_ep);
5200 case VM_SPECIAL_OBJECT_CONST_BASE:
5201 return vm_get_const_base(reg_ep);
5203 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5210 const VALUE ary2 = ary2st;
5211 VALUE tmp1 = rb_check_to_array(ary1);
5212 VALUE tmp2 = rb_check_to_array(ary2);
5223 tmp1 = rb_ary_dup(ary1);
5225 return rb_ary_concat(tmp1, tmp2);
5233 return vm_concat_array(ary1, ary2st);
5239 VALUE tmp = rb_check_to_array(ary);
5243 else if (
RTEST(flag)) {
5244 return rb_ary_dup(tmp);
5256 return vm_splat_array(flag, ary);
5260vm_check_match(rb_execution_context_t *ec,
VALUE target,
VALUE pattern, rb_num_t flag)
5262 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5264 if (flag & VM_CHECKMATCH_ARRAY) {
5268 for (i = 0; i < n; i++) {
5270 VALUE c = check_match(ec, v, target,
type);
5279 return check_match(ec, pattern, target,
type);
5284rb_vm_check_match(rb_execution_context_t *ec,
VALUE target,
VALUE pattern, rb_num_t flag)
5286 return vm_check_match(ec, target, pattern, flag);
5290vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5292 const VALUE kw_bits = *(ep - bits);
5295 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5296 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5300 VM_ASSERT(RB_TYPE_P(kw_bits,
T_HASH));
5309 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5310 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5311 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5312 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5316 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5319 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5322 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5325 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5332vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5337 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5338 return rb_public_const_get_at(cbase,
id);
5346vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5348 if (!RB_TYPE_P(klass,
T_CLASS)) {
5351 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5356 "superclass mismatch for class %"PRIsVALUE
"",
5369vm_check_if_module(
ID id,
VALUE mod)
5388vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5391 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5399vm_declare_module(
ID id,
VALUE cbase)
5405NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5409 VALUE name = rb_id2str(
id);
5410 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5412 VALUE location = rb_const_source_location_at(cbase,
id);
5413 if (!
NIL_P(location)) {
5414 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5415 " previous definition of %"PRIsVALUE
" was here",
5416 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
5422vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5426 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !RB_TYPE_P(super,
T_CLASS)) {
5428 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5432 vm_check_if_namespace(cbase);
5436 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5437 if (!vm_check_if_class(
id, flags, super, klass))
5438 unmatched_redefinition(
"class", cbase,
id, klass);
5442 return vm_declare_class(
id, flags, cbase, super);
5447vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5451 vm_check_if_namespace(cbase);
5452 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5453 if (!vm_check_if_module(
id, mod))
5454 unmatched_redefinition(
"module", cbase,
id, mod);
5458 return vm_declare_module(
id, cbase);
5463vm_find_or_create_class_by_id(
ID id,
5468 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5471 case VM_DEFINECLASS_TYPE_CLASS:
5473 return vm_define_class(
id, flags, cbase, super);
5475 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5479 case VM_DEFINECLASS_TYPE_MODULE:
5481 return vm_define_module(
id, flags, cbase);
5484 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5488static rb_method_visibility_t
5489vm_scope_visibility_get(
const rb_execution_context_t *ec)
5491 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
5493 if (!vm_env_cref_by_cref(cfp->ep)) {
5494 return METHOD_VISI_PUBLIC;
5497 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5502vm_scope_module_func_check(
const rb_execution_context_t *ec)
5504 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
5506 if (!vm_env_cref_by_cref(cfp->ep)) {
5510 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5515vm_define_method(
const rb_execution_context_t *ec,
VALUE obj,
ID id,
VALUE iseqval,
int is_singleton)
5518 rb_method_visibility_t visi;
5519 rb_cref_t *cref = vm_ec_cref(ec);
5523 visi = METHOD_VISI_PUBLIC;
5526 klass = CREF_CLASS_FOR_DEFINITION(cref);
5527 visi = vm_scope_visibility_get(ec);
5534 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
5536 if (
id == idInitialize && klass != rb_cObject && RB_TYPE_P(klass,
T_CLASS) && (
rb_get_alloc_func(klass) == rb_class_allocate_instance)) {
5538 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval);
5541 if (!is_singleton && vm_scope_module_func_check(ec)) {
5543 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5553 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5555 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5556 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5559 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5563enum method_explorer_type {
5565 mexp_search_invokeblock,
5574 VALUE block_handler,
5575 enum method_explorer_type method_explorer
5580 int argc = vm_ci_argc(ci);
5581 VALUE recv = TOPN(argc);
5583 .block_handler = block_handler,
5584 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5590 switch (method_explorer) {
5591 case mexp_search_method:
5592 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5593 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5595 case mexp_search_super:
5596 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5597 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5599 case mexp_search_invokeblock:
5600 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5607rb_vm_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd, ISEQ blockiseq)
5610 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
5611 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
5617rb_vm_opt_send_without_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd)
5620 VALUE bh = VM_BLOCK_HANDLER_NONE;
5621 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
5627rb_vm_invokesuper(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd, ISEQ blockiseq)
5630 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
5631 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
5637rb_vm_invokeblock(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd)
5640 VALUE bh = VM_BLOCK_HANDLER_NONE;
5641 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
5658vm_objtostring(
const rb_iseq_t *iseq,
VALUE recv, CALL_DATA cd)
5678 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
5684 val = rb_mod_to_s(recv);
5690 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
5691 return rb_nil_to_s(recv);
5695 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
5696 return rb_true_to_s(recv);
5700 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
5701 return rb_false_to_s(recv);
5705 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
5706 return rb_fix_to_s(recv);
5714vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
5716 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5728vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr)
5730 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
5735 VALUE result = *ptr;
5736 rb_snum_t i = num - 1;
5738 const VALUE v = *++ptr;
5739 if (OPTIMIZED_CMP(v, result) > 0) {
5752rb_vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr)
5754 return vm_opt_newarray_max(ec, num, ptr);
5758vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr)
5760 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
5765 VALUE result = *ptr;
5766 rb_snum_t i = num - 1;
5768 const VALUE v = *++ptr;
5769 if (OPTIMIZED_CMP(v, result) < 0) {
5782rb_vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr)
5784 return vm_opt_newarray_min(ec, num, ptr);
5788vm_opt_newarray_hash(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr)
5791 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
5792 return rb_ary_hash_values(num, ptr);
5800rb_vm_opt_newarray_hash(rb_execution_context_t *ec, rb_num_t num,
const VALUE *ptr)
5802 return vm_opt_newarray_hash(ec, num, ptr);
5807#define IMEMO_CONST_CACHE_SHAREABLE IMEMO_FL_USER0
5810vm_track_constant_cache(
ID id,
void *ic)
5812 rb_vm_t *vm = GET_VM();
5813 struct rb_id_table *const_cache = vm->constant_cache;
5814 VALUE lookup_result;
5817 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
5818 ics = (st_table *)lookup_result;
5821 ics = st_init_numtable();
5822 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
5837 vm->inserting_constant_cache_id = id;
5839 st_insert(ics, (st_data_t) ic, (st_data_t)
Qtrue);
5841 vm->inserting_constant_cache_id = (
ID)0;
5845vm_ic_track_const_chain(rb_control_frame_t *cfp, IC ic,
const ID *segments)
5849 for (
int i = 0; segments[i]; i++) {
5850 ID id = segments[i];
5851 if (
id == idNULL)
continue;
5852 vm_track_constant_cache(
id, ic);
5860vm_inlined_ic_hit_p(
VALUE flags,
VALUE value,
const rb_cref_t *ic_cref,
const VALUE *reg_ep)
5862 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
5863 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
5865 return (ic_cref == NULL ||
5866 ic_cref == vm_get_cref(reg_ep));
5874 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
5875 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
5880rb_vm_ic_hit_p(IC ic,
const VALUE *reg_ep)
5882 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
5886vm_ic_update(
const rb_iseq_t *iseq, IC ic,
VALUE val,
const VALUE *reg_ep,
const VALUE *pc)
5888 if (ruby_vm_const_missing_count > 0) {
5889 ruby_vm_const_missing_count = 0;
5896 ice->ic_cref = vm_get_const_key_cref(reg_ep);
5901 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
5902 rb_yjit_constant_ic_update(iseq, ic, pos);
5903 rb_rjit_constant_ic_update(iseq, ic, pos);
5907rb_vm_opt_getconstant_path(rb_execution_context_t *ec, rb_control_frame_t *
const reg_cfp, IC ic)
5912 if (ice && vm_ic_hit_p(ice, GET_EP())) {
5915 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
5917 ruby_vm_constant_cache_misses++;
5918 val = vm_get_ev_const_chain(ec, segments);
5919 vm_ic_track_const_chain(GET_CFP(), ic, segments);
5922 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
5928vm_once_dispatch(rb_execution_context_t *ec, ISEQ iseq, ISE is)
5930 rb_thread_t *th = rb_ec_thread_ptr(ec);
5931 rb_thread_t *
const RUNNING_THREAD_ONCE_DONE = (rb_thread_t *)(0x1);
5934 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
5935 return is->once.value;
5937 else if (is->once.running_thread == NULL) {
5939 is->once.running_thread = th;
5943 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
5946 else if (is->once.running_thread == th) {
5948 return vm_once_exec((
VALUE)iseq);
5952 RUBY_VM_CHECK_INTS(ec);
5959vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
5961 switch (OBJ_BUILTIN_TYPE(key)) {
5967 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
5968 SYMBOL_REDEFINED_OP_FLAG |
5969 INTEGER_REDEFINED_OP_FLAG |
5970 FLOAT_REDEFINED_OP_FLAG |
5971 NIL_REDEFINED_OP_FLAG |
5972 TRUE_REDEFINED_OP_FLAG |
5973 FALSE_REDEFINED_OP_FLAG |
5974 STRING_REDEFINED_OP_FLAG)) {
5978 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
5982 if (rb_hash_stlike_lookup(hash, key, &val)) {
5994 vm_stack_consistency_error(
const rb_execution_context_t *ec,
5995 const rb_control_frame_t *,
5998vm_stack_consistency_error(
const rb_execution_context_t *ec,
5999 const rb_control_frame_t *cfp,
6002 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6003 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6004 static const char stack_consistency_error[] =
6005 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6006#if defined RUBY_DEVEL
6007 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6012 rb_bug(stack_consistency_error, nsp, nbp);
6019 if (FIXNUM_2_P(recv, obj) &&
6020 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6021 return rb_fix_plus_fix(recv, obj);
6023 else if (FLONUM_2_P(recv, obj) &&
6024 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6032 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6037 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6038 return rb_str_opt_plus(recv, obj);
6042 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6043 return rb_ary_plus(recv, obj);
6053 if (FIXNUM_2_P(recv, obj) &&
6054 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6055 return rb_fix_minus_fix(recv, obj);
6057 else if (FLONUM_2_P(recv, obj) &&
6058 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6066 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6077 if (FIXNUM_2_P(recv, obj) &&
6078 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6079 return rb_fix_mul_fix(recv, obj);
6081 else if (FLONUM_2_P(recv, obj) &&
6082 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6090 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6101 if (FIXNUM_2_P(recv, obj) &&
6102 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6103 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6105 else if (FLONUM_2_P(recv, obj) &&
6106 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6107 return rb_flo_div_flo(recv, obj);
6114 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6115 return rb_flo_div_flo(recv, obj);
6125 if (FIXNUM_2_P(recv, obj) &&
6126 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6127 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6129 else if (FLONUM_2_P(recv, obj) &&
6130 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6138 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6147vm_opt_neq(
const rb_iseq_t *iseq, CALL_DATA cd, CALL_DATA cd_eq,
VALUE recv,
VALUE obj)
6149 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6150 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6152 if (!UNDEF_P(val)) {
6153 return RBOOL(!
RTEST(val));
6163 if (FIXNUM_2_P(recv, obj) &&
6164 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6167 else if (FLONUM_2_P(recv, obj) &&
6168 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6176 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6188 if (FIXNUM_2_P(recv, obj) &&
6189 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6192 else if (FLONUM_2_P(recv, obj) &&
6193 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6201 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6213 if (FIXNUM_2_P(recv, obj) &&
6214 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6217 else if (FLONUM_2_P(recv, obj) &&
6218 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6226 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6238 if (FIXNUM_2_P(recv, obj) &&
6239 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6242 else if (FLONUM_2_P(recv, obj) &&
6243 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6251 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6268 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6269 if (LIKELY(RB_TYPE_P(obj,
T_STRING))) {
6270 return rb_str_buf_append(recv, obj);
6277 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6278 return rb_ary_push(recv, obj);
6295 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6306 if (FIXNUM_2_P(recv, obj) &&
6307 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6319 if (FIXNUM_2_P(recv, obj) &&
6320 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6321 return rb_fix_aref(recv, obj);
6326 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6328 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6331 return rb_ary_aref1(recv, obj);
6335 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6336 return rb_hash_aref(recv, obj);
6350 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6352 rb_ary_store(recv,
FIX2LONG(obj), set);
6356 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6357 rb_hash_aset(recv, obj, set);
6369 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6370 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
6371 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6372 return rb_hash_aref(recv, key);
6382 return vm_opt_aref_with(recv, key);
6389 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6390 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
6391 return rb_hash_aset(recv, key, val);
6399vm_opt_length(
VALUE recv,
int bop)
6405 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6406 if (bop == BOP_EMPTY_P) {
6407 return LONG2NUM(RSTRING_LEN(recv));
6414 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6418 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6427vm_opt_empty_p(
VALUE recv)
6429 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6439vm_opt_nil_p(
const rb_iseq_t *iseq, CALL_DATA cd,
VALUE recv)
6442 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6445 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
6461 case RSHIFT(~0UL, 1):
6464 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
6482vm_opt_succ(
VALUE recv)
6485 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
6486 return fix_succ(recv);
6492 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
6501vm_opt_not(
const rb_iseq_t *iseq, CALL_DATA cd,
VALUE recv)
6503 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
6504 return RBOOL(!
RTEST(recv));
6519 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
6523 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
6531rb_event_flag_t rb_iseq_event_flags(
const rb_iseq_t *iseq,
size_t pos);
6533NOINLINE(
static void vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp));
6536vm_trace_hook(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
const VALUE *pc,
6538 rb_hook_list_t *global_hooks, rb_hook_list_t *
const *local_hooks_ptr,
VALUE val)
6541 VALUE self = GET_SELF();
6543 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
6545 if (event & global_hooks->events) {
6548 vm_dtrace(event, ec);
6549 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
6554 rb_hook_list_t *local_hooks = *local_hooks_ptr;
6555 if (local_hooks != NULL) {
6556 if (event & local_hooks->events) {
6559 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
6567rb_vm_opt_cfunc_p(CALL_CACHE cc,
int insn)
6571 return check_cfunc(vm_cc_cme(cc), rb_obj_equal);
6572 case BIN(opt_nil_p):
6573 return check_cfunc(vm_cc_cme(cc), rb_false);
6575 return check_cfunc(vm_cc_cme(cc), rb_obj_not);
6581#define VM_TRACE_HOOK(target_event, val) do { \
6582 if ((pc_events & (target_event)) & enabled_flags) { \
6583 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
6588rescue_errinfo(rb_execution_context_t *ec, rb_control_frame_t *cfp)
6590 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
6591 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
6592 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
6596vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp)
6598 const VALUE *pc = reg_cfp->pc;
6599 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
6602 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
6606 const rb_iseq_t *iseq = reg_cfp->iseq;
6608 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
6610 rb_hook_list_t *local_hooks = iseq->aux.exec.local_hooks;
6611 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
6612 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
6613 rb_hook_list_t *bmethod_local_hooks = NULL;
6614 rb_hook_list_t **bmethod_local_hooks_ptr = NULL;
6616 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
6617 enabled_flags |= iseq_local_events;
6619 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
6621 if (bmethod_frame) {
6622 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
6623 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
6624 bmethod_local_hooks = me->def->body.bmethod.hooks;
6625 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
6626 if (bmethod_local_hooks) {
6627 bmethod_local_events = bmethod_local_hooks->events;
6632 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
6636 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
6644 else if (ec->trace_arg != NULL) {
6649 rb_hook_list_t *global_hooks = rb_ec_ractor_hooks(ec);
6652 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
6655 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
6658 RSTRING_PTR(rb_iseq_path(iseq)),
6659 (
int)rb_iseq_line_no(iseq, pos),
6660 RSTRING_PTR(rb_iseq_label(iseq)));
6662 VM_ASSERT(reg_cfp->pc == pc);
6663 VM_ASSERT(pc_events != 0);
6673 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
6674 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
6692#if VM_CHECK_MODE > 0
6693NORETURN( NOINLINE( COLDFUNC
6694void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
6697Init_vm_stack_canary(
void)
6700 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
6701 vm_stack_canary |= 0x01;
6703 vm_stack_canary_was_born =
true;
6708rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
6712 const char *insn = rb_insns_name(i);
6716 rb_bug(
"dead canary found at %s: %s", insn, str);
6720void Init_vm_stack_canary(
void) { }
6749builtin_invoker0(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6751 typedef VALUE (*rb_invoke_funcptr0_t)(rb_execution_context_t *ec,
VALUE self);
6752 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
6756builtin_invoker1(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6758 typedef VALUE (*rb_invoke_funcptr1_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1);
6759 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
6763builtin_invoker2(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6765 typedef VALUE (*rb_invoke_funcptr2_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2);
6766 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
6770builtin_invoker3(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6773 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
6777builtin_invoker4(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6780 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
6784builtin_invoker5(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6787 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
6791builtin_invoker6(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6794 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
6798builtin_invoker7(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6801 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
6805builtin_invoker8(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6808 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
6812builtin_invoker9(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6814 typedef VALUE (*rb_invoke_funcptr9_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
6815 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
6819builtin_invoker10(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6821 typedef VALUE (*rb_invoke_funcptr10_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
6822 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
6826builtin_invoker11(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6828 typedef VALUE (*rb_invoke_funcptr11_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
6829 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
6833builtin_invoker12(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6835 typedef VALUE (*rb_invoke_funcptr12_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
6836 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
6840builtin_invoker13(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6842 typedef VALUE (*rb_invoke_funcptr13_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
6843 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
6847builtin_invoker14(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6849 typedef VALUE (*rb_invoke_funcptr14_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
6850 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
6854builtin_invoker15(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr)
6856 typedef VALUE (*rb_invoke_funcptr15_t)(rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
6857 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
6860typedef VALUE (*builtin_invoker)(rb_execution_context_t *ec,
VALUE self,
const VALUE *argv, rb_insn_func_t funcptr);
6862static builtin_invoker
6863lookup_builtin_invoker(
int argc)
6865 static const builtin_invoker invokers[] = {
6884 return invokers[argc];
6888invoke_bf(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
const struct rb_builtin_function* bf,
const VALUE *argv)
6890 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
6891 SETUP_CANARY(canary_p);
6892 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, (rb_insn_func_t)bf->func_ptr);
6893 CHECK_CANARY(canary_p, BIN(invokebuiltin));
6898vm_invoke_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp,
const struct rb_builtin_function* bf,
const VALUE *argv)
6900 return invoke_bf(ec, cfp, bf, argv);
6904vm_invoke_builtin_delegate(rb_execution_context_t *ec, rb_control_frame_t *cfp,
const struct rb_builtin_function *bf,
unsigned int start_index)
6907 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
6908 for (
int i=0; i<bf->argc; i++) {
6909 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
6911 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc, bf->func_ptr);
6914 if (bf->argc == 0) {
6915 return invoke_bf(ec, cfp, bf, NULL);
6918 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
6919 return invoke_bf(ec, cfp, bf, argv);
6926rb_vm_lvar_exposed(rb_execution_context_t *ec,
int index)
6928 const rb_control_frame_t *cfp = ec->cfp;
6929 return cfp->ep[index];
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class#inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define FL_SINGLETON
Old name of RUBY_FL_SINGLETON.
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
#define rb_check_frozen
Just another name of rb_check_frozen.
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
#define rb_check_frozen_internal(obj)
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.