/vm_insnhelper.c
C | 5688 lines | 4748 code | 730 blank | 210 comment | 1082 complexity | 1fdc5951c2a5719152838f55edfc1cfa MD5 | raw file
Possible License(s): LGPL-2.1, AGPL-3.0, 0BSD, Unlicense, GPL-2.0, BSD-3-Clause
Large files files are truncated, but you can click here to view the full file
- /**********************************************************************
- vm_insnhelper.c - instruction helper functions.
- $Author$
- Copyright (C) 2007 Koichi Sasada
- **********************************************************************/
- #include "ruby/internal/config.h"
- #include <math.h>
- #include "constant.h"
- #include "debug_counter.h"
- #include "internal.h"
- #include "internal/class.h"
- #include "internal/compar.h"
- #include "internal/hash.h"
- #include "internal/numeric.h"
- #include "internal/proc.h"
- #include "internal/random.h"
- #include "internal/variable.h"
- #include "variable.h"
- /* finish iseq array */
- #include "insns.inc"
- #ifndef MJIT_HEADER
- #include "insns_info.inc"
- #endif
- extern rb_method_definition_t *rb_method_definition_create(rb_method_type_t type, ID mid);
- extern void rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts);
- extern int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2);
- extern VALUE rb_make_no_method_exception(VALUE exc, VALUE format, VALUE obj,
- int argc, const VALUE *argv, int priv);
- #ifndef MJIT_HEADER
- static const struct rb_callcache vm_empty_cc;
- #endif
- /* control stack frame */
- static rb_control_frame_t *vm_get_ruby_level_caller_cfp(const rb_execution_context_t *ec, const rb_control_frame_t *cfp);
- MJIT_STATIC VALUE
- ruby_vm_special_exception_copy(VALUE exc)
- {
- VALUE e = rb_obj_alloc(rb_class_real(RBASIC_CLASS(exc)));
- rb_obj_copy_ivar(e, exc);
- return e;
- }
- NORETURN(static void ec_stack_overflow(rb_execution_context_t *ec, int));
- static void
- ec_stack_overflow(rb_execution_context_t *ec, int setup)
- {
- VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
- ec->raised_flag = RAISED_STACKOVERFLOW;
- if (setup) {
- VALUE at = rb_ec_backtrace_object(ec);
- mesg = ruby_vm_special_exception_copy(mesg);
- rb_ivar_set(mesg, idBt, at);
- rb_ivar_set(mesg, idBt_locations, at);
- }
- ec->errinfo = mesg;
- EC_JUMP_TAG(ec, TAG_RAISE);
- }
- NORETURN(static void vm_stackoverflow(void));
- #ifdef MJIT_HEADER
- NOINLINE(static COLDFUNC void vm_stackoverflow(void));
- #endif
- static void
- vm_stackoverflow(void)
- {
- ec_stack_overflow(GET_EC(), TRUE);
- }
- NORETURN(MJIT_STATIC void rb_ec_stack_overflow(rb_execution_context_t *ec, int crit));
- MJIT_STATIC void
- rb_ec_stack_overflow(rb_execution_context_t *ec, int crit)
- {
- if (rb_during_gc()) {
- rb_bug("system stack overflow during GC. Faulty native extension?");
- }
- if (crit) {
- ec->raised_flag = RAISED_STACKOVERFLOW;
- ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
- EC_JUMP_TAG(ec, TAG_RAISE);
- }
- #ifdef USE_SIGALTSTACK
- ec_stack_overflow(ec, TRUE);
- #else
- ec_stack_overflow(ec, FALSE);
- #endif
- }
- #if VM_CHECK_MODE > 0
- static int
- callable_class_p(VALUE klass)
- {
- #if VM_CHECK_MODE >= 2
- if (!klass) return FALSE;
- switch (RB_BUILTIN_TYPE(klass)) {
- default:
- break;
- case T_ICLASS:
- if (!RB_TYPE_P(RCLASS_SUPER(klass), T_MODULE)) break;
- case T_MODULE:
- return TRUE;
- }
- while (klass) {
- if (klass == rb_cBasicObject) {
- return TRUE;
- }
- klass = RCLASS_SUPER(klass);
- }
- return FALSE;
- #else
- return klass != 0;
- #endif
- }
- static int
- callable_method_entry_p(const rb_callable_method_entry_t *cme)
- {
- if (cme == NULL) {
- return TRUE;
- }
- else {
- VM_ASSERT(IMEMO_TYPE_P((VALUE)cme, imemo_ment));
- if (callable_class_p(cme->defined_class)) {
- return TRUE;
- }
- else {
- return FALSE;
- }
- }
- }
- static void
- vm_check_frame_detail(VALUE type, int req_block, int req_me, int req_cref, VALUE specval, VALUE cref_or_me, int is_cframe, const rb_iseq_t *iseq)
- {
- unsigned int magic = (unsigned int)(type & VM_FRAME_MAGIC_MASK);
- enum imemo_type cref_or_me_type = imemo_env; /* impossible value */
- if (RB_TYPE_P(cref_or_me, T_IMEMO)) {
- cref_or_me_type = imemo_type(cref_or_me);
- }
- if (type & VM_FRAME_FLAG_BMETHOD) {
- req_me = TRUE;
- }
- if (req_block && (type & VM_ENV_FLAG_LOCAL) == 0) {
- rb_bug("vm_push_frame: specval (%p) should be a block_ptr on %x frame", (void *)specval, magic);
- }
- if (!req_block && (type & VM_ENV_FLAG_LOCAL) != 0) {
- rb_bug("vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (void *)specval, magic);
- }
- if (req_me) {
- if (cref_or_me_type != imemo_ment) {
- rb_bug("vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
- }
- }
- else {
- if (req_cref && cref_or_me_type != imemo_cref) {
- rb_bug("vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
- }
- else { /* cref or Qfalse */
- if (cref_or_me != Qfalse && cref_or_me_type != imemo_cref) {
- if (((type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
- /* ignore */
- }
- else {
- rb_bug("vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
- }
- }
- }
- }
- if (cref_or_me_type == imemo_ment) {
- const rb_callable_method_entry_t *me = (const rb_callable_method_entry_t *)cref_or_me;
- if (!callable_method_entry_p(me)) {
- rb_bug("vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
- }
- }
- if ((type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
- VM_ASSERT(iseq == NULL ||
- RUBY_VM_NORMAL_ISEQ_P(iseq) /* argument error. it should be fixed */);
- }
- else {
- VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
- }
- }
- static void
- vm_check_frame(VALUE type,
- VALUE specval,
- VALUE cref_or_me,
- const rb_iseq_t *iseq)
- {
- VALUE given_magic = type & VM_FRAME_MAGIC_MASK;
- VM_ASSERT(FIXNUM_P(type));
- #define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
- case magic: \
- vm_check_frame_detail(type, req_block, req_me, req_cref, \
- specval, cref_or_me, is_cframe, iseq); \
- break
- switch (given_magic) {
- /* BLK ME CREF CFRAME */
- CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
- CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
- CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
- CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
- CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
- CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
- CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
- CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
- CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
- default:
- rb_bug("vm_push_frame: unknown type (%x)", (unsigned int)given_magic);
- }
- #undef CHECK
- }
- static VALUE vm_stack_canary; /* Initialized later */
- static bool vm_stack_canary_was_born = false;
- #ifndef MJIT_HEADER
- MJIT_FUNC_EXPORTED void
- rb_vm_check_canary(const rb_execution_context_t *ec, VALUE *sp)
- {
- const struct rb_control_frame_struct *reg_cfp = ec->cfp;
- const struct rb_iseq_struct *iseq;
- if (! LIKELY(vm_stack_canary_was_born)) {
- return; /* :FIXME: isn't it rather fatal to enter this branch? */
- }
- else if ((VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
- /* This is at the very beginning of a thread. cfp does not exist. */
- return;
- }
- else if (! (iseq = GET_ISEQ())) {
- return;
- }
- else if (LIKELY(sp[0] != vm_stack_canary)) {
- return;
- }
- else {
- /* we are going to call methods below; squash the canary to
- * prevent infinite loop. */
- sp[0] = Qundef;
- }
- const VALUE *orig = rb_iseq_original_iseq(iseq);
- const VALUE *encoded = iseq->body->iseq_encoded;
- const ptrdiff_t pos = GET_PC() - encoded;
- const enum ruby_vminsn_type insn = (enum ruby_vminsn_type)orig[pos];
- const char *name = insn_name(insn);
- const VALUE iseqw = rb_iseqw_new(iseq);
- const VALUE inspection = rb_inspect(iseqw);
- const char *stri = rb_str_to_cstr(inspection);
- const VALUE disasm = rb_iseq_disasm(iseq);
- const char *strd = rb_str_to_cstr(disasm);
- /* rb_bug() is not capable of outputting this large contents. It
- is designed to run form a SIGSEGV handler, which tends to be
- very restricted. */
- ruby_debug_printf(
- "We are killing the stack canary set by %s, "
- "at %s@pc=%"PRIdPTR"\n"
- "watch out the C stack trace.\n"
- "%s",
- name, stri, pos, strd);
- rb_bug("see above.");
- }
- #endif
- #define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
- #else
- #define vm_check_canary(ec, sp)
- #define vm_check_frame(a, b, c, d)
- #endif /* VM_CHECK_MODE > 0 */
- #if USE_DEBUG_COUNTER
- static void
- vm_push_frame_debug_counter_inc(
- const struct rb_execution_context_struct *ec,
- const struct rb_control_frame_struct *reg_cfp,
- VALUE type)
- {
- const struct rb_control_frame_struct *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(reg_cfp);
- RB_DEBUG_COUNTER_INC(frame_push);
- if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
- const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
- const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
- if (prev) {
- if (curr) {
- RB_DEBUG_COUNTER_INC(frame_R2R);
- }
- else {
- RB_DEBUG_COUNTER_INC(frame_R2C);
- }
- }
- else {
- if (curr) {
- RB_DEBUG_COUNTER_INC(frame_C2R);
- }
- else {
- RB_DEBUG_COUNTER_INC(frame_C2C);
- }
- }
- }
- switch (type & VM_FRAME_MAGIC_MASK) {
- case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method); return;
- case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block); return;
- case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class); return;
- case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top); return;
- case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc); return;
- case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc); return;
- case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval); return;
- case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue); return;
- case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy); return;
- }
- rb_bug("unreachable");
- }
- #else
- #define vm_push_frame_debug_counter_inc(ec, cfp, t) /* void */
- #endif
- STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
- STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
- STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
- static void
- vm_push_frame(rb_execution_context_t *ec,
- const rb_iseq_t *iseq,
- VALUE type,
- VALUE self,
- VALUE specval,
- VALUE cref_or_me,
- const VALUE *pc,
- VALUE *sp,
- int local_size,
- int stack_max)
- {
- rb_control_frame_t *const cfp = RUBY_VM_NEXT_CONTROL_FRAME(ec->cfp);
- vm_check_frame(type, specval, cref_or_me, iseq);
- VM_ASSERT(local_size >= 0);
- /* check stack overflow */
- CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
- vm_check_canary(ec, sp);
- /* setup vm value stack */
- /* initialize local variables */
- for (int i=0; i < local_size; i++) {
- *sp++ = Qnil;
- }
- /* setup ep with managing data */
- *sp++ = cref_or_me; /* ep[-2] / Qnil or T_IMEMO(cref) or T_IMEMO(ment) */
- *sp++ = specval /* ep[-1] / block handler or prev env ptr */;
- *sp++ = type; /* ep[-0] / ENV_FLAGS */
- /* setup new frame */
- *cfp = (const struct rb_control_frame_struct) {
- .pc = pc,
- .sp = sp,
- .iseq = iseq,
- .self = self,
- .ep = sp - 1,
- .block_code = NULL,
- .__bp__ = sp, /* Store initial value of ep as bp to skip calculation cost of bp on JIT cancellation. */
- #if VM_DEBUG_BP_CHECK
- .bp_check = sp,
- #endif
- };
- ec->cfp = cfp;
- if (VMDEBUG == 2) {
- SDR();
- }
- vm_push_frame_debug_counter_inc(ec, cfp, type);
- }
- /* return TRUE if the frame is finished */
- static inline int
- vm_pop_frame(rb_execution_context_t *ec, rb_control_frame_t *cfp, const VALUE *ep)
- {
- VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
- if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
- if (VMDEBUG == 2) SDR();
- RUBY_VM_CHECK_INTS(ec);
- ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
- return flags & VM_FRAME_FLAG_FINISH;
- }
- MJIT_STATIC void
- rb_vm_pop_frame(rb_execution_context_t *ec)
- {
- vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
- }
- /* method dispatch */
- static inline VALUE
- rb_arity_error_new(int argc, int min, int max)
- {
- VALUE err_mess = 0;
- if (min == max) {
- err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d)", argc, min);
- }
- else if (max == UNLIMITED_ARGUMENTS) {
- err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d+)", argc, min);
- }
- else {
- err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d..%d)", argc, min, max);
- }
- return rb_exc_new3(rb_eArgError, err_mess);
- }
- MJIT_STATIC void
- rb_error_arity(int argc, int min, int max)
- {
- rb_exc_raise(rb_arity_error_new(argc, min, max));
- }
- /* lvar */
- NOINLINE(static void vm_env_write_slowpath(const VALUE *ep, int index, VALUE v));
- static void
- vm_env_write_slowpath(const VALUE *ep, int index, VALUE v)
- {
- /* remember env value forcely */
- rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
- VM_FORCE_WRITE(&ep[index], v);
- VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
- RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
- }
- static inline void
- vm_env_write(const VALUE *ep, int index, VALUE v)
- {
- VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
- if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
- VM_STACK_ENV_WRITE(ep, index, v);
- }
- else {
- vm_env_write_slowpath(ep, index, v);
- }
- }
- MJIT_STATIC VALUE
- rb_vm_bh_to_procval(const rb_execution_context_t *ec, VALUE block_handler)
- {
- if (block_handler == VM_BLOCK_HANDLER_NONE) {
- return Qnil;
- }
- else {
- switch (vm_block_handler_type(block_handler)) {
- case block_handler_type_iseq:
- case block_handler_type_ifunc:
- return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler), rb_cProc);
- case block_handler_type_symbol:
- return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
- case block_handler_type_proc:
- return VM_BH_TO_PROC(block_handler);
- default:
- VM_UNREACHABLE(rb_vm_bh_to_procval);
- }
- }
- }
- /* svar */
- #if VM_CHECK_MODE > 0
- static int
- vm_svar_valid_p(VALUE svar)
- {
- if (RB_TYPE_P((VALUE)svar, T_IMEMO)) {
- switch (imemo_type(svar)) {
- case imemo_svar:
- case imemo_cref:
- case imemo_ment:
- return TRUE;
- default:
- break;
- }
- }
- rb_bug("vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
- return FALSE;
- }
- #endif
- static inline struct vm_svar *
- lep_svar(const rb_execution_context_t *ec, const VALUE *lep)
- {
- VALUE svar;
- if (lep && (ec == NULL || ec->root_lep != lep)) {
- svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
- }
- else {
- svar = ec->root_svar;
- }
- VM_ASSERT(svar == Qfalse || vm_svar_valid_p(svar));
- return (struct vm_svar *)svar;
- }
- static inline void
- lep_svar_write(const rb_execution_context_t *ec, const VALUE *lep, const struct vm_svar *svar)
- {
- VM_ASSERT(vm_svar_valid_p((VALUE)svar));
- if (lep && (ec == NULL || ec->root_lep != lep)) {
- vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (VALUE)svar);
- }
- else {
- RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
- }
- }
- static VALUE
- lep_svar_get(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key)
- {
- const struct vm_svar *svar = lep_svar(ec, lep);
- if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) return Qnil;
- switch (key) {
- case VM_SVAR_LASTLINE:
- return svar->lastline;
- case VM_SVAR_BACKREF:
- return svar->backref;
- default: {
- const VALUE ary = svar->others;
- if (NIL_P(ary)) {
- return Qnil;
- }
- else {
- return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
- }
- }
- }
- }
- static struct vm_svar *
- svar_new(VALUE obj)
- {
- return (struct vm_svar *)rb_imemo_new(imemo_svar, Qnil, Qnil, Qnil, obj);
- }
- static void
- lep_svar_set(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, VALUE val)
- {
- struct vm_svar *svar = lep_svar(ec, lep);
- if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) {
- lep_svar_write(ec, lep, svar = svar_new((VALUE)svar));
- }
- switch (key) {
- case VM_SVAR_LASTLINE:
- RB_OBJ_WRITE(svar, &svar->lastline, val);
- return;
- case VM_SVAR_BACKREF:
- RB_OBJ_WRITE(svar, &svar->backref, val);
- return;
- default: {
- VALUE ary = svar->others;
- if (NIL_P(ary)) {
- RB_OBJ_WRITE(svar, &svar->others, ary = rb_ary_new());
- }
- rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
- }
- }
- }
- static inline VALUE
- vm_getspecial(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, rb_num_t type)
- {
- VALUE val;
- if (type == 0) {
- val = lep_svar_get(ec, lep, key);
- }
- else {
- VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
- if (type & 0x01) {
- switch (type >> 1) {
- case '&':
- val = rb_reg_last_match(backref);
- break;
- case '`':
- val = rb_reg_match_pre(backref);
- break;
- case '\'':
- val = rb_reg_match_post(backref);
- break;
- case '+':
- val = rb_reg_match_last(backref);
- break;
- default:
- rb_bug("unexpected back-ref");
- }
- }
- else {
- val = rb_reg_nth_match((int)(type >> 1), backref);
- }
- }
- return val;
- }
- PUREFUNC(static rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar));
- static rb_callable_method_entry_t *
- check_method_entry(VALUE obj, int can_be_svar)
- {
- if (obj == Qfalse) return NULL;
- #if VM_CHECK_MODE > 0
- if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_method_entry: unknown type: %s", rb_obj_info(obj));
- #endif
- switch (imemo_type(obj)) {
- case imemo_ment:
- return (rb_callable_method_entry_t *)obj;
- case imemo_cref:
- return NULL;
- case imemo_svar:
- if (can_be_svar) {
- return check_method_entry(((struct vm_svar *)obj)->cref_or_me, FALSE);
- }
- default:
- #if VM_CHECK_MODE > 0
- rb_bug("check_method_entry: svar should not be there:");
- #endif
- return NULL;
- }
- }
- MJIT_STATIC const rb_callable_method_entry_t *
- rb_vm_frame_method_entry(const rb_control_frame_t *cfp)
- {
- const VALUE *ep = cfp->ep;
- rb_callable_method_entry_t *me;
- while (!VM_ENV_LOCAL_P(ep)) {
- if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return me;
- ep = VM_ENV_PREV_EP(ep);
- }
- return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
- }
- static rb_iseq_t *
- method_entry_iseqptr(const rb_callable_method_entry_t *me)
- {
- switch (me->def->type) {
- case VM_METHOD_TYPE_ISEQ:
- return me->def->body.iseq.iseqptr;
- default:
- return NULL;
- }
- }
- static rb_cref_t *
- method_entry_cref(const rb_callable_method_entry_t *me)
- {
- switch (me->def->type) {
- case VM_METHOD_TYPE_ISEQ:
- return me->def->body.iseq.cref;
- default:
- return NULL;
- }
- }
- #if VM_CHECK_MODE == 0
- PUREFUNC(static rb_cref_t *check_cref(VALUE, int));
- #endif
- static rb_cref_t *
- check_cref(VALUE obj, int can_be_svar)
- {
- if (obj == Qfalse) return NULL;
- #if VM_CHECK_MODE > 0
- if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_cref: unknown type: %s", rb_obj_info(obj));
- #endif
- switch (imemo_type(obj)) {
- case imemo_ment:
- return method_entry_cref((rb_callable_method_entry_t *)obj);
- case imemo_cref:
- return (rb_cref_t *)obj;
- case imemo_svar:
- if (can_be_svar) {
- return check_cref(((struct vm_svar *)obj)->cref_or_me, FALSE);
- }
- default:
- #if VM_CHECK_MODE > 0
- rb_bug("check_method_entry: svar should not be there:");
- #endif
- return NULL;
- }
- }
- static inline rb_cref_t *
- vm_env_cref(const VALUE *ep)
- {
- rb_cref_t *cref;
- while (!VM_ENV_LOCAL_P(ep)) {
- if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return cref;
- ep = VM_ENV_PREV_EP(ep);
- }
- return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
- }
- static int
- is_cref(const VALUE v, int can_be_svar)
- {
- if (RB_TYPE_P(v, T_IMEMO)) {
- switch (imemo_type(v)) {
- case imemo_cref:
- return TRUE;
- case imemo_svar:
- if (can_be_svar) return is_cref(((struct vm_svar *)v)->cref_or_me, FALSE);
- default:
- break;
- }
- }
- return FALSE;
- }
- static int
- vm_env_cref_by_cref(const VALUE *ep)
- {
- while (!VM_ENV_LOCAL_P(ep)) {
- if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) return TRUE;
- ep = VM_ENV_PREV_EP(ep);
- }
- return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
- }
- static rb_cref_t *
- cref_replace_with_duplicated_cref_each_frame(const VALUE *vptr, int can_be_svar, VALUE parent)
- {
- const VALUE v = *vptr;
- rb_cref_t *cref, *new_cref;
- if (RB_TYPE_P(v, T_IMEMO)) {
- switch (imemo_type(v)) {
- case imemo_cref:
- cref = (rb_cref_t *)v;
- new_cref = vm_cref_dup(cref);
- if (parent) {
- RB_OBJ_WRITE(parent, vptr, new_cref);
- }
- else {
- VM_FORCE_WRITE(vptr, (VALUE)new_cref);
- }
- return (rb_cref_t *)new_cref;
- case imemo_svar:
- if (can_be_svar) {
- return cref_replace_with_duplicated_cref_each_frame((const VALUE *)&((struct vm_svar *)v)->cref_or_me, FALSE, v);
- }
- /* fall through */
- case imemo_ment:
- rb_bug("cref_replace_with_duplicated_cref_each_frame: unreachable");
- default:
- break;
- }
- }
- return FALSE;
- }
- static rb_cref_t *
- vm_cref_replace_with_duplicated_cref(const VALUE *ep)
- {
- if (vm_env_cref_by_cref(ep)) {
- rb_cref_t *cref;
- VALUE envval;
- while (!VM_ENV_LOCAL_P(ep)) {
- envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
- if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
- return cref;
- }
- ep = VM_ENV_PREV_EP(ep);
- }
- envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
- return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
- }
- else {
- rb_bug("vm_cref_dup: unreachable");
- }
- }
- static rb_cref_t *
- vm_get_cref(const VALUE *ep)
- {
- rb_cref_t *cref = vm_env_cref(ep);
- if (cref != NULL) {
- return cref;
- }
- else {
- rb_bug("vm_get_cref: unreachable");
- }
- }
- static rb_cref_t *
- vm_ec_cref(const rb_execution_context_t *ec)
- {
- const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
- if (cfp == NULL) {
- return NULL;
- }
- return vm_get_cref(cfp->ep);
- }
- static const rb_cref_t *
- vm_get_const_key_cref(const VALUE *ep)
- {
- const rb_cref_t *cref = vm_get_cref(ep);
- const rb_cref_t *key_cref = cref;
- while (cref) {
- if (FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
- FL_TEST(CREF_CLASS(cref), RCLASS_CLONED)) {
- return key_cref;
- }
- cref = CREF_NEXT(cref);
- }
- /* does not include singleton class */
- return NULL;
- }
- void
- rb_vm_rewrite_cref(rb_cref_t *cref, VALUE old_klass, VALUE new_klass, rb_cref_t **new_cref_ptr)
- {
- rb_cref_t *new_cref;
- while (cref) {
- if (CREF_CLASS(cref) == old_klass) {
- new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
- *new_cref_ptr = new_cref;
- return;
- }
- new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
- cref = CREF_NEXT(cref);
- *new_cref_ptr = new_cref;
- new_cref_ptr = (rb_cref_t **)&new_cref->next;
- }
- *new_cref_ptr = NULL;
- }
- static rb_cref_t *
- vm_cref_push(const rb_execution_context_t *ec, VALUE klass, const VALUE *ep, int pushed_by_eval)
- {
- rb_cref_t *prev_cref = NULL;
- if (ep) {
- prev_cref = vm_env_cref(ep);
- }
- else {
- rb_control_frame_t *cfp = vm_get_ruby_level_caller_cfp(ec, ec->cfp);
- if (cfp) {
- prev_cref = vm_env_cref(cfp->ep);
- }
- }
- return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval);
- }
- static inline VALUE
- vm_get_cbase(const VALUE *ep)
- {
- const rb_cref_t *cref = vm_get_cref(ep);
- VALUE klass = Qundef;
- while (cref) {
- if ((klass = CREF_CLASS(cref)) != 0) {
- break;
- }
- cref = CREF_NEXT(cref);
- }
- return klass;
- }
- static inline VALUE
- vm_get_const_base(const VALUE *ep)
- {
- const rb_cref_t *cref = vm_get_cref(ep);
- VALUE klass = Qundef;
- while (cref) {
- if (!CREF_PUSHED_BY_EVAL(cref) &&
- (klass = CREF_CLASS(cref)) != 0) {
- break;
- }
- cref = CREF_NEXT(cref);
- }
- return klass;
- }
- static inline void
- vm_check_if_namespace(VALUE klass)
- {
- if (!RB_TYPE_P(klass, T_CLASS) && !RB_TYPE_P(klass, T_MODULE)) {
- rb_raise(rb_eTypeError, "%+"PRIsVALUE" is not a class/module", klass);
- }
- }
- static inline void
- vm_ensure_not_refinement_module(VALUE self)
- {
- if (RB_TYPE_P(self, T_MODULE) && FL_TEST(self, RMODULE_IS_REFINEMENT)) {
- rb_warn("not defined at the refinement, but at the outer class/module");
- }
- }
- static inline VALUE
- vm_get_iclass(const rb_control_frame_t *cfp, VALUE klass)
- {
- return klass;
- }
- static inline VALUE
- vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, bool allow_nil, int is_defined)
- {
- void rb_const_warn_if_deprecated(const rb_const_entry_t *ce, VALUE klass, ID id);
- VALUE val;
- if (orig_klass == Qnil && allow_nil) {
- /* in current lexical scope */
- const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
- const rb_cref_t *cref;
- VALUE klass = Qnil;
- while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
- root_cref = CREF_NEXT(root_cref);
- }
- cref = root_cref;
- while (cref && CREF_NEXT(cref)) {
- if (CREF_PUSHED_BY_EVAL(cref)) {
- klass = Qnil;
- }
- else {
- klass = CREF_CLASS(cref);
- }
- cref = CREF_NEXT(cref);
- if (!NIL_P(klass)) {
- VALUE av, am = 0;
- rb_const_entry_t *ce;
- search_continue:
- if ((ce = rb_const_lookup(klass, id))) {
- rb_const_warn_if_deprecated(ce, klass, id);
- val = ce->value;
- if (val == Qundef) {
- if (am == klass) break;
- am = klass;
- if (is_defined) return 1;
- if (rb_autoloading_value(klass, id, &av, NULL)) return av;
- rb_autoload_load(klass, id);
- goto search_continue;
- }
- else {
- if (is_defined) {
- return 1;
- }
- else {
- if (UNLIKELY(!rb_ractor_main_p())) {
- if (!rb_ractor_shareable_p(val)) {
- rb_raise(rb_eRactorIsolationError,
- "can not access non-shareable objects in constant %"PRIsVALUE"::%s by non-main ractor.", rb_class_path(klass), rb_id2name(id));
- }
- }
- return val;
- }
- }
- }
- }
- }
- /* search self */
- if (root_cref && !NIL_P(CREF_CLASS(root_cref))) {
- klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
- }
- else {
- klass = CLASS_OF(ec->cfp->self);
- }
- if (is_defined) {
- return rb_const_defined(klass, id);
- }
- else {
- return rb_const_get(klass, id);
- }
- }
- else {
- vm_check_if_namespace(orig_klass);
- if (is_defined) {
- return rb_public_const_defined_from(orig_klass, id);
- }
- else {
- return rb_public_const_get_from(orig_klass, id);
- }
- }
- }
- static inline VALUE
- vm_get_cvar_base(const rb_cref_t *cref, const rb_control_frame_t *cfp, int top_level_raise)
- {
- VALUE klass;
- if (!cref) {
- rb_bug("vm_get_cvar_base: no cref");
- }
- while (CREF_NEXT(cref) &&
- (NIL_P(CREF_CLASS(cref)) || FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
- CREF_PUSHED_BY_EVAL(cref))) {
- cref = CREF_NEXT(cref);
- }
- if (top_level_raise && !CREF_NEXT(cref)) {
- rb_raise(rb_eRuntimeError, "class variable access from toplevel");
- }
- klass = vm_get_iclass(cfp, CREF_CLASS(cref));
- if (NIL_P(klass)) {
- rb_raise(rb_eTypeError, "no class variables available");
- }
- return klass;
- }
- static VALUE
- vm_search_const_defined_class(const VALUE cbase, ID id)
- {
- if (rb_const_defined_at(cbase, id)) return cbase;
- if (cbase == rb_cObject) {
- VALUE tmp = RCLASS_SUPER(cbase);
- while (tmp) {
- if (rb_const_defined_at(tmp, id)) return tmp;
- tmp = RCLASS_SUPER(tmp);
- }
- }
- return 0;
- }
- static bool
- iv_index_tbl_lookup(struct st_table *iv_index_tbl, ID id, struct rb_iv_index_tbl_entry **ent)
- {
- int found;
- if (iv_index_tbl == NULL) return false;
- RB_VM_LOCK_ENTER();
- {
- found = st_lookup(iv_index_tbl, (st_data_t)id, (st_data_t *)ent);
- }
- RB_VM_LOCK_LEAVE();
- return found ? true : false;
- }
- ALWAYS_INLINE(static void fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, struct rb_iv_index_tbl_entry *ent));
- static inline void
- fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, struct rb_iv_index_tbl_entry *ent)
- {
- // fill cache
- if (!is_attr) {
- ic->entry = ent;
- RB_OBJ_WRITTEN(iseq, Qundef, ent->class_value);
- }
- else {
- vm_cc_attr_index_set(cc, (int)ent->index + 1);
- }
- }
- ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, const rb_iseq_t *, IVC, const struct rb_callcache *, int));
- static inline VALUE
- vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
- {
- #if OPT_IC_FOR_IVAR
- VALUE val = Qundef;
- if (SPECIAL_CONST_P(obj)) {
- // frozen?
- }
- else if (LIKELY(is_attr ?
- RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_unset, vm_cc_attr_index(cc) > 0) :
- RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_serial,
- ic->entry && ic->entry->class_serial == RCLASS_SERIAL(RBASIC(obj)->klass)))) {
- uint32_t index = !is_attr ? ic->entry->index : (vm_cc_attr_index(cc) - 1);
- RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
- if (LIKELY(BUILTIN_TYPE(obj) == T_OBJECT) &&
- LIKELY(index < ROBJECT_NUMIV(obj))) {
- val = ROBJECT_IVPTR(obj)[index];
- VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
- }
- else if (FL_TEST_RAW(obj, FL_EXIVAR)) {
- val = rb_ivar_generic_lookup_with_index(obj, id, index);
- }
- goto ret;
- }
- else {
- struct rb_iv_index_tbl_entry *ent;
- if (BUILTIN_TYPE(obj) == T_OBJECT) {
- struct st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
- if (iv_index_tbl && iv_index_tbl_lookup(iv_index_tbl, id, &ent)) {
- fill_ivar_cache(iseq, ic, cc, is_attr, ent);
- // get value
- if (ent->index < ROBJECT_NUMIV(obj)) {
- val = ROBJECT_IVPTR(obj)[ent->index];
- VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
- }
- }
- }
- else if (FL_TEST_RAW(obj, FL_EXIVAR)) {
- struct st_table *iv_index_tbl = RCLASS_IV_INDEX_TBL(rb_obj_class(obj));
- if (iv_index_tbl && iv_index_tbl_lookup(iv_index_tbl, id, &ent)) {
- fill_ivar_cache(iseq, ic, cc, is_attr, ent);
- val = rb_ivar_generic_lookup_with_index(obj, id, ent->index);
- }
- }
- else {
- // T_CLASS / T_MODULE
- goto general_path;
- }
- ret:
- if (LIKELY(val != Qundef)) {
- return val;
- }
- else {
- return Qnil;
- }
- }
- general_path:
- #endif /* OPT_IC_FOR_IVAR */
- RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
- if (is_attr) {
- return rb_attr_get(obj, id);
- }
- else {
- return rb_ivar_get(obj, id);
- }
- }
- ALWAYS_INLINE(static VALUE vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr));
- NOINLINE(static VALUE vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic));
- NOINLINE(static VALUE vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc));
- static VALUE
- vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
- {
- rb_check_frozen_internal(obj);
- #if OPT_IC_FOR_IVAR
- if (RB_TYPE_P(obj, T_OBJECT)) {
- struct st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj);
- struct rb_iv_index_tbl_entry *ent;
- if (iv_index_tbl_lookup(iv_index_tbl, id, &ent)) {
- if (!is_attr) {
- ic->entry = ent;
- RB_OBJ_WRITTEN(iseq, Qundef, ent->class_value);
- }
- else if (ent->index >= INT_MAX) {
- rb_raise(rb_eArgError, "too many instance variables");
- }
- else {
- vm_cc_attr_index_set(cc, (int)(ent->index + 1));
- }
- uint32_t index = ent->index;
- if (UNLIKELY(index >= ROBJECT_NUMIV(obj))) {
- rb_init_iv_list(obj);
- }
- VALUE *ptr = ROBJECT_IVPTR(obj);
- RB_OBJ_WRITE(obj, &ptr[index], val);
- RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
- return val;
- }
- }
- #endif
- RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
- return rb_ivar_set(obj, id, val);
- }
- static VALUE
- vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic)
- {
- return vm_setivar_slowpath(obj, id, val, iseq, ic, NULL, false);
- }
- static VALUE
- vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc)
- {
- return vm_setivar_slowpath(obj, id, val, NULL, NULL, cc, true);
- }
- static inline VALUE
- vm_setivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
- {
- #if OPT_IC_FOR_IVAR
- if (LIKELY(RB_TYPE_P(obj, T_OBJECT)) &&
- LIKELY(!RB_OBJ_FROZEN_RAW(obj))) {
- VM_ASSERT(!rb_ractor_shareable_p(obj));
- if (LIKELY(
- (!is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_serial, ic->entry && ic->entry->class_serial == RCLASS_SERIAL(RBASIC(obj)->klass))) ||
- ( is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_unset, vm_cc_attr_index(cc) > 0)))) {
- uint32_t index = !is_attr ? ic->entry->index : vm_cc_attr_index(cc)-1;
- if (UNLIKELY(index >= ROBJECT_NUMIV(obj))) {
- rb_init_iv_list(obj);
- }
- VALUE *ptr = ROBJECT_IVPTR(obj);
- RB_OBJ_WRITE(obj, &ptr[index], val);
- RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
- return val; /* inline cache hit */
- }
- }
- else {
- RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
- }
- #endif /* OPT_IC_FOR_IVAR */
- if (is_attr) {
- return vm_setivar_slowpath_attr(obj, id, val, cc);
- }
- else {
- return vm_setivar_slowpath_ivar(obj, id, val, iseq, ic);
- }
- }
- static VALUE
- update_classvariable_cache(const rb_iseq_t *iseq, VALUE klass, ID id, ICVARC ic)
- {
- VALUE defined_class = 0;
- VALUE cvar_value = rb_cvar_find(klass, id, &defined_class);
- if (RB_TYPE_P(defined_class, T_ICLASS)) {
- defined_class = RBASIC(defined_class)->klass;
- }
- struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
- if (!rb_cvc_tbl) {
- rb_bug("the cvc table should be set");
- }
- VALUE ent_data;
- if (!rb_id_table_lookup(rb_cvc_tbl, id, &ent_data)) {
- rb_bug("should have cvar cache entry");
- }
- struct rb_cvar_class_tbl_entry *ent = (void *)ent_data;
- ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
- ic->entry = ent;
- RB_OBJ_WRITTEN(iseq, Qundef, ent->class_value);
- return cvar_value;
- }
- static inline VALUE
- vm_getclassvariable(const rb_iseq_t *iseq, const rb_cref_t *cref, const rb_control_frame_t *cfp, ID id, ICVARC ic)
- {
- if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE()) {
- VALUE v = Qundef;
- RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
- if (st_lookup(RCLASS_IV_TBL(ic->entry->class_value), (st_data_t)id, &v)) {
- return v;
- }
- }
- VALUE klass = vm_get_cvar_base(cref, cfp, 1);
- return update_classvariable_cache(iseq, klass, id, ic);
- }
- static inline void
- vm_setclassvariable(const rb_iseq_t *iseq, const rb_cref_t *cref, const rb_control_frame_t *cfp, ID id, VALUE val, ICVARC ic)
- {
- if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE()) {
- RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
- rb_class_ivar_set(ic->entry->class_value, id, val);
- return;
- }
- VALUE klass = vm_get_cvar_base(cref, cfp, 1);
- rb_cvar_set(klass, id, val);
- update_classvariable_cache(iseq, klass, id, ic);
- }
- static inline VALUE
- vm_getinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, IVC ic)
- {
- return vm_getivar(obj, id, iseq, ic, NULL, FALSE);
- }
- static inline void
- vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
- {
- vm_setivar(obj, id, val, iseq, ic, 0, 0);
- }
- static VALUE
- vm_throw_continue(const rb_execution_context_t *ec, VALUE err)
- {
- /* continue throw */
- if (FIXNUM_P(err)) {
- ec->tag->state = FIX2INT(err);
- }
- else if (SYMBOL_P(err)) {
- ec->tag->state = TAG_THROW;
- }
- else if (THROW_DATA_P(err)) {
- ec->tag->state = THROW_DATA_STATE((struct vm_throw_data *)err);
- }
- else {
- ec->tag->state = TAG_RAISE;
- }
- return err;
- }
- static VALUE
- vm_throw_start(const rb_execution_context_t *ec, rb_control_frame_t *const reg_cfp, enum ruby_tag_type state,
- const int flag, const VALUE throwobj)
- {
- const rb_control_frame_t *escape_cfp = NULL;
- const rb_control_frame_t * const eocfp = RUBY_VM_END_CONTROL_FRAME(ec); /* end of control frame pointer */
- if (flag != 0) {
- /* do nothing */
- }
- else if (state == TAG_BREAK) {
- int is_orphan = 1;
- const VALUE *ep = GET_EP();
- const rb_iseq_t *base_iseq = GET_ISEQ();
- escape_cfp = reg_cfp;
- while (base_iseq->body->type != ISEQ_TYPE_BLOCK) {
- if (escape_cfp->iseq->body->type == ISEQ_TYPE_CLASS) {
- escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
- ep = escape_cfp->ep;
- base_iseq = escape_cfp->iseq;
- }
- else {
- ep = VM_ENV_PREV_EP(ep);
- base_iseq = base_iseq->body->parent_iseq;
- escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
- VM_ASSERT(escape_cfp->iseq == base_iseq);
- }
- }
- if (VM_FRAME_LAMBDA_P(escape_cfp)) {
- /* lambda{... break ...} */
- is_orphan = 0;
- state = TAG_RETURN;
- }
- else {
- ep = VM_ENV_PREV_EP(ep);
- while (escape_cfp < eocfp) {
- if (escape_cfp->ep == ep) {
- const rb_iseq_t *const iseq = escape_cfp->iseq;
- const VALUE epc = escape_cfp->pc - iseq->body->iseq_encoded;
- const struct iseq_catch_table *const ct = iseq->body->catch_table;
- unsigned int i;
- if (!ct) break;
- for (i=0; i < ct->size; i++) {
- const struct iseq_catch_table_entry *const entry =
- UNALIGNED_MEMBER_PTR(ct, entries[i]);
- if (entry->type == CATCH_TYPE_BREAK &&
- entry->iseq == base_iseq &&
- entry->start < epc && entry->end >= epc) {
- if (entry->cont == epc) { /* found! */
- is_orphan = 0;
- }
- break;
- }
- }
- break;
- }
- escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
- }
- }
- if (is_orphan) {
- rb_vm_localjump_error("break from proc-closure", throwobj, TAG_BREAK);
- }
- }
- else if (state == TAG_RETRY) {
- const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
- escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
- }
- else if (state == TAG_RETURN) {
- const VALUE *current_ep = GET_EP();
- const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
- int in_class_frame = 0;
- int toplevel = 1;
- escape_cfp = reg_cfp;
- // find target_lep, target_ep
- while (!VM_ENV_LOCAL_P(ep)) {
- if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
- target_ep = ep;
- }
- ep = VM_ENV_PREV_EP(ep);
- }
- target_lep = ep;
- while (escape_cfp < eocfp) {
- const VALUE *lep = VM_CF_LEP(escape_cfp);
- if (!target_lep) {
- target_lep = lep;
- }
- if (lep == target_lep &&
- VM_FRAME_RUBYFRAME_P(escape_cfp) &&
- escape_cfp->iseq->body->type == ISEQ_TYPE_CLASS) {
- in_class_frame = 1;
- target_lep = 0;
- }
- if (lep == target_lep) {
- if (VM_FRAME_LAMBDA_P(escape_cfp)) {
- toplevel = 0;
- if (in_class_frame) {
- /* lambda {class A; ... return ...; end} */
- goto valid_return;
- }
- else {
- const VALUE *tep = current_ep;
- while (target_lep != tep) {
- if (escape_cfp->ep == tep) {
- /* in lambda */
- if (tep == target_ep) {
- goto valid_return;
- }
- else {
- goto unexpected_return;
- }
- }
- tep = VM_ENV_PREV_EP(tep);
- }
- }
- }
- else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
- switch (escape_cfp->iseq->body->type) {
- case ISEQ_TYPE_TOP:
- case ISEQ_TYPE_MAIN:
- if (toplevel) {
- if (in_class_frame) goto unexpected_return;
- if (target_ep == NULL) {
- goto valid_return;
- }
- else {
- goto unexpected_return;
- }
- }
- break;
- case ISEQ_TYPE_EVAL:
- case ISEQ_TYPE_CLASS:
- toplevel = 0;
- break;
- default:
- break;
- }
- }
- }
- if (escape_cfp->ep == target_lep && escape_cfp->iseq->body->type == ISEQ_TYPE_METHOD) {
- if (target_ep == NULL) {
- goto valid_return;
- }
- else {
- goto unexpected_return;
- }
- }
- escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
- }
- unexpected_return:;
- rb_vm_localjump_error("unexpected return", throwobj, TAG_RETURN);
- valid_return:;
- /* do nothing */
- }
- else {
- rb_bug("isns(throw): unsupported throw type");
- }
- ec->tag->state = state;
- return (VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
- }
- static VALUE
- vm_throw(const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
- rb_num_t throw_state, VALUE throwobj)
- {
- const int state = (int)(throw_state & VM_THROW_STATE_MASK);
- const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
- if (state != 0) {
- return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
- }
- else {
- return vm_throw_continue(ec, throwobj);
- }
- }
- static inline void
- vm_expandarray(VALUE *sp, VALUE ary, rb_num_t num, int flag)
- {
- int is_splat = flag & 0x01;
- rb_num_t space_size = num + is_splat;
- VALUE *base = sp - 1;
- const VALUE *ptr;
- rb_num_t len;
- const VALUE obj = ary;
- if (!RB_TYPE_P(ary, T_ARRAY) && NIL_P(ary = rb_check_array_type(ary))) {
- ary = obj;
- ptr = &ary;
- len = 1;
- }
- else {
- ptr = RARRAY_CONST_PTR_TRANSIENT(ary);
- len = (rb_num_t)RARRAY_LEN(ary);
- }
- if (space_size == 0) {
- /* no space left on stack */
- }
- else if (flag & 0x02) {
- /* post: ..., nil ,ary[-1], ..., ary[0..-num] # top */
- rb_num_t i = 0, j;
- if (len < num) {
- for (i=0; i<num-len; i++) {
- *base++ = Qnil;
- }
- }
- for (j=0; i<num; i++, j++) {
- VALUE v = ptr[len - j - 1];
- *base++ = v;
- }
- if (is_splat) {
- *base = rb_ary_new4(len - j, ptr);
- }
- }
- else {
- /* normal: ary[num..-1], ary[num-2], ary[num-3], ..., ary[0] # top */
- rb_num_t i;
- VALUE *bptr = &base[space_size - 1];
- for (i=0; i<num; i++) {
- if (len <= i) {
- for (; i<num; i++) {
- *bptr-- = Qnil;
- }
- break;
- }
- *bptr-- = ptr[i];
- }
- if (is_splat) {
- if (num > len) {
- *bptr = rb_ary_new();
- }
- else {
- *bptr = rb_ary_new4(len - num, ptr + num);
- }
- }
- }
- RB_GC_GUARD(ary);
- }
- static VALUE vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
- static VALUE vm_mtbl_dump(VALUE klass, ID target_mid);
- static struct rb_class_cc_entries *
- vm_ccs_create(VALUE klass, const rb_callable_method_entry_t *cme)
- {
- struct rb_class_cc_entries *ccs = ALLOC(struct rb_class_cc_entries);
- #if VM_CHECK_MODE > 0
- ccs->debug_sig = ~(VALUE)ccs;
- #endif
- ccs->capa = 0;
- ccs->len = 0;
- RB_OBJ_WRITE(klass, &ccs->cme, cme);
- METHOD_ENTRY_CACHED_SET((rb_callable_method_entry_t *)cme);
- ccs->entries = NULL;
- return ccs;
- }
- static void
- vm_ccs_push(VALUE klass, struct rb_class_cc_entries *ccs, const struct rb_callinfo *ci, const struct rb_callcache *cc)
- {
- if (! vm_cc_markable(cc)) {
- return;
- }
- else if (! vm_ci_markable(ci)) {
- return;
- }
- if (UNLIKELY(ccs->len == ccs->capa)) {
- if (ccs->capa == 0) {
- ccs->capa = 1;
- ccs->entries = ALLOC_N(struct rb_class_cc_entries_entry, ccs->capa);
- }
- else {
- ccs->capa *= 2;
- REALLOC_N(ccs->entries, struct rb_class_cc_entries_entry, ccs->capa);
- }
- }
- VM_ASSERT(ccs->len < ccs->capa);
- const int pos = ccs->len++;
- RB_OBJ_WRITE(klass, &ccs->entries[pos].ci, ci);
- RB_OBJ_WRITE(klass, &ccs->entries[pos].cc, cc);
- if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
- // for tuning
- // vm_mtbl_dump(klass, 0);
- }
- }
- #if VM_CHECK_MODE > 0
- void
- rb_vm_ccs_dump(struct rb_class_cc_entries *ccs)
- {
- ruby_debug_printf("ccs:%p (%d,%d)\n", (void *)ccs, ccs->len, ccs->capa);
- for (int i=0; i<ccs->len; i++) {
- vm_ci_dump(ccs->entries[i].ci);
- rp(ccs->entries[i].cc);
- }
- }
- static int
- vm_ccs_verify(struct rb_class_cc_entries *ccs, ID mid, VALUE klass)
- {
- VM_ASSERT(vm_ccs_p(ccs));
- VM_ASSERT(ccs->len <= ccs->capa);
- for (int i=0; i<ccs->len; i++) {
- const struct rb_callinfo *ci = ccs->entries[i].ci;
- const struct rb_callcache *cc = ccs->entries[i].cc;
- VM_ASSERT(vm_ci_p(ci));
- VM_ASSERT(vm_ci_mid(ci) == mid);
- VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
- VM_ASSERT(vm_cc_class_check(cc, klass));
- VM_ASSERT(vm_cc_cme(cc) == ccs->cme);
- }
- return TRUE;
- }
- #endif
- #ifndef MJIT_HEADER
- static const struct rb_callcache *
- vm_search_cc(const VALUE klass, const struct rb_callinfo * const ci)
- {
- const ID mid = vm_ci_mid(ci);
- struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
- struct rb_class_cc_entries *ccs = NULL;
- VALUE ccs_data;
- if (cc_tbl) {
- if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
- ccs = (struct rb_class_cc_entries *)ccs_data;
- const int ccs_len = ccs->len;
- VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
- if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
- rb_vm_ccs_free(ccs);
- rb_id_table_delete(cc_tbl, mid);
- ccs = NULL;
- }
- else {
- for (int i=0; i<ccs_len; i++) {
- const struct rb_callinfo *ccs_ci = ccs->entries[i].ci;
- const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
- VM_ASSERT(vm_ci_p(ccs_ci));
- VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
- if (ccs_ci == ci) { // TODO: equality
- RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
- VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
- VM_ASSERT(ccs_cc->klass == klass);
- VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
- return ccs_cc;
- }
- }
- }
- }
- }
- else {
- cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
- }
- RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
- const rb_callable_method_entry_t *cme;
- if (ccs) {
- cme = ccs->cme;
- cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
- VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
- }
- else {
- cme = rb_callable_method_entry(klass, mid);
- }
- VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
- if (cme == NULL) {
- // undef or not found: can't cache the information
- VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
- return &vm_empty_cc;
- }
- VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
- const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general);
- METHOD_ENTRY_CACHED_SET((struct rb_callable_method_entry_struct *)cme);
- if (ccs == NULL) {
- VM_ASSERT(cc_tbl != NULL);
- if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
- // rb_callable_method_entry() prepares ccs.
- ccs = (struct rb_class_cc_entries *)ccs_data;
- }
- else {
- // TODO: required?
- ccs = vm_ccs_create(klass, cme);
- rb_id_table_insert(cc_tbl, mid, (VALUE)ccs);
- }
- }
- vm_ccs_push(klass, ccs, ci, cc);
- VM_ASSERT(vm_cc_cme(cc) != NULL);
- VM_ASSERT(cme->called_id == mid);
- VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
- return cc;
- }
- MJIT_FUNC_EXPORTED const struct rb_callcache *
- rb_vm_search_method_slowpath(const struct rb_callinfo *ci, VALUE klass)
- {
- const struct rb_callcache *cc;
- VM_ASSERT(RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS));
- RB_VM_LOCK_ENTER();
- {
- cc = vm_search_cc(klass, ci);
- VM_ASSERT(cc);
- VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
- VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
- VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
- VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED…
Large files files are truncated, but you can click here to view the full file