summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--ChangeLog9
-rw-r--r--compile.c38
-rw-r--r--insns.def10
-rw-r--r--iseq.c16
-rw-r--r--vm_core.h17
-rw-r--r--vm_insnhelper.c18
6 files changed, 58 insertions, 50 deletions
diff --git a/ChangeLog b/ChangeLog
index 9450921fda..51780f39ab 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,12 @@
+Tue Oct 16 02:17:35 2012 Koichi Sasada <ko1@atdot.net>
+
+ * vm_core.h (VM_CALL_*): rename VM_CALL_*_BIT
+ to VM_CALL_* (remove `_BIT' suffix).
+ Add comments on each macros.
+ Remove unused macro VM_CALL_TAILRECURSION_BIT.
+
+ * compile.c, iseq.c, insns.def, vm_insnhelper.c: ditto.
+
Mon Oct 15 22:14:44 2012 Nobuyoshi Nakada <nobu@ruby-lang.org>
* test/ruby/envutil.rb (Test::Unit::Assertions#file_assertion):
diff --git a/compile.c b/compile.c
index ab586dc47b..9dc5a8dd5a 100644
--- a/compile.c
+++ b/compile.c
@@ -209,10 +209,10 @@ r_value(VALUE value)
ADD_INSN((seq), (line), putself)
#define ADD_CALL(seq, line, id, argc) \
- ADD_SEND_R((seq), (line), (id), (argc), (VALUE)Qfalse, (VALUE)INT2FIX(VM_CALL_FCALL_BIT))
+ ADD_SEND_R((seq), (line), (id), (argc), (VALUE)Qfalse, (VALUE)INT2FIX(VM_CALL_FCALL))
#define ADD_CALL_WITH_BLOCK(seq, line, id, argc, block) \
- ADD_SEND_R((seq), (line), (id), (argc), (block), (VALUE)INT2FIX(VM_CALL_FCALL_BIT))
+ ADD_SEND_R((seq), (line), (id), (argc), (block), (VALUE)INT2FIX(VM_CALL_FCALL))
#define ADD_SEND_R(seq, line, id, argc, block, flag) \
ADD_ELEM((seq), (LINK_ELEMENT *) \
@@ -948,7 +948,7 @@ new_callinfo(rb_iseq_t *iseq, ID mid, int argc, VALUE block, unsigned long flag)
}
else {
ci->blockiseq = 0;
- if (!(ci->flag & (VM_CALL_ARGS_BLOCKARG_BIT | VM_CALL_ARGS_SPLAT_BIT))) {
+ if (!(ci->flag & (VM_CALL_ARGS_SPLAT | VM_CALL_ARGS_BLOCKARG))) {
ci->flag |= VM_CALL_ARGS_SKIP_SETUP;
}
}
@@ -1859,7 +1859,7 @@ iseq_peephole_optimize(rb_iseq_t *iseq, LINK_ELEMENT *list, const int do_tailcal
* send ...
* leave
* =>
- * send ..., ... | VM_CALL_TAILCALL_BIT, ...
+ * send ..., ... | VM_CALL_TAILCALL, ...
* leave # unreachable
*/
INSN *piobj = (INSN *)get_prev_insn((INSN *)list);
@@ -1867,7 +1867,7 @@ iseq_peephole_optimize(rb_iseq_t *iseq, LINK_ELEMENT *list, const int do_tailcal
if (piobj->insn_id == BIN(send)) {
rb_call_info_t *ci = (rb_call_info_t *)piobj->operands[0];
if (ci->blockiseq == 0) {
- ci->flag |= VM_CALL_TAILCALL_BIT;
+ ci->flag |= VM_CALL_TAILCALL;
}
}
}
@@ -3045,7 +3045,7 @@ setup_args(rb_iseq_t *iseq, LINK_ANCHOR *args, NODE *argn, VALUE *flag)
INIT_ANCHOR(args_splat);
if (argn && nd_type(argn) == NODE_BLOCK_PASS) {
COMPILE(arg_block, "block", argn->nd_body);
- *flag |= VM_CALL_ARGS_BLOCKARG_BIT;
+ *flag |= VM_CALL_ARGS_BLOCKARG;
argn = argn->nd_head;
}
@@ -3056,7 +3056,7 @@ setup_args(rb_iseq_t *iseq, LINK_ANCHOR *args, NODE *argn, VALUE *flag)
COMPILE(args, "args (splat)", argn->nd_head);
argc = INT2FIX(1);
nsplat++;
- *flag |= VM_CALL_ARGS_SPLAT_BIT;
+ *flag |= VM_CALL_ARGS_SPLAT;
break;
}
case NODE_ARGSCAT:
@@ -3079,7 +3079,7 @@ setup_args(rb_iseq_t *iseq, LINK_ANCHOR *args, NODE *argn, VALUE *flag)
}
INSERT_LIST(args_splat, tmp);
nsplat++;
- *flag |= VM_CALL_ARGS_SPLAT_BIT;
+ *flag |= VM_CALL_ARGS_SPLAT;
if (next_is_array) {
argc = INT2FIX(compile_array(iseq, args, argn->nd_head, COMPILE_ARRAY_TYPE_ARGS) + 1);
@@ -3111,7 +3111,7 @@ setup_args(rb_iseq_t *iseq, LINK_ANCHOR *args, NODE *argn, VALUE *flag)
ADD_SEQ(args, args_splat);
}
- if (*flag & VM_CALL_ARGS_BLOCKARG_BIT) {
+ if (*flag & VM_CALL_ARGS_BLOCKARG) {
ADD_SEQ(args, arg_block);
}
return argc;
@@ -3987,7 +3987,7 @@ iseq_compile_each(rb_iseq_t *iseq, LINK_ANCHOR *ret, NODE * node, int poped)
if (!poped) {
ADD_INSN1(ret, nd_line(node), setn, FIXNUM_INC(argc, 2+boff));
}
- if (flag & VM_CALL_ARGS_SPLAT_BIT) {
+ if (flag & VM_CALL_ARGS_SPLAT) {
ADD_INSN1(ret, nd_line(node), newarray, INT2FIX(1));
if (boff > 0) {
ADD_INSN1(ret, nd_line(node), dupn, INT2FIX(3));
@@ -4024,7 +4024,7 @@ iseq_compile_each(rb_iseq_t *iseq, LINK_ANCHOR *ret, NODE * node, int poped)
if (!poped) {
ADD_INSN1(ret, nd_line(node), setn, FIXNUM_INC(argc, 2+boff));
}
- if (flag & VM_CALL_ARGS_SPLAT_BIT) {
+ if (flag & VM_CALL_ARGS_SPLAT) {
ADD_INSN1(ret, nd_line(node), newarray, INT2FIX(1));
if (boff > 0) {
ADD_INSN1(ret, nd_line(node), dupn, INT2FIX(3));
@@ -4283,10 +4283,10 @@ iseq_compile_each(rb_iseq_t *iseq, LINK_ANCHOR *ret, NODE * node, int poped)
switch (nd_type(node)) {
case NODE_VCALL:
- flag |= VM_CALL_VCALL_BIT;
+ flag |= VM_CALL_VCALL;
/* VCALL is funcall, so fall through */
case NODE_FCALL:
- flag |= VM_CALL_FCALL_BIT;
+ flag |= VM_CALL_FCALL;
}
ADD_SEND_R(ret, nd_line(node), ID2SYM(mid),
@@ -4340,7 +4340,7 @@ iseq_compile_each(rb_iseq_t *iseq, LINK_ANCHOR *ret, NODE * node, int poped)
int idx = liseq->local_size - liseq->arg_rest;
ADD_INSN2(args, nd_line(node), getlocal, INT2FIX(idx), INT2FIX(lvar_level));
argc = INT2FIX(liseq->arg_rest + 1);
- flag |= VM_CALL_ARGS_SPLAT_BIT;
+ flag |= VM_CALL_ARGS_SPLAT;
}
if (liseq->arg_post_len) {
@@ -4374,7 +4374,7 @@ iseq_compile_each(rb_iseq_t *iseq, LINK_ANCHOR *ret, NODE * node, int poped)
ADD_INSN1(ret, nd_line(node), putobject, nd_type(node) == NODE_ZSUPER ? Qfalse : Qtrue);
ADD_SEQ(ret, args);
ADD_INSN1(ret, nd_line(node), invokesuper, new_callinfo(iseq, 0, FIX2INT(argc), parent_block,
- flag | VM_CALL_SUPER_BIT | VM_CALL_FCALL_BIT));
+ flag | VM_CALL_SUPER | VM_CALL_FCALL));
if (poped) {
ADD_INSN(ret, nd_line(node), pop);
@@ -5123,7 +5123,7 @@ iseq_compile_each(rb_iseq_t *iseq, LINK_ANCHOR *ret, NODE * node, int poped)
argc = setup_args(iseq, args, node->nd_args, &flag);
if (node->nd_recv == (NODE *) 1) {
- flag |= VM_CALL_FCALL_BIT;
+ flag |= VM_CALL_FCALL;
ADD_INSN(recv, nd_line(node), putself);
}
else {
@@ -5138,16 +5138,16 @@ iseq_compile_each(rb_iseq_t *iseq, LINK_ANCHOR *ret, NODE * node, int poped)
ADD_SEQ(ret, recv);
ADD_SEQ(ret, args);
- if (flag & VM_CALL_ARGS_BLOCKARG_BIT) {
+ if (flag & VM_CALL_ARGS_BLOCKARG) {
ADD_INSN1(ret, nd_line(node), topn, INT2FIX(1));
- if (flag & VM_CALL_ARGS_SPLAT_BIT) {
+ if (flag & VM_CALL_ARGS_SPLAT) {
ADD_INSN1(ret, nd_line(node), putobject, INT2FIX(-1));
ADD_SEND(ret, nd_line(node), ID2SYM(idAREF), INT2FIX(1));
}
ADD_INSN1(ret, nd_line(node), setn, FIXNUM_INC(argc, 3));
ADD_INSN (ret, nd_line(node), pop);
}
- else if (flag & VM_CALL_ARGS_SPLAT_BIT) {
+ else if (flag & VM_CALL_ARGS_SPLAT) {
ADD_INSN(ret, nd_line(node), dup);
ADD_INSN1(ret, nd_line(node), putobject, INT2FIX(-1));
ADD_SEND(ret, nd_line(node), ID2SYM(idAREF), INT2FIX(1));
diff --git a/insns.def b/insns.def
index 6e516fbd8d..19218d66c8 100644
--- a/insns.def
+++ b/insns.def
@@ -959,12 +959,12 @@ DEFINE_INSN
send
(CALL_INFO ci)
(...)
-(VALUE val) // inc += - (int)(ci->orig_argc + ((ci->flag & VM_CALL_ARGS_BLOCKARG_BIT) ? 1 : 0));
+(VALUE val) // inc += - (int)(ci->orig_argc + ((ci->flag & VM_CALL_ARGS_BLOCKARG) ? 1 : 0));
{
ci->argc = ci->orig_argc;
ci->blockptr = 0;
- if (!LIKELY(ci->flag & VM_CALL_ARGS_SKIP_SETUP)) {
+ if (UNLIKELY(!(ci->flag & VM_CALL_ARGS_SKIP_SETUP))) {
vm_caller_setup_args(th, reg_cfp, ci);
}
vm_search_method(ci, ci->recv = TOPN(ci->argc));
@@ -980,12 +980,12 @@ DEFINE_INSN
invokesuper
(CALL_INFO ci)
(...)
-(VALUE val) // inc += - (int)(ci->orig_argc + ((ci->flag & VM_CALL_ARGS_BLOCKARG_BIT) ? 1 : 0));
+(VALUE val) // inc += - (int)(ci->orig_argc + ((ci->flag & VM_CALL_ARGS_BLOCKARG) ? 1 : 0));
{
ci->argc = ci->orig_argc;
- ci->blockptr = !(ci->flag & VM_CALL_ARGS_BLOCKARG_BIT) ? GET_BLOCK_PTR() : 0;
+ ci->blockptr = !(ci->flag & VM_CALL_ARGS_BLOCKARG) ? GET_BLOCK_PTR() : 0;
- if (!LIKELY(ci->flag & VM_CALL_ARGS_SKIP_SETUP)) {
+ if (UNLIKELY(!(ci->flag & VM_CALL_ARGS_SKIP_SETUP))) {
vm_caller_setup_args(th, reg_cfp, ci);
}
ci->recv = GET_SELF();
diff --git a/iseq.c b/iseq.c
index 3244e44ab8..b0cf8014c9 100644
--- a/iseq.c
+++ b/iseq.c
@@ -1061,14 +1061,14 @@ insn_operand_intern(rb_iseq_t *iseq,
if (ci->flag) {
VALUE flags = rb_ary_new();
- if (ci->flag & VM_CALL_ARGS_SPLAT_BIT) rb_ary_push(flags, rb_str_new2("ARGS_SPLAT"));
- if (ci->flag & VM_CALL_ARGS_BLOCKARG_BIT) rb_ary_push(flags, rb_str_new2("ARGS_BLOCKARG"));
- if (ci->flag & VM_CALL_FCALL_BIT) rb_ary_push(flags, rb_str_new2("FCALL"));
- if (ci->flag & VM_CALL_VCALL_BIT) rb_ary_push(flags, rb_str_new2("VCALL"));
- if (ci->flag & VM_CALL_TAILCALL_BIT) rb_ary_push(flags, rb_str_new2("TAILCALL"));
- if (ci->flag & VM_CALL_SUPER_BIT) rb_ary_push(flags, rb_str_new2("SUPER"));
- if (ci->flag & VM_CALL_OPT_SEND_BIT) rb_ary_push(flags, rb_str_new2("SNED")); /* maybe not reachable */
- if (ci->flag & VM_CALL_ARGS_SKIP_SETUP) rb_ary_push(flags, rb_str_new2("ARGS_SKIP"));
+ if (ci->flag & VM_CALL_ARGS_SPLAT) rb_ary_push(flags, rb_str_new2("ARGS_SPLAT"));
+ if (ci->flag & VM_CALL_ARGS_BLOCKARG) rb_ary_push(flags, rb_str_new2("ARGS_BLOCKARG"));
+ if (ci->flag & VM_CALL_FCALL) rb_ary_push(flags, rb_str_new2("FCALL"));
+ if (ci->flag & VM_CALL_VCALL) rb_ary_push(flags, rb_str_new2("VCALL"));
+ if (ci->flag & VM_CALL_TAILCALL) rb_ary_push(flags, rb_str_new2("TAILCALL"));
+ if (ci->flag & VM_CALL_SUPER) rb_ary_push(flags, rb_str_new2("SUPER"));
+ if (ci->flag & VM_CALL_OPT_SEND) rb_ary_push(flags, rb_str_new2("SNED")); /* maybe not reachable */
+ if (ci->flag & VM_CALL_ARGS_SKIP_SETUP) rb_ary_push(flags, rb_str_new2("ARGS_SKIP")); /* maybe not reachable */
rb_ary_push(ary, rb_ary_join(flags, rb_str_new2("|")));
}
ret = rb_sprintf("<callinfo!%"PRIsVALUE">", rb_ary_join(ary, rb_str_new2(", ")));
diff --git a/vm_core.h b/vm_core.h
index 0524616e38..c53d68601c 100644
--- a/vm_core.h
+++ b/vm_core.h
@@ -650,15 +650,14 @@ enum vm_check_match_type {
#define VM_CHECKMATCH_TYPE_MASK 0x03
#define VM_CHECKMATCH_ARRAY 0x04
-#define VM_CALL_ARGS_SPLAT_BIT (0x01 << 1)
-#define VM_CALL_ARGS_BLOCKARG_BIT (0x01 << 2)
-#define VM_CALL_FCALL_BIT (0x01 << 3)
-#define VM_CALL_VCALL_BIT (0x01 << 4)
-#define VM_CALL_TAILCALL_BIT (0x01 << 5)
-#define VM_CALL_TAILRECURSION_BIT (0x01 << 6)
-#define VM_CALL_SUPER_BIT (0x01 << 7)
-#define VM_CALL_OPT_SEND_BIT (0x01 << 8)
-#define VM_CALL_ARGS_SKIP_SETUP (0x01 << 9)
+#define VM_CALL_ARGS_SPLAT (0x01 << 1) /* m(*args) */
+#define VM_CALL_ARGS_BLOCKARG (0x01 << 2) /* m(&block) */
+#define VM_CALL_FCALL (0x01 << 3) /* m(...) */
+#define VM_CALL_VCALL (0x01 << 4) /* m */
+#define VM_CALL_TAILCALL (0x01 << 5) /* located at tail position */
+#define VM_CALL_SUPER (0x01 << 6) /* super */
+#define VM_CALL_OPT_SEND (0x01 << 7) /* internal flag */
+#define VM_CALL_ARGS_SKIP_SETUP (0x01 << 8) /* (flag & (SPLAT|BLOCKARG)) && blockiseq == 0 */
enum vm_special_object_type {
VM_SPECIAL_OBJECT_VMCORE = 1,
diff --git a/vm_insnhelper.c b/vm_insnhelper.c
index 3a7eae8624..c3bb69b274 100644
--- a/vm_insnhelper.c
+++ b/vm_insnhelper.c
@@ -1013,7 +1013,7 @@ vm_base_ptr(rb_control_frame_t *cfp)
static void
vm_caller_setup_args(const rb_thread_t *th, rb_control_frame_t *cfp, rb_call_info_t *ci)
{
- if (UNLIKELY(ci->flag & VM_CALL_ARGS_BLOCKARG_BIT)) {
+ if (UNLIKELY(ci->flag & VM_CALL_ARGS_BLOCKARG)) {
rb_proc_t *po;
VALUE proc;
@@ -1042,7 +1042,7 @@ vm_caller_setup_args(const rb_thread_t *th, rb_control_frame_t *cfp, rb_call_inf
/* expand top of stack? */
- if (UNLIKELY(ci->flag & VM_CALL_ARGS_SPLAT_BIT)) {
+ if (UNLIKELY(ci->flag & VM_CALL_ARGS_SPLAT)) {
VALUE ary = *(cfp->sp - 1);
VALUE *ptr;
int i;
@@ -1206,7 +1206,7 @@ vm_call_iseq_setup_2(rb_thread_t *th, rb_control_frame_t *cfp, rb_call_info_t *c
/* stack overflow check */
CHECK_STACK_OVERFLOW(cfp, iseq->stack_max);
- if (LIKELY(!(ci->flag & VM_CALL_TAILCALL_BIT))) {
+ if (LIKELY(!(ci->flag & VM_CALL_TAILCALL))) {
VALUE *sp = argv + iseq->arg_size;
/* clear local variables */
@@ -1443,7 +1443,7 @@ vm_call_opt_send(rb_thread_t *th, rb_control_frame_t *reg_cfp, rb_call_info_t *c
ci->argc -= 1;
DEC_SP(1);
- ci->flag |= VM_CALL_FCALL_BIT | VM_CALL_OPT_SEND_BIT;
+ ci->flag |= VM_CALL_FCALL | VM_CALL_OPT_SEND;
return vm_call_method(th, reg_cfp, ci);
}
@@ -1567,15 +1567,15 @@ vm_call_method(rb_thread_t *th, rb_control_frame_t *cfp, rb_call_info_t *ci)
}
else {
int noex_safe;
- if (!(ci->flag & VM_CALL_FCALL_BIT) && (ci->me->flag & NOEX_MASK) & NOEX_PRIVATE) {
+ if (!(ci->flag & VM_CALL_FCALL) && (ci->me->flag & NOEX_MASK) & NOEX_PRIVATE) {
int stat = NOEX_PRIVATE;
- if (ci->flag & VM_CALL_VCALL_BIT) {
+ if (ci->flag & VM_CALL_VCALL) {
stat |= NOEX_VCALL;
}
val = vm_method_missing(th, cfp, ci, stat);
}
- else if (!(ci->flag & VM_CALL_OPT_SEND_BIT) && (ci->me->flag & NOEX_MASK) & NOEX_PROTECTED) {
+ else if (!(ci->flag & VM_CALL_OPT_SEND) && (ci->me->flag & NOEX_MASK) & NOEX_PROTECTED) {
if (!rb_obj_is_kind_of(cfp->self, ci->defined_class)) {
val = vm_method_missing(th, cfp, ci, NOEX_PROTECTED);
}
@@ -1594,10 +1594,10 @@ vm_call_method(rb_thread_t *th, rb_control_frame_t *cfp, rb_call_info_t *ci)
else {
/* method missing */
int stat = 0;
- if (ci->flag & VM_CALL_VCALL_BIT) {
+ if (ci->flag & VM_CALL_VCALL) {
stat |= NOEX_VCALL;
}
- if (ci->flag & VM_CALL_SUPER_BIT) {
+ if (ci->flag & VM_CALL_SUPER) {
stat |= NOEX_SUPER;
}
if (ci->mid == idMethodMissing) {