summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--compile.c10
-rw-r--r--iseq.c2
-rw-r--r--test/ruby/test_mjit.rb6
-rw-r--r--vm_core.h2
4 files changed, 10 insertions, 10 deletions
diff --git a/compile.c b/compile.c
index 484399abc6..e906bd1e10 100644
--- a/compile.c
+++ b/compile.c
@@ -1357,18 +1357,18 @@ new_child_iseq_with_callback(rb_iseq_t *iseq, const struct rb_iseq_new_with_call
static void
set_catch_except_p(struct rb_iseq_constant_body *body)
{
- body->catch_except_p = TRUE;
+ body->catch_except_p = true;
if (body->parent_iseq != NULL) {
set_catch_except_p(ISEQ_BODY(body->parent_iseq));
}
}
-/* Set body->catch_except_p to TRUE if the ISeq may catch an exception. If it is FALSE,
- JIT-ed code may be optimized. If we are extremely conservative, we should set TRUE
+/* Set body->catch_except_p to true if the ISeq may catch an exception. If it is false,
+ JIT-ed code may be optimized. If we are extremely conservative, we should set true
if catch table exists. But we want to optimize while loop, which always has catch
table entries for break/next/redo.
- So this function sets TRUE for limited ISeqs with break/next/redo catch table entries
+ So this function sets true for limited ISeqs with break/next/redo catch table entries
whose child ISeq would really raise an exception. */
static void
update_catch_except_flags(struct rb_iseq_constant_body *body)
@@ -1399,7 +1399,7 @@ update_catch_except_flags(struct rb_iseq_constant_body *body)
if (entry->type != CATCH_TYPE_BREAK
&& entry->type != CATCH_TYPE_NEXT
&& entry->type != CATCH_TYPE_REDO) {
- body->catch_except_p = TRUE;
+ body->catch_except_p = true;
break;
}
}
diff --git a/iseq.c b/iseq.c
index f17a2d49b6..4a2c9a33ee 100644
--- a/iseq.c
+++ b/iseq.c
@@ -2411,7 +2411,7 @@ rb_iseq_disasm_recursive(const rb_iseq_t *iseq, VALUE indent)
rb_str_cat2(str, "== disasm: ");
rb_str_append(str, iseq_inspect(iseq));
- rb_str_catf(str, " (catch: %s)", body->catch_except_p ? "TRUE" : "FALSE");
+ rb_str_catf(str, " (catch: %s)", body->catch_except_p ? "true" : "false");
if ((l = RSTRING_LEN(str) - indent_len) < header_minlen) {
rb_str_modify_expand(str, header_minlen - l);
memset(RSTRING_END(str), '=', header_minlen - l);
diff --git a/test/ruby/test_mjit.rb b/test/ruby/test_mjit.rb
index 3a1dcf7f09..9cd93855bd 100644
--- a/test/ruby/test_mjit.rb
+++ b/test/ruby/test_mjit.rb
@@ -782,9 +782,9 @@ class TestMJIT < Test::Unit::TestCase
def test_catching_deep_exception
assert_eval_with_jit("#{<<~"begin;"}\n#{<<~"end;"}", stdout: '1', success_count: 4)
begin;
- def catch_true(paths, prefixes) # catch_except_p: TRUE
- prefixes.each do |prefix| # catch_except_p: TRUE
- paths.each do |path| # catch_except_p: FALSE
+ def catch_true(paths, prefixes) # catch_except_p: true
+ prefixes.each do |prefix| # catch_except_p: true
+ paths.each do |path| # catch_except_p: false
return path
end
end
diff --git a/vm_core.h b/vm_core.h
index 45ec111155..eee25161f5 100644
--- a/vm_core.h
+++ b/vm_core.h
@@ -474,7 +474,7 @@ struct rb_iseq_constant_body {
iseq_bits_t single;
} mark_bits;
- char catch_except_p; /* If a frame of this ISeq may catch exception, set TRUE */
+ bool catch_except_p; // If a frame of this ISeq may catch exception, set true.
// If true, this ISeq is leaf *and* backtraces are not used, for example,
// by rb_profile_frames. We verify only leafness on VM_CHECK_MODE though.
// Note that GC allocations might use backtraces due to