summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--array.c94
-rw-r--r--compile.c4
-rw-r--r--enum.c6
-rw-r--r--gc.c58
-rw-r--r--string.c6
-rw-r--r--variable.c2
-rw-r--r--vm_args.c4
-rw-r--r--vm_insnhelper.c2
8 files changed, 88 insertions, 88 deletions
diff --git a/array.c b/array.c
index 83a5ff41f5..0b2be722ea 100644
--- a/array.c
+++ b/array.c
@@ -102,7 +102,7 @@ VALUE rb_cArray;
} while (0)
#define ARY_CAPA(ary) (ARY_EMBED_P(ary) ? RARRAY_EMBED_LEN_MAX : \
- ARY_SHARED_ROOT_P(ary) ? RARRAY_LEN(ary) : RARRAY(ary)->as.heap.aux.capa)
+ ARY_SHARED_ROOT_P(ary) ? RARRAY_LEN(ary) : RARRAY(ary)->as.heap.aux.capa)
#define ARY_SET_CAPA(ary, n) do { \
assert(!ARY_EMBED_P(ary)); \
assert(!ARY_SHARED_P(ary)); \
@@ -211,16 +211,16 @@ ary_resize_capa(VALUE ary, long capacity)
ARY_SET_HEAP_LEN(ary, len);
}
else {
- SIZED_REALLOC_N(RARRAY(ary)->as.heap.ptr, VALUE, capacity, RARRAY(ary)->as.heap.aux.capa);
+ SIZED_REALLOC_N(RARRAY(ary)->as.heap.ptr, VALUE, capacity, RARRAY(ary)->as.heap.aux.capa);
}
ARY_SET_CAPA(ary, (capacity));
}
else {
if (!ARY_EMBED_P(ary)) {
long len = RARRAY_LEN(ary);
- const VALUE *ptr = RARRAY_CONST_PTR(ary);
+ const VALUE *ptr = RARRAY_CONST_PTR(ary);
- if (len > capacity) len = capacity;
+ if (len > capacity) len = capacity;
MEMCPY((VALUE *)RARRAY(ary)->as.ary, ptr, VALUE, len);
FL_SET_EMBED(ary);
ARY_SET_LEN(ary, len);
@@ -326,9 +326,9 @@ rb_ary_modify(VALUE ary)
ARY_SET_EMBED_LEN(ary, len);
}
else if (ARY_SHARED_OCCUPIED(shared) && len > ((shared_len = RARRAY_LEN(shared))>>1)) {
- long shift = RARRAY_CONST_PTR(ary) - RARRAY_CONST_PTR(shared);
+ long shift = RARRAY_CONST_PTR(ary) - RARRAY_CONST_PTR(shared);
FL_UNSET_SHARED(ary);
- ARY_SET_PTR(ary, RARRAY_CONST_PTR(shared));
+ ARY_SET_PTR(ary, RARRAY_CONST_PTR(shared));
ARY_SET_CAPA(ary, shared_len);
RARRAY_PTR_USE(ary, ptr, {
MEMMOVE(ptr, ptr+shift, VALUE, len);
@@ -362,9 +362,9 @@ ary_ensure_room_for_push(VALUE ary, long add_len)
if (new_len > RARRAY_EMBED_LEN_MAX) {
VALUE shared = ARY_SHARED(ary);
if (ARY_SHARED_OCCUPIED(shared)) {
- if (RARRAY_CONST_PTR(ary) - RARRAY_CONST_PTR(shared) + new_len <= RARRAY_LEN(shared)) {
+ if (RARRAY_CONST_PTR(ary) - RARRAY_CONST_PTR(shared) + new_len <= RARRAY_LEN(shared)) {
rb_ary_modify_check(ary);
- return shared;
+ return shared;
}
else {
/* if array is shared, then it is likely it participate in push/shift pattern */
@@ -377,7 +377,7 @@ ary_ensure_room_for_push(VALUE ary, long add_len)
}
}
}
- rb_ary_modify(ary);
+ rb_ary_modify(ary);
}
else {
rb_ary_modify_check(ary);
@@ -459,7 +459,7 @@ ary_new(VALUE klass, long capa)
ary = ary_alloc(klass);
if (capa > RARRAY_EMBED_LEN_MAX) {
- ptr = ALLOC_N(VALUE, capa);
+ ptr = ALLOC_N(VALUE, capa);
FL_UNSET_EMBED(ary);
ARY_SET_PTR(ary, ptr);
ARY_SET_CAPA(ary, capa);
@@ -539,8 +539,8 @@ void
rb_ary_free(VALUE ary)
{
if (ARY_OWNS_HEAP_P(ary)) {
- RB_DEBUG_COUNTER_INC(obj_ary_ptr);
- ruby_sized_xfree((void *)ARY_HEAP_PTR(ary), ARY_HEAP_SIZE(ary));
+ RB_DEBUG_COUNTER_INC(obj_ary_ptr);
+ ruby_sized_xfree((void *)ARY_HEAP_PTR(ary), ARY_HEAP_SIZE(ary));
}
else {
RB_DEBUG_COUNTER_INC(obj_ary_embed);
@@ -588,14 +588,14 @@ ary_make_shared(VALUE ary)
FL_UNSET_EMBED(shared);
ARY_SET_LEN((VALUE)shared, capa);
- ARY_SET_PTR((VALUE)shared, RARRAY_CONST_PTR(ary));
- ary_mem_clear((VALUE)shared, len, capa - len);
+ ARY_SET_PTR((VALUE)shared, RARRAY_CONST_PTR(ary));
+ ary_mem_clear((VALUE)shared, len, capa - len);
FL_SET_SHARED_ROOT(shared);
ARY_SET_SHARED_NUM((VALUE)shared, 1);
FL_SET_SHARED(ary);
ARY_SET_SHARED(ary, (VALUE)shared);
OBJ_FREEZE(shared);
- return (VALUE)shared;
+ return (VALUE)shared;
}
}
@@ -606,7 +606,7 @@ ary_make_substitution(VALUE ary)
if (len <= RARRAY_EMBED_LEN_MAX) {
VALUE subst = rb_ary_new2(len);
- ary_memcpy(subst, 0, len, RARRAY_CONST_PTR(ary));
+ ary_memcpy(subst, 0, len, RARRAY_CONST_PTR(ary));
ARY_SET_EMBED_LEN(subst, len);
return subst;
}
@@ -729,8 +729,8 @@ rb_ary_initialize(int argc, VALUE *argv, VALUE ary)
rb_ary_modify(ary);
if (argc == 0) {
- if (ARY_OWNS_HEAP_P(ary) && RARRAY_CONST_PTR(ary) != 0) {
- ruby_sized_xfree((void *)RARRAY_CONST_PTR(ary), ARY_HEAP_SIZE(ary));
+ if (ARY_OWNS_HEAP_P(ary) && RARRAY_CONST_PTR(ary) != 0) {
+ ruby_sized_xfree((void *)RARRAY_CONST_PTR(ary), ARY_HEAP_SIZE(ary));
}
rb_ary_unshare_safe(ary);
FL_SET_EMBED(ary);
@@ -837,7 +837,7 @@ ary_make_partial(VALUE ary, VALUE klass, long offset, long len)
if (len <= RARRAY_EMBED_LEN_MAX) {
VALUE result = ary_alloc(klass);
- ary_memcpy(result, 0, len, RARRAY_CONST_PTR(ary) + offset);
+ ary_memcpy(result, 0, len, RARRAY_CONST_PTR(ary) + offset);
ARY_SET_EMBED_LEN(result, len);
return result;
}
@@ -1120,8 +1120,8 @@ ary_ensure_room_for_unshift(VALUE ary, int argc)
VALUE shared = ARY_SHARED(ary);
capa = RARRAY_LEN(shared);
if (ARY_SHARED_OCCUPIED(shared) && capa > new_len) {
- head = RARRAY_CONST_PTR(ary);
- sharedp = RARRAY_CONST_PTR(shared);
+ head = RARRAY_CONST_PTR(ary);
+ sharedp = RARRAY_CONST_PTR(shared);
goto makeroom_if_need;
}
}
@@ -1134,11 +1134,11 @@ ary_ensure_room_for_unshift(VALUE ary, int argc)
/* use shared array for big "queues" */
if (new_len > ARY_DEFAULT_SIZE * 4) {
- /* make a room for unshifted items */
+ /* make a room for unshifted items */
capa = ARY_CAPA(ary);
ary_make_shared(ary);
- head = sharedp = RARRAY_CONST_PTR(ary);
+ head = sharedp = RARRAY_CONST_PTR(ary);
goto makeroom;
makeroom_if_need:
if (head - sharedp < argc) {
@@ -1576,7 +1576,7 @@ rb_ary_splice(VALUE ary, long beg, long len, const VALUE *rptr, long rlen)
}
{
- const VALUE *optr = RARRAY_CONST_PTR(ary);
+ const VALUE *optr = RARRAY_CONST_PTR(ary);
rofs = (rptr >= optr && rptr < optr + olen) ? rptr - optr : -1;
}
@@ -1589,7 +1589,7 @@ rb_ary_splice(VALUE ary, long beg, long len, const VALUE *rptr, long rlen)
len = beg + rlen;
ary_mem_clear(ary, olen, beg - olen);
if (rlen > 0) {
- if (rofs != -1) rptr = RARRAY_CONST_PTR(ary) + rofs;
+ if (rofs != -1) rptr = RARRAY_CONST_PTR(ary) + rofs;
ary_memcpy0(ary, beg, rlen, rptr, target_ary);
}
ARY_SET_LEN(ary, len);
@@ -1613,7 +1613,7 @@ rb_ary_splice(VALUE ary, long beg, long len, const VALUE *rptr, long rlen)
ARY_SET_LEN(ary, alen);
}
if (rlen > 0) {
- if (rofs != -1) rptr = RARRAY_CONST_PTR(ary) + rofs;
+ if (rofs != -1) rptr = RARRAY_CONST_PTR(ary) + rofs;
/* give up wb-protected ary */
MEMMOVE(RARRAY_PTR(ary) + beg, rptr, VALUE, rlen);
}
@@ -1673,7 +1673,7 @@ rb_ary_resize(VALUE ary, long len)
}
else {
if (olen > len + ARY_DEFAULT_SIZE) {
- SIZED_REALLOC_N(RARRAY(ary)->as.heap.ptr, VALUE, len, RARRAY(ary)->as.heap.aux.capa);
+ SIZED_REALLOC_N(RARRAY(ary)->as.heap.ptr, VALUE, len, RARRAY(ary)->as.heap.aux.capa);
ARY_SET_CAPA(ary, len);
}
ARY_SET_HEAP_LEN(ary, len);
@@ -1738,7 +1738,7 @@ rb_ary_aset(int argc, VALUE *argv, VALUE ary)
/* check if idx is Range */
range:
rpl = rb_ary_to_ary(argv[argc-1]);
- rb_ary_splice(ary, beg, len, RARRAY_CONST_PTR(rpl), RARRAY_LEN(rpl));
+ rb_ary_splice(ary, beg, len, RARRAY_CONST_PTR(rpl), RARRAY_LEN(rpl));
RB_GC_GUARD(rpl);
return argv[argc-1];
}
@@ -2279,8 +2279,8 @@ rb_ary_reverse_m(VALUE ary)
VALUE dup = rb_ary_new2(len);
if (len > 0) {
- const VALUE *p1 = RARRAY_CONST_PTR(ary);
- VALUE *p2 = (VALUE *)RARRAY_CONST_PTR(dup) + len - 1;
+ const VALUE *p1 = RARRAY_CONST_PTR(ary);
+ VALUE *p2 = (VALUE *)RARRAY_CONST_PTR(dup) + len - 1;
do *p2-- = *p1++; while (--len > 0);
}
ARY_SET_LEN(dup, RARRAY_LEN(ary));
@@ -2382,7 +2382,7 @@ rb_ary_rotate_m(int argc, VALUE *argv, VALUE ary)
rotated = rb_ary_new2(len);
if (len > 0) {
cnt = rotate_count(cnt, len);
- ptr = RARRAY_CONST_PTR(ary);
+ ptr = RARRAY_CONST_PTR(ary);
len -= cnt;
ary_memcpy(rotated, 0, len, ptr + cnt);
ary_memcpy(rotated, len, cnt, ptr);
@@ -2515,7 +2515,7 @@ rb_ary_sort_bang(VALUE ary)
rb_ary_unshare(ary);
}
else {
- ruby_sized_xfree((void *)ARY_HEAP_PTR(ary), ARY_HEAP_SIZE(ary));
+ ruby_sized_xfree((void *)ARY_HEAP_PTR(ary), ARY_HEAP_SIZE(ary));
}
ARY_SET_PTR(ary, RARRAY_CONST_PTR(tmp));
ARY_SET_HEAP_LEN(ary, len);
@@ -2526,7 +2526,7 @@ rb_ary_sort_bang(VALUE ary)
FL_SET_EMBED(tmp);
ARY_SET_EMBED_LEN(tmp, 0);
FL_SET(tmp, FL_FREEZE);
- }
+ }
/* tmp will be GC'ed. */
RBASIC_SET_CLASS_RAW(tmp, rb_cArray); /* rb_cArray must be marked */
}
@@ -2833,7 +2833,7 @@ append_values_at_single(VALUE result, VALUE ary, long olen, VALUE idx)
/* check if idx is Range */
else if (rb_range_beg_len(idx, &beg, &len, olen, 1)) {
if (len > 0) {
- const VALUE *const src = RARRAY_CONST_PTR(ary);
+ const VALUE *const src = RARRAY_CONST_PTR(ary);
const long end = beg + len;
const long prevlen = RARRAY_LEN(result);
if (beg < olen) {
@@ -3187,7 +3187,7 @@ rb_ary_slice_bang(int argc, VALUE *argv, VALUE ary)
len = orig_len - pos;
}
if (len == 0) return rb_ary_new2(0);
- arg2 = rb_ary_new4(len, RARRAY_CONST_PTR(ary)+pos);
+ arg2 = rb_ary_new4(len, RARRAY_CONST_PTR(ary)+pos);
RBASIC_SET_CLASS(arg2, rb_obj_class(ary));
rb_ary_splice(ary, pos, len, 0, 0);
return arg2;
@@ -3223,7 +3223,7 @@ ary_reject(VALUE orig, VALUE result)
for (i = 0; i < RARRAY_LEN(orig); i++) {
VALUE v = RARRAY_AREF(orig, i);
- if (!RTEST(rb_yield(v))) {
+ if (!RTEST(rb_yield(v))) {
rb_ary_push(result, v);
}
}
@@ -3504,14 +3504,14 @@ rb_ary_replace(VALUE copy, VALUE orig)
VALUE shared = 0;
if (ARY_OWNS_HEAP_P(copy)) {
- RARRAY_PTR_USE(copy, ptr, ruby_sized_xfree(ptr, ARY_HEAP_SIZE(copy)));
+ RARRAY_PTR_USE(copy, ptr, ruby_sized_xfree(ptr, ARY_HEAP_SIZE(copy)));
}
else if (ARY_SHARED_P(copy)) {
shared = ARY_SHARED(copy);
FL_UNSET_SHARED(copy);
}
FL_SET_EMBED(copy);
- ary_memcpy(copy, 0, RARRAY_LEN(orig), RARRAY_CONST_PTR(orig));
+ ary_memcpy(copy, 0, RARRAY_LEN(orig), RARRAY_CONST_PTR(orig));
if (shared) {
rb_ary_decrement_share(shared);
}
@@ -3520,7 +3520,7 @@ rb_ary_replace(VALUE copy, VALUE orig)
else {
VALUE shared = ary_make_shared(orig);
if (ARY_OWNS_HEAP_P(copy)) {
- RARRAY_PTR_USE(copy, ptr, ruby_sized_xfree(ptr, ARY_HEAP_SIZE(copy)));
+ RARRAY_PTR_USE(copy, ptr, ruby_sized_xfree(ptr, ARY_HEAP_SIZE(copy)));
}
else {
rb_ary_unshare_safe(copy);
@@ -3555,7 +3555,7 @@ rb_ary_clear(VALUE ary)
}
}
else if (ARY_DEFAULT_SIZE * 2 < ARY_CAPA(ary)) {
- ary_resize_capa(ary, ARY_DEFAULT_SIZE * 2);
+ ary_resize_capa(ary, ARY_DEFAULT_SIZE * 2);
}
return ary;
}
@@ -3700,7 +3700,7 @@ ary_append(VALUE x, VALUE y)
{
long n = RARRAY_LEN(y);
if (n > 0) {
- rb_ary_splice(x, RARRAY_LEN(x), 0, RARRAY_CONST_PTR(y), n);
+ rb_ary_splice(x, RARRAY_LEN(x), 0, RARRAY_CONST_PTR(y), n);
}
return x;
}
@@ -3801,11 +3801,11 @@ rb_ary_times(VALUE ary, VALUE times)
if (0 < t) {
ary_memcpy(ary2, 0, t, ptr);
while (t <= len/2) {
- ary_memcpy(ary2, t, t, RARRAY_CONST_PTR(ary2));
+ ary_memcpy(ary2, t, t, RARRAY_CONST_PTR(ary2));
t *= 2;
}
if (t < len) {
- ary_memcpy(ary2, t, len-t, RARRAY_CONST_PTR(ary2));
+ ary_memcpy(ary2, t, len-t, RARRAY_CONST_PTR(ary2));
}
}
out:
@@ -3903,8 +3903,8 @@ recursive_equal(VALUE ary1, VALUE ary2, int recur)
return Qfalse;
if (len1 < i)
return Qtrue;
- p1 = RARRAY_CONST_PTR(ary1) + i;
- p2 = RARRAY_CONST_PTR(ary2) + i;
+ p1 = RARRAY_CONST_PTR(ary1) + i;
+ p2 = RARRAY_CONST_PTR(ary2) + i;
}
else {
return Qfalse;
@@ -4949,8 +4949,8 @@ rb_ary_shuffle_bang(int argc, VALUE *argv, VALUE ary)
while (i) {
long j = RAND_UPTO(i);
VALUE tmp;
- if (len != RARRAY_LEN(ary) || ptr != RARRAY_CONST_PTR(ary)) {
- rb_raise(rb_eRuntimeError, "modified during shuffle");
+ if (len != RARRAY_LEN(ary) || ptr != RARRAY_CONST_PTR(ary)) {
+ rb_raise(rb_eRuntimeError, "modified during shuffle");
}
tmp = ptr[--i];
ptr[i] = ptr[j];
diff --git a/compile.c b/compile.c
index ba6924e713..4f98ac7997 100644
--- a/compile.c
+++ b/compile.c
@@ -1649,7 +1649,7 @@ iseq_set_arguments(rb_iseq_t *iseq, LINK_ANCHOR *const optargs, const NODE *cons
opt_table = ALLOC_N(VALUE, i+1);
- MEMCPY(opt_table, RARRAY_CONST_PTR(labels), VALUE, i+1);
+ MEMCPY(opt_table, RARRAY_CONST_PTR(labels), VALUE, i+1);
for (j = 0; j < i+1; j++) {
opt_table[j] &= ~1;
}
@@ -2304,7 +2304,7 @@ iseq_set_exception_table(rb_iseq_t *iseq)
table->size = tlen;
for (i = 0; i < table->size; i++) {
- ptr = RARRAY_CONST_PTR(tptr[i]);
+ ptr = RARRAY_CONST_PTR(tptr[i]);
entry = &table->entries[i];
entry->type = (enum catch_type)(ptr[0] & 0xffff);
entry->start = label_get_position((LABEL *)(ptr[1] & ~1));
diff --git a/enum.c b/enum.c
index 9485b7eb60..71e7a74d35 100644
--- a/enum.c
+++ b/enum.c
@@ -1171,9 +1171,9 @@ enum_sort_by(VALUE obj)
rb_ary_concat(ary, buf);
}
if (RARRAY_LEN(ary) > 2) {
- RARRAY_PTR_USE(ary, ptr,
- ruby_qsort(ptr, RARRAY_LEN(ary)/2, 2*sizeof(VALUE),
- sort_by_cmp, (void *)ary));
+ RARRAY_PTR_USE(ary, ptr,
+ ruby_qsort(ptr, RARRAY_LEN(ary)/2, 2*sizeof(VALUE),
+ sort_by_cmp, (void *)ary));
}
if (RBASIC(ary)->klass) {
rb_raise(rb_eRuntimeError, "sort_by reentered");
diff --git a/gc.c b/gc.c
index dc8f5808f2..00f51c9657 100644
--- a/gc.c
+++ b/gc.c
@@ -1532,7 +1532,7 @@ heap_page_allocate(rb_objspace_t *objspace)
/* assign heap_page entry */
page = (struct heap_page *)calloc(1, sizeof(struct heap_page));
if (page == 0) {
- aligned_free(page_body);
+ aligned_free(page_body);
rb_memerror();
}
@@ -1954,10 +1954,10 @@ newobj_of(VALUE klass, VALUE flags, VALUE v1, VALUE v2, VALUE v3, int wb_protect
#if GC_DEBUG_STRESS_TO_CLASS
if (UNLIKELY(stress_to_class)) {
- long i, cnt = RARRAY_LEN(stress_to_class);
- for (i = 0; i < cnt; ++i) {
+ long i, cnt = RARRAY_LEN(stress_to_class);
+ for (i = 0; i < cnt; ++i) {
if (klass == RARRAY_AREF(stress_to_class, i)) rb_memerror();
- }
+ }
}
#endif
if (!(during_gc ||
@@ -2215,14 +2215,14 @@ obj_free(rb_objspace_t *objspace, VALUE obj)
switch (BUILTIN_TYPE(obj)) {
case T_OBJECT:
- if (!(RANY(obj)->as.basic.flags & ROBJECT_EMBED) &&
+ if (!(RANY(obj)->as.basic.flags & ROBJECT_EMBED) &&
RANY(obj)->as.object.as.heap.ivptr) {
- xfree(RANY(obj)->as.object.as.heap.ivptr);
- RB_DEBUG_COUNTER_INC(obj_obj_ptr);
- }
- else {
+ xfree(RANY(obj)->as.object.as.heap.ivptr);
+ RB_DEBUG_COUNTER_INC(obj_obj_ptr);
+ }
+ else {
RB_DEBUG_COUNTER_INC(obj_obj_embed);
- }
+ }
break;
case T_MODULE:
case T_CLASS:
@@ -2259,7 +2259,7 @@ obj_free(rb_objspace_t *objspace, VALUE obj)
rb_str_free(obj);
break;
case T_ARRAY:
- rb_ary_free(obj);
+ rb_ary_free(obj);
break;
case T_HASH:
if (RANY(obj)->as.hash.ntbl) {
@@ -4671,16 +4671,16 @@ gc_mark_children(rb_objspace_t *objspace, VALUE obj)
break;
case T_ARRAY:
- if (FL_TEST(obj, ELTS_SHARED)) {
- gc_mark(objspace, any->as.array.as.heap.aux.shared);
+ if (FL_TEST(obj, ELTS_SHARED)) {
+ gc_mark(objspace, any->as.array.as.heap.aux.shared);
}
else {
long i, len = RARRAY_LEN(obj);
- const VALUE *ptr = RARRAY_CONST_PTR(obj);
+ const VALUE *ptr = RARRAY_CONST_PTR(obj);
for (i=0; i < len; i++) {
- gc_mark(objspace, *ptr++);
+ gc_mark(objspace, *ptr++);
}
- }
+ }
break;
case T_HASH:
@@ -4709,9 +4709,9 @@ gc_mark_children(rb_objspace_t *objspace, VALUE obj)
case T_OBJECT:
{
uint32_t i, len = ROBJECT_NUMIV(obj);
- VALUE *ptr = ROBJECT_IVPTR(obj);
+ VALUE *ptr = ROBJECT_IVPTR(obj);
for (i = 0; i < len; i++) {
- gc_mark(objspace, *ptr++);
+ gc_mark(objspace, *ptr++);
}
}
break;
@@ -9551,13 +9551,13 @@ rb_raw_obj_info(char *buff, const int buff_size, VALUE obj)
#if USE_RGENGC
const int age = RVALUE_FLAGS_AGE(RBASIC(obj)->flags);
- snprintf(buff, buff_size, "%p [%d%s%s%s%s] %s",
- (void *)obj, age,
- C(RVALUE_UNCOLLECTIBLE_BITMAP(obj), "L"),
- C(RVALUE_MARK_BITMAP(obj), "M"),
- C(RVALUE_MARKING_BITMAP(obj), "R"),
- C(RVALUE_WB_UNPROTECTED_BITMAP(obj), "U"),
- obj_type_name(obj));
+ snprintf(buff, buff_size, "%p [%d%s%s%s%s] %s",
+ (void *)obj, age,
+ C(RVALUE_UNCOLLECTIBLE_BITMAP(obj), "L"),
+ C(RVALUE_MARK_BITMAP(obj), "M"),
+ C(RVALUE_MARKING_BITMAP(obj), "R"),
+ C(RVALUE_WB_UNPROTECTED_BITMAP(obj), "U"),
+ obj_type_name(obj));
#else
snprintf(buff, buff_size, "%p [%s] %s",
(void *)obj,
@@ -9587,10 +9587,10 @@ rb_raw_obj_info(char *buff, const int buff_size, VALUE obj)
UNEXPECTED_NODE(rb_raw_obj_info);
break;
case T_ARRAY:
- snprintf(buff, buff_size, "%s [%s%s] len: %d", buff,
- C(ARY_EMBED_P(obj), "E"),
- C(ARY_SHARED_P(obj), "S"),
- (int)RARRAY_LEN(obj));
+ snprintf(buff, buff_size, "%s [%s%s] len: %d", buff,
+ C(ARY_EMBED_P(obj), "E"),
+ C(ARY_SHARED_P(obj), "S"),
+ (int)RARRAY_LEN(obj));
break;
case T_STRING: {
snprintf(buff, buff_size, "%s %s", buff, RSTRING_PTR(obj));
diff --git a/string.c b/string.c
index e654a3023d..3449ce6b53 100644
--- a/string.c
+++ b/string.c
@@ -2006,9 +2006,9 @@ rb_str_format_m(VALUE str, VALUE arg)
VALUE tmp = rb_check_array_type(arg);
if (!NIL_P(tmp)) {
- VALUE rv = rb_str_format(RARRAY_LENINT(tmp), RARRAY_CONST_PTR(tmp), str);
- RB_GC_GUARD(tmp);
- return rv;
+ VALUE rv = rb_str_format(RARRAY_LENINT(tmp), RARRAY_CONST_PTR(tmp), str);
+ RB_GC_GUARD(tmp);
+ return rv;
}
return rb_str_format(1, &arg, str);
}
diff --git a/variable.c b/variable.c
index 1c23173d6c..ee8807c838 100644
--- a/variable.c
+++ b/variable.c
@@ -1379,7 +1379,7 @@ rb_ivar_set(VALUE obj, ID id, VALUE val)
}
}
RB_OBJ_WRITE(obj, &ROBJECT_IVPTR(obj)[ivup.index], val);
- break;
+ break;
case T_CLASS:
case T_MODULE:
if (!RCLASS_IV_TBL(obj)) RCLASS_IV_TBL(obj) = st_init_numtable();
diff --git a/vm_args.c b/vm_args.c
index 4e989574ea..f9f74b5b0b 100644
--- a/vm_args.c
+++ b/vm_args.c
@@ -335,7 +335,7 @@ args_setup_opt_parameters(struct args_info *args, int opt_max, VALUE *locals)
if (args->rest) {
int len = RARRAY_LENINT(args->rest);
- const VALUE *argv = RARRAY_CONST_PTR(args->rest);
+ const VALUE *argv = RARRAY_CONST_PTR(args->rest);
for (; i<opt_max && args->rest_index < len; i++, args->rest_index++) {
locals[i] = argv[args->rest_index];
@@ -785,7 +785,7 @@ vm_caller_setup_arg_splat(rb_control_frame_t *cfp, struct rb_calling_info *calli
cfp->sp--;
if (!NIL_P(ary)) {
- const VALUE *ptr = RARRAY_CONST_PTR(ary);
+ const VALUE *ptr = RARRAY_CONST_PTR(ary);
long len = RARRAY_LEN(ary), i;
CHECK_VM_STACK_OVERFLOW(cfp, len);
diff --git a/vm_insnhelper.c b/vm_insnhelper.c
index e162e9d32f..d16c318f2c 100644
--- a/vm_insnhelper.c
+++ b/vm_insnhelper.c
@@ -1265,7 +1265,7 @@ vm_expandarray(VALUE *sp, VALUE ary, rb_num_t num, int flag)
len = 1;
}
else {
- ptr = RARRAY_CONST_PTR(ary);
+ ptr = RARRAY_CONST_PTR(ary);
len = (rb_num_t)RARRAY_LEN(ary);
}