summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorusa <usa@b2dd03c8-39d4-4d8f-98ff-823fe69b080e>2013-06-25 12:56:07 +0000
committerusa <usa@b2dd03c8-39d4-4d8f-98ff-823fe69b080e>2013-06-25 12:56:07 +0000
commit301823549e9efe655f8956392c2a372b9d7b72c1 (patch)
treed44c68463d89fd561b40862953644e7b0ddbaee3
parentd07c6d3acf7c67135362ee15f6fbda4117c325d8 (diff)
merge revision(s) 41325: [Backport #8554]
* gc.c: Fixup around GC by MALLOC. Add allocate size to malloc_increase before GC for updating limit in after_gc_sweep. Reset malloc_increase into garbage_collect() for preventing GC again soon. this backport patch is written by nari. git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/branches/ruby_1_9_3@41628 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
-rw-r--r--ChangeLog9
-rw-r--r--gc.c14
-rw-r--r--version.h2
3 files changed, 19 insertions, 6 deletions
diff --git a/ChangeLog b/ChangeLog
index 91023f06dd..e69e15bc96 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,12 @@
+Tue Jun 25 21:54:17 2013 NAKAMURA Usaku <usa@ruby-lang.org>
+
+ * gc.c: Fixup around GC by MALLOC.
+ Add allocate size to malloc_increase before GC
+ for updating limit in after_gc_sweep.
+ Reset malloc_increase into garbage_collect()
+ for preventing GC again soon.
+ this backport patch is written by nari.
+
Wed Jun 5 12:38:14 2013 Nobuyoshi Nakada <nobu@ruby-lang.org>
* win32/win32.c (setup_overlapped, finish_overlapped): extract from
diff --git a/gc.c b/gc.c
index 0f84e22966..21c9173a50 100644
--- a/gc.c
+++ b/gc.c
@@ -236,7 +236,7 @@ getrusage_time(void)
#define GC_PROF_SET_MALLOC_INFO do {\
if (objspace->profile.run) {\
gc_profile_record *record = &objspace->profile.record[objspace->profile.count];\
- record->allocate_increase = malloc_increase;\
+ record->allocate_increase = malloc_increase + malloc_increase2;\
record->allocate_limit = malloc_limit; \
}\
} while(0)
@@ -352,6 +352,7 @@ typedef struct rb_objspace {
struct {
size_t limit;
size_t increase;
+ size_t increase2;
#if CALC_EXACT_MALLOC_SIZE
size_t allocated_size;
size_t allocations;
@@ -405,6 +406,7 @@ int *ruby_initial_gc_stress_ptr = &rb_objspace.gc_stress;
#endif
#define malloc_limit objspace->malloc_params.limit
#define malloc_increase objspace->malloc_params.increase
+#define malloc_increase2 objspace->malloc_params.increase2
#define heaps objspace->heap.ptr
#define heaps_length objspace->heap.length
#define heaps_used objspace->heap.used
@@ -756,8 +758,9 @@ vm_malloc_prepare(rb_objspace_t *objspace, size_t size)
size += sizeof(size_t);
#endif
+ malloc_increase += size;
if ((ruby_gc_stress && !ruby_disable_gc_stress) ||
- (malloc_increase+size) > malloc_limit) {
+ malloc_increase > malloc_limit) {
garbage_collect_with_gvl(objspace);
}
@@ -767,8 +770,6 @@ vm_malloc_prepare(rb_objspace_t *objspace, size_t size)
static inline void *
vm_malloc_fixup(rb_objspace_t *objspace, void *mem, size_t size)
{
- malloc_increase += size;
-
#if CALC_EXACT_MALLOC_SIZE
objspace->malloc_params.allocated_size += size;
objspace->malloc_params.allocations++;
@@ -2211,6 +2212,8 @@ before_gc_sweep(rb_objspace_t *objspace)
objspace->heap.sweep_slots = heaps;
objspace->heap.free_num = 0;
+ malloc_increase2 += ATOMIC_SIZE_EXCHANGE(malloc_increase,0);
+
/* sweep unlinked method entries */
if (GET_VM()->unlinked_method_entry_list) {
rb_sweep_method_entry(GET_VM());
@@ -2227,11 +2230,12 @@ after_gc_sweep(rb_objspace_t *objspace)
heaps_increment(objspace);
}
- if (malloc_increase > malloc_limit) {
+ if ((malloc_increase + malloc_increase2) > malloc_limit) {
malloc_limit += (size_t)((malloc_increase - malloc_limit) * (double)objspace->heap.live_num / (heaps_used * HEAP_OBJ_LIMIT));
if (malloc_limit < initial_malloc_limit) malloc_limit = initial_malloc_limit;
}
malloc_increase = 0;
+ malloc_increase2 = 0;
free_unused_heaps(objspace);
}
diff --git a/version.h b/version.h
index ad3b047451..9ec6795602 100644
--- a/version.h
+++ b/version.h
@@ -1,5 +1,5 @@
#define RUBY_VERSION "1.9.3"
-#define RUBY_PATCHLEVEL 433
+#define RUBY_PATCHLEVEL 434
#define RUBY_RELEASE_DATE "2013-06-25"
#define RUBY_RELEASE_YEAR 2013