Index: src/common/space_tuner.cpp =================================================================== --- src/common/space_tuner.cpp (revision 547478) +++ src/common/space_tuner.cpp (working copy) @@ -52,14 +52,15 @@ /*For_statistic wasted memory*/ POINTER_SIZE_INT curr_used_los = lspace->surviving_size + lspace->alloced_size; - POINTER_SIZE_INT curr_wast_los = lspace->committed_heap_size - curr_used_los; + POINTER_SIZE_INT curr_wast_los = 0; + if(gc->cause != GC_CAUSE_LOS_IS_FULL) curr_wast_los = lspace->committed_heap_size - curr_used_los; tuner->wast_los += curr_wast_los; + POINTER_SIZE_INT curr_used_mos = mspace->surviving_size + mspace->alloced_size; float expected_mos_ratio = mspace_get_expected_threshold_ratio((Mspace*)mspace); POINTER_SIZE_INT expected_mos = (POINTER_SIZE_INT)((mspace->committed_heap_size + fspace->committed_heap_size) * expected_mos_ratio); POINTER_SIZE_INT curr_wast_mos = 0; - if(expected_mos > curr_used_mos) - curr_wast_mos = expected_mos - curr_used_mos; + if(expected_mos > curr_used_mos) curr_wast_mos = expected_mos - curr_used_mos; tuner->wast_mos += curr_wast_mos; tuner->current_dw = ABS_DIFF(tuner->wast_mos, tuner->wast_los); @@ -117,6 +118,7 @@ { tuner->kind = TRANS_FROM_MOS_TO_LOS; tuner->tuning_size = new_free_los_sz - los_expect_free_sz; + lspace->move_object = 0; } /*LOS_Shrink:*/ else if(new_free_los_sz < los_expect_free_sz) @@ -152,7 +154,8 @@ if(tuner->force_tune){ if(tuner->kind != TRANS_FROM_MOS_TO_LOS){ tuner->kind = TRANS_FROM_MOS_TO_LOS; - tuner->reverse = 1; + tuner->tuning_size = 0; + tuner->reverse_1 = 1; } } @@ -164,37 +167,188 @@ static POINTER_SIZE_INT non_los_live_obj_size; static POINTER_SIZE_INT los_live_obj_size; -static void gc_compute_live_object_size_after_marking(GC* gc) +static void gc_compute_live_object_size_after_marking(GC* gc, POINTER_SIZE_INT non_los_size) { non_los_live_obj_size = 0; los_live_obj_size = 0; + + POINTER_SIZE_INT segment_live_size[NORMAL_SIZE_SEGMENT_NUM]; + memset(segment_live_size, 0, sizeof(POINTER_SIZE_INT) * NORMAL_SIZE_SEGMENT_NUM); unsigned int collector_num = gc->num_active_collectors; for(unsigned int i = collector_num; i--;){ Collector *collector = gc->collectors[i]; non_los_live_obj_size += collector->non_los_live_obj_size; los_live_obj_size += collector->los_live_obj_size; + for(unsigned int j = NORMAL_SIZE_SEGMENT_NUM; j--;){ + segment_live_size[j] += collector->segment_live_size[j]; + } + memset(collector->segment_live_size, 0, sizeof(POINTER_SIZE_INT) * NORMAL_SIZE_SEGMENT_NUM); } - POINTER_SIZE_INT additional_non_los_size = ((collector_num * 2) << GC_BLOCK_SHIFT_COUNT) + (non_los_live_obj_size >> GC_BLOCK_SHIFT_COUNT) * (GC_OBJ_SIZE_THRESHOLD/4); - non_los_live_obj_size = round_up_to_size(non_los_live_obj_size + additional_non_los_size, GC_BLOCK_SIZE_BYTES); + //POINTER_SIZE_INT additional_non_los_size = ((collector_num * 2) << GC_BLOCK_SHIFT_COUNT) + (non_los_live_obj_size >> GC_BLOCK_SHIFT_COUNT) * (GC_OBJ_SIZE_THRESHOLD/4); + double additional_non_los_size = 0; + for(unsigned int i = NORMAL_SIZE_SEGMENT_NUM; i--;){ + additional_non_los_size += (double)segment_live_size[i] * SEGMENT_INDEX_TO_SIZE(i) / non_los_live_obj_size; + } + additional_non_los_size *= 1.2; // in case of some cases worse than average one + POINTER_SIZE_INT non_los_live_block = non_los_live_obj_size / (GC_BLOCK_BODY_SIZE_BYTES-(POINTER_SIZE_INT)additional_non_los_size); + additional_non_los_size *= non_los_live_block + 1; + additional_non_los_size += collector_num << (GC_BLOCK_SHIFT_COUNT + 1); + non_los_live_obj_size = round_up_to_size(non_los_live_obj_size + (POINTER_SIZE_INT)additional_non_los_size, GC_BLOCK_SIZE_BYTES); + if(non_los_live_obj_size > non_los_size) + non_los_live_obj_size = non_los_size; los_live_obj_size += ((collector_num << 2) << GC_BLOCK_SHIFT_COUNT); los_live_obj_size = round_up_to_size(los_live_obj_size, GC_BLOCK_SIZE_BYTES); } -void gc_compute_space_tune_size_after_marking(GC *gc) +static void compute_space_tune_size_for_force_tune(GC *gc, POINTER_SIZE_INT max_tune_for_min_non_los) { - gc_compute_live_object_size_after_marking(gc); + Space_Tuner* tuner = gc->tuner; + Lspace *lspace = (Lspace*)gc_get_los((GC_Gen*)gc); + Blocked_Space* fspace = (Blocked_Space*)gc_get_nos((GC_Gen*)gc); + POINTER_SIZE_INT max_tuning_size = 0; + POINTER_SIZE_INT failure_size = lspace->failure_size; + POINTER_SIZE_INT lspace_free_size = ( (lspace->committed_heap_size > los_live_obj_size) ? (lspace->committed_heap_size - los_live_obj_size) : (0) ); + //debug_adjust + assert(!(lspace_free_size % KB)); + assert(!(failure_size % KB)); + if(lspace_free_size >= failure_size){ + tuner->tuning_size = 0; + tuner->kind = TRANS_NOTHING; + lspace->move_object = 1; + }else{ + tuner->tuning_size = failure_size -lspace_free_size; + + /*We should assure that the tuning size is no more than the free space of non_los area*/ + if( gc->committed_heap_size > lspace->committed_heap_size + non_los_live_obj_size ) + max_tuning_size = gc->committed_heap_size - lspace->committed_heap_size - non_los_live_obj_size; + + if(max_tuning_size > max_tune_for_min_non_los) + max_tuning_size = max_tune_for_min_non_los; + + /*Round up to satisfy LOS alloc demand.*/ + tuner->tuning_size = round_up_to_size(tuner->tuning_size, GC_BLOCK_SIZE_BYTES); + max_tuning_size = round_down_to_size(max_tuning_size, GC_BLOCK_SIZE_BYTES); + + /*If the tuning size is too large, we did nothing and wait for the OOM of JVM*/ + /*Fixme: if the heap size is not mx, we can extend the whole heap size*/ + if(tuner->tuning_size > max_tuning_size){ + tuner->tuning_size = round_up_to_size(tuner->tuning_size, SPACE_ALLOC_UNIT); + max_tuning_size = round_down_to_size(max_tuning_size, SPACE_ALLOC_UNIT); + //debug_adjust + assert(max_heap_size_bytes >= gc->committed_heap_size); + POINTER_SIZE_INT extend_heap_size = 0; + POINTER_SIZE_INT potential_max_tuning_size = max_tuning_size + max_heap_size_bytes - gc->committed_heap_size; + potential_max_tuning_size -= LOS_HEAD_RESERVE_FOR_HEAP_NULL; + + //debug_adjust + assert(!(potential_max_tuning_size % SPACE_ALLOC_UNIT)); + if(tuner->tuning_size > potential_max_tuning_size){ + tuner->tuning_size = 0; + tuner->kind = TRANS_NOTHING; + lspace->move_object = 0; + }else{ + /*We have tuner->tuning_size > max_tuning_size up there.*/ + extend_heap_size = tuner->tuning_size - max_tuning_size; + blocked_space_extend(fspace, (unsigned int)extend_heap_size); + gc->committed_heap_size += extend_heap_size; + tuner->kind = TRANS_FROM_MOS_TO_LOS; + lspace->move_object = 1; + } + } else{ + tuner->kind = TRANS_FROM_MOS_TO_LOS; + lspace->move_object = 1; + } + } + + return; +} + +static void make_sure_tuning_size(GC* gc) +{ + Space_Tuner* tuner = gc->tuner; + Lspace *lspace = (Lspace*)gc_get_los((GC_Gen*)gc); Blocked_Space* mspace = (Blocked_Space*)gc_get_mos((GC_Gen*)gc); Blocked_Space* fspace = (Blocked_Space*)gc_get_nos((GC_Gen*)gc); + + POINTER_SIZE_INT los_free_sz = ((lspace->committed_heap_size > los_live_obj_size) ? + (lspace->committed_heap_size - los_live_obj_size) : 0); + float mos_expect_threshold_ratio = mspace_get_expected_threshold_ratio((Mspace*)mspace); + POINTER_SIZE_INT mos_expect_threshold = (POINTER_SIZE_INT)((mspace->committed_heap_size + fspace->committed_heap_size) * mos_expect_threshold_ratio); + POINTER_SIZE_INT mos_free_sz = ((mos_expect_threshold > non_los_live_obj_size)? + (mos_expect_threshold - non_los_live_obj_size) : 0); + POINTER_SIZE_INT total_free_sz = los_free_sz + mos_free_sz; + + float new_los_ratio = (float)tuner->speed_los / (float)(tuner->speed_los + tuner->speed_mos); + POINTER_SIZE_INT new_free_los_sz = (POINTER_SIZE_INT)((float)total_free_sz * new_los_ratio); + + /*LOS_Extend:*/ + if((new_free_los_sz > los_free_sz) ) + { + tuner->kind = TRANS_FROM_MOS_TO_LOS; + tuner->tuning_size = new_free_los_sz - los_free_sz; + lspace->move_object = 0; //This is necessary, because the flag might be set by gc_compute_space_tune_size_before_marking. + } + /*LOS_Shrink:*/ + else if(new_free_los_sz < los_free_sz) + { + tuner->kind = TRANS_FROM_LOS_TO_MOS; + tuner->tuning_size = los_free_sz - new_free_los_sz; + lspace->move_object = 1; + } + /*Nothing*/ + else + { + tuner->tuning_size = 0; + tuner->kind = TRANS_NOTHING;//This is necessary, because the original value of kind might not be NOTHING. + } + + /*If not force tune, and the tuning size is too small, tuner will not take effect.*/ + if( (!tuner->force_tune) && (tuner->tuning_size < tuner->min_tuning_size) ){ + tuner->kind = TRANS_NOTHING; + tuner->tuning_size = 0; + lspace->move_object = 0; + } + + /*If los or non-los is already the smallest size, there is no need to tune anymore. + *But we give "force tune" a chance to extend the whole heap size down there. + */ + if(((lspace->committed_heap_size <= min_los_size_bytes) && (tuner->kind == TRANS_FROM_LOS_TO_MOS)) || + ((fspace->committed_heap_size + mspace->committed_heap_size <= min_none_los_size_bytes) && (tuner->kind == TRANS_FROM_MOS_TO_LOS))){ + assert((lspace->committed_heap_size == min_los_size_bytes) || (fspace->committed_heap_size + mspace->committed_heap_size == min_none_los_size_bytes)); + tuner->kind = TRANS_NOTHING; + tuner->tuning_size = 0; + lspace->move_object = 0; + } + + if(tuner->force_tune){ + if(tuner->kind != TRANS_FROM_MOS_TO_LOS){ + tuner->kind = TRANS_FROM_MOS_TO_LOS; + tuner->reverse_2 = 1; + } + } + + return; +} + +void gc_compute_space_tune_size_after_marking(GC *gc) +{ + Blocked_Space* mspace = (Blocked_Space*)gc_get_mos((GC_Gen*)gc); + Blocked_Space* fspace = (Blocked_Space*)gc_get_nos((GC_Gen*)gc); Lspace *lspace = (Lspace*)gc_get_los((GC_Gen*)gc); Space_Tuner* tuner = gc->tuner; - + POINTER_SIZE_INT max_tuning_size = 0; POINTER_SIZE_INT non_los_size = mspace->committed_heap_size + fspace->committed_heap_size; + + gc_compute_live_object_size_after_marking(gc, non_los_size); + + make_sure_tuning_size(gc); + /*We should assure that the non_los area is no less than min_none_los_size_bytes*/ POINTER_SIZE_INT max_tune_for_min_non_los = 0; if(non_los_size > min_none_los_size_bytes) @@ -204,74 +358,8 @@ assert(lspace->committed_heap_size >= min_los_size_bytes); max_tune_for_min_los = lspace->committed_heap_size - min_los_size_bytes; - /*If the tuning strategy give a bigger tuning_size than failure size, we just follow the strategy and set noforce.*/ - Boolean doforce = TRUE; - POINTER_SIZE_INT failure_size = lspace_get_failure_size((Lspace*)lspace); - if( (tuner->kind == TRANS_FROM_MOS_TO_LOS) && (!tuner->reverse) && (tuner->tuning_size > failure_size) ) - doforce = FALSE; - - /*If force tune*/ - if( (tuner->force_tune) && (doforce) ){ - POINTER_SIZE_INT lspace_free_size = - ( (lspace->committed_heap_size > los_live_obj_size) ? (lspace->committed_heap_size - los_live_obj_size) : (0) ); - //debug_adjust - assert(!(lspace_free_size % KB)); - assert(!(failure_size % KB)); - - if(lspace_free_size >= failure_size){ - tuner->tuning_size = 0; - tuner->kind = TRANS_NOTHING; - lspace->move_object = 1; - return; - }else{ - tuner->tuning_size = failure_size -lspace_free_size; - - /*We should assure that the tuning size is no more than the free space of non_los area*/ - if( gc->committed_heap_size > lspace->committed_heap_size + non_los_live_obj_size ) - max_tuning_size = gc->committed_heap_size - lspace->committed_heap_size - non_los_live_obj_size; - - if(max_tuning_size > max_tune_for_min_non_los) - max_tuning_size = max_tune_for_min_non_los; - - /*Round up to satisfy LOS alloc demand.*/ - tuner->tuning_size = round_up_to_size(tuner->tuning_size, GC_BLOCK_SIZE_BYTES); - max_tuning_size = round_down_to_size(max_tuning_size, GC_BLOCK_SIZE_BYTES); - - /*If the tuning size is too large, we did nothing and wait for the OOM of JVM*/ - /*Fixme: if the heap size is not mx, we can extend the whole heap size*/ - if(tuner->tuning_size > max_tuning_size){ - tuner->tuning_size = round_up_to_size(tuner->tuning_size, SPACE_ALLOC_UNIT); - max_tuning_size = round_down_to_size(max_tuning_size, SPACE_ALLOC_UNIT); - //debug_adjust - assert(max_heap_size_bytes >= gc->committed_heap_size); - POINTER_SIZE_INT extend_heap_size = 0; - POINTER_SIZE_INT potential_max_tuning_size = max_tuning_size + max_heap_size_bytes - gc->committed_heap_size; - potential_max_tuning_size -= LOS_HEAD_RESERVE_FOR_HEAP_NULL; - - //debug_adjust - assert(!(potential_max_tuning_size % SPACE_ALLOC_UNIT)); - if(tuner->tuning_size > potential_max_tuning_size){ - tuner->tuning_size = 0; - tuner->kind = TRANS_NOTHING; - lspace->move_object = 0; - }else{ - //We have tuner->tuning_size > max_tuning_size up there. - extend_heap_size = tuner->tuning_size - max_tuning_size; - blocked_space_extend(fspace, (unsigned int)extend_heap_size); - gc->committed_heap_size += extend_heap_size; - tuner->kind = TRANS_FROM_MOS_TO_LOS; - lspace->move_object = 1; - } - } - else - { - tuner->kind = TRANS_FROM_MOS_TO_LOS; - lspace->move_object = 1; - } - } - } - /*No force tune, LOS_Extend:*/ - else if(tuner->kind == TRANS_FROM_MOS_TO_LOS) + /*Not force tune, LOS_Extend:*/ + if(tuner->kind == TRANS_FROM_MOS_TO_LOS) { if (gc->committed_heap_size > lspace->committed_heap_size + non_los_live_obj_size){ max_tuning_size = gc->committed_heap_size - lspace->committed_heap_size - non_los_live_obj_size; @@ -281,11 +369,18 @@ tuner->tuning_size = max_tuning_size; /*Round down so as not to break max_tuning_size*/ tuner->tuning_size = round_down_to_size(tuner->tuning_size, GC_BLOCK_SIZE_BYTES); + if(tuner->tuning_size == 0){ + //If tuning size is zero, we should reset kind to NOTHING, in case that gc_init_block_for_collectors relink the block list. + tuner->kind = TRANS_NOTHING; + lspace->move_object = 0; + } }else{ tuner->tuning_size = 0; + tuner->kind = TRANS_NOTHING; + lspace->move_object = 0; } } - /*No force tune, LOS Shrink*/ + /*Not force tune, LOS Shrink*/ else { if(lspace->committed_heap_size > los_live_obj_size){ @@ -296,15 +391,29 @@ tuner->tuning_size = max_tuning_size; /*Round down so as not to break max_tuning_size*/ tuner->tuning_size = round_down_to_size(tuner->tuning_size, GC_BLOCK_SIZE_BYTES); + if(tuner->tuning_size == 0){ + tuner->kind = TRANS_NOTHING; + lspace->move_object = 0; + } }else{ /* this is possible because of the reservation in gc_compute_live_object_size_after_marking*/ tuner->tuning_size = 0; + tuner->kind = TRANS_NOTHING; + lspace->move_object = 0; } } - if(tuner->tuning_size == 0){ - tuner->kind = TRANS_NOTHING; - lspace->move_object = 0; + + /*If the tuning strategy give a bigger tuning_size than failure size, we just follow the strategy and set noforce.*/ + Boolean doforce = TRUE; + POINTER_SIZE_INT failure_size = lspace_get_failure_size((Lspace*)lspace); + if( (tuner->kind == TRANS_FROM_MOS_TO_LOS) && (!tuner->reverse_2) && (tuner->tuning_size > failure_size) ) + doforce = FALSE; + + /*If force tune*/ + if( (tuner->force_tune) && (doforce) ){ + compute_space_tune_size_for_force_tune(gc, max_tune_for_min_non_los); } + return; } @@ -335,7 +444,8 @@ tuner->wast_mos = 0; } tuner->kind = TRANS_NOTHING; - tuner->reverse = 0; + tuner->reverse_1 = 0; + tuner->reverse_2 = 0; } return; @@ -410,4 +520,3 @@ } - Index: src/common/space_tuner.h =================================================================== --- src/common/space_tuner.h (revision 547478) +++ src/common/space_tuner.h (working copy) @@ -38,8 +38,10 @@ typedef struct Space_Tuner{ Transform_Kind kind; - /*This flag is set if the los tuning status changes in the process of tuning*/ - Boolean reverse; + /*Fixme: These flags is set if the los tuning status changes in the process of tuning, remove one of them*/ + Boolean reverse_1; + Boolean reverse_2; + POINTER_SIZE_INT tuning_size; /*Used for LOS_Shrink*/ Block_Header* interim_blocks; Index: src/finalizer_weakref/finalizer_weakref.cpp =================================================================== --- src/finalizer_weakref/finalizer_weakref.cpp (revision 547478) +++ src/finalizer_weakref/finalizer_weakref.cpp (working copy) @@ -184,10 +184,12 @@ #ifdef USE_32BITS_HASHCODE obj_size += (hashcode_is_set(p_obj))?GC_OBJECT_ALIGNMENT:0; #endif - if(!obj_belongs_to_space(p_obj, gc_get_los((GC_Gen*)gc))) + if(!obj_belongs_to_space(p_obj, gc_get_los((GC_Gen*)gc))){ collector->non_los_live_obj_size += obj_size; - else + collector->segment_live_size[SIZE_TO_SEGMENT_INDEX(obj_size)] += obj_size; + } else { collector->los_live_obj_size += round_up_to_size(obj_size, KB); + } }else{ trace_object = trace_obj_in_normal_marking; } Index: src/gen/gen.cpp =================================================================== --- src/gen/gen.cpp (revision 547478) +++ src/gen/gen.cpp (working copy) @@ -107,7 +107,7 @@ }else{ los_mos_size = min_heap_size; - mos_reserve_size = los_mos_size - los_size; + mos_reserve_size = max_heap_size_bytes - min_los_size_bytes; nos_commit_size = (POINTER_SIZE_INT)(((float)(min_heap_size - los_size))/(1.0f + gc_gen->survive_ratio)); nos_reserve_size = mos_reserve_size; } @@ -192,8 +192,12 @@ #endif /* STATIC_NOS_MAPPING else */ HEAP_NULL = (POINTER_SIZE_INT)reserved_base; - + +#ifdef STATIC_NOS_MAPPING gc_gen->reserved_heap_size = los_size + nos_reserve_size + mos_reserve_size; +#else + gc_gen->reserved_heap_size = max_heap_size_bytes; +#endif gc_gen->heap_start = reserved_base; gc_gen->heap_end = reserved_end; gc_gen->blocks = (Block*)reserved_base; Index: src/gen/gen_adapt.cpp =================================================================== --- src/gen/gen_adapt.cpp (revision 547478) +++ src/gen/gen_adapt.cpp (working copy) @@ -246,7 +246,11 @@ free_size_threshold = (POINTER_SIZE_INT)(free_ratio_threshold * (SMax - GC_MOS_MIN_EXTRA_REMAIN_SIZE ) + GC_MOS_MIN_EXTRA_REMAIN_SIZE ); else free_size_threshold = (POINTER_SIZE_INT)(free_ratio_threshold * SMax); - + +/*Fixme: if the total free size is lesser than threshold, the time point might be too late! + *Have a try to test whether the backup solution is better for specjbb. + */ +// if ((mos_free_size + nos_free_size + minor_surviving_size) < free_size_threshold) gc->force_major_collect = TRUE; if ((mos_free_size + nos_free_size)< free_size_threshold) gc->force_major_collect = TRUE; survive_ratio = (float)minor_surviving_size/(float)space_committed_size((Space*)fspace); @@ -292,9 +296,10 @@ #else POINTER_SIZE_INT curr_heap_commit_end = (POINTER_SIZE_INT)gc->heap_start + LOS_HEAD_RESERVE_FOR_HEAP_NULL + gc->committed_heap_size; + assert(curr_heap_commit_end > (POINTER_SIZE_INT)mspace->heap_start); total_size = curr_heap_commit_end - (POINTER_SIZE_INT)mspace->heap_start; #endif - + assert(total_size >= used_mos_size); POINTER_SIZE_INT total_free = total_size - used_mos_size; /*If total free is smaller than one block, there is no room for us to adjust*/ if(total_free < GC_BLOCK_SIZE_BYTES) return FALSE; Index: src/mark_compact/los_extention_mark_scan.cpp =================================================================== --- src/mark_compact/los_extention_mark_scan.cpp (revision 547478) +++ src/mark_compact/los_extention_mark_scan.cpp (working copy) @@ -30,10 +30,12 @@ #ifdef USE_32BITS_HASHCODE obj_size += (hashcode_is_set(p_obj))?GC_OBJECT_ALIGNMENT:0; #endif - if(!obj_belongs_to_space(p_obj, gc_get_los((GC_Gen*)collector->gc))) + if(!obj_belongs_to_space(p_obj, gc_get_los((GC_Gen*)collector->gc))){ collector->non_los_live_obj_size += obj_size; - else + collector->segment_live_size[SIZE_TO_SEGMENT_INDEX(obj_size)] += obj_size; + } else { collector->los_live_obj_size += round_up_to_size(obj_size, KB); + } } return; @@ -146,10 +148,12 @@ #ifdef USE_32BITS_HASHCODE obj_size += (hashcode_is_set(p_obj))?GC_OBJECT_ALIGNMENT:0; #endif - if(!obj_belongs_to_space(p_obj, gc_get_los((GC_Gen*)gc))) + if(!obj_belongs_to_space(p_obj, gc_get_los((GC_Gen*)gc))){ collector->non_los_live_obj_size += obj_size; - else + collector->segment_live_size[SIZE_TO_SEGMENT_INDEX(obj_size)] += obj_size; + } else { collector->los_live_obj_size += round_up_to_size(obj_size, KB); + } } } Index: src/mark_compact/mspace_collect_compact.cpp =================================================================== --- src/mark_compact/mspace_collect_compact.cpp (revision 547478) +++ src/mark_compact/mspace_collect_compact.cpp (working copy) @@ -39,7 +39,6 @@ mspace->blocks = &mspace->blocks[tune_blocks]; mspace->heap_start = mspace->blocks; mspace->committed_heap_size -= tune_size; - mspace->reserved_heap_size -= tune_size; mspace->first_block_idx += tune_blocks; mspace->num_managed_blocks -= tune_blocks; mspace->num_total_blocks -= tune_blocks; @@ -340,4 +339,3 @@ - Index: src/mark_sweep/lspace.cpp =================================================================== --- src/mark_sweep/lspace.cpp (revision 547478) +++ src/mark_sweep/lspace.cpp (working copy) @@ -27,6 +27,7 @@ void gc_set_los(GC_Gen* gc, Space* lspace); extern POINTER_SIZE_INT min_los_size_bytes; +extern POINTER_SIZE_INT min_none_los_size_bytes; void lspace_initialize(GC* gc, void* start, POINTER_SIZE_INT lspace_size) { Lspace* lspace = (Lspace*)STD_MALLOC( sizeof(Lspace)); @@ -42,7 +43,7 @@ min_los_size_bytes -= LOS_HEAD_RESERVE_FOR_HEAP_NULL; lspace->committed_heap_size = committed_size - LOS_HEAD_RESERVE_FOR_HEAP_NULL; - lspace->reserved_heap_size = committed_size - LOS_HEAD_RESERVE_FOR_HEAP_NULL; + lspace->reserved_heap_size = gc->reserved_heap_size - min_none_los_size_bytes - LOS_HEAD_RESERVE_FOR_HEAP_NULL; lspace->heap_start = (void*)((POINTER_SIZE_INT)reserved_base + LOS_HEAD_RESERVE_FOR_HEAP_NULL); lspace->heap_end = (void *)((POINTER_SIZE_INT)reserved_base + committed_size); Index: src/mark_sweep/lspace_alloc_collect.cpp =================================================================== --- src/mark_sweep/lspace_alloc_collect.cpp (revision 547478) +++ src/mark_sweep/lspace_alloc_collect.cpp (working copy) @@ -338,7 +338,6 @@ if(trans_size >= GC_OBJ_SIZE_THRESHOLD) free_pool_add_area(lspace->free_pool, trans_fa); } lspace->committed_heap_size += trans_size; - lspace->reserved_heap_size += trans_size; break; } case TRANS_FROM_LOS_TO_MOS:{ @@ -348,7 +347,6 @@ assert( (POINTER_SIZE_INT)lspace->heap_end - trans_size == (POINTER_SIZE_INT)mos_first_block ); lspace->heap_end = (void*)mos_first_block; lspace->committed_heap_size -= trans_size; - lspace->reserved_heap_size -= trans_size; /*LOS_Shrink: We don't have to scan lspace to build free pool when slide compact LOS*/ assert((POINTER_SIZE_INT)lspace->scompact_fa_end > (POINTER_SIZE_INT)lspace->scompact_fa_start + tuner->tuning_size); new_fa_size = (POINTER_SIZE_INT)lspace->scompact_fa_end - (POINTER_SIZE_INT)lspace->scompact_fa_start - tuner->tuning_size; Index: src/thread/collector.h =================================================================== --- src/thread/collector.h (revision 547478) +++ src/thread/collector.h (working copy) @@ -25,6 +25,12 @@ struct Block_Header; struct Stealable_Stack; +#define NORMAL_SIZE_SEGMENT_GRANULARITY_BITS 8 +#define NORMAL_SIZE_SEGMENT_GRANULARITY (1 << NORMAL_SIZE_SEGMENT_GRANULARITY_BITS) +#define NORMAL_SIZE_SEGMENT_NUM (GC_OBJ_SIZE_THRESHOLD / NORMAL_SIZE_SEGMENT_GRANULARITY) +#define SIZE_TO_SEGMENT_INDEX(size) ((((size) + NORMAL_SIZE_SEGMENT_GRANULARITY-1) >> NORMAL_SIZE_SEGMENT_GRANULARITY_BITS) - 1) +#define SEGMENT_INDEX_TO_SIZE(index) (((index)+1) << NORMAL_SIZE_SEGMENT_GRANULARITY_BITS) + typedef struct Collector{ /* <-- first couple of fields are overloaded as Allocator */ void *free; @@ -61,6 +67,7 @@ POINTER_SIZE_INT non_los_live_obj_size; POINTER_SIZE_INT los_live_obj_size; + POINTER_SIZE_INT segment_live_size[NORMAL_SIZE_SEGMENT_NUM]; unsigned int result; }Collector; Index: src/verify/verify_gc_effect.cpp =================================================================== --- src/verify/verify_gc_effect.cpp (revision 547478) +++ src/verify/verify_gc_effect.cpp (working copy) @@ -77,11 +77,48 @@ void* verifier_copy_obj_information(Partial_Reveal_Object* p_obj) { - Live_Object_Inform* p_obj_information = (Live_Object_Inform* )STD_MALLOC(sizeof(Live_Object_Inform)); - assert(p_obj_information); - p_obj_information->vt_raw = obj_get_vt_raw(p_obj); - p_obj_information->address = p_obj; - return (void*) p_obj_information; + if(!object_has_ref_field(p_obj)){ + Live_Object_Inform* p_obj_information = (Live_Object_Inform* )STD_MALLOC(sizeof(Live_Object_Inform)); + assert(p_obj_information); + p_obj_information->vt_raw = obj_get_vt_raw(p_obj); + p_obj_information->address = p_obj; + return (void*) p_obj_information; + }else{ + REF *p_ref; + if (object_is_array(p_obj)) { + Partial_Reveal_Array* array = (Partial_Reveal_Array*)p_obj; + unsigned int array_length = array->array_len; + Live_Object_Ref_Slot_Inform* p_obj_information = (Live_Object_Ref_Slot_Inform* )STD_MALLOC(sizeof(Live_Object_Inform) + sizeof(VT)*array_length); + + p_obj_information->vt_raw = obj_get_vt_raw(p_obj); + p_obj_information->address = p_obj; + + p_ref = (REF *)((POINTER_SIZE_INT)array + (int)array_first_element_offset(array)); + + unsigned int i = 0; + for(; iref_slot[i] = p_obj==NULL? (VT)NULL: obj_get_vt_raw(p_obj); + } + return p_obj_information; + }else{ + unsigned int num_refs = object_ref_field_num(p_obj); + Live_Object_Ref_Slot_Inform* p_obj_information = (Live_Object_Ref_Slot_Inform* )STD_MALLOC(sizeof(Live_Object_Inform) + sizeof(VT)*num_refs); + + p_obj_information->vt_raw = obj_get_vt_raw(p_obj); + p_obj_information->address = p_obj; + + int* ref_iterator = object_ref_iterator_init(p_obj); + + unsigned int i = 0; + for(; iref_slot[i] = p_obj == NULL? (VT)NULL: obj_get_vt_raw(p_obj); + } + return p_obj_information; + } + } } static Boolean fspace_object_was_forwarded(Partial_Reveal_Object *p_obj, Fspace *fspace, Heap_Verifier* heap_verifier) @@ -304,8 +341,45 @@ Live_Object_Inform* obj_inform_2 = (Live_Object_Inform*)*obj_container2; if(((POINTER_SIZE_INT)obj_inform_1->vt_raw) == ((POINTER_SIZE_INT)obj_inform_2->vt_raw)){ /*FIXME: erase live object information in compare_function. */ - STD_FREE(obj_inform_1); - STD_FREE(obj_inform_2); + if( object_has_ref_field((Partial_Reveal_Object*)obj_inform_1) ){ + Live_Object_Ref_Slot_Inform* obj_ref_inform_1 = (Live_Object_Ref_Slot_Inform*)obj_inform_1; + Live_Object_Ref_Slot_Inform* obj_ref_inform_2 = (Live_Object_Ref_Slot_Inform*)obj_inform_2; + + if (object_is_array((Partial_Reveal_Object*)obj_ref_inform_1)){ + Partial_Reveal_Array* array = (Partial_Reveal_Array*)obj_ref_inform_2->address; + unsigned int array_length = array->array_len; + + unsigned int i = 0; + for(; iref_slot[i] != (POINTER_SIZE_INT)obj_ref_inform_2->ref_slot[i]){ + assert(0); + STD_FREE(obj_ref_inform_1); + STD_FREE(obj_ref_inform_1); + return FALSE; + } + } + }else{ + + unsigned int num_refs = object_ref_field_num((Partial_Reveal_Object*)(obj_ref_inform_2->address)); + + unsigned int i = 0; + for(; iref_slot[i] != (POINTER_SIZE_INT)obj_ref_inform_2->ref_slot[i]){ + assert(0); + STD_FREE(obj_ref_inform_1); + STD_FREE(obj_ref_inform_1); + return FALSE; + } + } + + } + + STD_FREE(obj_ref_inform_1); + STD_FREE(obj_ref_inform_2); + }else{ + STD_FREE(obj_inform_1); + STD_FREE(obj_inform_2); + } return TRUE; }else{ assert(0); @@ -438,4 +512,3 @@ - Index: src/verify/verify_gc_effect.h =================================================================== --- src/verify/verify_gc_effect.h (revision 547478) +++ src/verify/verify_gc_effect.h (working copy) @@ -42,6 +42,12 @@ Partial_Reveal_Object* address; } Live_Object_Inform; +typedef struct Live_Object_Ref_Slot_Inform_Struct{ + VT vt_raw; + Partial_Reveal_Object* address; + VT ref_slot[1]; +} Live_Object_Ref_Slot_Inform; + typedef struct Object_Hashcode_Inform_struct{ int hashcode; Partial_Reveal_Object* address; @@ -75,4 +81,3 @@ #endif -