Index: gc_gen/src/common/gc_common.cpp =================================================================== --- gc_gen/src/common/gc_common.cpp (revision 567407) +++ gc_gen/src/common/gc_common.cpp (working copy) @@ -47,6 +47,8 @@ extern unsigned int MINOR_COLLECTORS; extern unsigned int MAJOR_COLLECTORS; +extern Boolean IGNORE_VTABLE_TRACING; + POINTER_SIZE_INT HEAP_SIZE_DEFAULT = 256 * MB; POINTER_SIZE_INT min_heap_size_bytes = 16 * MB; POINTER_SIZE_INT max_heap_size_bytes = 0; @@ -254,6 +256,10 @@ JVMTI_HEAP_ITERATION = get_boolean_property("gc.heap_iteration"); } + if (is_property_set("gc.ignore_vtable_tracing", VM_PROPERTIES) == 1) { + IGNORE_VTABLE_TRACING = get_boolean_property("gc.ignore_vtable_tracing"); + } + if (is_property_set("gc.use_large_page", VM_PROPERTIES) == 1){ char* value = get_property("gc.use_large_page", VM_PROPERTIES); large_page_hint = strdup(value); @@ -366,3 +372,4 @@ } + Index: gc_gen/src/common/gc_common.h =================================================================== --- gc_gen/src/common/gc_common.h (revision 567407) +++ gc_gen/src/common/gc_common.h (working copy) @@ -396,6 +396,7 @@ /* FIXME:: this is wrong! root_set belongs to mutator */ Vector_Block* root_set; + Vector_Block* weak_root_set; Vector_Block* uncompressed_root_set; Space_Tuner* tuner; @@ -465,4 +466,6 @@ extern Boolean* p_global_lspace_move_obj; inline Boolean obj_is_moved(Partial_Reveal_Object* p_obj) { return ((p_obj >= los_boundary) || (*p_global_lspace_move_obj)); } + +extern Boolean VTABLE_TRACING; #endif //_GC_COMMON_H_ Index: gc_gen/src/common/gc_for_class.cpp =================================================================== --- gc_gen/src/common/gc_for_class.cpp (revision 567407) +++ gc_gen/src/common/gc_for_class.cpp (working copy) @@ -139,7 +139,7 @@ } gcvt_size = (gcvt_size + GCVT_ALIGN_MASK) & ~GCVT_ALIGN_MASK; - gcvt = (GC_VTable_Info*) malloc(gcvt_size); + gcvt = (GC_VTable_Info*) class_alloc_via_classloader(ch, gcvt_size); assert(gcvt); assert(!((POINTER_SIZE_INT)gcvt % GCVT_ALIGNMENT)); @@ -199,3 +199,4 @@ + Index: gc_gen/src/common/gc_for_class.h =================================================================== --- gc_gen/src/common/gc_for_class.h (revision 567407) +++ gc_gen/src/common/gc_for_class.h (working copy) @@ -117,9 +117,18 @@ } GC_VTable_Info; +enum VT_Mark_Status { + VT_UNMARKED = 0, + VT_MARKED = 0x1, + VT_FALLBACK_MARKED = 0x2 +}; + +struct Partial_Reveal_Object; typedef struct Partial_Reveal_VTable { //--Fixme: emt64 GC_VTable_Info *gcvt; + Partial_Reveal_Object* jlC; + unsigned int vtmark; } Partial_Reveal_VTable; typedef struct Partial_Reveal_Object { @@ -300,3 +309,4 @@ + Index: gc_gen/src/common/gc_for_vm.cpp =================================================================== --- gc_gen/src/common/gc_for_vm.cpp (revision 567407) +++ gc_gen/src/common/gc_for_vm.cpp (working copy) @@ -168,6 +168,29 @@ gc_compressed_rootset_add_entry(p_global_gc, p_ref); } +Boolean gc_supports_class_unloading() +{ + return VTABLE_TRACING; +} + +void gc_add_weak_root_set_entry(Managed_Object_Handle *ref, Boolean is_pinned, Boolean is_short_weak) +{ + //assert(is_short_weak == FALSE); //Currently no need for short_weak_roots + Partial_Reveal_Object** p_ref = (Partial_Reveal_Object**)ref; + Partial_Reveal_Object* p_obj = *p_ref; + /* we don't enumerate NULL reference and nos_boundary + FIXME:: nos_boundary is a static field in GCHelper.java for fast write barrier, not a real object reference + this should be fixed that magic Address field should not be enumerated. */ +#ifdef COMPRESS_REFERENCE + if (p_obj == (Partial_Reveal_Object*)HEAP_NULL || p_obj == NULL || p_obj == nos_boundary ) return; +#else + if (p_obj == NULL || p_obj == nos_boundary ) return; +#endif + assert( !obj_is_marked_in_vt(p_obj)); + assert( address_belongs_to_gc_heap(p_obj, p_global_gc)); + gc_weak_rootset_add_entry(p_global_gc, p_ref, is_short_weak); +} + /* VM to force GC */ void gc_force_gc() { Index: gc_gen/src/common/gc_metadata.cpp =================================================================== --- gc_gen/src/common/gc_metadata.cpp (revision 567407) +++ gc_gen/src/common/gc_metadata.cpp (working copy) @@ -76,6 +76,7 @@ gc_metadata.mutator_remset_pool = sync_pool_create(); gc_metadata.collector_remset_pool = sync_pool_create(); gc_metadata.collector_repset_pool = sync_pool_create(); + gc_metadata.weak_roots_pool = sync_pool_create(); #ifdef USE_32BITS_HASHCODE gc_metadata.collector_hashcode_pool = sync_pool_create(); #endif @@ -97,6 +98,7 @@ sync_pool_destruct(metadata->mutator_remset_pool); sync_pool_destruct(metadata->collector_remset_pool); sync_pool_destruct(metadata->collector_repset_pool); + sync_pool_destruct(metadata->weak_roots_pool); #ifdef USE_32BITS_HASHCODE sync_pool_destruct(metadata->collector_hashcode_pool); #endif @@ -212,6 +214,8 @@ GC* gc = collector->gc; GC_Metadata* metadata = gc->metadata; + gc_update_weak_roots_pool(gc); + /* MINOR_COLLECTION doesn't need rootset update, but need reset */ if( !gc_match_kind(gc, MINOR_COLLECTION)){ gc_update_repointed_sets(gc, metadata->gc_rootset_pool); @@ -220,6 +224,7 @@ #endif } else { gc_set_pool_clear(metadata->gc_rootset_pool); + gc_set_pool_clear(metadata->weak_roots_pool); } #ifdef COMPRESS_REFERENCE @@ -255,6 +260,12 @@ gc_rootset_pool after the entry pointed by gc->root_set. So we clear this value only after we know we are not going to fallback. */ // gc->root_set = NULL; + + if(vector_block_is_empty(gc->weak_root_set)) + pool_put_entry(free_set_pool, gc->weak_root_set); + else + pool_put_entry(metadata->weak_roots_pool, gc->weak_root_set); + gc->weak_root_set = NULL; if(!gc_is_gen_mode()) return; @@ -323,6 +334,11 @@ gc->root_set = free_set_pool_get_entry(&gc_metadata); assert(vector_block_is_empty(gc->root_set)); + assert(pool_is_empty(gc_metadata.weak_roots_pool)); + assert(gc->weak_root_set == NULL); + gc->weak_root_set = free_set_pool_get_entry(&gc_metadata); + assert(vector_block_is_empty(gc->weak_root_set)); + #ifdef COMPRESS_REFERENCE assert(pool_is_empty(gc_metadata.gc_uncompressed_rootset_pool)); assert(gc->uncompressed_root_set == NULL); @@ -377,3 +393,4 @@ + Index: gc_gen/src/common/gc_metadata.h =================================================================== --- gc_gen/src/common/gc_metadata.h (revision 567407) +++ gc_gen/src/common/gc_metadata.h (working copy) @@ -44,6 +44,7 @@ Pool* mutator_remset_pool; /* list of remsets generated by app during execution */ Pool* collector_remset_pool; /* list of remsets generated by gc during collection */ Pool* collector_repset_pool; /* list of repointed ref slot sets */ + Pool* weak_roots_pool; /* list of short weak roots */ #ifdef USE_32BITS_HASHCODE Pool* collector_hashcode_pool; #endif @@ -60,6 +61,9 @@ void gc_reset_rootset(GC* gc); void gc_fix_rootset(Collector* collector); +void identify_dead_weak_roots(GC *gc, Pool *pool); +void gc_update_weak_roots_pool(GC *gc); + void gc_clear_remset(GC* gc); inline void gc_task_pool_clear(Pool* task_pool) { @@ -182,7 +186,21 @@ assert(collector->trace_stack); } +inline void gc_weak_rootset_add_entry(GC* gc, Partial_Reveal_Object** p_ref, Boolean is_short_weak) +{ + //assert(is_short_weak == FALSE); //Currently no need for short_weak_roots + assert( p_ref < gc_heap_base_address() || p_ref >= gc_heap_ceiling_address()); + + Vector_Block* weak_root_set = gc->weak_root_set; + vector_block_add_entry(weak_root_set, (POINTER_SIZE_INT)p_ref); + + if( !vector_block_is_full(weak_root_set)) return; + pool_put_entry(gc_metadata.weak_roots_pool, weak_root_set); + gc->weak_root_set = free_set_pool_get_entry(&gc_metadata); + assert(gc->weak_root_set); +} + #ifdef COMPRESS_REFERENCE inline void gc_rootset_add_entry(GC* gc, Partial_Reveal_Object** p_ref) Index: gc_gen/src/common/mark_scan_pool.cpp =================================================================== --- gc_gen/src/common/mark_scan_pool.cpp (revision 567407) +++ gc_gen/src/common/mark_scan_pool.cpp (working copy) @@ -46,6 +46,15 @@ { vm_notify_obj_alive( (void *)p_obj); assert((((POINTER_SIZE_INT)p_obj) % GC_OBJECT_ALIGNMENT) == 0); + + Partial_Reveal_VTable *vtable = uncompress_vt(obj_get_vt(p_obj)); + if(VTABLE_TRACING) + if(vtable->vtmark == VT_UNMARKED) { + vtable->vtmark = VT_MARKED; + if(obj_mark_in_vt(vtable->jlC)) + collector_tracestack_push(collector, vtable->jlC); + } + if( !object_has_ref_field(p_obj) ) return; REF *p_ref; Index: gc_gen/src/common/weak_roots.cpp =================================================================== --- gc_gen/src/common/weak_roots.cpp (revision 0) +++ gc_gen/src/common/weak_roots.cpp (revision 0) @@ -0,0 +1,79 @@ +/* + * Copyright 2005-2006 The Apache Software Foundation or its licensors, as applicable. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "gc_common.h" +#include "gc_metadata.h" +#include "../gen/gen.h" + +void identify_dead_weak_roots(GC *gc, Pool *pool) +{ + pool_iterator_init(pool); + while(Vector_Block *block = pool_iterator_next(pool)){ + POINTER_SIZE_INT *iter = vector_block_iterator_init(block); + for(; !vector_block_iterator_end(block, iter); iter = vector_block_iterator_advance(block, iter)){ + Partial_Reveal_Object** p_ref = (Partial_Reveal_Object**)*iter; + Partial_Reveal_Object *p_obj = *p_ref; + if(!p_obj){ // reference has been cleared + continue; + } + if(IS_FALLBACK_COMPACTION) { + if(obj_belongs_to_nos(p_obj) && obj_is_fw_in_oi(p_obj)){ + //this is unreachable for VTable->jlc(p_obj), but needed by general weak roots + assert(!obj_is_marked_in_vt(p_obj)); + assert(obj_get_vt(p_obj) == obj_get_vt(obj_get_fw_in_oi(p_obj))); + p_obj = obj_get_fw_in_oi(p_obj); + assert(p_obj); + *p_ref = p_obj; + } + } + if(gc_obj_is_dead(gc, p_obj)) + *p_ref = 0; + } + } +} + +extern Boolean IS_MOVE_COMPACT; + +/* parameter pointer_addr_in_pool means it is p_ref or p_obj in pool */ +void gc_update_weak_roots_pool(GC *gc) +{ + GC_Metadata* metadata = gc->metadata; + Pool *pool = metadata->weak_roots_pool; + Partial_Reveal_Object** p_ref; + Partial_Reveal_Object *p_obj; + + pool_iterator_init(pool); + while(Vector_Block *repset = pool_iterator_next(pool)){ + POINTER_SIZE_INT *iter = vector_block_iterator_init(repset); + for(; !vector_block_iterator_end(repset,iter); iter = vector_block_iterator_advance(repset,iter)){ + p_ref = (Partial_Reveal_Object**)*iter; + p_obj = *p_ref; + if(!p_obj){ // reference has been cleared + continue; + } + + if(obj_need_move(gc, p_obj)) { + if(!IS_MOVE_COMPACT){ + assert((POINTER_SIZE_INT)obj_get_fw_in_oi(p_obj) > DUAL_MARKBITS); + *p_ref = obj_get_fw_in_oi(p_obj); + } else { + assert(space_of_addr(gc, (void*)p_obj)->move_object); + *p_ref = ref_to_obj_ptr(obj_get_fw_in_table(p_obj)); + } + } + } + } +} Index: gc_gen/src/finalizer_weakref/finalizer_weakref.cpp =================================================================== --- gc_gen/src/finalizer_weakref/finalizer_weakref.cpp (revision 567407) +++ gc_gen/src/finalizer_weakref/finalizer_weakref.cpp (working copy) @@ -33,76 +33,6 @@ Boolean IGNORE_FINREF = FALSE; Boolean DURING_RESURRECTION = FALSE; - -static inline Boolean obj_is_dead_in_gen_minor_gc(Partial_Reveal_Object *p_obj) -{ - /* - * The first condition is for supporting switch between nongen and gen minor collection. - * With this kind of switch dead objects in MOS & LOS may be set the mark or fw bit in oi. - * The second condition is for supporting partially forwarding NOS. - * In partially forwarding situation live objects in the non-forwarding half NOS will only be marked but not forwarded. - */ - return obj_belongs_to_nos(p_obj) && !obj_is_marked_or_fw_in_oi(p_obj); -} - -static inline Boolean obj_is_dead_in_nongen_minor_gc(Partial_Reveal_Object *p_obj) -{ - return (obj_belongs_to_nos(p_obj) && !obj_is_fw_in_oi(p_obj)) - || (!obj_belongs_to_nos(p_obj) && !obj_is_marked_in_oi(p_obj)); -} - -static inline Boolean obj_is_dead_in_major_gc(Partial_Reveal_Object *p_obj) -{ - return !obj_is_marked_in_vt(p_obj); -} - -#ifdef USE_MARK_SWEEP_GC -extern Boolean obj_is_marked_in_table(Partial_Reveal_Object *obj); -static inline Boolean obj_is_dead_in_mark_sweep_gc(Partial_Reveal_Object * p_obj) -{ - return !obj_is_marked_in_table(p_obj); -} -#endif - -static inline Boolean gc_obj_is_dead(GC *gc, Partial_Reveal_Object *p_obj) -{ - assert(p_obj); - -#ifdef USE_MARK_SWEEP_GC - return obj_is_dead_in_mark_sweep_gc(p_obj); -#endif - - if(gc_match_kind(gc, MINOR_COLLECTION)){ - if(gc_is_gen_mode()) - return obj_is_dead_in_gen_minor_gc(p_obj); - else - return obj_is_dead_in_nongen_minor_gc(p_obj); - } else { - return obj_is_dead_in_major_gc(p_obj); - } -} - -static inline Boolean fspace_obj_to_be_forwarded(Partial_Reveal_Object *p_obj) -{ - if(!obj_belongs_to_nos(p_obj)) return FALSE; - return forward_first_half ? (p_obj < object_forwarding_boundary) : (p_obj>=object_forwarding_boundary); -} -static inline Boolean obj_need_move(GC *gc, Partial_Reveal_Object *p_obj) -{ - assert(!gc_obj_is_dead(gc, p_obj)); - -#ifdef USE_MARK_SWEEP_GC - Sspace *sspace = gc_ms_get_sspace((GC_MS*)gc); - return sspace->move_object; -#endif - - if(gc_is_gen_mode() && gc_match_kind(gc, MINOR_COLLECTION)) - return fspace_obj_to_be_forwarded(p_obj); - - Space *space = space_of_addr(gc, p_obj); - return space->move_object; -} - static void finref_add_repset_from_pool(GC *gc, Pool *pool) { finref_reset_repset(gc); Index: gc_gen/src/gen/gen.cpp =================================================================== --- gc_gen/src/gen/gen.cpp (revision 567407) +++ gc_gen/src/gen/gen.cpp (working copy) @@ -284,6 +284,8 @@ Boolean FORCE_FULL_COMPACT = FALSE; +Boolean IGNORE_VTABLE_TRACING = TRUE; +Boolean VTABLE_TRACING = FALSE; void gc_decide_collection_kind(GC_Gen* gc, unsigned int cause) { @@ -294,6 +296,11 @@ gc->collect_kind = MAJOR_COLLECTION; else gc->collect_kind = MINOR_COLLECTION; + + if(IGNORE_VTABLE_TRACING || (gc->collect_kind == MINOR_COLLECTION)) + VTABLE_TRACING = FALSE; + else + VTABLE_TRACING = TRUE; #ifdef USE_MARK_SWEEP_GC gc->collect_kind = MARK_SWEEP_GC; @@ -783,3 +790,4 @@ #endif } + Index: gc_gen/src/gen/gen.h =================================================================== --- gc_gen/src/gen/gen.h (revision 567407) +++ gc_gen/src/gen/gen.h (working copy) @@ -87,6 +87,7 @@ /* FIXME:: this is wrong! root_set belongs to mutator */ Vector_Block* root_set; + Vector_Block* weak_root_set; Vector_Block* uncompressed_root_set; //For_LOS_extend @@ -193,5 +194,58 @@ extern Boolean GEN_NONGEN_SWITCH ; +inline Boolean obj_is_dead_in_gen_minor_gc(Partial_Reveal_Object *p_obj) +{ + /* + * The first condition is for supporting switch between nongen and gen minor collection + * With this kind of switch dead objects in MOS & LOS may be set the mark or fw bit in oi + */ + return obj_belongs_to_nos(p_obj) && !obj_is_marked_or_fw_in_oi(p_obj); +} + +inline Boolean obj_is_dead_in_nongen_minor_gc(Partial_Reveal_Object *p_obj) +{ + return (obj_belongs_to_nos(p_obj) && !obj_is_fw_in_oi(p_obj)) + || (!obj_belongs_to_nos(p_obj) && !obj_is_marked_in_oi(p_obj)); +} + +inline Boolean obj_is_dead_in_major_gc(Partial_Reveal_Object *p_obj) +{ + return !obj_is_marked_in_vt(p_obj); +} + +// clear the two least significant bits of p_obj first +inline Boolean gc_obj_is_dead(GC *gc, Partial_Reveal_Object *p_obj) +{ + assert(p_obj); + if(gc_match_kind(gc, MINOR_COLLECTION)){ + if(gc_is_gen_mode()) + return obj_is_dead_in_gen_minor_gc(p_obj); + else + return obj_is_dead_in_nongen_minor_gc(p_obj); + } else { + return obj_is_dead_in_major_gc(p_obj); + } +} + +extern Boolean forward_first_half; +extern void* object_forwarding_boundary; + +inline Boolean fspace_obj_to_be_forwarded(Partial_Reveal_Object *p_obj) +{ + if(!obj_belongs_to_nos(p_obj)) return FALSE; + return forward_first_half? (p_obj < object_forwarding_boundary):(p_obj>=object_forwarding_boundary); +} + +inline Boolean obj_need_move(GC *gc, Partial_Reveal_Object *p_obj) +{ + if(gc_is_gen_mode() && gc_match_kind(gc, MINOR_COLLECTION)) + return fspace_obj_to_be_forwarded(p_obj); + + Space *space = space_of_addr(gc, p_obj); + return space->move_object; +} + #endif /* ifndef _GC_GEN_H_ */ + Index: gc_gen/src/mark_compact/fallback_mark_scan.cpp =================================================================== --- gc_gen/src/mark_compact/fallback_mark_scan.cpp (revision 567407) +++ gc_gen/src/mark_compact/fallback_mark_scan.cpp (working copy) @@ -40,6 +40,14 @@ Partial_Reveal_Object *p_obj = read_slot(p_ref); assert(p_obj); assert((((POINTER_SIZE_INT)p_obj) % GC_OBJECT_ALIGNMENT) == 0); + + Partial_Reveal_VTable *vtable = uncompress_vt(obj_get_vt(p_obj)); + if(VTABLE_TRACING) + if(!(vtable->vtmark & VT_FALLBACK_MARKED)) { + vtable->vtmark |= VT_FALLBACK_MARKED; //we need different marking for fallback compaction + collector_tracestack_push(collector, &(vtable->jlC)); + //64bits consideration is needed, vtable->jlC is an uncompressed reference + } if(obj_belongs_to_nos(p_obj) && obj_is_fw_in_oi(p_obj)){ assert(obj_get_vt(p_obj) == obj_get_vt(obj_get_fw_in_oi(p_obj))); @@ -226,3 +234,4 @@ #endif + Index: gc_gen/src/mark_compact/mspace_extend_compact.cpp =================================================================== --- gc_gen/src/mark_compact/mspace_extend_compact.cpp (revision 567407) +++ gc_gen/src/mark_compact/mspace_extend_compact.cpp (working copy) @@ -244,6 +244,7 @@ #ifndef BUILD_IN_REFERENT gc_update_finref_repointed_refs(gc); #endif + gc_reupdate_repointed_sets(gc, gc->metadata->weak_roots_pool, start_address, end_address, addr_diff); update_rootset_interior_pointer(); } Index: gc_gen/src/mark_compact/mspace_move_compact.cpp =================================================================== --- gc_gen/src/mark_compact/mspace_move_compact.cpp (revision 567407) +++ gc_gen/src/mark_compact/mspace_move_compact.cpp (working copy) @@ -204,6 +204,7 @@ gc_update_weakref_ignore_finref(gc); } #endif + identify_dead_weak_roots(gc, gc->metadata->weak_roots_pool); /* let other collectors go */ num_marking_collectors++; @@ -293,8 +294,8 @@ mspace_reset_after_compaction(mspace); fspace_reset_for_allocation(fspace); - gc_set_pool_clear(gc->metadata->gc_rootset_pool); + gc_set_pool_clear(gc->metadata->weak_roots_pool); TRACE2("gc.process", "GC: collector[0] finished"); return; Index: gc_gen/src/mark_compact/mspace_slide_compact.cpp =================================================================== --- gc_gen/src/mark_compact/mspace_slide_compact.cpp (revision 567407) +++ gc_gen/src/mark_compact/mspace_slide_compact.cpp (working copy) @@ -453,6 +453,7 @@ gc_update_weakref_ignore_finref(gc); } #endif + identify_dead_weak_roots(gc, gc->metadata->weak_roots_pool); if( gc->tuner->kind != TRANS_NOTHING ) gc_compute_space_tune_size_after_marking(gc); assert(!(gc->tuner->tuning_size % GC_BLOCK_SIZE_BYTES)); @@ -591,6 +592,7 @@ mspace_restore_block_chain(mspace); gc_set_pool_clear(gc->metadata->gc_rootset_pool); + gc_set_pool_clear(gc->metadata->weak_roots_pool); TRACE2("gc.process", "GC: collector[0] finished"); return; Index: gc_gen/src/mark_compact/space_tune_mark_scan.cpp =================================================================== --- gc_gen/src/mark_compact/space_tune_mark_scan.cpp (revision 567407) +++ gc_gen/src/mark_compact/space_tune_mark_scan.cpp (working copy) @@ -54,6 +54,15 @@ { vm_notify_obj_alive( (void *)p_obj); assert((((POINTER_SIZE_INT)p_obj) % GC_OBJECT_ALIGNMENT) == 0); + + Partial_Reveal_VTable *vtable = uncompress_vt(obj_get_vt(p_obj)); + if(VTABLE_TRACING) + if(vtable->vtmark == VT_UNMARKED) { + vtable->vtmark = VT_MARKED; + if(obj_mark_in_vt(vtable->jlC)) + collector_tracestack_push(collector, vtable->jlC); + } + if( !object_has_ref_field(p_obj) ) return; REF *p_ref; Index: gc_gen/src/mark_sweep/sspace_mark.cpp =================================================================== --- gc_gen/src/mark_sweep/sspace_mark.cpp (revision 567407) +++ gc_gen/src/mark_sweep/sspace_mark.cpp (working copy) @@ -45,6 +45,15 @@ static FORCE_INLINE void scan_object(Collector *collector, Partial_Reveal_Object *p_obj) { assert((((POINTER_SIZE_INT)p_obj) % GC_OBJECT_ALIGNMENT) == 0); + + Partial_Reveal_VTable *vtable = uncompress_vt(obj_get_vt(p_obj)); + if(VTABLE_TRACING) + if(vtable->vtmark == VT_UNMARKED) { + vtable->vtmark = VT_MARKED; + if(obj_mark_in_table(vtable->jlC)) + collector_tracestack_push(collector, vtable->jlC); + } + if(!object_has_ref_field(p_obj)) return; REF *p_ref; Index: gc_gen/src/mark_sweep/sspace_mark_sweep.cpp =================================================================== --- gc_gen/src/mark_sweep/sspace_mark_sweep.cpp (revision 567407) +++ gc_gen/src/mark_sweep/sspace_mark_sweep.cpp (working copy) @@ -96,6 +96,7 @@ gc_update_weakref_ignore_finref(gc); } #endif + identify_dead_weak_roots(gc, gc->metadata->weak_roots_pool); gc_init_chunk_for_sweep(gc, sspace); /* let other collectors go */ num_marking_collectors++; Index: gc_gen/src/trace_forward/fspace_gen_forward_pool.cpp =================================================================== --- gc_gen/src/trace_forward/fspace_gen_forward_pool.cpp (revision 567407) +++ gc_gen/src/trace_forward/fspace_gen_forward_pool.cpp (working copy) @@ -48,9 +48,16 @@ return; } +void trace_forwarded_gen_jlC_from_vtable(Collector* collector, Partial_Reveal_Object *p_obj); static FORCE_INLINE void scan_object(Collector* collector, Partial_Reveal_Object *p_obj) { assert((((POINTER_SIZE_INT)p_obj) % GC_OBJECT_ALIGNMENT) == 0); + Partial_Reveal_VTable *vtable = uncompress_vt(obj_get_vt(p_obj)); + if(VTABLE_TRACING) + if(vtable->vtmark == VT_UNMARKED) { + vtable->vtmark = VT_MARKED; + trace_forwarded_gen_jlC_from_vtable(collector, vtable->jlC); + } if (!object_has_ref_field(p_obj)) return; REF *p_ref; @@ -164,6 +171,40 @@ return; } +void trace_forwarded_gen_jlC_from_vtable(Collector* collector, Partial_Reveal_Object *p_obj) +//Forward the vtable->jlc and trace the forwarded object. But do not update the vtable->jlc but leave them for weakroots updating +//We probably do not need this function if we do not perform class unloading in copy-collections. That means all vtable->jlc would be strong roots in this algorithm +{ + Space* space = collector->collect_space; + GC* gc = collector->gc; + + if(!obj_belongs_to_nos(p_obj)) return; + + /* Fastpath: object has already been forwarded*/ + if(obj_is_fw_in_oi(p_obj)) return; + + if(NOS_PARTIAL_FORWARD && !fspace_object_to_be_forwarded(p_obj, (Fspace*)space)) { + assert(!obj_is_fw_in_oi(p_obj)); + if(obj_mark_in_oi(p_obj)) + scan_object(collector, p_obj); + return; + } + + /* following is the logic for forwarding */ + Partial_Reveal_Object* p_target_obj = collector_forward_object(collector, p_obj); + if( p_target_obj == NULL ){ + if(collector->result == FALSE ){ + /* failed to forward, let's get back to controller. */ + vector_stack_clear(collector->trace_stack); + return; + } + assert(obj_get_fw_in_oi(p_obj)); + return; + } + scan_object(collector, p_target_obj); + return; +} + static void trace_object(Collector *collector, REF *p_ref) { forward_object(collector, p_ref); @@ -309,6 +350,7 @@ gc_update_weakref_ignore_finref(gc); } #endif + identify_dead_weak_roots(gc, gc->metadata->weak_roots_pool); gc_fix_rootset(collector); Index: gc_gen/src/trace_forward/fspace_nongen_forward_pool.cpp =================================================================== --- gc_gen/src/trace_forward/fspace_nongen_forward_pool.cpp (revision 567407) +++ gc_gen/src/trace_forward/fspace_nongen_forward_pool.cpp (working copy) @@ -40,9 +40,16 @@ return; } +void trace_forwarded_nongen_jlC_from_vtable(Collector* collector, Partial_Reveal_Object *p_obj) ; static FORCE_INLINE void scan_object(Collector* collector, Partial_Reveal_Object *p_obj) { assert((((POINTER_SIZE_INT)p_obj) % GC_OBJECT_ALIGNMENT) == 0); + Partial_Reveal_VTable *vtable = uncompress_vt(obj_get_vt(p_obj)); + if(VTABLE_TRACING) + if(vtable->vtmark == VT_UNMARKED) { + vtable->vtmark = VT_MARKED; + trace_forwarded_nongen_jlC_from_vtable(collector, vtable->jlC); + } if (!object_has_ref_field_before_scan(p_obj)) return; REF *p_ref; @@ -135,6 +142,30 @@ return; } +void trace_forwarded_nongen_jlC_from_vtable(Collector* collector, Partial_Reveal_Object *p_obj) +//Forward the vtable->jlc and trace the forwarded object. But do not update the vtable->jlc but leave them for weakroots updating +//We probably do not need this function if we do not perform class unloading in copy-collections. That means all vtable->jlc would be strong roots in this algorithm +{ + if(!obj_belongs_to_nos(p_obj)){ + if(obj_mark_in_oi(p_obj)) + scan_object(collector, p_obj); + return; + } + + /* following is the logic for forwarding */ + Partial_Reveal_Object* p_target_obj = collector_forward_object(collector, p_obj); + if( p_target_obj == NULL ){ + if(collector->result == FALSE ){ + vector_stack_clear(collector->trace_stack); + return; + } + assert(obj_get_fw_in_oi(p_obj)); + return; + } + scan_object(collector, p_target_obj); + return; +} + static void trace_object(Collector *collector, REF *p_ref) { forward_object(collector, p_ref); @@ -273,6 +304,7 @@ gc_update_weakref_ignore_finref(gc); } #endif + identify_dead_weak_roots(gc, gc->metadata->weak_roots_pool); gc_fix_rootset(collector); Index: include/open/gc.h =================================================================== --- include/open/gc.h (revision 567407) +++ include/open/gc.h (working copy) @@ -662,6 +662,8 @@ VMEXPORT extern void * (*gc_heap_base_address)(); VMEXPORT extern void * (*gc_heap_ceiling_address)(); +extern Boolean (*gc_supports_class_unloading)(); + #else // USE_GC_STATIC //@} @@ -787,6 +789,7 @@ */ GCExport Boolean gc_clear_mutator_block_flag(); +GCExport Boolean gc_supports_class_unloading(); // XXX move this elsewhere -salikh #ifdef JNIEXPORT Index: interpreter/src/interpreter.cpp =================================================================== --- interpreter/src/interpreter.cpp (revision 567407) +++ interpreter/src/interpreter.cpp (working copy) @@ -3332,6 +3332,7 @@ // Setup locals and stack on C stack. SETUP_LOCALS_AND_STACK(frame, method); + frame.This = *(method->get_class()->get_class_handle()); int args = method->get_num_arg_slots(); for(int i = args-1; i >= 0; --i) { Index: jitrino/src/codegenerator/ia32/Ia32RuntimeInterface.cpp =================================================================== --- jitrino/src/codegenerator/ia32/Ia32RuntimeInterface.cpp (revision 567407) +++ jitrino/src/codegenerator/ia32/Ia32RuntimeInterface.cpp (working copy) @@ -67,7 +67,7 @@ #else stackInfo.read(methodDesc, *context->p_eip, isFirst); assert(isFirst || (uint32)context->p_eip+4 == context->esp); - assert(stackInfo.getStackDepth()==0 || !isFirst); + //assert(stackInfo.getStackDepth()==0 || !isFirst); return (void *)(context->esp + stackInfo.getStackDepth() + stackInfo.getOffsetOfThis()); #endif } @@ -143,3 +143,4 @@ }}; //namespace Ia32 + Index: vmcore/include/classloader.h =================================================================== --- vmcore/include/classloader.h (revision 567407) +++ vmcore/include/classloader.h (working copy) @@ -227,9 +227,10 @@ static ClassLoader* FindByObject( ManagedObject* loader ); // ppervov: NOTE: LookupLoader has side effect of adding 'loader' to the collection VMEXPORT static ClassLoader* LookupLoader( ManagedObject* loader ); - static void UnloadClassLoader( ManagedObject* loader ); + static void UnloadClassLoader( ClassLoader* loader); static void gc_enumerate(); static void ClearMarkBits(); + static void StartUnloading(); static unsigned GetClassLoaderNumber() { return m_nextEntry; } static ClassLoader** GetClassLoaderTable() { return m_table; } static void DeleteClassLoaderTable(){ Index: vmcore/include/unloading.h =================================================================== --- vmcore/include/unloading.h (revision 567407) +++ vmcore/include/unloading.h (working copy) @@ -23,5 +23,7 @@ #define __UNLOADING_H__ void class_unloading_clear_mark_bits(); +void class_unloading_start(); #endif + Index: vmcore/include/vtable.h =================================================================== --- vmcore/include/vtable.h (revision 567407) +++ vmcore/include/vtable.h (working copy) @@ -56,6 +56,8 @@ typedef struct VTable { Byte _gc_private_information[GC_BYTES_IN_VTABLE]; + ManagedObject* jlC; + unsigned int vtmark; Class* clss; // See the masks in vm_for_gc.h. Index: vmcore/src/class_support/classloader.cpp =================================================================== --- vmcore/src/class_support/classloader.cpp (revision 567407) +++ vmcore/src/class_support/classloader.cpp (working copy) @@ -464,14 +464,14 @@ } -void ClassLoader::UnloadClassLoader( ManagedObject* loader ) +void ClassLoader::UnloadClassLoader( ClassLoader* loader ) { LMAutoUnlock aulock( &(ClassLoader::m_tableLock) ); unsigned i; for(i = 0; i < m_nextEntry; i++) { ClassLoader* cl = m_table[i]; - if( loader == cl->m_loader ) break; + if( loader == cl) break; } if (i == m_nextEntry) return; ClassLoader* cl = m_table[i]; @@ -486,11 +486,19 @@ { TRACE2("enumeration", "enumerating classes"); LMAutoUnlock aulock( &(ClassLoader::m_tableLock) ); - + + Boolean do_class_unloading = gc_supports_class_unloading(); for(unsigned int i = 0; i < m_nextEntry; i++) { - if(m_table[i]->m_loader != NULL) { + //assert (m_table[i]->m_loader); + if (m_table[i]->m_markBit) { vm_enumerate_root_reference((void**)(&(m_table[i]->m_loader)), FALSE); } + else { + if(do_class_unloading) + vm_enumerate_weak_root_reference((void**)(&(m_table[i]->m_loader)), FALSE); + else + vm_enumerate_root_reference((void**)(&(m_table[i]->m_loader)), FALSE); + } } } @@ -505,11 +513,69 @@ << m_table[i] << " (" << m_table[i]->m_loader << " : " << m_table[i]->m_loader->vt_unsafe()->clss->get_name()->bytes << ") and its classes"); m_table[i]->m_markBit = 0; + Class* c; + ReportedClasses::iterator itc; + ReportedClasses* RepClasses = m_table[i]->GetReportedClasses(); + for (itc = RepClasses->begin(); itc != RepClasses->end(); itc++) + { + c = itc->second; + if (c && c->get_vtable()) c->get_vtable()->vtmark = 0; + } + ClassTable* ct = m_table[i]->GetLoadedClasses(); + ClassTable::iterator it; + for (it = ct->begin(); it != ct->end(); it++) + { + c = it->second; + if (c && c->get_vtable()) c->get_vtable()->vtmark = 0; + } } TRACE2("classloader.unloading.clear", "Finished clearing mark bits"); } +void ClassLoader::StartUnloading() +{ + TRACE2("classloader.unloading.marking", "Finished marking loaders"); + TRACE2("classloader.unloading.do", "Start checking loaders ready to be unloaded"); + LMAutoUnlock aulock( &(ClassLoader::m_tableLock) ); + vector unloadinglist; + unsigned i; + for(i = 0; i < m_nextEntry; i++) { + if(m_table[i]->m_loader) { + TRACE2("classloader.unloading.debug", " Skipping live classloader " + << m_table[i] << " (" << m_table[i]->m_loader << " : " + << ((VTable*)(*(unsigned**)(m_table[i]->m_loader)))->clss->get_name()->bytes << ")"); + continue; + } + TRACE2("classloader.unloading.stats", " Unloading classloader " + << m_table[i] << " (" << m_table[i] << ")"); +#ifdef _DEBUG // check that all j.l.Classes inside j.l.ClassLoader are also unloaded + ClassTable* ct = m_table[i]->GetLoadedClasses(); + ClassTable::iterator it; + for (it = ct->begin(); it != ct->end(); it++) + { + Class* c = it->second; + if (*c->get_class_handle()) + { + DIE("FAILED on unloading classloader: \n" << (void*)m_table[i] << + "live j.l.Class of unloaded class is detected: " << c->get_name()->bytes); + assert (false); + } + } +#endif + unloadinglist.push_back(m_table[i]); + } + // safely remove classloaders from m_table + vector::iterator it; + for (it = unloadinglist.begin(); it != unloadinglist.end(); it++) + { + UnloadClassLoader(*it); + } + + TRACE2("classloader.unloading.do", "Finished checking loaders"); +} + + ClassLoader* ClassLoader::AddClassLoader( ManagedObject* loader ) { SuspendDisabledChecker sdc; @@ -1031,6 +1097,11 @@ void class_unloading_clear_mark_bits() { ClassLoader::ClearMarkBits(); } + +void class_unloading_start() { + ClassLoader::StartUnloading(); +} + inline void BootstrapClassLoader::SetClasspathFromString(char* bcp, apr_pool_t* tmp_pool) @@ -1849,3 +1920,4 @@ } + Index: vmcore/src/class_support/Prepare.cpp =================================================================== --- vmcore/src/class_support/Prepare.cpp (revision 567407) +++ vmcore/src/class_support/Prepare.cpp (working copy) @@ -905,6 +905,20 @@ { m_is_suitable_for_fast_instanceof = 1; } + + Global_Env* env = VM_Global_State::loader_env; + if (!env->InBootstrap()){ + tmn_suspend_disable(); + vtable->jlC = *get_class_handle(); + tmn_suspend_enable(); + } + else { + // for BootStrap mode jlC is set in create_instance_for_class + // class_handle is NULL in bootstrap mode + assert (!get_class_handle()); + vtable->jlC = NULL; + } + m_vtable = vtable; } // Class::create_vtable Index: vmcore/src/gc/dll_gc.cpp =================================================================== --- vmcore/src/gc/dll_gc.cpp (revision 567407) +++ vmcore/src/gc/dll_gc.cpp (working copy) @@ -70,8 +70,8 @@ static void default_gc_add_weak_root_set_entry( Managed_Object_Handle*, Boolean, Boolean); +static Boolean default_gc_supports_class_unloading(); - Boolean (*gc_supports_compressed_references)() = 0; void (*gc_add_root_set_entry)(Managed_Object_Handle *ref, Boolean is_pinned) = 0; void (*gc_add_weak_root_set_entry)(Managed_Object_Handle *ref, Boolean is_pinned,Boolean is_short_weak) = 0; @@ -133,6 +133,8 @@ void (*gc_set_mutator_block_flag)() = 0; Boolean (*gc_clear_mutator_block_flag)() = 0; +Boolean (*gc_supports_class_unloading)() = 0; + static apr_dso_handle_sym_t getFunction(apr_dso_handle_t *handle, const char *name, const char *dllName) { apr_dso_handle_sym_t fn; @@ -316,6 +318,11 @@ (apr_dso_handle_sym_t)default_gc_write_barrier); gc_test_safepoint = (void (*)()) getFunctionOptional(handle, "gc_test_safepoint", dllName, (apr_dso_handle_sym_t)default_gc_test_safepoint); + gc_supports_class_unloading = (Boolean (*)()) + getFunctionOptional(handle, + "gc_supports_class_unloading", + dllName, + (apr_dso_handle_sym_t)default_gc_supports_class_unloading); } //vm_add_gc @@ -541,4 +548,9 @@ // default to strong reference semantics gc_add_root_set_entry(root, pinned); } + +static Boolean default_gc_supports_class_unloading() +{ + return TRUE; +} //default_gc_supports_class_unloading #endif // !USE_GC_STATIC Index: vmcore/src/gc/root_set_enum_common.cpp =================================================================== --- vmcore/src/gc/root_set_enum_common.cpp (revision 567407) +++ vmcore/src/gc/root_set_enum_common.cpp (working copy) @@ -75,6 +75,8 @@ else { vm_enumerate_weak_root_reference((void**)c->get_class_handle(), FALSE); } + if(c->get_vtable()) + vm_enumerate_weak_root_reference((void**)&(c->get_vtable()->jlC), FALSE); } static void vm_enumerate_class_static(Class* c) @@ -110,6 +112,7 @@ { TRACE2("enumeration", "vm_enumerate_static_fields()"); GlobalClassLoaderIterator ClIterator; + Boolean do_class_unloading = gc_supports_class_unloading(); ClassLoader *cl = ClIterator.first(); while(cl) { GlobalClassLoaderIterator::ClassIterator itc; @@ -130,7 +133,7 @@ assert(c); if (!cl->IsBootstrap()) { - vm_enumerate_jlc(c, true/*enum as weak root*/); + vm_enumerate_jlc(c, do_class_unloading/*enum as weak root if gc supports that*/); vm_enumerate_class_static(c); } } Index: vmcore/src/gc/stop_the_world_root_set_enum.cpp =================================================================== --- vmcore/src/gc/stop_the_world_root_set_enum.cpp (revision 567407) +++ vmcore/src/gc/stop_the_world_root_set_enum.cpp (working copy) @@ -98,7 +98,7 @@ jvmti_send_gc_start_event(); - class_unloading_clear_mark_bits(); + if(gc_supports_class_unloading()) class_unloading_clear_mark_bits(); current_vm_thread = p_TLS_vmthread; // Run through list of active threads and enumerate each one of them. @@ -150,6 +150,8 @@ { TRACE2("vm.gc", "vm_resume_threads_after()"); + if(gc_supports_class_unloading()) class_unloading_start(); + jvmti_send_gc_finish_event(); jvmti_clean_reclaimed_object_tags(); @@ -222,3 +224,4 @@ ////////////////////////////////////////////////////////////////////////////// + Index: vmcore/src/init/vm_init.cpp =================================================================== --- vmcore/src/init/vm_init.cpp (revision 567407) +++ vmcore/src/init/vm_init.cpp (working copy) @@ -110,6 +110,13 @@ void create_instance_for_class(Global_Env * vm_env, Class *clss) { clss->get_class_loader()->AllocateAndReportInstance(vm_env, clss); + // set jlC to vtable - for non BS classes jlc is set in create_vtable + if (clss->get_vtable()) // vtable = NULL for interfaces + { + assert (!clss->get_vtable()->jlC); // used for BS classes only + clss->get_vtable()->jlC = *clss->get_class_handle(); + assert (!clss->get_class_loader()->GetLoader()); + } } //create_instance_for_class /**