diff -BburN svn/gc_gen/src/common/gc_for_class.cpp vm.new/gc_gen/src/common/gc_for_class.cpp --- svn/gc_gen/src/common/gc_for_class.cpp 2007-07-24 01:11:59.000000000 +0800 +++ vm.new/gc_gen/src/common/gc_for_class.cpp 2007-07-24 00:58:56.000000000 +0800 @@ -138,7 +138,7 @@ } gcvt_size = (gcvt_size + GCVT_ALIGN_MASK) & ~GCVT_ALIGN_MASK; - gcvt = (GC_VTable_Info*) malloc(gcvt_size); + gcvt = (GC_VTable_Info*) class_alloc_via_classloader(ch, gcvt_size); assert(gcvt); assert(!((POINTER_SIZE_INT)gcvt % GCVT_ALIGNMENT)); diff -BburN svn/gc_gen/src/common/gc_for_class.h vm.new/gc_gen/src/common/gc_for_class.h --- svn/gc_gen/src/common/gc_for_class.h 2007-07-24 01:11:59.000000000 +0800 +++ vm.new/gc_gen/src/common/gc_for_class.h 2007-07-24 00:58:56.000000000 +0800 @@ -116,9 +116,18 @@ } GC_VTable_Info; +enum VT_Mark_Status { + VT_UNMARKED = 0, + VT_MARKED = 0x1, + VT_FALLBACK_MARKED = 0x2 +}; + +struct Partial_Reveal_Object; typedef struct Partial_Reveal_VTable { //--Fixme: emt64 GC_VTable_Info *gcvt; + Partial_Reveal_Object* jlC; + unsigned int vtmark; } Partial_Reveal_VTable; typedef struct Partial_Reveal_Object { diff -BburN svn/gc_gen/src/common/mark_scan_pool.cpp vm.new/gc_gen/src/common/mark_scan_pool.cpp --- svn/gc_gen/src/common/mark_scan_pool.cpp 2007-07-24 01:11:59.000000000 +0800 +++ vm.new/gc_gen/src/common/mark_scan_pool.cpp 2007-07-24 00:58:56.000000000 +0800 @@ -37,6 +37,14 @@ static FORCE_INLINE void scan_object(Collector* collector, Partial_Reveal_Object *p_obj) { vm_notify_obj_alive( (void *)p_obj); + + Partial_Reveal_VTable *vtable = obj_get_vt(p_obj); + if(vtable->vtmark == VT_UNMARKED) { + vtable->vtmark = VT_MARKED; + if(obj_mark_in_vt(vtable->jlC)) + collector_tracestack_push(collector, vtable->jlC); + } + if( !object_has_ref_field(p_obj) ) return; REF *p_ref; diff -BburN svn/gc_gen/src/mark_compact/fallback_mark_scan.cpp vm.new/gc_gen/src/mark_compact/fallback_mark_scan.cpp --- svn/gc_gen/src/mark_compact/fallback_mark_scan.cpp 2007-07-24 01:11:59.000000000 +0800 +++ vm.new/gc_gen/src/mark_compact/fallback_mark_scan.cpp 2007-07-24 00:58:56.000000000 +0800 @@ -36,6 +36,13 @@ Partial_Reveal_Object *p_obj = read_slot(p_ref); assert(p_obj); + Partial_Reveal_VTable *vtable = obj_get_vt(p_obj); + if(!(vtable->vtmark & VT_FALLBACK_MARKED)) { + vtable->vtmark |= VT_FALLBACK_MARKED; //we need different marking for fallback compaction + collector_tracestack_push(collector, &(vtable->jlC)); + //64bits consideration is needed, vtable->jlC is an uncompressed reference + } + if(obj_belongs_to_nos(p_obj) && obj_is_fw_in_oi(p_obj)){ assert(obj_get_vt(p_obj) == obj_get_vt(obj_get_fw_in_oi(p_obj))); p_obj = obj_get_fw_in_oi(p_obj); diff -BburN svn/gc_gen/src/mark_compact/los_extention_mark_scan.cpp vm.new/gc_gen/src/mark_compact/los_extention_mark_scan.cpp --- svn/gc_gen/src/mark_compact/los_extention_mark_scan.cpp 2007-07-24 01:11:59.000000000 +0800 +++ vm.new/gc_gen/src/mark_compact/los_extention_mark_scan.cpp 2007-07-24 00:58:56.000000000 +0800 @@ -44,6 +44,13 @@ static FORCE_INLINE void scan_object(Collector* collector, Partial_Reveal_Object *p_obj) { vm_notify_obj_alive( (void *)p_obj); + Partial_Reveal_VTable *vtable = obj_get_vt(p_obj); + if(vtable->vtmark == VT_UNMARKED) { + vtable->vtmark = VT_MARKED; + if(obj_mark_in_vt(vtable->jlC)) + collector_tracestack_push(collector, vtable->jlC); + } + if( !object_has_ref_field(p_obj) ) return; REF *p_ref; diff -BburN svn/gc_gen/src/mark_sweep/sspace_mark.cpp vm.new/gc_gen/src/mark_sweep/sspace_mark.cpp --- svn/gc_gen/src/mark_sweep/sspace_mark.cpp 2007-07-24 01:11:59.000000000 +0800 +++ vm.new/gc_gen/src/mark_sweep/sspace_mark.cpp 2007-07-24 00:58:56.000000000 +0800 @@ -32,6 +32,12 @@ static FORCE_INLINE void scan_object(Collector *collector, Partial_Reveal_Object *p_obj) { if(!object_has_ref_field(p_obj)) return; + Partial_Reveal_VTable *vtable = obj_get_vt(p_obj); + if(vtable->vtmark == VT_UNMARKED) { + vtable->vtmark = VT_MARKED; + if(obj_mark_in_table(vtable->jlC)) + collector_tracestack_push(collector, vtable->jlC); + } REF *p_ref; diff -BburN svn/gc_gen/src/trace_forward/fspace_gen_forward_pool.cpp vm.new/gc_gen/src/trace_forward/fspace_gen_forward_pool.cpp --- svn/gc_gen/src/trace_forward/fspace_gen_forward_pool.cpp 2007-07-24 01:11:59.000000000 +0800 +++ vm.new/gc_gen/src/trace_forward/fspace_gen_forward_pool.cpp 2007-07-24 00:58:56.000000000 +0800 @@ -44,8 +44,14 @@ return; } +void trace_forwarded_jlC_from_vtable(Collector* collector, Partial_Reveal_Object *p_obj); static FORCE_INLINE void scan_object(Collector* collector, Partial_Reveal_Object *p_obj) { + Partial_Reveal_VTable *vtable = obj_get_vt(p_obj); + if(vtable->vtmark == VT_UNMARKED) { + vtable->vtmark = VT_MARKED; + trace_forwarded_jlC_from_vtable(collector, vtable->jlC); + } if (!object_has_ref_field(p_obj)) return; REF *p_ref; @@ -147,6 +153,41 @@ return; } +static FORCE_INLINE void trace_forwarded_jlC_from_vtable(Collector* collector, Partial_Reveal_Object *p_obj) +//Forward the vtable->jlc and trace the forwarded object. But do not update the vtable->jlc but leave them for weakroots updating +//We probably do not need this function if we do not perform class unloading in copy-collections. That means all vtable->jlc would be strong roots in this algorithm +{ + Space* space = collector->collect_space; + GC* gc = collector->gc; + + if(!obj_belongs_to_nos(p_obj)) return; + + /* Fastpath: object has already been forwarded*/ + if(obj_is_fw_in_oi(p_obj)) return; + + if(NOS_PARTIAL_FORWARD && !fspace_object_to_be_forwarded(p_obj, (Fspace*)space)) { + assert(!obj_is_fw_in_oi(p_obj)); + if(obj_mark_in_oi(p_obj)) + scan_object(collector, p_obj); + return; + } + + /* following is the logic for forwarding */ + Partial_Reveal_Object* p_target_obj = collector_forward_object(collector, p_obj); + if( p_target_obj == NULL ){ + if(collector->result == FALSE ){ + /* failed to forward, let's get back to controller. */ + vector_stack_clear(collector->trace_stack); + return; + } + assert(obj_get_fw_in_oi(p_obj)); + return; + } + scan_object(collector, p_target_obj); + return; +} + + static void trace_object(Collector *collector, REF *p_ref) { forward_object(collector, p_ref); diff -BburN svn/gc_gen/src/trace_forward/fspace_nongen_forward_pool.cpp vm.new/gc_gen/src/trace_forward/fspace_nongen_forward_pool.cpp --- svn/gc_gen/src/trace_forward/fspace_nongen_forward_pool.cpp 2007-07-24 01:11:59.000000000 +0800 +++ vm.new/gc_gen/src/trace_forward/fspace_nongen_forward_pool.cpp 2007-07-24 00:58:56.000000000 +0800 @@ -35,8 +35,15 @@ return; } +void trace_forwarded_jlC_from_vtable(Collector* collector, Partial_Reveal_Object *p_obj) ; static FORCE_INLINE void scan_object(Collector* collector, Partial_Reveal_Object *p_obj) { + Partial_Reveal_VTable *vtable = obj_get_vt(p_obj); + if(vtable->vtmark == VT_UNMARKED) { + vtable->vtmark = VT_MARKED; + trace_forwarded_jlC_from_vtable(collector, vtable->jlC); + } + if (!object_has_ref_field_before_scan(p_obj)) return; REF *p_ref; @@ -118,6 +125,30 @@ return; } +static FORCE_INLINE void trace_forwarded_jlC_from_vtable(Collector* collector, Partial_Reveal_Object *p_obj) +//Forward the vtable->jlc and trace the forwarded object. But do not update the vtable->jlc but leave them for weakroots updating +//We probably do not need this function if we do not perform class unloading in copy-collections. That means all vtable->jlc would be strong roots in this algorithm +{ + if(!obj_belongs_to_nos(p_obj)){ + if(obj_mark_in_oi(p_obj)) + scan_object(collector, p_obj); + return; + } + + /* following is the logic for forwarding */ + Partial_Reveal_Object* p_target_obj = collector_forward_object(collector, p_obj); + if( p_target_obj == NULL ){ + if(collector->result == FALSE ){ + vector_stack_clear(collector->trace_stack); + return; + } + assert(obj_get_fw_in_oi(p_obj)); + return; + } + scan_object(collector, p_target_obj); + return; +} + static void trace_object(Collector *collector, REF *p_ref) { forward_object(collector, p_ref);