diff --git a/gcv4/src/gc_header.h b/gcv4/src/gc_header.h index f2e91c4..70a34a9 100644 --- a/gcv4/src/gc_header.h +++ b/gcv4/src/gc_header.h @@ -80,6 +80,7 @@ public: return (struct Partial_Reveal_VTable *) (vt_offset + vtable_base); } struct Partial_Reveal_VTable *vt() { assert(vt_offset); return (struct Partial_Reveal_VTable *) (vt_offset + vtable_base); } + struct Partial_Reveal_Object *vtobject() { return (Partial_Reveal_Object *)(vt_offset + heap_base); } void set_vtable(Allocation_Handle ah) { // vtables are allocated from a fixed-size pool in the VM @@ -142,6 +143,7 @@ public: struct Partial_Reveal_VTable *vtraw() { return vt_raw; } struct Partial_Reveal_VTable *vt() { ASSERT(vt_raw, "incorrect object at " << this); return vt_raw; } + struct Partial_Reveal_Object *vtobject() { return (Partial_Reveal_Object *)((POINTER_SIZE_INT)vt_raw - get_header_size()); } void set_vtable(Allocation_Handle ah) { vt_raw = (struct Partial_Reveal_VTable *)ah; } struct Partial_Reveal_Object *get_forwarding_pointer() { @@ -176,6 +178,7 @@ #endif // !USE_COMPRESSED_VTABLE_POINTER static POINTER_SIZE_INT vtable_base; static POINTER_SIZE_INT heap_base; + static unsigned get_header_size(){ return sizeof(Partial_Reveal_Object);} public: inline const char* class_name(); diff --git a/gcv4/src/mark_scan.cpp b/gcv4/src/mark_scan.cpp index 5da0672..cf037dd 100644 --- a/gcv4/src/mark_scan.cpp +++ b/gcv4/src/mark_scan.cpp @@ -98,6 +98,12 @@ scan_object(Partial_Reveal_Object *p_obj // Object had better be marked. assert(is_object_marked(p_obj) == true); + // Scan VTable object + Slot slot(NULL); + Partial_Reveal_Object* p_vtobj = p_obj->vtobject(); + slot.set((void*)&p_vtobj, false); + scan_slot(slot, gc_thread); + if (!p_obj->has_slots()) { gc_trace(p_obj, " scan_object(): object doesn't contain slots"); return; @@ -110,7 +116,6 @@ scan_object(Partial_Reveal_Object *p_obj } int *offset_scanner = init_strong_object_scanner(p_obj); - Slot slot(NULL); while (true) { void *ref = p_get_ref(offset_scanner, p_obj); diff --git a/include/jit_import_rt.h b/include/jit_import_rt.h index cc8ee35..4b1b104 100644 --- a/include/jit_import_rt.h +++ b/include/jit_import_rt.h @@ -46,6 +46,7 @@ #endif // collection. VMEXPORT void vm_enumerate_root_reference(Managed_Object_Handle *ref, Boolean is_pinned); +VMEXPORT void vm_enumerate_weakroot_reference(Managed_Object_Handle *ref, Boolean is_pinned); // Resembles vm_enumerate_root_reference() but is passed the address of // a slot containing a compressed reference. VMEXPORT void vm_enumerate_compressed_root_reference(uint32 *ref, Boolean is_pinned); diff --git a/interpreter/src/interpreter.cpp b/interpreter/src/interpreter.cpp index 21971ac..095fda7 100644 --- a/interpreter/src/interpreter.cpp +++ b/interpreter/src/interpreter.cpp @@ -3265,7 +3265,8 @@ interpreterInvokeStatic(StackFrame& prev // Setup locals and stack on C stack. SETUP_LOCALS_AND_STACK(frame, method); - + + frame.This = *(method->get_class()->class_handle); int args = method->get_num_arg_bytes() >> 2; for(int i = args-1; i >= 0; --i) { diff --git a/vmcore/include/Class.h b/vmcore/include/Class.h index ecdcb7a..3293a04 100644 --- a/vmcore/include/Class.h +++ b/vmcore/include/Class.h @@ -383,7 +383,14 @@ #endif // POINTER64 #define GC_BYTES_IN_VTABLE (sizeof(void *)) - +#if defined _IPF_ || defined _EM64T_ // CHECK ON THAT MODE!!! + #define VTABLE_FIELDS_NUMBER 12 // number of 32bit fields in struct VTable - 11.5 jlong fields actually +#else // _IA32_ + #define VTABLE_FIELDS_NUMBER 12 // number of 32bit fields in struct VTable +#endif +#define JLC_REF_NUMBER 3 //order of jlc field inside vtable class + +// update VTABLE_FIELDS_NUMBER when you change struct VTable!!! typedef struct VTable { Byte _gc_private_information[GC_BYTES_IN_VTABLE]; @@ -395,6 +402,7 @@ typedef struct VTable { uint32 class_properties; + ManagedObject *jlC; // reference inside object is ManagedObject * physically // Offset from the top by CLASS_ALLOCATED_SIZE_OFFSET // The number of bytes allocated for this object. It is the same as // instance_data_size with the constraint bit cleared. This includes @@ -414,7 +422,11 @@ typedef struct VTable { #define VTABLE_OVERHEAD (sizeof(VTable) - sizeof(void *)) // The "- sizeof(void *)" part subtracts out the "unsigned char *methods[1]" contribution. -VTable *create_vtable(Class *p_class, unsigned n_vtable_entries); +VTable *create_vtable(Class *p_class, void* vt_ptr = NULL); +void *create_vtable_sequence(Class *p_class); + +unsigned get_vtable_fields_number(Class* clss); +size_t get_vtable_size(Class* clss); } // extern "C" @@ -506,6 +518,8 @@ typedef struct Class { ClassLoader* class_loader; + ManagedObject* vtObj; // TEMPORARY - used for enumeration + getting vtObj from class, change to (vtable-8) + // // Does it represent a primitive type? // @@ -538,6 +552,10 @@ typedef struct Class { unsigned is_verified : 2; // + // Is this class sythetic VTable class + unsigned is_VT_class : 1; + + // // Can instances of this class be allocated using a fast inline sequence containing // no calls to other routines. // @@ -753,9 +771,6 @@ #endif // Required for fast enumeration of error objects Const_Pool* m_failedResolution; - // struct Class accessibility - unsigned m_markBit:1; - // verify data void *verify_data; @@ -841,7 +856,7 @@ const String* class_extract_name(Global_ // // preparation phase of class loading // -bool class_prepare(Global_Env* env, Class *clss); +bool class_prepare(Global_Env* env, Class *clss, void* p_vt_container = NULL); diff --git a/vmcore/include/classloader.h b/vmcore/include/classloader.h index 331be8c..46aee23 100644 --- a/vmcore/include/classloader.h +++ b/vmcore/include/classloader.h @@ -160,7 +160,7 @@ public: private: class FailedClasses : public MapEx {}; class LoadingClasses : public MapEx {}; - class ReportedClasses : public MapEx {}; + class ReportedClasses : public MapEx {}; class JavaTypes : public MapEx {}; @@ -169,7 +169,7 @@ public: ClassLoader() : m_loader(NULL), m_parent(NULL), m_package_table(NULL), m_loadedClasses(NULL), m_failedClasses(NULL), m_loadingClasses(NULL), m_reportedClasses(NULL), m_javaTypes(NULL), m_nativeLibraries(NULL), - m_markBit(0), m_unloading(false), m_fullSize(0), m_verifyData(NULL) + m_markBit(false), m_fullSize(0), m_verifyData(NULL) { apr_pool_create(&pool, 0); } @@ -185,16 +185,20 @@ public: Class** klass = m_loadedClasses->Lookup(name); return klass?*klass:NULL; } - void InsertClass(Class* clss) { - LMAutoUnlock aulock(&m_lock); - m_loadedClasses->Insert(clss->name, clss); - m_initiatedClasses->Insert(clss->name, clss); + void RemoveFromReported(const String* name){ + Class** clss = m_reportedClasses->Lookup(name); + if(clss) { + m_reportedClasses->Remove(name); + } } - Class* AllocateAndReportInstance(const Global_Env* env, Class* klass); + + bool InsertClass(Class* clss); + Class* AllocateAndReportInstance(const Global_Env* env, Class* klass, bool b_report_class_handle = true); Class* NewClass(const Global_Env* env, const String* name); Class* DefineClass(Global_Env* env, const char* class_name, uint8* bytecode, unsigned offset, unsigned length, const String** res_name = NULL); Class* LoadClass( Global_Env* UNREF env, const String* UNREF name); + Class* CreateVTableClass(String*, int); Class* LoadVerifyAndPrepareClass( Global_Env* env, const String* name); virtual void ReportException(const char* exn_name, std::stringstream& message_stream); virtual void ReportFailedClass(Class* klass, const char* exnclass, std::stringstream& exnmsg); @@ -228,8 +232,15 @@ protected: void FailedLoadingClass(const String* className); public: - bool IsBootstrap() { return m_loader == NULL; } - void Mark() { m_markBit = 1; } + bool IsBootstrap() { +#ifdef _DEBUG + if (m_loader){ + assert (this != (const ClassLoader*)VM_Global_State::loader_env->bootstrap_class_loader); + } +#endif + return m_loader == NULL; + } + void Mark() { m_markBit = true; } bool NotMarked() { return (m_markBit == 0); } unsigned GetFullSize(); ManagedObject* GetLoader() { return m_loader; } @@ -247,7 +258,7 @@ public: static ClassLoader* FindByObject( ManagedObject* loader ); // ppervov: NOTE: LookupLoader has side effect of adding 'loader' to the collection VMEXPORT static ClassLoader* LookupLoader( ManagedObject* loader ); - static void UnloadClassLoader( ManagedObject* loader ); + static void UnloadClassLoader( ClassLoader* loader ); static void gc_enumerate(); static void ClearMarkBits(); static void StartUnloading(); @@ -273,9 +284,6 @@ public: inline void* CodeAlloc(size_t size, size_t alignment, Code_Allocation_Action action) { return CodeMemoryManager->alloc(size, alignment, action); } - inline void* VTableAlloc(size_t size, size_t alignment, Code_Allocation_Action action) { - return VM_Global_State::loader_env->VTableMemoryManager->alloc(size, alignment, action); - } private: static Lock_Manager m_tableLock; @@ -295,13 +303,14 @@ protected: ClassTable* m_initiatedClasses; FailedClasses* m_failedClasses; LoadingClasses* m_loadingClasses; - ReportedClasses* m_reportedClasses; + // m_reportedClasses used to store classes for the moment jlc is created for some class and before it is placed to m_loaded_classes + ReportedClasses* m_reportedClasses; JavaTypes* m_javaTypes; NativeLibraryList m_nativeLibraries; Lock_Manager m_lock; Lock_Manager m_types_cache_lock; - unsigned m_markBit:1; - unsigned m_unloading; + // m_markBit=true forces strong j.l.Classloader enumeration, m_markBit=false -- weak enumeration + bool m_markBit; unsigned m_fullSize; void* m_verifyData; apr_pool_t* pool; diff --git a/vmcore/include/environment.h b/vmcore/include/environment.h index d6ff8fc..44b15b3 100644 --- a/vmcore/include/environment.h +++ b/vmcore/include/environment.h @@ -52,8 +52,6 @@ struct Global_Env { DynamicCode* dcList; Assertion_Registry* assert_reg; PoolManager* GlobalCodeMemoryManager; - PoolManager* VTableMemoryManager; - Method_Lookup_Table* vm_methods; hythread_library_t hythread_lib; String_Pool string_pool; // string table @@ -88,6 +86,7 @@ struct Global_Env { String* VoidBooleanDescriptor_String; String* Clonable_String; String* Serializable_String; + String* VTableBaseObject_String; String* JavaLangReflectMethod_String; String* JavaLangNullPointerException_String; @@ -134,6 +133,7 @@ struct Global_Env { Class* JavaLangObject_Class; Class* JavaLangString_Class; Class* JavaLangClass_Class; + Class* VTableVTable_Class; Class* java_lang_Throwable_Class; Class* java_lang_StackTraceElement_Class; diff --git a/vmcore/include/object_layout.h b/vmcore/include/object_layout.h index c620670..531fa8a 100644 --- a/vmcore/include/object_layout.h +++ b/vmcore/include/object_layout.h @@ -57,9 +57,16 @@ typedef struct ManagedObjectCompressedVt VTable *vt() { assert(vt_offset); return vt_unsafe(); } uint32 get_obj_info() { return obj_info; } void set_obj_info(uint32 value) { obj_info = value; } + void set_vtable_unsafe(VTable* vt_ptr) { + vt_offset = (uint32)((POINTER_SIZE_INT)vt_ptr - vm_get_vtable_base()); + } static VTable *allocation_handle_to_vtable(Allocation_Handle ah) { return (VTable *) ((POINTER_SIZE_INT)ah + vm_get_vtable_base()); } + static Allocation_Handle vtable_to_allocation_handle(VTable *vt) { + assert(vt); + return (Allocation_Handle) ((POINTER_SIZE_INT)vt - vm_get_vtable_base()); + } static unsigned header_offset() { return sizeof(uint32); } static size_t get_size() { return sizeof(ManagedObjectCompressedVtablePtr); } static bool are_vtable_pointers_compressed() { return TRUE; } @@ -72,15 +79,40 @@ typedef struct ManagedObjectUncompressed VTable *vt() { assert(vt_raw); return vt_unsafe(); } uint32 get_obj_info() { return (uint32) obj_info; } void set_obj_info(uint32 value) { obj_info = (uint32) value; } + void set_vtable_unsafe(VTable* vt_ptr) { + assert(vt_ptr); + vt_raw = vt_ptr; + } static VTable *allocation_handle_to_vtable(Allocation_Handle ah) { + assert (ah); return (VTable *) ah; } + static Allocation_Handle vtable_to_allocation_handle(VTable *vt) { + assert(vt); + return (Allocation_Handle)vt; + } static unsigned header_offset() { return sizeof(VTable *); } static size_t get_size() { return sizeof(ManagedObjectUncompressedVtablePtr); } static bool are_vtable_pointers_compressed() { return FALSE; } } ManagedObjectUncompressedVtablePtr; typedef struct ManagedObject { + static unsigned get_header_size(){ + if (are_vtable_pointers_compressed()) + return sizeof(ManagedObjectCompressedVtablePtr); + else + return sizeof(ManagedObjectUncompressedVtablePtr); + } + // get pointer to object data area + void *get_data_ptr() { + return (VTable *)((POINTER_SIZE_INT)this + get_header_size()); + } + void set_vtable_unsafe(VTable* vt_ptr) { + if (are_vtable_pointers_compressed()) + ((ManagedObjectCompressedVtablePtr *)this)->set_vtable_unsafe(vt_ptr); + else + ((ManagedObjectUncompressedVtablePtr *)this)->set_vtable_unsafe(vt_ptr); + } VTable *vt_unsafe() { if (are_vtable_pointers_compressed()) return ((ManagedObjectCompressedVtablePtr *)this)->vt_unsafe(); @@ -114,6 +146,10 @@ typedef struct ManagedObject { return are_vtable_pointers_compressed() ? ManagedObjectCompressedVtablePtr::allocation_handle_to_vtable(ah) : ManagedObjectUncompressedVtablePtr::allocation_handle_to_vtable(ah); } + static Allocation_Handle vtable_to_allocation_handle(VTable *vt) { + return are_vtable_pointers_compressed() ? + ManagedObjectCompressedVtablePtr::vtable_to_allocation_handle(vt) : ManagedObjectUncompressedVtablePtr::vtable_to_allocation_handle(vt); + } static unsigned header_offset() { return are_vtable_pointers_compressed() ? ManagedObjectCompressedVtablePtr::header_offset() : ManagedObjectUncompressedVtablePtr::header_offset(); diff --git a/vmcore/src/class_support/C_Interface.cpp b/vmcore/src/class_support/C_Interface.cpp index 5038469..091cd29 100644 --- a/vmcore/src/class_support/C_Interface.cpp +++ b/vmcore/src/class_support/C_Interface.cpp @@ -2388,16 +2388,7 @@ Boolean vm_vtable_pointers_are_compresse Class_Handle allocation_handle_get_class(Allocation_Handle ah) { assert(ah); - VTable *vt; - - if (vm_vtable_pointers_are_compressed()) - { - vt = (VTable *) ((POINTER_SIZE_INT)ah + vm_get_vtable_base()); - } - else - { - vt = (VTable *) ah; - } + VTable *vt = ManagedObject::allocation_handle_to_vtable(ah); return (Class_Handle) vt->clss; } diff --git a/vmcore/src/class_support/Class.cpp b/vmcore/src/class_support/Class.cpp index 85e97ef..2fd8f6c 100644 --- a/vmcore/src/class_support/Class.cpp +++ b/vmcore/src/class_support/Class.cpp @@ -31,7 +31,10 @@ #include "exceptions.h" #include "compile.h" #include "open/gc.h" #include "nogc.h" +#include "heap.h" #include "classloader.h" +#include "method_lookup.h" +#include "interpreter_imports.h" // 20020923 Total number of allocations and total number of bytes for class-related data structures. // This includes any rounding added to make each item aligned (current alignment is to the next 16 byte boundary). @@ -626,25 +629,134 @@ void* code_alloc(Class* p_class, size_t return p_class->class_loader->CodeAlloc(size, alignment, action); } +#define VTABLE_TAIL_STRING "$$$VT" +/* + * create_vtable_sequence precreates vtable for class + * p_class parameter is class for which vtable is created +*/ +void* create_vtable_sequence(Class *p_class) +{ + Global_Env* env = VM_Global_State::loader_env; + // create local handles to enumerate vtable_for_VT and VT_for_object objects + // before we set them to clss->allocation_handle and enumerate them via class + tmn_suspend_disable(); + ObjectHandle vtVTableHandle = oh_allocate_local_handle(); + assert (vtVTableHandle); + // create VTable for VT + // VTable for VT class is equial to env->VTableVTable_Class as having the same size + vtVTableHandle->object = (ManagedObject*)gc_alloc(env->VTableVTable_Class->instance_data_size, + env->VTableVTable_Class->allocation_handle, vm_get_gc_thread_local()); + tmn_suspend_enable(); -VTable *create_vtable(Class *p_class, unsigned n_vtable_entries) -{ - unsigned vtable_size = VTABLE_OVERHEAD + n_vtable_entries * sizeof(void *); + if (vtVTableHandle->object == NULL) + { + assert (false); + REPORT_FAILED_CLASS_CLASS(env->VTableVTable_Class->class_loader, env->VTableVTable_Class, + "java/lang/OutOfMemoryError", + "Failed to create vtable_sequence for class " << env->VTableVTable_Class->name->bytes); + return NULL; + } + gc_pin_object((Managed_Object_Handle*)vtVTableHandle); + memset(vtVTableHandle->object->get_data_ptr(), 0, VTABLE_OVERHEAD); + + // Create VT for class + unsigned vt_clss_name_len = p_class->name->len + strlen(VTABLE_TAIL_STRING); + char* buf = (char*) STD_ALLOCA(vt_clss_name_len + 1); + sprintf(buf, "%s%s", p_class->name->bytes, VTABLE_TAIL_STRING); + buf[vt_clss_name_len] = 0; + String* vt_clss_name = env->string_pool.lookup(buf, vt_clss_name_len); + Class* VT_clss = p_class->class_loader->CreateVTableClass(vt_clss_name, get_vtable_fields_number(p_class)); + if (VT_clss == NULL){ + assert (false); + REPORT_FAILED_CLASS_CLASS(VT_clss->class_loader, VT_clss, + "java/lang/OutOfMemoryError", + "Failed to create vtable class " << vt_clss_name->bytes); + return NULL; + } + if(!class_prepare(env, VT_clss, vtVTableHandle->object->get_data_ptr())) { + assert (false); + return NULL; + } + + int vt_size = get_vtable_size(p_class); + assert(vt_size + ManagedObject::get_header_size() == VT_clss->instance_data_size); + + tmn_suspend_disable(); + ObjectHandle vtVTHandle = oh_allocate_local_handle(); + assert (vtVTHandle); + vtVTHandle->object = (ManagedObject*)gc_alloc(VT_clss->instance_data_size, + VT_clss->allocation_handle, vm_get_gc_thread_local()); + tmn_suspend_enable(); - // Always allocate vtable data from vtable_data_pool - void *p_gc_hdr = p_class->class_loader->VTableAlloc(vtable_size, 16, CAA_Allocate); + if (vtVTHandle->object == NULL) + { + assert (false); + REPORT_FAILED_CLASS_CLASS(VT_clss->class_loader, VT_clss, + "java/lang/OutOfMemoryError", + "Failed to create vtable_sequence for class " << VT_clss->name->bytes); + return NULL; + } + gc_pin_object((Managed_Object_Handle*)vtVTHandle); + + memset(vtVTHandle->object->get_data_ptr(), 0, vt_size); + p_class->vtObj = vtVTHandle->object; // TEMPORARY - use allocation_handle instead!!! will be set later into allocation_handle + + // SET reference to VTable object in jlc: jlc.classVTInstance = vtVT + // skip BS classses as VTable is always enumerated for them + if (!p_class->class_loader->IsBootstrap()) + { + assert (env->JavaLangClass_Class); + assert (p_class->class_handle); + JNIEnv *jenv = get_jni_native_intf(); + tmn_suspend_disable(); + Field *field = class_lookup_field_recursive(env->JavaLangClass_Class, "classVTInstance", "Ljava/lang/Object;"); + assert (field); + Byte *java_ref = (Byte *)*p_class->class_handle; + ManagedObject **field_addr = (ManagedObject **)(java_ref + field->get_offset()); + STORE_REFERENCE((ManagedObject *)java_ref, field_addr, vtVTHandle->object); + tmn_suspend_enable(); + } #ifdef VM_STATS - // For allocation statistics, include any rounding added to make each item aligned (current alignment is to the next 16 byte boundary). - unsigned num_bytes = (vtable_size + 15) & ~15; - // 20020923 Total number of allocations and total number of bytes for class-related data structures. - Class::num_vtable_allocations++; - Class::total_vtable_bytes += num_bytes; + unsigned vtables_allocated_size_total = vt_size + VTABLE_OVERHEAD + 2*ManagedObject::get_header_size(); + Class::num_vtable_allocations += 2; + Class::total_vtable_bytes += vtables_allocated_size_total; #endif - assert(p_gc_hdr); - memset(p_gc_hdr, 0, vtable_size); + return vtVTHandle->object->get_data_ptr(); +} + +/* + * get_vtable_fields_number get number of methods of class which are placed in methods array in VTable +*/ +unsigned get_vtable_fields_number(Class* clss) +{ + return (clss->n_virtual_method_entries + clss->n_intfc_method_entries); +} + +/* + * get_vtable_size returns size of VTable in bytes for corresponding class + * the fuction must correspond to VTABLE_FIELDS_NUMBER + * --- SYSTEM DEPENDENT --- jint for IA32, jlong for _IPF_ and _EM64T_ +*/ +size_t get_vtable_size(Class* clss) +{ + size_t sz; +#if defined _IPF_ || defined _EM64T_ + sz = get_vtable_fields_number(clss)*sizeof(jlong) + VTABLE_OVERHEAD; +#else // _IA32_ + sz = get_vtable_fields_number(clss)*sizeof(jint) + VTABLE_OVERHEAD; +#endif + return sz; +} + + +VTable *create_vtable(Class *p_class, void* p_gc_hdr) +{ + assert (p_gc_hdr); + assert (p_class); + Global_Env* env = VM_Global_State::loader_env; VTable *vtable = (VTable *)p_gc_hdr; if(p_class->super_class) { @@ -666,6 +778,40 @@ #endif { p_class->is_suitable_for_fast_instanceof = 1; } + + if (!env->InBootstrap()){ + tmn_suspend_disable(); + vtable->jlC = *p_class->class_handle; + tmn_suspend_enable(); + } + else { + // for BootStrap mode jlC is set in create_instance_for_class + // class_handle is NULL in bootstrap mode + assert (!p_class->class_handle); + vtable->jlC = NULL; + } + +#ifdef _DEBUG + // check of VTable structure consistency: + // test that jlC field in struct VTable is reflected to jlc ref field in VT object + // skip BS classes which do not have jlc on prepare + if (p_class->class_handle) + { + tmn_suspend_disable(); + ManagedObject* VT_obj = (ManagedObject*)((POINTER_SIZE_INT)vtable - ManagedObject::get_header_size()); + assert (vtable->jlC); + JNIEnv *jenv = get_jni_native_intf(); + ObjectHandle vt_handle = oh_allocate_local_handle(); + vt_handle->object= VT_obj; + Field *field = class_lookup_field_recursive(VT_obj->vt()->clss, "vt_jlc", "Ljava/lang/Class;"); + assert (field); + assert (!field->is_static()); + tmn_suspend_enable(); + jobject res = GetObjectField(jenv, (jobject)vt_handle/*&VT_obj*/, (jfieldID)field/*referentField*/); + assert (res->object == *p_class->class_handle); + } +#endif + return vtable; } //create_vtable diff --git a/vmcore/src/class_support/Environment.cpp b/vmcore/src/class_support/Environment.cpp index 5d2ffef..4c0e636 100644 --- a/vmcore/src/class_support/Environment.cpp +++ b/vmcore/src/class_support/Environment.cpp @@ -80,6 +80,7 @@ ready_for_exceptions(false) EnqueueName_String = string_pool.lookup("enqueue"); Clonable_String = string_pool.lookup("java/lang/Cloneable"); Serializable_String = string_pool.lookup("java/io/Serializable"); + VTableBaseObject_String = string_pool.lookup("VTableBase"); Length_String = string_pool.lookup("length"); LoadClass_String = string_pool.lookup("loadClass"); @@ -115,14 +116,6 @@ #endif // !_IPF_ GlobalCodeMemoryManager = new PoolManager(DEFAULT_COMMOT_JIT_CODE_POOL_SIZE, system_page_size, use_large_pages, true/*is_code*/, true/*is_resize_allowed*/); - if (vm_vtable_pointers_are_compressed()) { - VTableMemoryManager = new PoolManager(DEFAULT_COMMOT_VTABLE_POOL_SIZE_NO_RESIZE, system_page_size, use_large_pages, - false/*is_code*/, false/*is_resize_allowed*/); - } - else { - VTableMemoryManager = new PoolManager(DEFAULT_VTABLE_POOL_SIZE, system_page_size, use_large_pages, - false/*is_code*/, true/*is_resize_allowed*/); - } verify_all = false; pin_interned_strings = false; @@ -149,6 +142,7 @@ #endif // !_IPF_ JavaLangObject_Class = NULL; JavaLangString_Class = NULL; JavaLangClass_Class = NULL; + VTableVTable_Class = NULL; java_lang_Throwable_Class = NULL; java_lang_Error_Class = NULL; java_lang_ThreadDeathError_Class = NULL; diff --git a/vmcore/src/class_support/Prepare.cpp b/vmcore/src/class_support/Prepare.cpp index d604613..048df55 100644 --- a/vmcore/src/class_support/Prepare.cpp +++ b/vmcore/src/class_support/Prepare.cpp @@ -289,7 +289,8 @@ #else // not POINTER64 #endif // not POINTER64 if ((clss->static_data_size%8)!=0) { clss->static_data_size += 4; - assert((clss->static_data_size%8)==0); + //assert((clss->static_data_size%8)==0); // remove to enable compact_fields field - do we need this if we + // align to 4 byte biundary?? probably - NO! } } @@ -363,6 +364,14 @@ void assign_offsets_to_class_fields(Clas assert(clss->state != ST_InstanceSizeComputed); bool do_field_compaction = Class::compact_fields; bool do_field_sorting = Class::sort_fields; + // force (do_field_compaction = true & do_field_sorting = false) for VTable for VTable class + // to reflect struct VTable to VT object data + Global_Env* env = VM_Global_State::loader_env; + if (clss->is_VT_class) + { + do_field_sorting = false; + do_field_compaction = true; + } // Create a temporary array of pointers to the class's fields. We do this to support sorting the fields // by size if the command line option "-sort_fields" is given, and because elements of the clss->fields array @@ -523,9 +532,10 @@ void build_gc_class_ref_map(Global_Env* //extern bool bootstrapped; //// gloss over bootstrap inconsistency //if (bootstrapped == true) { - if(env->InBootstrap()) { - assert(clss->n_instance_refs == 0); - } + // inconsistent check!!! - redo! + //if(env->InBootstrap()) { + // assert(clss->n_instance_refs == 0); + //} } assert (current_index <= clss->n_instance_refs); @@ -1276,7 +1286,7 @@ static void initialize_regular_class_dat // // -bool class_prepare(Global_Env* env, Class *clss) +bool class_prepare(Global_Env* env, Class *clss, void* p_vt_mem) { ASSERT_RAISE_AREA; // fast path @@ -1473,20 +1483,21 @@ #endif // code address (a stub or jitted code) // // - clss->vtable = create_vtable(clss, n_vtable_entries); + if (!p_vt_mem) + { + p_vt_mem = create_vtable_sequence(clss); + if (p_vt_mem == NULL){ + assert(false); + return false; + } + } + clss->vtable = create_vtable(clss, p_vt_mem); for (i = 0; i < n_vtable_entries; i++) { // need to populate with pointers to stubs or compiled code clss->vtable->methods[i] = NULL; // for now } - if (vm_vtable_pointers_are_compressed()) - { - clss->allocation_handle = (Allocation_Handle) ((POINTER_SIZE_INT)clss->vtable - vm_get_vtable_base()); - } - else - { - clss->allocation_handle = (Allocation_Handle) clss->vtable; - } clss->vtable->clss = clss; + clss->allocation_handle = ManagedObject::vtable_to_allocation_handle(clss->vtable); // Set the vtable entries to point to the code address (a stub or jitted code) point_class_vtable_entries_to_stubs(clss); diff --git a/vmcore/src/class_support/classloader.cpp b/vmcore/src/class_support/classloader.cpp index e07e0bd..1de6005 100644 --- a/vmcore/src/class_support/classloader.cpp +++ b/vmcore/src/class_support/classloader.cpp @@ -24,6 +24,7 @@ #include "cxxlog.h" #include "vm_log.h" #include +#include #include "classloader.h" #include "object_layout.h" @@ -74,7 +75,7 @@ void mark_classloader(ClassLoader* cl) if(cl->GetLoader() && cl->NotMarked()) { TRACE2("classloader.unloading.markloader", " Marking loader " << cl << " (" << (void*)cl->GetLoader() << " : " - << ((VTable*)(*(unsigned**)(cl->GetLoader())))->clss->name->bytes << ")"); + << cl->GetLoader()->vt_unsafe()->clss->name->bytes << ")"); cl->Mark(); } } @@ -345,7 +346,11 @@ Class* ClassLoader::DefineClass(Global_E clss->package = ProvidePackage(env, className, NULL); - InsertClass(clss); + if (!InsertClass(clss)){ + assert (false); + FailedLoadingClass(className); + return NULL; + } SuccessLoadingClass(className); //bool doNotNotifyBaseClasses = // false if class is either j/l/Object, j/io/Serializable, or j/l/Class @@ -566,6 +571,8 @@ void ClassLoader::SuccessLoadingClass(co ClassLoader* ClassLoader::FindByObject(ManagedObject* loader) { LMAutoUnlock aulock( &(ClassLoader::m_tableLock) ); + //check that loader != NULL because cl->m_loader could be == NULL in case that class loader is to be unloaded + assert(loader); ClassLoader* cl; for(unsigned i = 0; i < m_nextEntry; i++) { @@ -587,14 +594,15 @@ ClassLoader* ClassLoader::LookupLoader( } -void ClassLoader::UnloadClassLoader( ManagedObject* loader ) +void ClassLoader::UnloadClassLoader( ClassLoader* loader ) { + TRACE2("unloading", "Unloading class loader: " << (void*)loader); LMAutoUnlock aulock( &(ClassLoader::m_tableLock) ); unsigned i; for(i = 0; i < m_nextEntry; i++) { ClassLoader* cl = m_table[i]; - if( loader == cl->m_loader ) break; + if( loader == cl ) break; } if (i == m_nextEntry) return; ClassLoader* cl = m_table[i]; @@ -618,14 +626,18 @@ void ClassLoader::gc_enumerate() } for(unsigned int i = 0; i < m_nextEntry; i++) { - if(m_table[i]->m_loader != NULL) { + assert (m_table[i]->m_loader); + if (m_table[i]->m_markBit) { vm_enumerate_root_reference((void**)(&(m_table[i]->m_loader)), FALSE); - // should enumerate errors for classes - for(fci = m_table[i]->m_failedClasses->begin(); - fci != m_table[i]->m_failedClasses->end(); fci++) - { - vm_enumerate_root_reference((void**)(&(fci->second.m_exception)), FALSE); - } + } + else { + vm_enumerate_weakroot_reference((void**)(&(m_table[i]->m_loader)), FALSE); + } + // should enumerate errors for classes + for(fci = m_table[i]->m_failedClasses->begin(); + fci != m_table[i]->m_failedClasses->end(); fci++) + { + vm_enumerate_root_reference((void**)(&(fci->second.m_exception)), FALSE); } } } @@ -638,25 +650,12 @@ void ClassLoader::ClearMarkBits() ClassTable::iterator cti; unsigned i; for(i = 0; i < m_nextEntry; i++) { - if(m_table[i]->m_unloading) { - TRACE2("classloader.unloading.debug", " Skipping \"unloaded\" classloader " - << m_table[i] << " (" << m_table[i]->m_loader << " : " - << ((VTable*)(*(unsigned**)(m_table[i]->m_loader)))->clss->name->bytes << ")"); - continue; - } TRACE2("classloader.unloading.debug", " Clearing mark bits in classloader " << m_table[i] << " (" << m_table[i]->m_loader << " : " - << ((VTable*)(*(unsigned**)(m_table[i]->m_loader)))->clss->name->bytes << ") and its classes"); + << m_table[i]->m_loader->vt_unsafe()->clss->name->bytes << ") and its classes"); // clear mark bits in loader and classes m_table[i]->m_markBit = 0; - for(cti = m_table[i]->m_loadedClasses->begin(); - cti != m_table[i]->m_loadedClasses->end(); cti++) - { - if(cti->second->class_loader == m_table[i]) { - cti->second->m_markBit = 0; - } - } - } + } TRACE2("classloader.unloading.clear", "Finished clearing mark bits"); TRACE2("classloader.unloading.marking", "Starting mark loaders"); } @@ -667,27 +666,44 @@ void ClassLoader::StartUnloading() TRACE2("classloader.unloading.marking", "Finished marking loaders"); TRACE2("classloader.unloading.do", "Start checking loaders ready to be unloaded"); LMAutoUnlock aulock( &(ClassLoader::m_tableLock) ); + std::list unloadinglist; unsigned i; for(i = 0; i < m_nextEntry; i++) { - if(m_table[i]->m_unloading) { - TRACE2("classloader.unloading.debug", " Skipping \"unloaded\" classloader " + if(m_table[i]->m_loader) { + TRACE2("classloader.unloading.debug", " Skipping live classloader " << m_table[i] << " (" << m_table[i]->m_loader << " : " - << ((VTable*)(*(unsigned**)(m_table[i]->m_loader)))->clss->name->bytes << ")"); + << m_table[i]->m_loader->vt_unsafe()->clss->name->bytes << ")"); continue; } - TRACE2("classloader.unloading.debug", " Scanning loader " - << m_table[i] << " (" << m_table[i]->m_loader << " : " - << ((VTable*)(*(unsigned**)(m_table[i]->m_loader)))->clss->name->bytes << ")"); - if(!m_table[i]->m_markBit) { - TRACE2("classloader.unloading.stats", " (!) Ready to unload classloader " - << m_table[i] << " (" << m_table[i]->m_loader << " : " - << ((VTable*)(*(unsigned**)(m_table[i]->m_loader)))->clss->name->bytes << ")"); - TRACE2("classloader.unloading.stats", " (!) This will free " - << m_table[i]->GetFullSize() << " bytes in C heap"); - m_table[i]->m_unloading = true; - m_unloadedBytes += m_table[i]->GetFullSize(); + TRACE2("classloader.unloading.stats", " Unloading classloader " + << m_table[i] << " (" << m_table[i] << ")"); + TRACE2("classloader.unloading.stats", " (!) This will free " + << m_table[i]->GetFullSize() << " bytes in C heap"); +#ifdef _DEBUG // check that all j.l.Classes inside j.l.ClassLoader are also unloaded + ClassTable* ct = m_table[i]->GetLoadedClasses(); + ClassTable::iterator it; + for (it = ct->begin(); it != ct->end(); it++) + { + Class* c = it->second; + if (*c->class_handle) + { + DIE("FAILED on unloading classloader: \n" << (void*)m_table[i] << + "live j.l.Class of unloaded class is detected: " << c->name->bytes); + assert (false); + } } +#endif + unloadinglist.push_back(m_table[i]); + m_unloadedBytes += m_table[i]->GetFullSize(); } + + // safely remove classloaders from m_table + std::list::iterator it; + for (it = unloadinglist.begin(); it != unloadinglist.end(); it++) + { + UnloadClassLoader(*it); + } + TRACE2("classloader.unloading.do", "Finished checking loaders"); } @@ -698,10 +714,9 @@ void ClassLoader::PrintUnloadingStats() TRACE2("classloader.unloading.stats", "----------------------------------------------"); TRACE2("classloader.unloading.stats", "Class unloading statistics:"); for(i = 0; i < m_nextEntry; i++) { - if(m_table[i]->m_unloading) { + if(!m_table[i]->m_loader) { TRACE2("classloader.unloading.stats", " Class loader " - << m_table[i] << " (" << m_table[i]->m_loader << " : " - << ((VTable*)(*(unsigned**)(m_table[i]->m_loader)))->clss->name->bytes + << m_table[i] << " (" << m_table[i]->m_loader << ") contains " << m_table[i]->GetFullSize() << " bytes in C heap"); } } @@ -716,7 +731,6 @@ void vm_classloader_iterate_objects(void // skip the object iteration if it is not needed // (logging is not enabled and // class unloading is not yet implemented). - if (!is_info_enabled("class_unload")) return; Managed_Object_Handle obj; int nobjects = 0; @@ -733,11 +747,12 @@ ClassLoader* ClassLoader::AddClassLoader ClassLoader* cl = new UserDefinedClassLoader(); TRACE2("classloader.unloading.add", "Adding class loader " << cl << " (" << loader << " : " - << ((VTable*)(*(unsigned**)(loader)))->clss->name->bytes << ")"); + << loader->vt_unsafe()->clss->name->bytes << ")"); cl->Initialize( loader ); if( m_capacity <= m_nextEntry ) ReallocateTable( m_capacity?(2*m_capacity):32 ); m_table[m_nextEntry++] = cl; + return cl; } @@ -1100,7 +1115,11 @@ Class* ClassLoader::SetupAsArray(Global_ return NULL; } - InsertClass(klass); + if (!InsertClass(klass)){ + assert (false); + FailedLoadingClass(classNameString); + return NULL; + } SuccessLoadingClass(classNameString); return klass; @@ -1109,7 +1128,7 @@ Class* ClassLoader::SetupAsArray(Global_ /** Adds Class* pointer to m_reportedClasses HashTable. * clss->name must not be NULL. */ -Class* ClassLoader::AllocateAndReportInstance(const Global_Env* env, Class* clss) +Class* ClassLoader::AllocateAndReportInstance(const Global_Env* env, Class* clss, bool b_report_class_handle) { const String* name = clss->name; assert(name); @@ -1135,7 +1154,10 @@ Class* ClassLoader::AllocateAndReportIns } // add newly created java_lang_Class to reportable collection LMAutoUnlock aulock(&m_lock); - clss->class_handle = m_reportedClasses->Insert(name, new_java_lang_Class); + clss->class_handle = (ManagedObject**)Alloc(sizeof(ManagedObject**)); + *clss->class_handle = new_java_lang_Class; + if (b_report_class_handle) // skip for bs classes as it is done when they are already in m_loaded_classes + m_reportedClasses->Insert(name, clss); aulock.ForceUnlock(); TRACE("NewClass inserting class \"" << name->bytes << "\" with key " << name << " and object " << new_java_lang_Class); @@ -1887,6 +1909,69 @@ Class* UserDefinedClassLoader::DoLoadCla return clss; } // UserDefinedClassLoader::DoLoadClass +bool ClassLoader::InsertClass(Class* clss) { + if (!IsBootstrap()) // skip BS classes + { + Global_Env* env = VM_Global_State::loader_env; + jvalue args[3]; + ManagedObject* jstr; + + tmn_suspend_disable(); + if (env->compress_references) { + jstr = uncompress_compressed_reference(clss->name->intern.compressed_ref); + } else { + jstr = clss->name->intern.raw_ref; + } + ObjectHandle h = oh_allocate_local_handle(); + if (jstr != NULL) { + h->object = jstr; + } else { + h->object = vm_instantiate_cp_string_resolved((String*)clss->name); + } + args[1].l = h; + + if (exn_raised()) { + TRACE2("classloader", "OutOfMemoryError on class registering " << clss->name->bytes); + assert (false); + tmn_suspend_enable(); + return false; + } + + // this parameter + ObjectHandle hl = oh_allocate_local_handle(); + hl->object = m_loader; + args[0].l = hl; + + // jlc parameter + ObjectHandle chl = oh_allocate_local_handle(); + chl->object = *clss->class_handle; + args[2].l = chl; + + static String* acr_func_name = env->string_pool.lookup("addToClassRegistry"); + static String* acr_func_desc = env->string_pool.lookup("(Ljava/lang/String;Ljava/lang/Class;)V"); + + Method* method = class_lookup_method_recursive(m_loader->vt()->clss, acr_func_name, acr_func_desc); + assert(method); + + jvalue res; + vm_execute_java_method_array((jmethodID) method, &res, args); + + if(exn_raised()) { + assert (false); + tmn_suspend_enable(); + return false; + } + tmn_suspend_enable(); + } + + LMAutoUnlock aulock(&m_lock); + m_loadedClasses->Insert(clss->name, clss); + m_initiatedClasses->Insert(clss->name, clss); + RemoveFromReported(clss->name); + return true; +} + + void BootstrapClassLoader::ReportAndExit(const char* exnclass, std::stringstream& exnmsg) { std::stringstream ss; @@ -2079,4 +2164,71 @@ void BootstrapClassLoader::ReportExcepti ClassLoader::ReportException(exn_name, message_stream); } +/* + * CreateVTableClass fuction creates synthetic class vtable + * Arguments: + * clss_name - name of created class + * vtable_methods_number - number of methods inside vtable (methods array size) +*/ +Class* ClassLoader::CreateVTableClass(String* clss_name, int vtable_methods_number) +{ + Global_Env* env = VM_Global_State::loader_env; + + Class *clss = NewClass(env, clss_name); + clss->class_loader = this; + clss->access_flags = ACC_FINAL | ACC_SYNTHETIC; + clss->is_verified = 2; + /* FIELDS CONSTRUCTION*/ + // Current approach: + // fill VT class with fields according to struct VTable layout + // --- PLATFORM DEPENDENT --- + // jint fileds for _IA32_, jlong fileds for _IPF_ and _EM64T_ + // consider 32bit pointer as jint, and 64bit pointers as jlong + + clss->n_fields = vtable_methods_number + VTABLE_FIELDS_NUMBER; + clss->fields = new Field[clss->n_fields]; + assert(clss->fields); + for (unsigned i = 0; i < clss->n_fields; i++) + { + // jlc ref field + if (i == JLC_REF_NUMBER) + { + static String* vt_jlc_name = env->string_pool.lookup("vt_jlc"); + static String* vt_jlc_desc = env->string_pool.lookup("Ljava/lang/Class;"); + clss->fields[i].set(clss, vt_jlc_name, vt_jlc_desc, ACC_FINAL | ACC_SYNTHETIC); + clss->fields[i].set_field_type_desc( + type_desc_create_from_java_descriptor("Ljava/lang/Class;", this)); + clss->fields[i].set_injected(); + continue; + } + + char vt_field_name[10] = {0}; + sprintf (vt_field_name, "vt_%d", i); + +#if defined _IPF_ || defined _EM64T_ + static String* LongDescriptor_String = env->string_pool.lookup("I"); + clss->fields[i].set(clss, env->string_pool.lookup(vt_field_name), + LongDescriptor_String, ACC_FINAL | ACC_SYNTHETIC); + clss->fields[i].set_field_type_desc( + type_desc_create_from_java_descriptor("J", NULL)); + clss->fields[i].set_injected(); +#else // IA32 + static String* IntDescriptor_String = env->string_pool.lookup("I"); + clss->fields[i].set(clss, env->string_pool.lookup(vt_field_name), + IntDescriptor_String, ACC_FINAL | ACC_SYNTHETIC); + clss->fields[i].set_field_type_desc( + type_desc_create_from_java_descriptor("I", NULL)); + clss->fields[i].set_injected(); +#endif + } + + clss->is_VT_class = true; + if (!InsertClass(clss)){ + assert (false); + FailedLoadingClass(clss_name); + return NULL; + } + + return clss; +} diff --git a/vmcore/src/gc/dll_gc.cpp b/vmcore/src/gc/dll_gc.cpp index e8facb4..6794cd7 100644 --- a/vmcore/src/gc/dll_gc.cpp +++ b/vmcore/src/gc/dll_gc.cpp @@ -173,6 +173,7 @@ void vm_add_gc(const char *dllName) (apr_dso_handle_sym_t)default_gc_supports_compressed_references); gc_add_root_set_entry = (void (*)(Managed_Object_Handle *ref, Boolean is_pinned)) getFunction(handle, "gc_add_root_set_entry", dllName); + gc_add_weak_root_set_entry = (void (*)(Managed_Object_Handle *ref, Boolean is_pinned, Boolean short_ref)) getFunction(handle, "gc_add_weak_root_set_entry", dllName); gc_add_compressed_root_set_entry = (void (*)(uint32 *ref, Boolean is_pinned)) getFunctionOptional(handle, "gc_add_compressed_root_set_entry", diff --git a/vmcore/src/gc/root_set_enum_common.cpp b/vmcore/src/gc/root_set_enum_common.cpp index e7f7f97..f0dcb57 100644 --- a/vmcore/src/gc/root_set_enum_common.cpp +++ b/vmcore/src/gc/root_set_enum_common.cpp @@ -64,56 +64,100 @@ vm_enumerate_interned_strings() // Enumerate all globally visible classes and their static fields. +static void vm_enumerate_jlc(Class* c, bool b_weak = false) +{ + assert (VM_Global_State::loader_env->system_class_loader); + assert (*c->class_handle); + + if (!b_weak) { + vm_enumerate_root_reference((void**)c->class_handle, FALSE); + } + else { + vm_enumerate_weakroot_reference((void**)c->class_handle, FALSE); + } +} + +static void vm_enumerate_static_fields_process(Class* c) +{ + if (c->p_error) { + vm_enumerate_root_reference( + (void**)&c->p_error ,FALSE); + } + + Const_Pool* cp = c->m_failedResolution; + while(cp) { + vm_enumerate_root_reference((void**)(&(cp->error.cause)), FALSE); + cp = cp->error.next; + } + // Finally enumerate the static fields of the class + unsigned n_fields = c->n_fields; + if((c->state == ST_Prepared) + || (c->state == ST_Initializing) + || (c->state == ST_Initialized)) { + // Class has been prepared, so we can iterate over all its fields. + for(unsigned i = 0; i < n_fields; i++) { + Field *f = &c->fields[i]; + if(f->is_static()) { + char desc0 = f->get_descriptor()->bytes[0]; + if(desc0 == 'L' || desc0 == '[') { + // The field is static and it is a reference. + if (VM_Global_State::loader_env->compress_references) { + vm_enumerate_compressed_root_reference((uint32 *)f->get_address(), FALSE); + } else { + vm_enumerate_root_reference((void **)f->get_address(), FALSE); + } + } + } + } + } +} static void vm_enumerate_static_fields() { TRACE2("enumeration", "vm_enumerate_static_fields()"); Global_Env *global_env = VM_Global_State::loader_env; - ManagedObject** ppc; GlobalClassLoaderIterator ClIterator; ClassLoader *cl = ClIterator.first(); while(cl) { + // 1. enum all reported classes as strong roots + // 2. enum all loaded classes as weak roots GlobalClassLoaderIterator::ClassIterator itc; GlobalClassLoaderIterator::ReportedClasses RepClasses = cl->GetReportedClasses(); for (itc = RepClasses->begin(); itc != RepClasses->end(); itc++) { - ppc = &itc->second; - assert(*ppc); - Class* c = jclass_to_struct_Class((jclass)ppc); - - if (c->p_error) { - vm_enumerate_root_reference( - (void**)&c->p_error ,FALSE); - } - vm_enumerate_root_reference((void**)ppc, FALSE); - Const_Pool* cp = c->m_failedResolution; - while(cp) { - vm_enumerate_root_reference((void**)(&(cp->error.cause)), FALSE); - cp = cp->error.next; + Class* c = itc->second; + { + vm_enumerate_jlc(c); + if (c->vtable) + { + vm_enumerate_root_reference((void**)&c->vtObj, FALSE); + } + vm_enumerate_static_fields_process(c); } - // Finally enumerate the static fields of the class - unsigned n_fields = c->n_fields; - if((c->state == ST_Prepared) - || (c->state == ST_Initializing) - || (c->state == ST_Initialized)) { - // Class has been prepared, so we can iterate over all its fields. - for(unsigned i = 0; i < n_fields; i++) { - Field *f = &c->fields[i]; - if(f->is_static()) { - char desc0 = f->get_descriptor()->bytes[0]; - if(desc0 == 'L' || desc0 == '[') { - // The field is static and it is a reference. - if (global_env->compress_references) { - vm_enumerate_compressed_root_reference((uint32 *)f->get_address(), FALSE); - } else { - vm_enumerate_root_reference((void **)f->get_address(), FALSE); - } - } - } + + } + + ClassTable* ct = cl->GetLoadedClasses(); + ClassTable::iterator it; + for (it = ct->begin(); it != ct->end(); it++) + { + Class* c = it->second; + { + if (cl->IsBootstrap()) + { + vm_enumerate_jlc(c); + if (c->vtable) + vm_enumerate_root_reference((void**)&c->vtObj, FALSE); + } + else + { + vm_enumerate_jlc(c, true/*weak*/); } + vm_enumerate_static_fields_process(c); } } + cl = ClIterator.next(); } } //vm_enumerate_static_fields @@ -148,6 +192,28 @@ #endif // _DEBUG } //vm_enumerate_root_reference +void +vm_enumerate_weakroot_reference(void **ref, Boolean is_pinned) +{ + if (get_global_safepoint_status() == enumerate_the_universe) { + +#if _DEBUG + if (VM_Global_State::loader_env->compress_references) { + // 20030324 DEBUG: verify the slot whose reference is being passed. + ManagedObject **p_obj = (ManagedObject **)ref; + ManagedObject* obj = *p_obj; + assert(obj != NULL); // See the comment at the top of the procedure. + if ((void *)obj != Class::heap_base) { + assert(((POINTER_SIZE_INT)Class::heap_base <= (POINTER_SIZE_INT)obj) && ((POINTER_SIZE_INT)obj <= (POINTER_SIZE_INT)Class::heap_end)); + (obj->vt())->clss->name->bytes; + } + } +#endif // _DEBUG + + gc_add_weak_root_set_entry((Managed_Object_Handle *)ref, is_pinned, FALSE); + } +} //vm_enumerate_root_reference + // Resembles vm_enumerate_root_reference() but is passed the address of a uint32 slot containing a compressed reference. VMEXPORT void vm_enumerate_compressed_root_reference(uint32 *ref, Boolean is_pinned) @@ -262,6 +328,22 @@ #endif << "." << method_get_name(cci->get_method()) << method_get_descriptor(cci->get_method())); cci->get_jit()->get_root_set_from_stack_frame(cci->get_method(), 0, si_get_jit_context(si)); + ClassLoader* cl = cci->get_method()->get_class()->class_loader; + assert (cl); + mark_classloader(cl); + if (cci->has_inline_info()) { + JIT *jit = cci->get_jit(); + NativeCodePtr ip = si_get_ip(si); + uint32 inlined_depth = si_get_inline_depth(si); + uint32 offset = (POINTER_SIZE_INT)ip - (POINTER_SIZE_INT)cci->get_code_block_addr(); + for (uint32 i = 0; i < inlined_depth; i++) { + Method* m = jit->get_inlined_method(cci->get_inline_info(), offset, i); + assert (m); + cl = m->get_class()->class_loader; + assert (cl); + mark_classloader(cl); + } + } TRACE2("enumeration", "enumerated eip=" << (void *) si_get_ip(si) << " is_first=" << !si_get_jit_context(si)->is_ip_past << " " << class_get_name(method_get_class(cci->get_method())) @@ -275,6 +357,12 @@ #endif << (m2n_get_method(si_get_m2n(si)) ? method_get_name(m2n_get_method(si_get_m2n(si))) : "") << (m2n_get_method(si_get_m2n(si)) ? method_get_descriptor(m2n_get_method(si_get_m2n(si))) : "")); oh_enumerate_handles(m2n_get_local_handles(si_get_m2n(si))); + Method* m = m2n_get_method(si_get_m2n(si)); + if (m) { + ClassLoader* cl = m->get_class()->class_loader; + assert (cl); + mark_classloader(cl); + } } si_goto_previous(si, false); } diff --git a/vmcore/src/init/vm_init.cpp b/vmcore/src/init/vm_init.cpp index 941dba0..58a063c 100644 --- a/vmcore/src/init/vm_init.cpp +++ b/vmcore/src/init/vm_init.cpp @@ -119,7 +119,63 @@ #endif // and set its "vm_class" field to point back to that structure. void create_instance_for_class(Global_Env * vm_env, Class *clss) { - clss->class_loader->AllocateAndReportInstance(vm_env, clss); + clss->class_loader->AllocateAndReportInstance(vm_env, clss, false); + // set jlC to vtable - for non BS classes jlc is set in create_vtable + if (clss->vtable && clss != vm_env->VTableVTable_Class) // vtable = NULL for interfaces // skip vtable base - already set + { + assert (!clss->vtable->jlC); // used for BS classes only + clss->vtable->jlC = *clss->class_handle; + assert (!clss->class_loader->GetLoader()); + } +} + +/* + * Create VTable for VTables + * Function creates VTable object which vtable references to this object + * Required by automatic class unloading design +*/ +static void create_vtable_for_vtables(Global_Env *env) +{ + env->VTableVTable_Class = env->bootstrap_class_loader->CreateVTableClass(env->VTableBaseObject_String, 0); + if (!env->VTableVTable_Class){ + DIE("Could not create Vtable for Vtables class" << env->VTableBaseObject_String->bytes); + } +#ifdef _DEBUG + // integrity check: + // VTableforVTable has no methods => sizeof VTableforVTable->VTable = VTABLE_OVERHEAD + unsigned vt_size = get_vtable_size(env->VTableVTable_Class); + assert (vt_size == VTABLE_OVERHEAD); +#endif + + void* vt_buf = STD_ALLOCA(VTABLE_OVERHEAD); + assert(vt_buf); + memset(vt_buf, 0, VTABLE_OVERHEAD); + if(!class_prepare(env, env->VTableVTable_Class, vt_buf)) { + assert (false); + return; + } + + tmn_suspend_disable(); + ObjectHandle VTableVTableHandle = oh_allocate_local_handle(); + assert (VTableVTableHandle); + VTableVTableHandle->object = (ManagedObject*)gc_alloc(env->VTableVTable_Class->instance_data_size, + env->VTableVTable_Class->allocation_handle, vm_get_gc_thread_local()); + assert (VTableVTableHandle->object); + gc_pin_object((Managed_Object_Handle*)&VTableVTableHandle->object); + + // cycle VTable to itself + memcpy(VTableVTableHandle->object->get_data_ptr(), env->VTableVTable_Class->vtable, VTABLE_OVERHEAD); + env->VTableVTable_Class->vtable = (VTable*)VTableVTableHandle->object->get_data_ptr(); + env->VTableVTable_Class->allocation_handle = ManagedObject::vtable_to_allocation_handle(env->VTableVTable_Class->vtable); + VTableVTableHandle->object->set_vtable_unsafe(env->VTableVTable_Class->vtable); + + env->VTableVTable_Class->vtObj = VTableVTableHandle->object; + tmn_suspend_enable(); +#ifdef VM_STATS + unsigned vtables_allocated_size_total = VTABLE_OVERHEAD + ManagedObject::get_header_size(); + Class::num_vtable_allocations++; + Class::total_vtable_bytes += vtables_allocated_size_total; +#endif } /** @@ -270,6 +326,7 @@ static void bootstrap_initial_java_class * j.l.reflect.AnnotatedElement, GenericDeclaration and Type as per Java 5 */ vm_env->StartVMBootstrap(); + create_vtable_for_vtables(vm_env); vm_env->JavaLangObject_Class = preload_class(vm_env, vm_env->JavaLangObject_String); vm_env->java_io_Serializable_Class = preload_class(vm_env, vm_env->Serializable_String); vm_env->JavaLangClass_Class = preload_class(vm_env, vm_env->JavaLangClass_String); diff --git a/vmcore/src/kernel_classes/javasrc/java/lang/Class.java b/vmcore/src/kernel_classes/javasrc/java/lang/Class.java index 071a7b7..77590dd 100644 --- a/vmcore/src/kernel_classes/javasrc/java/lang/Class.java +++ b/vmcore/src/kernel_classes/javasrc/java/lang/Class.java @@ -398,6 +398,9 @@ public final class Class implements S .copyConstructor(getDeclaredConstructorInternal(argumentTypes)); } + public ClassLoader definingLoader; + public Object classVTInstance; + /** * @com.intel.drl.spec_ref */ diff --git a/vmcore/src/kernel_classes/javasrc/java/lang/ClassLoader.java b/vmcore/src/kernel_classes/javasrc/java/lang/ClassLoader.java index 3b45e24..21df88c 100644 --- a/vmcore/src/kernel_classes/javasrc/java/lang/ClassLoader.java +++ b/vmcore/src/kernel_classes/javasrc/java/lang/ClassLoader.java @@ -106,6 +106,12 @@ public abstract class ClassLoader { */ private final HashMap definedPackages; + /* + * The following mapping is used , where binaryClassName - class name, + * clazz - corresponding class. + */ + Hashtable loadedClasses = new Hashtable(); + /** * The following mapping is used , where binaryClassName - class name, * clazz - corresponding class. @@ -378,6 +384,17 @@ public abstract class ClassLoader { /** * @com.intel.drl.spec_ref */ + public void addToClassRegistry(String name, Class clazz) + { + synchronized (loadedClasses) + { + loadedClasses.put(name, clazz); // unloading + } + } + + /** + * @com.intel.drl.spec_ref + */ protected final Class defineClass(String name, ByteBuffer b, ProtectionDomain protectionDomain) throws ClassFormatError { byte[] data = b.array(); @@ -416,6 +433,7 @@ public abstract class ClassLoader { } Class clazz = VMClassRegistry .defineClass(name, this, data, offset, len); + clazz.definingLoader = this; clazz.setProtectionDomain(domain); if (certs != null) { packageCertificates.put(packageName, certs); diff --git a/vmcore/src/util/mem_alloc.cpp b/vmcore/src/util/mem_alloc.cpp index a1e0f30..3755c55 100644 --- a/vmcore/src/util/mem_alloc.cpp +++ b/vmcore/src/util/mem_alloc.cpp @@ -240,7 +240,5 @@ POINTER_SIZE_INT vm_get_vtable_base() { // Subtract a small number (like 1) from the real base so that // no valid vtable offsets will ever be 0. - assert (VM_Global_State::loader_env->VTableMemoryManager); - assert (VM_Global_State::loader_env->VTableMemoryManager->get_pool_base()); - return (POINTER_SIZE_INT) (VM_Global_State::loader_env->VTableMemoryManager->get_pool_base() - 1); + return (POINTER_SIZE_INT) (- 1); } //vm_get_vtable_base diff --git a/vmcore/src/util/natives_support.cpp b/vmcore/src/util/natives_support.cpp index 73b2abc..2591a75 100644 --- a/vmcore/src/util/natives_support.cpp +++ b/vmcore/src/util/natives_support.cpp @@ -307,7 +307,7 @@ natives_unload_library_internal(NativeLi { assert(pfound); - // FIXME find_call_JNI_OnUnload(pfound->handle); + find_call_JNI_OnUnload(pfound->handle); apr_dso_unload(pfound->handle); } // natives_unload_library