< prev index next >

src/hotspot/share/memory/dynamicArchive.cpp

Print this page

 244         if (orig_ref->msotype() == MetaspaceObj::ClassType) {
 245           Klass* k = (Klass*)orig_obj;
 246           assert(k->is_instance_klass() &&
 247                  SystemDictionaryShared::is_excluded_class(InstanceKlass::cast(k)),
 248                  "orig_obj must be excluded Class");
 249         }
 250       }
 251 #endif
 252 
 253       log_debug(cds, dynamic)("Relocating " PTR_FORMAT " %s", p2i(new_obj),
 254                               MetaspaceObj::type_name(orig_ref->msotype()));
 255       if (new_obj != NULL) {
 256         EmbeddedRefUpdater updater(_builder, orig_obj, new_obj);
 257         orig_ref->metaspace_pointers_do(&updater);
 258       }
 259 
 260       return true; // keep recursing until every object is visited exactly once.
 261     }
 262 
 263     virtual void push_special(SpecialRef type, Ref* ref, intptr_t* p) {
 264       assert(type == _method_entry_ref, "only special type allowed for now");

 265       address obj = ref->obj();
 266       address new_obj = _builder->get_new_loc(ref);
 267       size_t offset = pointer_delta(p, obj,  sizeof(u1));
 268       intptr_t* new_p = (intptr_t*)(new_obj + offset);
 269       assert(*p == *new_p, "must be a copy");













 270       ArchivePtrMarker::mark_pointer((address*)new_p);
 271     }
 272   };
 273 
 274   class EmbeddedRefUpdater: public MetaspaceClosure {
 275     DynamicArchiveBuilder* _builder;
 276     address _orig_obj;
 277     address _new_obj;
 278   public:
 279     EmbeddedRefUpdater(DynamicArchiveBuilder* shuffler, address orig_obj, address new_obj) :
 280       _builder(shuffler), _orig_obj(orig_obj), _new_obj(new_obj) {}
 281 
 282     // This method gets called once for each pointer field F of orig_obj.
 283     // We update new_obj->F to point to the new location of orig_obj->F.
 284     //
 285     // Example: Klass*  0x100 is copied to 0x400
 286     //          Symbol* 0x200 is copied to 0x500
 287     //
 288     // Let orig_obj == 0x100; and
 289     //     new_obj  == 0x400; and

 776   }
 777   _header->populate(base_info, os::vm_allocation_granularity());
 778 }
 779 
 780 void DynamicArchiveBuilder::release_header() {
 781   // We temporarily allocated a dynamic FileMapInfo for dumping, which makes it appear we
 782   // have mapped a dynamic archive, but we actually have not. We are in a safepoint now.
 783   // Let's free it so that if class loading happens after we leave the safepoint, nothing
 784   // bad will happen.
 785   assert(SafepointSynchronize::is_at_safepoint(), "must be");
 786   FileMapInfo *mapinfo = FileMapInfo::dynamic_info();
 787   assert(mapinfo != NULL && _header == mapinfo->dynamic_header(), "must be");
 788   delete mapinfo;
 789   assert(!DynamicArchive::is_mapped(), "must be");
 790   _header = NULL;
 791 }
 792 
 793 size_t DynamicArchiveBuilder::estimate_trampoline_size() {
 794   size_t total = 0;
 795   size_t each_method_bytes =
 796     align_up(SharedRuntime::trampoline_size(), BytesPerWord) +
 797     align_up(sizeof(AdapterHandlerEntry*), BytesPerWord);
 798 
 799   for (int i = 0; i < _klasses->length(); i++) {
 800     InstanceKlass* ik = _klasses->at(i);
 801     Array<Method*>* methods = ik->methods();
 802     total += each_method_bytes * methods->length();
 803   }
 804   if (total == 0) {
 805     // We have nothing to archive, but let's avoid having an empty region.
 806     total = SharedRuntime::trampoline_size();
 807   }
 808   return total;
 809 }
 810 
 811 void DynamicArchiveBuilder::make_trampolines() {
 812   DumpRegion* mc_space = MetaspaceShared::misc_code_dump_space();
 813   char* p = mc_space->base();
 814   for (int i = 0; i < _klasses->length(); i++) {
 815     InstanceKlass* ik = _klasses->at(i);
 816     Array<Method*>* methods = ik->methods();
 817     for (int j = 0; j < methods->length(); j++) {
 818       Method* m = methods->at(j);


 819       address c2i_entry_trampoline = (address)p;
 820       p += SharedRuntime::trampoline_size();
 821       assert(p >= mc_space->base() && p <= mc_space->top(), "must be");
 822       m->set_from_compiled_entry(to_target(c2i_entry_trampoline));
 823 










 824       AdapterHandlerEntry** adapter_trampoline =(AdapterHandlerEntry**)p;
 825       p += sizeof(AdapterHandlerEntry*);
 826       assert(p >= mc_space->base() && p <= mc_space->top(), "must be");
 827       *adapter_trampoline = NULL;
 828       m->set_adapter_trampoline(to_target(adapter_trampoline));
 829     }
 830   }
 831 
 832   guarantee(p <= mc_space->top(), "Estimate of trampoline size is insufficient");
 833 }
 834 
 835 void DynamicArchiveBuilder::make_klasses_shareable() {
 836   int i, count = _klasses->length();
 837 
 838   InstanceKlass::disable_method_binary_search();
 839   for (i = 0; i < count; i++) {
 840     InstanceKlass* ik = _klasses->at(i);
 841     sort_methods(ik);
 842   }
 843 

 244         if (orig_ref->msotype() == MetaspaceObj::ClassType) {
 245           Klass* k = (Klass*)orig_obj;
 246           assert(k->is_instance_klass() &&
 247                  SystemDictionaryShared::is_excluded_class(InstanceKlass::cast(k)),
 248                  "orig_obj must be excluded Class");
 249         }
 250       }
 251 #endif
 252 
 253       log_debug(cds, dynamic)("Relocating " PTR_FORMAT " %s", p2i(new_obj),
 254                               MetaspaceObj::type_name(orig_ref->msotype()));
 255       if (new_obj != NULL) {
 256         EmbeddedRefUpdater updater(_builder, orig_obj, new_obj);
 257         orig_ref->metaspace_pointers_do(&updater);
 258       }
 259 
 260       return true; // keep recursing until every object is visited exactly once.
 261     }
 262 
 263     virtual void push_special(SpecialRef type, Ref* ref, intptr_t* p) {
 264       // TODO:CDS - JDK-8234693 will consolidate this with an almost identical method in metaspaceShared.cpp
 265       assert_valid(type);
 266       address obj = ref->obj();
 267       address new_obj = _builder->get_new_loc(ref);
 268       size_t offset = pointer_delta(p, obj,  sizeof(u1));
 269       intptr_t* new_p = (intptr_t*)(new_obj + offset);
 270       switch (type) {
 271       case _method_entry_ref:
 272         assert(*p == *new_p, "must be a copy");
 273         break;
 274       case _internal_pointer_ref:
 275         {
 276           size_t off = pointer_delta(*((address*)p), obj, sizeof(u1));
 277           assert(0 <= intx(off) && intx(off) < ref->size() * BytesPerWord, "must point to internal address");
 278           *((address*)new_p) = new_obj + off;
 279         }
 280         break;
 281       default:
 282         ShouldNotReachHere();
 283       }
 284       ArchivePtrMarker::mark_pointer((address*)new_p);
 285     }
 286   };
 287 
 288   class EmbeddedRefUpdater: public MetaspaceClosure {
 289     DynamicArchiveBuilder* _builder;
 290     address _orig_obj;
 291     address _new_obj;
 292   public:
 293     EmbeddedRefUpdater(DynamicArchiveBuilder* shuffler, address orig_obj, address new_obj) :
 294       _builder(shuffler), _orig_obj(orig_obj), _new_obj(new_obj) {}
 295 
 296     // This method gets called once for each pointer field F of orig_obj.
 297     // We update new_obj->F to point to the new location of orig_obj->F.
 298     //
 299     // Example: Klass*  0x100 is copied to 0x400
 300     //          Symbol* 0x200 is copied to 0x500
 301     //
 302     // Let orig_obj == 0x100; and
 303     //     new_obj  == 0x400; and

 790   }
 791   _header->populate(base_info, os::vm_allocation_granularity());
 792 }
 793 
 794 void DynamicArchiveBuilder::release_header() {
 795   // We temporarily allocated a dynamic FileMapInfo for dumping, which makes it appear we
 796   // have mapped a dynamic archive, but we actually have not. We are in a safepoint now.
 797   // Let's free it so that if class loading happens after we leave the safepoint, nothing
 798   // bad will happen.
 799   assert(SafepointSynchronize::is_at_safepoint(), "must be");
 800   FileMapInfo *mapinfo = FileMapInfo::dynamic_info();
 801   assert(mapinfo != NULL && _header == mapinfo->dynamic_header(), "must be");
 802   delete mapinfo;
 803   assert(!DynamicArchive::is_mapped(), "must be");
 804   _header = NULL;
 805 }
 806 
 807 size_t DynamicArchiveBuilder::estimate_trampoline_size() {
 808   size_t total = 0;
 809   size_t each_method_bytes =
 810     align_up(SharedRuntime::trampoline_size(), BytesPerWord) * 3 +
 811     align_up(sizeof(AdapterHandlerEntry*), BytesPerWord);
 812 
 813   for (int i = 0; i < _klasses->length(); i++) {
 814     InstanceKlass* ik = _klasses->at(i);
 815     Array<Method*>* methods = ik->methods();
 816     total += each_method_bytes * methods->length();
 817   }
 818   if (total == 0) {
 819     // We have nothing to archive, but let's avoid having an empty region.
 820     total = SharedRuntime::trampoline_size();
 821   }
 822   return total;
 823 }
 824 
 825 void DynamicArchiveBuilder::make_trampolines() {
 826   DumpRegion* mc_space = MetaspaceShared::misc_code_dump_space();
 827   char* p = mc_space->base();
 828   for (int i = 0; i < _klasses->length(); i++) {
 829     InstanceKlass* ik = _klasses->at(i);
 830     Array<Method*>* methods = ik->methods();
 831     for (int j = 0; j < methods->length(); j++) {
 832       Method* m = methods->at(j);
 833 
 834       // TODO:CDS - JDK-8234693 will consolidate this with Method::unlink()
 835       address c2i_entry_trampoline = (address)p;
 836       p += SharedRuntime::trampoline_size();
 837       assert(p >= mc_space->base() && p <= mc_space->top(), "must be");
 838       m->set_from_compiled_entry(to_target(c2i_entry_trampoline));
 839 
 840       address c2i_inline_ro_entry_trampoline = (address)p;
 841       p += SharedRuntime::trampoline_size();
 842       assert(p >= mc_space->base() && p <= mc_space->top(), "must be");
 843       m->set_from_compiled_inline_ro_entry(to_target(c2i_inline_ro_entry_trampoline));
 844 
 845       address c2i_inline_entry_trampoline = (address)p;
 846       p +=  SharedRuntime::trampoline_size();
 847       assert(p >= mc_space->base() && p <= mc_space->top(), "must be");
 848       m->set_from_compiled_inline_entry(to_target(c2i_inline_entry_trampoline));
 849 
 850       AdapterHandlerEntry** adapter_trampoline =(AdapterHandlerEntry**)p;
 851       p += sizeof(AdapterHandlerEntry*);
 852       assert(p >= mc_space->base() && p <= mc_space->top(), "must be");
 853       *adapter_trampoline = NULL;
 854       m->set_adapter_trampoline(to_target(adapter_trampoline));
 855     }
 856   }
 857 
 858   guarantee(p <= mc_space->top(), "Estimate of trampoline size is insufficient");
 859 }
 860 
 861 void DynamicArchiveBuilder::make_klasses_shareable() {
 862   int i, count = _klasses->length();
 863 
 864   InstanceKlass::disable_method_binary_search();
 865   for (i = 0; i < count; i++) {
 866     InstanceKlass* ik = _klasses->at(i);
 867     sort_methods(ik);
 868   }
 869 
< prev index next >