annotate src/hotspot/share/aot/aotCompiledMethod.hpp @ 51020:ef980b9ac191

8203837: Split nmethod unloading from inline cache cleaning Summary: Refactor cleaning inline caches to after GC do_unloading. Reviewed-by: thartmann, eosterlund
author coleenp
date Wed, 02 May 2018 11:28:49 -0400
parents 29b94ed63a09
children f6641fcf7b7e
rev   line source
kvn@42650 1 /*
coleenp@49945 2 * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
kvn@42650 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
kvn@42650 4 *
kvn@42650 5 * This code is free software; you can redistribute it and/or modify it
kvn@42650 6 * under the terms of the GNU General Public License version 2 only, as
kvn@42650 7 * published by the Free Software Foundation.
kvn@42650 8 *
kvn@42650 9 * This code is distributed in the hope that it will be useful, but WITHOUT
kvn@42650 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
kvn@42650 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
kvn@42650 12 * version 2 for more details (a copy is included in the LICENSE file that
kvn@42650 13 * accompanied this code).
kvn@42650 14 *
kvn@42650 15 * You should have received a copy of the GNU General Public License version
kvn@42650 16 * 2 along with this work; if not, write to the Free Software Foundation,
kvn@42650 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
kvn@42650 18 *
kvn@42650 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
kvn@42650 20 * or visit www.oracle.com if you need additional information or have any
kvn@42650 21 * questions.
kvn@42650 22 */
kvn@42650 23
kvn@42650 24 #ifndef SHARE_VM_AOT_AOTCOMPILEDMETHOD_HPP
kvn@42650 25 #define SHARE_VM_AOT_AOTCOMPILEDMETHOD_HPP
kvn@42650 26
kvn@42650 27 #include "code/codeCache.hpp"
kvn@42650 28 #include "code/compiledIC.hpp"
kvn@42650 29 #include "code/compiledMethod.hpp"
kvn@42650 30 #include "code/pcDesc.hpp"
kvn@42650 31 #include "code/relocInfo.hpp"
kvn@42650 32
kvn@42650 33 class AOTCodeHeap;
kvn@42650 34
kvn@42650 35 class aot_metadata {
kvn@42650 36 private:
kvn@42650 37 int _size;
kvn@42650 38 int _code_size;
kvn@42650 39 int _entry;
kvn@42650 40 int _verified_entry;
kvn@42650 41 int _exception_handler_offset;
kvn@42650 42 int _deopt_handler_offset;
kvn@42650 43 int _stubs_offset;
kvn@42650 44 int _frame_size;
kvn@42650 45 // location in frame (offset for sp) that deopt can store the original
kvn@42650 46 // pc during a deopt.
kvn@42650 47 int _orig_pc_offset;
kvn@42650 48 int _unsafe_access;
kvn@42650 49
kvn@42650 50 int _pc_desc_begin;
kvn@42650 51 int _scopes_begin;
kvn@42650 52 int _reloc_begin;
kvn@42650 53 int _exception_table_begin;
kvn@42650 54 int _oopmap_begin;
kvn@42650 55 address at_offset(size_t offset) const { return ((address) this) + offset; }
kvn@42650 56 public:
kvn@42650 57 int code_size() const { return _code_size; }
kvn@42650 58 int frame_size() const { return _frame_size / HeapWordSize; }
kvn@42650 59 PcDesc *scopes_pcs_begin() const { return (PcDesc *) at_offset(_pc_desc_begin); }
kvn@42650 60 PcDesc *scopes_pcs_end() const { return (PcDesc *) at_offset(_scopes_begin); }
kvn@42650 61 address scopes_data_begin() const { return at_offset(_scopes_begin); }
kvn@42650 62 address scopes_data_end() const { return at_offset(_reloc_begin); }
kvn@42650 63 relocInfo* relocation_begin() const { return (relocInfo*) at_offset(_reloc_begin); }
kvn@42650 64 relocInfo* relocation_end() const { return (relocInfo*) at_offset(_exception_table_begin); }
kvn@42650 65 address handler_table_begin () const { return at_offset(_exception_table_begin); }
kvn@42650 66 address handler_table_end() const { return at_offset(_oopmap_begin); }
kvn@42650 67
kvn@42650 68 address nul_chk_table_begin() const { return at_offset(_oopmap_begin); }
kvn@42650 69 address nul_chk_table_end() const { return at_offset(_oopmap_begin); }
kvn@42650 70
kvn@42650 71 ImmutableOopMapSet* oopmap_set() const { return (ImmutableOopMapSet*) at_offset(_oopmap_begin); }
kvn@42650 72
kvn@42650 73 address consts_begin() const { return at_offset(_size); }
kvn@42650 74 address consts_end() const { return at_offset(_size); }
kvn@42650 75 int stub_offset() const { return _stubs_offset; }
kvn@42650 76 int entry_offset() const { return _entry; }
kvn@42650 77 int verified_entry_offset() const { return _verified_entry; }
kvn@42650 78 int exception_handler_offset() const { return _exception_handler_offset; }
kvn@42650 79 int deopt_handler_offset() const { return _deopt_handler_offset; }
kvn@42650 80 int orig_pc_offset() const { return _orig_pc_offset; }
kvn@42650 81
kvn@42650 82 int handler_table_size() const { return handler_table_end() - handler_table_begin(); }
kvn@42650 83 int nul_chk_table_size() const { return nul_chk_table_end() - nul_chk_table_begin(); }
kvn@42650 84 bool has_unsafe_access() const { return _unsafe_access != 0; }
kvn@42650 85
kvn@42650 86 };
kvn@42650 87
kvn@42650 88 /*
kvn@42650 89 * Use this for AOTCompiledMethods since a lot of the fields in CodeBlob gets the same
kvn@42650 90 * value when they come from AOT. code_begin == content_begin, etc... */
kvn@42650 91 class AOTCompiledMethodLayout : public CodeBlobLayout {
kvn@42650 92 public:
kvn@42650 93 AOTCompiledMethodLayout(address code_begin, address code_end, address relocation_begin, address relocation_end) :
kvn@42650 94 CodeBlobLayout(
kvn@42650 95 code_begin, // code_begin
kvn@42650 96 code_end, // code_end
kvn@42650 97 code_begin, // content_begin
kvn@42650 98 code_end, // content_end
kvn@42650 99 code_end, // data_end
kvn@42650 100 relocation_begin, // relocation_begin
kvn@42650 101 relocation_end
kvn@42650 102 ) {
kvn@42650 103 }
kvn@42650 104 };
kvn@42650 105
kvn@42650 106 class AOTCompiledMethod : public CompiledMethod, public CHeapObj<mtCode> {
kvn@42650 107 private:
kvn@42650 108 address _code;
kvn@42650 109 aot_metadata* _meta;
kvn@42650 110 Metadata** _metadata_got;
kvn@42650 111 jlong* _state_adr; // Address of cell to indicate aot method state (in_use or not_entrant)
kvn@42650 112 AOTCodeHeap* _heap; // code heap which has this method
kvn@42650 113 const char* _name; // For stub: "AOT Stub<name>" for stub,
kvn@42650 114 // For nmethod: "<u2_size>Ljava/lang/ThreadGroup;<u2_size>addUnstarted<u2_size>()V"
kvn@42650 115 const int _metadata_size; // size of _metadata_got
kvn@42650 116 const int _aot_id;
kvn@42650 117 const int _method_index;
kvn@42650 118 oop _oop; // method()->method_holder()->klass_holder()
kvn@42650 119
coleenp@49945 120 address* orig_pc_addr(const frame* fr);
kvn@42650 121 bool make_not_entrant_helper(int new_state);
kvn@42650 122
kvn@42650 123 public:
kvn@42650 124 using CHeapObj<mtCode>::operator new;
kvn@42650 125 using CHeapObj<mtCode>::operator delete;
kvn@42650 126
kvn@42650 127 int method_index() const { return _method_index; }
kvn@42650 128 void set_oop(oop o) { _oop = o; }
kvn@42650 129
kvn@42650 130 AOTCompiledMethod(address code, Method* method, aot_metadata* meta, address metadata_got, int metadata_size, jlong* state_adr, AOTCodeHeap* heap, const char* name, int method_index, int aot_id) :
kvn@42650 131 CompiledMethod(method, name, compiler_jvmci, // AOT code is generated by JVMCI compiler
kvn@42650 132 AOTCompiledMethodLayout(code, code + meta->code_size(), (address) meta->relocation_begin(), (address) meta->relocation_end()),
kvn@42650 133 0 /* frame_complete_offset */, meta->frame_size() /* frame_size */, meta->oopmap_set(), false /* caller_must_gc_arguments */),
kvn@42650 134 _code(code),
kvn@42650 135 _meta(meta),
kvn@42650 136 _metadata_got((Metadata**) metadata_got),
kvn@42650 137 _state_adr(state_adr),
kvn@42650 138 _heap(heap),
kvn@42650 139 _name(name),
kvn@42650 140 _metadata_size(metadata_size),
kvn@42650 141 _method_index(method_index),
kvn@42650 142 _aot_id(aot_id) {
kvn@42650 143
kvn@42650 144 _is_far_code = CodeCache::is_far_target(code) ||
kvn@42650 145 CodeCache::is_far_target(code + meta->code_size());
kvn@42650 146 _exception_cache = NULL;
kvn@42650 147
kvn@42650 148 _scopes_data_begin = (address) _meta->scopes_data_begin();
kvn@42650 149 _deopt_handler_begin = (address) _code + _meta->deopt_handler_offset();
kvn@42650 150 _deopt_mh_handler_begin = (address) this;
kvn@42650 151
kvn@42650 152 _pc_desc_container.reset_to(scopes_pcs_begin());
kvn@42650 153
kvn@42650 154 // Mark the AOTCompiledMethod as in_use
kvn@42650 155 *_state_adr = nmethod::in_use;
kvn@42650 156 set_has_unsafe_access(_meta->has_unsafe_access());
kvn@42650 157 _oop = NULL;
kvn@42650 158 }
kvn@42650 159
kvn@42650 160 virtual bool is_aot() const { return true; }
kvn@42650 161 virtual bool is_runtime_stub() const { return is_aot_runtime_stub(); }
kvn@42650 162
kvn@42650 163 virtual bool is_compiled() const { return !is_aot_runtime_stub(); }
kvn@42650 164
kvn@42650 165 virtual bool is_locked_by_vm() const { return false; }
kvn@42650 166
kvn@42650 167 int state() const { return *_state_adr; }
kvn@42650 168
kvn@42650 169 // Non-virtual for speed
kvn@42650 170 bool _is_alive() const { return state() < zombie; }
kvn@42650 171
kvn@42650 172 virtual bool is_zombie() const { return state() == zombie; }
kvn@42650 173 virtual bool is_unloaded() const { return state() == unloaded; }
kvn@42650 174 virtual bool is_not_entrant() const { return state() == not_entrant ||
kvn@42650 175 state() == not_used; }
kvn@42650 176 virtual bool is_alive() const { return _is_alive(); }
kvn@42650 177 virtual bool is_in_use() const { return state() == in_use; }
kvn@42650 178
dlong@46364 179 address exception_begin() const { return (address) _code + _meta->exception_handler_offset(); }
kvn@42650 180
kvn@42650 181 virtual const char* name() const { return _name; }
kvn@42650 182
kvn@42650 183 virtual int compile_id() const { return _aot_id; }
kvn@42650 184
kvn@42650 185 void print_on(outputStream* st) const;
kvn@42650 186 void print_on(outputStream* st, const char* msg) const;
kvn@42650 187 void print() const;
kvn@42650 188
kvn@42650 189 virtual void print_value_on(outputStream *stream) const;
kvn@42650 190 virtual void print_block_comment(outputStream *stream, address block_begin) const { }
kvn@42650 191 virtual void verify() {}
kvn@42650 192
kvn@42650 193 virtual int comp_level() const { return CompLevel_aot; }
kvn@42650 194 virtual address verified_entry_point() const { return _code + _meta->verified_entry_offset(); }
kvn@42650 195 virtual void log_identity(xmlStream* stream) const;
kvn@42650 196 virtual void log_state_change() const;
kvn@42650 197 virtual bool make_entrant();
kvn@42650 198 virtual bool make_not_entrant() { return make_not_entrant_helper(not_entrant); }
kvn@42650 199 virtual bool make_not_used() { return make_not_entrant_helper(not_used); }
kvn@42650 200 virtual address entry_point() const { return _code + _meta->entry_offset(); }
kvn@42650 201 virtual bool make_zombie() { ShouldNotReachHere(); return false; }
kvn@42650 202 virtual bool is_osr_method() const { return false; }
kvn@42650 203 virtual int osr_entry_bci() const { ShouldNotReachHere(); return -1; }
kvn@42650 204 // AOT compiled methods do not get into zombie state
kvn@42650 205 virtual bool can_convert_to_zombie() { return false; }
kvn@42650 206
kvn@42650 207 virtual bool is_evol_dependent_on(Klass* dependee);
kvn@42650 208 virtual bool is_dependent_on_method(Method* dependee) { return true; }
kvn@42650 209
kvn@42650 210 virtual void clear_inline_caches();
kvn@42650 211
kvn@42650 212 virtual void print_pcs() {}
kvn@42650 213
kvn@42650 214 virtual address scopes_data_end() const { return _meta->scopes_data_end(); }
kvn@42650 215
kvn@42650 216 virtual oop oop_at(int index) const;
kvn@42650 217 virtual Metadata* metadata_at(int index) const;
kvn@42650 218
kvn@42650 219 virtual PcDesc* scopes_pcs_begin() const { return _meta->scopes_pcs_begin(); }
kvn@42650 220 virtual PcDesc* scopes_pcs_end() const { return _meta->scopes_pcs_end(); }
kvn@42650 221
kvn@42650 222 virtual address handler_table_begin() const { return _meta->handler_table_begin(); }
kvn@42650 223 virtual address handler_table_end() const { return _meta->handler_table_end(); }
kvn@42650 224
kvn@42650 225 virtual address nul_chk_table_begin() const { return _meta->nul_chk_table_begin(); }
kvn@42650 226 virtual address nul_chk_table_end() const { return _meta->nul_chk_table_end(); }
kvn@42650 227
kvn@42650 228 virtual address consts_begin() const { return _meta->consts_begin(); }
kvn@42650 229 virtual address consts_end() const { return _meta->consts_end(); }
kvn@42650 230
kvn@42650 231 virtual address stub_begin() const { return code_begin() + _meta->stub_offset(); }
kvn@42650 232 virtual address stub_end() const { return code_end(); }
kvn@42650 233
kvn@42650 234 virtual oop* oop_addr_at(int index) const { ShouldNotReachHere(); return NULL; }
kvn@42650 235 virtual Metadata** metadata_addr_at(int index) const { ShouldNotReachHere(); return NULL; }
kvn@42650 236
kvn@42650 237 // Accessor/mutator for the original pc of a frame before a frame was deopted.
kvn@42650 238 address get_original_pc(const frame* fr) { return *orig_pc_addr(fr); }
kvn@42650 239 void set_original_pc(const frame* fr, address pc) { *orig_pc_addr(fr) = pc; }
kvn@42650 240
kvn@42650 241 #ifdef HOTSWAP
kvn@42650 242 // Flushing and deoptimization in case of evolution
coleenp@46329 243 void flush_evol_dependents_on(InstanceKlass* dependee);
kvn@42650 244 #endif // HOTSWAP
kvn@42650 245
kvn@42650 246 virtual void metadata_do(void f(Metadata*));
kvn@42650 247
kvn@42650 248 bool metadata_got_contains(Metadata **p) {
kvn@42650 249 return p >= &_metadata_got[0] && p < &_metadata_got[_metadata_size];
kvn@42650 250 }
kvn@42650 251
kvn@42650 252 Metadata** metadata_begin() const { return &_metadata_got[0] ; }
kvn@42650 253 Metadata** metadata_end() const { return &_metadata_got[_metadata_size] ; }
kvn@42650 254 const char* compile_kind() const { return "AOT"; }
kvn@42650 255
kvn@42650 256 int get_state() const {
kvn@42650 257 return (int) (*_state_adr);
kvn@42650 258 }
kvn@42650 259
kvn@42650 260 // inlined and non-virtual for AOTCodeHeap::oops_do
kvn@42650 261 void do_oops(OopClosure* f) {
kvn@42650 262 assert(_is_alive(), "");
kvn@42650 263 if (_oop != NULL) {
kvn@42650 264 f->do_oop(&_oop);
kvn@42650 265 }
kvn@42650 266 #if 0
kvn@42650 267 metadata_oops_do(metadata_begin(), metadata_end(), f);
kvn@42650 268 #endif
kvn@42650 269 }
kvn@42650 270
kvn@42650 271
kvn@42650 272 protected:
kvn@42650 273 // AOT compiled methods are not flushed
kvn@42650 274 void flush() {};
kvn@42650 275
kvn@42650 276 NativeCallWrapper* call_wrapper_at(address call) const;
kvn@42650 277 NativeCallWrapper* call_wrapper_before(address return_pc) const;
kvn@42650 278 address call_instruction_address(address pc) const;
kvn@42650 279
kvn@42650 280 CompiledStaticCall* compiledStaticCall_at(Relocation* call_site) const;
kvn@42650 281 CompiledStaticCall* compiledStaticCall_at(address addr) const;
kvn@42650 282 CompiledStaticCall* compiledStaticCall_before(address addr) const;
kvn@42650 283 private:
kvn@42650 284 bool is_aot_runtime_stub() const { return _method == NULL; }
kvn@42650 285
kvn@42650 286 protected:
coleenp@51020 287 virtual bool do_unloading_oops(address low_boundary, BoolObjectClosure* is_alive);
coleenp@51020 288 virtual bool do_unloading_jvmci() { return false; }
kvn@42650 289
kvn@42650 290 };
kvn@42650 291
kvn@42650 292 class PltNativeCallWrapper: public NativeCallWrapper {
kvn@42650 293 private:
kvn@42650 294 NativePltCall* _call;
kvn@42650 295
kvn@42650 296 public:
kvn@42650 297 PltNativeCallWrapper(NativePltCall* call) : _call(call) {}
kvn@42650 298
kvn@42650 299 virtual address destination() const { return _call->destination(); }
kvn@42650 300 virtual address instruction_address() const { return _call->instruction_address(); }
kvn@42650 301 virtual address next_instruction_address() const { return _call->next_instruction_address(); }
kvn@42650 302 virtual address return_address() const { return _call->return_address(); }
kvn@42650 303 virtual address get_resolve_call_stub(bool is_optimized) const { return _call->plt_resolve_call(); }
kvn@42650 304 virtual void set_destination_mt_safe(address dest) { _call->set_destination_mt_safe(dest); }
kvn@42650 305 virtual void set_to_interpreted(const methodHandle& method, CompiledICInfo& info);
kvn@42650 306 virtual void verify() const { _call->verify(); }
kvn@42650 307 virtual void verify_resolve_call(address dest) const;
kvn@42650 308
kvn@42650 309 virtual bool is_call_to_interpreted(address dest) const { return (dest == _call->plt_c2i_stub()); }
kvn@42650 310 // TODO: assume for now that patching of aot code (got cell) is safe.
kvn@42650 311 virtual bool is_safe_for_patching() const { return true; }
kvn@42650 312
kvn@42650 313 virtual NativeInstruction* get_load_instruction(virtual_call_Relocation* r) const;
kvn@42650 314
kvn@42650 315 virtual void *get_data(NativeInstruction* instruction) const {
kvn@42650 316 return (void*)((NativeLoadGot*) instruction)->data();
kvn@42650 317 }
kvn@42650 318
kvn@42650 319 virtual void set_data(NativeInstruction* instruction, intptr_t data) {
kvn@42650 320 ((NativeLoadGot*) instruction)->set_data(data);
kvn@42650 321 }
kvn@42650 322 };
kvn@42650 323
kvn@42650 324 #endif //SHARE_VM_AOT_AOTCOMPILEDMETHOD_HPP