< prev index next >

src/hotspot/cpu/x86/interp_masm_x86.cpp

Print this page

1157   call_VM(noreg, CAST_FROM_FN_PTR(address,
1158           InterpreterRuntime::build_method_counters), method);
1159   movptr(mcs, Address(method,Method::method_counters_offset()));
1160   testptr(mcs, mcs);
1161   jcc(Assembler::zero, skip); // No MethodCounters allocated, OutOfMemory
1162   bind(has_counters);
1163 }
1164 
1165 
1166 // Lock object
1167 //
1168 // Args:
1169 //      rdx, c_rarg1: BasicObjectLock to be used for locking
1170 //
1171 // Kills:
1172 //      rax, rbx
1173 void InterpreterMacroAssembler::lock_object(Register lock_reg) {
1174   assert(lock_reg == LP64_ONLY(c_rarg1) NOT_LP64(rdx),
1175          "The argument is only for looks. It must be c_rarg1");
1176 

1177   if (UseHeavyMonitors) {
1178     call_VM(noreg,
1179             CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),
1180             lock_reg);
1181   } else {
1182     Label done;
1183 
1184     const Register swap_reg = rax; // Must use rax for cmpxchg instruction
1185     const Register tmp_reg = rbx; // Will be passed to biased_locking_enter to avoid a
1186                                   // problematic case where tmp_reg = no_reg.
1187     const Register obj_reg = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // Will contain the oop
1188 
1189     const int obj_offset = BasicObjectLock::obj_offset_in_bytes();
1190     const int lock_offset = BasicObjectLock::lock_offset_in_bytes ();
1191     const int mark_offset = lock_offset +
1192                             BasicLock::displaced_header_offset_in_bytes();
1193 
1194     Label slow_case;
1195 
1196     // Load object pointer into obj_reg

1235     andptr(swap_reg, zero_bits - os::vm_page_size());
1236 
1237     // Save the test result, for recursive case, the result is zero
1238     movptr(Address(lock_reg, mark_offset), swap_reg);
1239 
1240     if (PrintBiasedLockingStatistics) {
1241       cond_inc32(Assembler::zero,
1242                  ExternalAddress((address) BiasedLocking::fast_path_entry_count_addr()));
1243     }
1244     jcc(Assembler::zero, done);
1245 
1246     bind(slow_case);
1247 
1248     // Call the runtime routine for slow case
1249     call_VM(noreg,
1250             CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),
1251             lock_reg);
1252 
1253     bind(done);
1254   }









1255 }
1256 
1257 
1258 // Unlocks an object. Used in monitorexit bytecode and
1259 // remove_activation.  Throws an IllegalMonitorException if object is
1260 // not locked by current thread.
1261 //
1262 // Args:
1263 //      rdx, c_rarg1: BasicObjectLock for lock
1264 //
1265 // Kills:
1266 //      rax
1267 //      c_rarg0, c_rarg1, c_rarg2, c_rarg3, ... (param regs)
1268 //      rscratch1 (scratch reg)
1269 // rax, rbx, rcx, rdx
1270 void InterpreterMacroAssembler::unlock_object(Register lock_reg) {
1271   assert(lock_reg == LP64_ONLY(c_rarg1) NOT_LP64(rdx),
1272          "The argument is only for looks. It must be c_rarg1");
1273 








1274   if (UseHeavyMonitors) {
1275     call_VM(noreg,
1276             CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorexit),
1277             lock_reg);
1278   } else {
1279     Label done;
1280 
1281     const Register swap_reg   = rax;  // Must use rax for cmpxchg instruction
1282     const Register header_reg = LP64_ONLY(c_rarg2) NOT_LP64(rbx);  // Will contain the old oopMark
1283     const Register obj_reg    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);  // Will contain the oop
1284 
1285     save_bcp(); // Save in case of exception
1286 
1287     // Convert from BasicObjectLock structure to object and BasicLock
1288     // structure Store the BasicLock address into %rax
1289     lea(swap_reg, Address(lock_reg, BasicObjectLock::lock_offset_in_bytes()));
1290 
1291     // Load oop into obj_reg(%c_rarg3)
1292     movptr(obj_reg, Address(lock_reg, BasicObjectLock::obj_offset_in_bytes()));
1293 

1998   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rbx);
1999   if (JvmtiExport::can_post_interpreter_events()) {
2000     Label L;
2001     NOT_LP64(get_thread(rthread);)
2002     movl(rdx, Address(rthread, JavaThread::interp_only_mode_offset()));
2003     testl(rdx, rdx);
2004     jcc(Assembler::zero, L);
2005     call_VM(noreg, CAST_FROM_FN_PTR(address,
2006                                     InterpreterRuntime::post_method_entry));
2007     bind(L);
2008   }
2009 
2010   {
2011     SkipIfEqual skip(this, &DTraceMethodProbes, false);
2012     NOT_LP64(get_thread(rthread);)
2013     get_method(rarg);
2014     call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry),
2015                  rthread, rarg);
2016   }
2017 




2018   // RedefineClasses() tracing support for obsolete method entry
2019   if (log_is_enabled(Trace, redefine, class, obsolete)) {
2020     NOT_LP64(get_thread(rthread);)
2021     get_method(rarg);
2022     call_VM_leaf(
2023       CAST_FROM_FN_PTR(address, SharedRuntime::rc_trace_method_entry),
2024       rthread, rarg);
2025   }
2026 }
2027 
2028 
2029 void InterpreterMacroAssembler::notify_method_exit(
2030     TosState state, NotifyMethodExitMode mode) {
2031   // Whenever JVMTI is interp_only_mode, method entry/exit events are sent to
2032   // track stack depth.  If it is possible to enter interp_only_mode we add
2033   // the code to check if the event should be sent.
2034   Register rthread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
2035   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rbx);
2036   if (mode == NotifyJVMTI && JvmtiExport::can_post_interpreter_events()) {
2037     Label L;
2038     // Note: frame::interpreter_frame_result has a dependency on how the
2039     // method result is saved across the call to post_method_exit. If this
2040     // is changed then the interpreter_frame_result implementation will
2041     // need to be updated too.
2042 
2043     // template interpreter will leave the result on the top of the stack.
2044     push(state);
2045     NOT_LP64(get_thread(rthread);)
2046     movl(rdx, Address(rthread, JavaThread::interp_only_mode_offset()));
2047     testl(rdx, rdx);
2048     jcc(Assembler::zero, L);
2049     call_VM(noreg,
2050             CAST_FROM_FN_PTR(address, InterpreterRuntime::post_method_exit));
2051     bind(L);
2052     pop(state);
2053   }
2054 







2055   {
2056     SkipIfEqual skip(this, &DTraceMethodProbes, false);
2057     push(state);
2058     NOT_LP64(get_thread(rthread);)
2059     get_method(rarg);
2060     call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit),
2061                  rthread, rarg);
2062     pop(state);
2063   }
2064 }

1157   call_VM(noreg, CAST_FROM_FN_PTR(address,
1158           InterpreterRuntime::build_method_counters), method);
1159   movptr(mcs, Address(method,Method::method_counters_offset()));
1160   testptr(mcs, mcs);
1161   jcc(Assembler::zero, skip); // No MethodCounters allocated, OutOfMemory
1162   bind(has_counters);
1163 }
1164 
1165 
1166 // Lock object
1167 //
1168 // Args:
1169 //      rdx, c_rarg1: BasicObjectLock to be used for locking
1170 //
1171 // Kills:
1172 //      rax, rbx
1173 void InterpreterMacroAssembler::lock_object(Register lock_reg) {
1174   assert(lock_reg == LP64_ONLY(c_rarg1) NOT_LP64(rdx),
1175          "The argument is only for looks. It must be c_rarg1");
1176 
1177   TSAN_RUNTIME_ONLY(push_ptr(lock_reg));
1178   if (UseHeavyMonitors) {
1179     call_VM(noreg,
1180             CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),
1181             lock_reg);
1182   } else {
1183     Label done;
1184 
1185     const Register swap_reg = rax; // Must use rax for cmpxchg instruction
1186     const Register tmp_reg = rbx; // Will be passed to biased_locking_enter to avoid a
1187                                   // problematic case where tmp_reg = no_reg.
1188     const Register obj_reg = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // Will contain the oop
1189 
1190     const int obj_offset = BasicObjectLock::obj_offset_in_bytes();
1191     const int lock_offset = BasicObjectLock::lock_offset_in_bytes ();
1192     const int mark_offset = lock_offset +
1193                             BasicLock::displaced_header_offset_in_bytes();
1194 
1195     Label slow_case;
1196 
1197     // Load object pointer into obj_reg

1236     andptr(swap_reg, zero_bits - os::vm_page_size());
1237 
1238     // Save the test result, for recursive case, the result is zero
1239     movptr(Address(lock_reg, mark_offset), swap_reg);
1240 
1241     if (PrintBiasedLockingStatistics) {
1242       cond_inc32(Assembler::zero,
1243                  ExternalAddress((address) BiasedLocking::fast_path_entry_count_addr()));
1244     }
1245     jcc(Assembler::zero, done);
1246 
1247     bind(slow_case);
1248 
1249     // Call the runtime routine for slow case
1250     call_VM(noreg,
1251             CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),
1252             lock_reg);
1253 
1254     bind(done);
1255   }
1256 
1257   TSAN_RUNTIME_ONLY(
1258     pop_ptr(lock_reg);
1259     pusha();
1260     call_VM(noreg,
1261             CAST_FROM_FN_PTR(address, SharedRuntime::tsan_interp_lock),
1262             lock_reg);
1263     popa();
1264   );
1265 }
1266 
1267 
1268 // Unlocks an object. Used in monitorexit bytecode and
1269 // remove_activation.  Throws an IllegalMonitorException if object is
1270 // not locked by current thread.
1271 //
1272 // Args:
1273 //      rdx, c_rarg1: BasicObjectLock for lock
1274 //
1275 // Kills:
1276 //      rax
1277 //      c_rarg0, c_rarg1, c_rarg2, c_rarg3, ... (param regs)
1278 //      rscratch1 (scratch reg)
1279 // rax, rbx, rcx, rdx
1280 void InterpreterMacroAssembler::unlock_object(Register lock_reg) {
1281   assert(lock_reg == LP64_ONLY(c_rarg1) NOT_LP64(rdx),
1282          "The argument is only for looks. It must be c_rarg1");
1283 
1284   TSAN_RUNTIME_ONLY(
1285     pusha();
1286     call_VM(noreg,
1287             CAST_FROM_FN_PTR(address, SharedRuntime::tsan_interp_unlock),
1288             lock_reg);
1289     popa();
1290   );
1291 
1292   if (UseHeavyMonitors) {
1293     call_VM(noreg,
1294             CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorexit),
1295             lock_reg);
1296   } else {
1297     Label done;
1298 
1299     const Register swap_reg   = rax;  // Must use rax for cmpxchg instruction
1300     const Register header_reg = LP64_ONLY(c_rarg2) NOT_LP64(rbx);  // Will contain the old oopMark
1301     const Register obj_reg    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);  // Will contain the oop
1302 
1303     save_bcp(); // Save in case of exception
1304 
1305     // Convert from BasicObjectLock structure to object and BasicLock
1306     // structure Store the BasicLock address into %rax
1307     lea(swap_reg, Address(lock_reg, BasicObjectLock::lock_offset_in_bytes()));
1308 
1309     // Load oop into obj_reg(%c_rarg3)
1310     movptr(obj_reg, Address(lock_reg, BasicObjectLock::obj_offset_in_bytes()));
1311 

2016   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rbx);
2017   if (JvmtiExport::can_post_interpreter_events()) {
2018     Label L;
2019     NOT_LP64(get_thread(rthread);)
2020     movl(rdx, Address(rthread, JavaThread::interp_only_mode_offset()));
2021     testl(rdx, rdx);
2022     jcc(Assembler::zero, L);
2023     call_VM(noreg, CAST_FROM_FN_PTR(address,
2024                                     InterpreterRuntime::post_method_entry));
2025     bind(L);
2026   }
2027 
2028   {
2029     SkipIfEqual skip(this, &DTraceMethodProbes, false);
2030     NOT_LP64(get_thread(rthread);)
2031     get_method(rarg);
2032     call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry),
2033                  rthread, rarg);
2034   }
2035 
2036   TSAN_RUNTIME_ONLY(call_VM(noreg,
2037                             CAST_FROM_FN_PTR(address,
2038                                              SharedRuntime::tsan_interp_method_entry)));
2039 
2040   // RedefineClasses() tracing support for obsolete method entry
2041   if (log_is_enabled(Trace, redefine, class, obsolete)) {
2042     NOT_LP64(get_thread(rthread);)
2043     get_method(rarg);
2044     call_VM_leaf(
2045       CAST_FROM_FN_PTR(address, SharedRuntime::rc_trace_method_entry),
2046       rthread, rarg);
2047   }
2048 }
2049 
2050 
2051 void InterpreterMacroAssembler::notify_method_exit(
2052     TosState state, NotifyMethodExitMode mode) {
2053   // Whenever JVMTI is interp_only_mode, method entry/exit events are sent to
2054   // track stack depth.  If it is possible to enter interp_only_mode we add
2055   // the code to check if the event should be sent.
2056   Register rthread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
2057   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rbx);
2058   if (mode == NotifyJVMTI && JvmtiExport::can_post_interpreter_events()) {
2059     Label L;
2060     // Note: frame::interpreter_frame_result has a dependency on how the
2061     // method result is saved across the call to post_method_exit. If this
2062     // is changed then the interpreter_frame_result implementation will
2063     // need to be updated too.
2064 
2065     // template interpreter will leave the result on the top of the stack.
2066     push(state);
2067     NOT_LP64(get_thread(rthread);)
2068     movl(rdx, Address(rthread, JavaThread::interp_only_mode_offset()));
2069     testl(rdx, rdx);
2070     jcc(Assembler::zero, L);
2071     call_VM(noreg,
2072             CAST_FROM_FN_PTR(address, InterpreterRuntime::post_method_exit));
2073     bind(L);
2074     pop(state);
2075   }
2076 
2077   TSAN_RUNTIME_ONLY(
2078     push(state);
2079     call_VM_leaf(CAST_FROM_FN_PTR(address,
2080                                   SharedRuntime::tsan_interp_method_exit));
2081     pop(state);
2082   );
2083 
2084   {
2085     SkipIfEqual skip(this, &DTraceMethodProbes, false);
2086     push(state);
2087     NOT_LP64(get_thread(rthread);)
2088     get_method(rarg);
2089     call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit),
2090                  rthread, rarg);
2091     pop(state);
2092   }
2093 }
< prev index next >