131 case TemplateTable::equal : return Assembler::NE;
132 case TemplateTable::not_equal : return Assembler::EQ;
133 case TemplateTable::less : return Assembler::GE;
134 case TemplateTable::less_equal : return Assembler::GT;
135 case TemplateTable::greater : return Assembler::LE;
136 case TemplateTable::greater_equal: return Assembler::LT;
137 }
138 ShouldNotReachHere();
139 return Assembler::EQ;
140 }
141
142
143 // Miscelaneous helper routines
144 // Store an oop (or NULL) at the Address described by obj.
145 // If val == noreg this means store a NULL
146 static void do_oop_store(InterpreterMacroAssembler* _masm,
147 Address dst,
148 Register val,
149 DecoratorSet decorators) {
150 assert(val == noreg || val == r0, "parameter is just for looks");
151 __ store_heap_oop(dst, val, r10, r1, decorators);
152 }
153
154 static void do_oop_load(InterpreterMacroAssembler* _masm,
155 Address src,
156 Register dst,
157 DecoratorSet decorators) {
158 __ load_heap_oop(dst, src, r10, r1, decorators);
159 }
160
161 Address TemplateTable::at_bcp(int offset) {
162 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
163 return Address(rbcp, offset);
164 }
165
166 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
167 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
168 int byte_no)
169 {
170 if (!RewriteBytecodes) return;
171 Label L_patch_done;
172
173 switch (bc) {
174 case Bytecodes::_fast_aputfield:
175 case Bytecodes::_fast_bputfield:
176 case Bytecodes::_fast_zputfield:
177 case Bytecodes::_fast_cputfield:
178 case Bytecodes::_fast_dputfield:
179 case Bytecodes::_fast_fputfield:
180 case Bytecodes::_fast_iputfield:
181 case Bytecodes::_fast_lputfield:
182 case Bytecodes::_fast_sputfield:
183 {
184 // We skip bytecode quickening for putfield instructions when
185 // the put_code written to the constant pool cache is zero.
186 // This is required so that every execution of this instruction
187 // calls out to InterpreterRuntime::resolve_get_put to do
188 // additional, required work.
189 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
190 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
191 __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
192 __ movw(bc_reg, bc);
193 __ cbzw(temp_reg, L_patch_done); // don't patch
729 }
730
731 void TemplateTable::index_check(Register array, Register index)
732 {
733 // destroys r1, rscratch1
734 // check array
735 __ null_check(array, arrayOopDesc::length_offset_in_bytes());
736 // sign extend index for use by indexed load
737 // __ movl2ptr(index, index);
738 // check index
739 Register length = rscratch1;
740 __ ldrw(length, Address(array, arrayOopDesc::length_offset_in_bytes()));
741 __ cmpw(index, length);
742 if (index != r1) {
743 // ??? convention: move aberrant index into r1 for exception message
744 assert(r1 != array, "different registers");
745 __ mov(r1, index);
746 }
747 Label ok;
748 __ br(Assembler::LO, ok);
749 // ??? convention: move array into r3 for exception message
750 __ mov(r3, array);
751 __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
752 __ br(rscratch1);
753 __ bind(ok);
754 }
755
756 void TemplateTable::iaload()
757 {
758 transition(itos, itos);
759 __ mov(r1, r0);
760 __ pop_ptr(r0);
761 // r0: array
762 // r1: index
763 index_check(r0, r1); // leaves index in r1, kills rscratch1
764 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
765 __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
766 }
767
768 void TemplateTable::laload()
769 {
770 transition(itos, ltos);
771 __ mov(r1, r0);
772 __ pop_ptr(r0);
792 void TemplateTable::daload()
793 {
794 transition(itos, dtos);
795 __ mov(r1, r0);
796 __ pop_ptr(r0);
797 // r0: array
798 // r1: index
799 index_check(r0, r1); // leaves index in r1, kills rscratch1
800 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
801 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
802 }
803
804 void TemplateTable::aaload()
805 {
806 transition(itos, atos);
807 __ mov(r1, r0);
808 __ pop_ptr(r0);
809 // r0: array
810 // r1: index
811 index_check(r0, r1); // leaves index in r1, kills rscratch1
812 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
813 do_oop_load(_masm,
814 Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)),
815 r0,
816 IS_ARRAY);
817 }
818
819 void TemplateTable::baload()
820 {
821 transition(itos, itos);
822 __ mov(r1, r0);
823 __ pop_ptr(r0);
824 // r0: array
825 // r1: index
826 index_check(r0, r1); // leaves index in r1, kills rscratch1
827 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
828 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
829 }
830
831 void TemplateTable::caload()
832 {
833 transition(itos, itos);
834 __ mov(r1, r0);
835 __ pop_ptr(r0);
836 // r0: array
1093 __ pop_ptr(r3);
1094 // v0: value
1095 // r1: index
1096 // r3: array
1097 index_check(r3, r1); // prefer index in r1
1098 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1099 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg);
1100 }
1101
1102 void TemplateTable::aastore() {
1103 Label is_null, ok_is_subtype, done;
1104 transition(vtos, vtos);
1105 // stack: ..., array, index, value
1106 __ ldr(r0, at_tos()); // value
1107 __ ldr(r2, at_tos_p1()); // index
1108 __ ldr(r3, at_tos_p2()); // array
1109
1110 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1111
1112 index_check(r3, r2); // kills r1
1113 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1114
1115 // do array store check - check for NULL value first
1116 __ cbz(r0, is_null);
1117
1118 // Move subklass into r1
1119 __ load_klass(r1, r0);
1120 // Move superklass into r0
1121 __ load_klass(r0, r3);
1122 __ ldr(r0, Address(r0,
1123 ObjArrayKlass::element_klass_offset()));
1124 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1125
1126 // Generate subtype check. Blows r2, r5
1127 // Superklass in r0. Subklass in r1.
1128 __ gen_subtype_check(r1, ok_is_subtype);
1129
1130 // Come here on failure
1131 // object is at TOS
1132 __ b(Interpreter::_throw_ArrayStoreException_entry);
1133
1134 // Come here on success
1135 __ bind(ok_is_subtype);
1136
1137 // Get the value we will store
1138 __ ldr(r0, at_tos());
1139 // Now store using the appropriate barrier
1140 do_oop_store(_masm, element_address, r0, IS_ARRAY);
1141 __ b(done);
1142
1143 // Have a NULL in r0, r3=array, r2=index. Store NULL at ary[idx]
1144 __ bind(is_null);
1145 __ profile_null_seen(r2);
1146
1147 // Store a NULL
1148 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1149
1150 // Pop stack arguments
1151 __ bind(done);
1152 __ add(esp, esp, 3 * Interpreter::stackElementSize);
1153 }
1154
1155 void TemplateTable::bastore()
1156 {
1157 transition(itos, vtos);
1158 __ pop_i(r1);
1159 __ pop_ptr(r3);
1160 // r0: value
1161 // r1: index
1162 // r3: array
1163 index_check(r3, r1); // prefer index in r1
1164
1165 // Need to check whether array is boolean or byte
1166 // since both types share the bastore bytecode.
1167 __ load_klass(r2, r3);
1168 __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));
1999 __ br(j_not(cc), not_taken);
2000 branch(false, false);
2001 __ bind(not_taken);
2002 __ profile_not_taken_branch(r0);
2003 }
2004
2005 void TemplateTable::if_nullcmp(Condition cc)
2006 {
2007 transition(atos, vtos);
2008 // assume branch is more often taken than not (loops use backward branches)
2009 Label not_taken;
2010 if (cc == equal)
2011 __ cbnz(r0, not_taken);
2012 else
2013 __ cbz(r0, not_taken);
2014 branch(false, false);
2015 __ bind(not_taken);
2016 __ profile_not_taken_branch(r0);
2017 }
2018
2019 void TemplateTable::if_acmp(Condition cc)
2020 {
2021 transition(atos, vtos);
2022 // assume branch is more often taken than not (loops use backward branches)
2023 Label not_taken;
2024 __ pop_ptr(r1);
2025 __ cmpoop(r1, r0);
2026 __ br(j_not(cc), not_taken);
2027 branch(false, false);
2028 __ bind(not_taken);
2029 __ profile_not_taken_branch(r0);
2030 }
2031
2032 void TemplateTable::ret() {
2033 transition(vtos, vtos);
2034 // We might be moving to a safepoint. The thread which calls
2035 // Interpreter::notice_safepoints() will effectively flush its cache
2036 // when it makes a system call, but we need to do something to
2037 // ensure that we see the changed dispatch table.
2038 __ membar(MacroAssembler::LoadLoad);
2039
2040 locals_index(r1);
2041 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2042 __ profile_ret(r1, r2);
2043 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2044 __ lea(rbcp, Address(rbcp, r1));
2045 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2046 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2047 }
2048
2049 void TemplateTable::wide_ret() {
2050 transition(vtos, vtos);
2051 locals_index_wide(r1);
2484 // 8179954: We need to make sure that the code generated for
2485 // volatile accesses forms a sequentially-consistent set of
2486 // operations when combined with STLR and LDAR. Without a leading
2487 // membar it's possible for a simple Dekker test to fail if loads
2488 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
2489 // the stores in one method and we interpret the loads in another.
2490 if (!is_c1_or_interpreter_only()){
2491 Label notVolatile;
2492 __ tbz(raw_flags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2493 __ membar(MacroAssembler::AnyAny);
2494 __ bind(notVolatile);
2495 }
2496
2497 const Address field(obj, off);
2498
2499 Label Done, notByte, notBool, notInt, notShort, notChar,
2500 notLong, notFloat, notObj, notDouble;
2501
2502 // x86 uses a shift and mask or wings it with a shift plus assert
2503 // the mask is not needed. aarch64 just uses bitfield extract
2504 __ ubfxw(flags, raw_flags, ConstantPoolCacheEntry::tos_state_shift,
2505 ConstantPoolCacheEntry::tos_state_bits);
2506
2507 assert(btos == 0, "change code, btos != 0");
2508 __ cbnz(flags, notByte);
2509
2510 // Don't rewrite getstatic, only getfield
2511 if (is_static) rc = may_not_rewrite;
2512
2513 // btos
2514 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
2515 __ push(btos);
2516 // Rewrite bytecode to be faster
2517 if (rc == may_rewrite) {
2518 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2519 }
2520 __ b(Done);
2521
2522 __ bind(notByte);
2523 __ cmp(flags, (u1)ztos);
2524 __ br(Assembler::NE, notBool);
2525
2526 // ztos (same code as btos)
2527 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2528 __ push(ztos);
2529 // Rewrite bytecode to be faster
2530 if (rc == may_rewrite) {
2531 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2532 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2533 }
2534 __ b(Done);
2535
2536 __ bind(notBool);
2537 __ cmp(flags, (u1)atos);
2538 __ br(Assembler::NE, notObj);
2539 // atos
2540 do_oop_load(_masm, field, r0, IN_HEAP);
2541 __ push(atos);
2542 if (rc == may_rewrite) {
2543 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2544 }
2545 __ b(Done);
2546
2547 __ bind(notObj);
2548 __ cmp(flags, (u1)itos);
2549 __ br(Assembler::NE, notInt);
2550 // itos
2551 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2552 __ push(itos);
2553 // Rewrite bytecode to be faster
2554 if (rc == may_rewrite) {
2555 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2556 }
2557 __ b(Done);
2558
2559 __ bind(notInt);
2560 __ cmp(flags, (u1)ctos);
2561 __ br(Assembler::NE, notChar);
2562 // ctos
2563 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2564 __ push(ctos);
2565 // Rewrite bytecode to be faster
2695 // c_rarg1: object pointer set up above (NULL if static)
2696 // c_rarg2: cache entry pointer
2697 // c_rarg3: jvalue object on the stack
2698 __ call_VM(noreg,
2699 CAST_FROM_FN_PTR(address,
2700 InterpreterRuntime::post_field_modification),
2701 c_rarg1, c_rarg2, c_rarg3);
2702 __ get_cache_and_index_at_bcp(cache, index, 1);
2703 __ bind(L1);
2704 }
2705 }
2706
2707 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2708 transition(vtos, vtos);
2709
2710 const Register cache = r2;
2711 const Register index = r3;
2712 const Register obj = r2;
2713 const Register off = r19;
2714 const Register flags = r0;
2715 const Register bc = r4;
2716
2717 resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2718 jvmti_post_field_mod(cache, index, is_static);
2719 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2720
2721 Label Done;
2722 __ mov(r5, flags);
2723
2724 {
2725 Label notVolatile;
2726 __ tbz(r5, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2727 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2728 __ bind(notVolatile);
2729 }
2730
2731 // field address
2732 const Address field(obj, off);
2733
2734 Label notByte, notBool, notInt, notShort, notChar,
2735 notLong, notFloat, notObj, notDouble;
2736
2737 // x86 uses a shift and mask or wings it with a shift plus assert
2738 // the mask is not needed. aarch64 just uses bitfield extract
2739 __ ubfxw(flags, flags, ConstantPoolCacheEntry::tos_state_shift, ConstantPoolCacheEntry::tos_state_bits);
2740
2741 assert(btos == 0, "change code, btos != 0");
2742 __ cbnz(flags, notByte);
2743
2744 // Don't rewrite putstatic, only putfield
2745 if (is_static) rc = may_not_rewrite;
2746
2747 // btos
2748 {
2749 __ pop(btos);
2750 if (!is_static) pop_and_check_object(obj);
2751 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
2752 if (rc == may_rewrite) {
2753 patch_bytecode(Bytecodes::_fast_bputfield, bc, r1, true, byte_no);
2754 }
2755 __ b(Done);
2756 }
2759 __ cmp(flags, (u1)ztos);
2760 __ br(Assembler::NE, notBool);
2761
2762 // ztos
2763 {
2764 __ pop(ztos);
2765 if (!is_static) pop_and_check_object(obj);
2766 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
2767 if (rc == may_rewrite) {
2768 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2769 }
2770 __ b(Done);
2771 }
2772
2773 __ bind(notBool);
2774 __ cmp(flags, (u1)atos);
2775 __ br(Assembler::NE, notObj);
2776
2777 // atos
2778 {
2779 __ pop(atos);
2780 if (!is_static) pop_and_check_object(obj);
2781 // Store into the field
2782 do_oop_store(_masm, field, r0, IN_HEAP);
2783 if (rc == may_rewrite) {
2784 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2785 }
2786 __ b(Done);
2787 }
2788
2789 __ bind(notObj);
2790 __ cmp(flags, (u1)itos);
2791 __ br(Assembler::NE, notInt);
2792
2793 // itos
2794 {
2795 __ pop(itos);
2796 if (!is_static) pop_and_check_object(obj);
2797 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
2798 if (rc == may_rewrite) {
2799 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
2800 }
2801 __ b(Done);
2802 }
2803
2804 __ bind(notInt);
2805 __ cmp(flags, (u1)ctos);
2806 __ br(Assembler::NE, notChar);
2906 void TemplateTable::putstatic(int byte_no) {
2907 putfield_or_static(byte_no, true);
2908 }
2909
2910 void TemplateTable::jvmti_post_fast_field_mod()
2911 {
2912 if (JvmtiExport::can_post_field_modification()) {
2913 // Check to see if a field modification watch has been set before
2914 // we take the time to call into the VM.
2915 Label L2;
2916 __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2917 __ ldrw(c_rarg3, Address(rscratch1));
2918 __ cbzw(c_rarg3, L2);
2919 __ pop_ptr(r19); // copy the object pointer from tos
2920 __ verify_oop(r19);
2921 __ push_ptr(r19); // put the object pointer back on tos
2922 // Save tos values before call_VM() clobbers them. Since we have
2923 // to do it for every data type, we use the saved values as the
2924 // jvalue object.
2925 switch (bytecode()) { // load values into the jvalue object
2926 case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
2927 case Bytecodes::_fast_bputfield: // fall through
2928 case Bytecodes::_fast_zputfield: // fall through
2929 case Bytecodes::_fast_sputfield: // fall through
2930 case Bytecodes::_fast_cputfield: // fall through
2931 case Bytecodes::_fast_iputfield: __ push_i(r0); break;
2932 case Bytecodes::_fast_dputfield: __ push_d(); break;
2933 case Bytecodes::_fast_fputfield: __ push_f(); break;
2934 case Bytecodes::_fast_lputfield: __ push_l(r0); break;
2935
2936 default:
2937 ShouldNotReachHere();
2938 }
2939 __ mov(c_rarg3, esp); // points to jvalue on the stack
2940 // access constant pool cache entry
2941 __ get_cache_entry_pointer_at_bcp(c_rarg2, r0, 1);
2942 __ verify_oop(r19);
2943 // r19: object pointer copied above
2944 // c_rarg2: cache entry pointer
2945 // c_rarg3: jvalue object on the stack
2946 __ call_VM(noreg,
2947 CAST_FROM_FN_PTR(address,
2948 InterpreterRuntime::post_field_modification),
2949 r19, c_rarg2, c_rarg3);
2950
2951 switch (bytecode()) { // restore tos values
2952 case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
2953 case Bytecodes::_fast_bputfield: // fall through
2954 case Bytecodes::_fast_zputfield: // fall through
2955 case Bytecodes::_fast_sputfield: // fall through
2956 case Bytecodes::_fast_cputfield: // fall through
2957 case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
2958 case Bytecodes::_fast_dputfield: __ pop_d(); break;
2959 case Bytecodes::_fast_fputfield: __ pop_f(); break;
2960 case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
2961 default: break;
2962 }
2963 __ bind(L2);
2964 }
2965 }
2966
2967 void TemplateTable::fast_storefield(TosState state)
2968 {
2969 transition(state, vtos);
2970
2971 ByteSize base = ConstantPoolCache::base_offset();
2985 // replace index with field offset from cache entry
2986 __ ldr(r1, Address(r2, in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
2987
2988 {
2989 Label notVolatile;
2990 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2991 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2992 __ bind(notVolatile);
2993 }
2994
2995 Label notVolatile;
2996
2997 // Get object from stack
2998 pop_and_check_object(r2);
2999
3000 // field address
3001 const Address field(r2, r1);
3002
3003 // access field
3004 switch (bytecode()) {
3005 case Bytecodes::_fast_aputfield:
3006 do_oop_store(_masm, field, r0, IN_HEAP);
3007 break;
3008 case Bytecodes::_fast_lputfield:
3009 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg);
3010 break;
3011 case Bytecodes::_fast_iputfield:
3012 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
3013 break;
3014 case Bytecodes::_fast_zputfield:
3015 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
3016 break;
3017 case Bytecodes::_fast_bputfield:
3018 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
3019 break;
3020 case Bytecodes::_fast_sputfield:
3021 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg);
3022 break;
3023 case Bytecodes::_fast_cputfield:
3024 __ access_store_at(T_CHAR, IN_HEAP, field, r0, noreg, noreg);
3082 // r0: object
3083 __ verify_oop(r0);
3084 __ null_check(r0);
3085 const Address field(r0, r1);
3086
3087 // 8179954: We need to make sure that the code generated for
3088 // volatile accesses forms a sequentially-consistent set of
3089 // operations when combined with STLR and LDAR. Without a leading
3090 // membar it's possible for a simple Dekker test to fail if loads
3091 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3092 // the stores in one method and we interpret the loads in another.
3093 if (!is_c1_or_interpreter_only()) {
3094 Label notVolatile;
3095 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3096 __ membar(MacroAssembler::AnyAny);
3097 __ bind(notVolatile);
3098 }
3099
3100 // access field
3101 switch (bytecode()) {
3102 case Bytecodes::_fast_agetfield:
3103 do_oop_load(_masm, field, r0, IN_HEAP);
3104 __ verify_oop(r0);
3105 break;
3106 case Bytecodes::_fast_lgetfield:
3107 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3108 break;
3109 case Bytecodes::_fast_igetfield:
3110 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3111 break;
3112 case Bytecodes::_fast_bgetfield:
3113 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3114 break;
3115 case Bytecodes::_fast_sgetfield:
3116 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3117 break;
3118 case Bytecodes::_fast_cgetfield:
3119 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3120 break;
3121 case Bytecodes::_fast_fgetfield:
3637 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), r0);
3638 __ pop(atos); // restore the return value
3639
3640 }
3641 __ b(done);
3642 }
3643
3644 // slow case
3645 __ bind(slow_case);
3646 __ get_constant_pool(c_rarg1);
3647 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3648 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3649 __ verify_oop(r0);
3650
3651 // continue
3652 __ bind(done);
3653 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3654 __ membar(Assembler::StoreStore);
3655 }
3656
3657 void TemplateTable::newarray() {
3658 transition(itos, atos);
3659 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3660 __ mov(c_rarg2, r0);
3661 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3662 c_rarg1, c_rarg2);
3663 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3664 __ membar(Assembler::StoreStore);
3665 }
3666
3667 void TemplateTable::anewarray() {
3668 transition(itos, atos);
3669 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3670 __ get_constant_pool(c_rarg1);
3671 __ mov(c_rarg3, r0);
3672 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3673 c_rarg1, c_rarg2, c_rarg3);
3674 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3675 __ membar(Assembler::StoreStore);
3676 }
3708 __ bind(quicked);
3709 __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3710 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3711
3712 __ bind(resolved);
3713 __ load_klass(r19, r3);
3714
3715 // Generate subtype check. Blows r2, r5. Object in r3.
3716 // Superklass in r0. Subklass in r19.
3717 __ gen_subtype_check(r19, ok_is_subtype);
3718
3719 // Come here on failure
3720 __ push(r3);
3721 // object is at TOS
3722 __ b(Interpreter::_throw_ClassCastException_entry);
3723
3724 // Come here on success
3725 __ bind(ok_is_subtype);
3726 __ mov(r0, r3); // Restore object in r3
3727
3728 // Collect counts on whether this test sees NULLs a lot or not.
3729 if (ProfileInterpreter) {
3730 __ b(done);
3731 __ bind(is_null);
3732 __ profile_null_seen(r2);
3733 } else {
3734 __ bind(is_null); // same as 'done'
3735 }
3736 __ bind(done);
3737 }
3738
3739 void TemplateTable::instanceof() {
3740 transition(atos, itos);
3741 Label done, is_null, ok_is_subtype, quicked, resolved;
3742 __ cbz(r0, is_null);
3743
3744 // Get cpool & tags index
3745 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3746 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3747 // See if bytecode has already been quicked
3748 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3749 __ lea(r1, Address(rscratch1, r19));
3750 __ ldarb(r1, r1);
3751 __ cmp(r1, (u1)JVM_CONSTANT_Class);
3752 __ br(Assembler::EQ, quicked);
3753
|
131 case TemplateTable::equal : return Assembler::NE;
132 case TemplateTable::not_equal : return Assembler::EQ;
133 case TemplateTable::less : return Assembler::GE;
134 case TemplateTable::less_equal : return Assembler::GT;
135 case TemplateTable::greater : return Assembler::LE;
136 case TemplateTable::greater_equal: return Assembler::LT;
137 }
138 ShouldNotReachHere();
139 return Assembler::EQ;
140 }
141
142
143 // Miscelaneous helper routines
144 // Store an oop (or NULL) at the Address described by obj.
145 // If val == noreg this means store a NULL
146 static void do_oop_store(InterpreterMacroAssembler* _masm,
147 Address dst,
148 Register val,
149 DecoratorSet decorators) {
150 assert(val == noreg || val == r0, "parameter is just for looks");
151 __ store_heap_oop(dst, val, r10, r1, noreg, decorators);
152 }
153
154 static void do_oop_load(InterpreterMacroAssembler* _masm,
155 Address src,
156 Register dst,
157 DecoratorSet decorators) {
158 __ load_heap_oop(dst, src, r10, r1, decorators);
159 }
160
161 Address TemplateTable::at_bcp(int offset) {
162 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
163 return Address(rbcp, offset);
164 }
165
166 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
167 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
168 int byte_no)
169 {
170 if (!RewriteBytecodes) return;
171 Label L_patch_done;
172
173 switch (bc) {
174 case Bytecodes::_fast_qputfield:
175 case Bytecodes::_fast_aputfield:
176 case Bytecodes::_fast_bputfield:
177 case Bytecodes::_fast_zputfield:
178 case Bytecodes::_fast_cputfield:
179 case Bytecodes::_fast_dputfield:
180 case Bytecodes::_fast_fputfield:
181 case Bytecodes::_fast_iputfield:
182 case Bytecodes::_fast_lputfield:
183 case Bytecodes::_fast_sputfield:
184 {
185 // We skip bytecode quickening for putfield instructions when
186 // the put_code written to the constant pool cache is zero.
187 // This is required so that every execution of this instruction
188 // calls out to InterpreterRuntime::resolve_get_put to do
189 // additional, required work.
190 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
191 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
192 __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
193 __ movw(bc_reg, bc);
194 __ cbzw(temp_reg, L_patch_done); // don't patch
730 }
731
732 void TemplateTable::index_check(Register array, Register index)
733 {
734 // destroys r1, rscratch1
735 // check array
736 __ null_check(array, arrayOopDesc::length_offset_in_bytes());
737 // sign extend index for use by indexed load
738 // __ movl2ptr(index, index);
739 // check index
740 Register length = rscratch1;
741 __ ldrw(length, Address(array, arrayOopDesc::length_offset_in_bytes()));
742 __ cmpw(index, length);
743 if (index != r1) {
744 // ??? convention: move aberrant index into r1 for exception message
745 assert(r1 != array, "different registers");
746 __ mov(r1, index);
747 }
748 Label ok;
749 __ br(Assembler::LO, ok);
750 // ??? convention: move array into r3 for exception message
751 __ mov(r3, array);
752 __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
753 __ br(rscratch1);
754 __ bind(ok);
755 }
756
757 void TemplateTable::iaload()
758 {
759 transition(itos, itos);
760 __ mov(r1, r0);
761 __ pop_ptr(r0);
762 // r0: array
763 // r1: index
764 index_check(r0, r1); // leaves index in r1, kills rscratch1
765 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
766 __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
767 }
768
769 void TemplateTable::laload()
770 {
771 transition(itos, ltos);
772 __ mov(r1, r0);
773 __ pop_ptr(r0);
793 void TemplateTable::daload()
794 {
795 transition(itos, dtos);
796 __ mov(r1, r0);
797 __ pop_ptr(r0);
798 // r0: array
799 // r1: index
800 index_check(r0, r1); // leaves index in r1, kills rscratch1
801 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
802 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
803 }
804
805 void TemplateTable::aaload()
806 {
807 transition(itos, atos);
808 __ mov(r1, r0);
809 __ pop_ptr(r0);
810 // r0: array
811 // r1: index
812 index_check(r0, r1); // leaves index in r1, kills rscratch1
813 if (UseFlatArray) {
814 Label is_flat_array, done;
815
816 __ test_flattened_array_oop(r0, r8 /*temp*/, is_flat_array);
817 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
818 do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
819
820 __ b(done);
821 __ bind(is_flat_array);
822 __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_array_load), r0, r1);
823 __ bind(done);
824 } else {
825 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
826 do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
827 }
828 }
829
830 void TemplateTable::baload()
831 {
832 transition(itos, itos);
833 __ mov(r1, r0);
834 __ pop_ptr(r0);
835 // r0: array
836 // r1: index
837 index_check(r0, r1); // leaves index in r1, kills rscratch1
838 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
839 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
840 }
841
842 void TemplateTable::caload()
843 {
844 transition(itos, itos);
845 __ mov(r1, r0);
846 __ pop_ptr(r0);
847 // r0: array
1104 __ pop_ptr(r3);
1105 // v0: value
1106 // r1: index
1107 // r3: array
1108 index_check(r3, r1); // prefer index in r1
1109 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1110 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg);
1111 }
1112
1113 void TemplateTable::aastore() {
1114 Label is_null, ok_is_subtype, done;
1115 transition(vtos, vtos);
1116 // stack: ..., array, index, value
1117 __ ldr(r0, at_tos()); // value
1118 __ ldr(r2, at_tos_p1()); // index
1119 __ ldr(r3, at_tos_p2()); // array
1120
1121 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1122
1123 index_check(r3, r2); // kills r1
1124
1125 // FIXME: Could we remove the line below?
1126 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1127
1128 // do array store check - check for NULL value first
1129 __ cbz(r0, is_null);
1130
1131 Label is_flat_array;
1132 if (UseFlatArray) {
1133 __ test_flattened_array_oop(r3, r8 /*temp*/, is_flat_array);
1134 }
1135
1136 // Move subklass into r1
1137 __ load_klass(r1, r0);
1138
1139 // Move superklass into r0
1140 __ load_klass(r0, r3);
1141 __ ldr(r0, Address(r0, ObjArrayKlass::element_klass_offset()));
1142 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1143
1144 // Generate subtype check. Blows r2, r5
1145 // Superklass in r0. Subklass in r1.
1146
1147 __ gen_subtype_check(r1, ok_is_subtype);
1148
1149 // Come here on failure
1150 // object is at TOS
1151 __ b(Interpreter::_throw_ArrayStoreException_entry);
1152
1153
1154 // Come here on success
1155 __ bind(ok_is_subtype);
1156
1157
1158 // Get the value we will store
1159 __ ldr(r0, at_tos());
1160 // Now store using the appropriate barrier
1161 do_oop_store(_masm, element_address, r0, IS_ARRAY);
1162 __ b(done);
1163
1164 // Have a NULL in r0, r3=array, r2=index. Store NULL at ary[idx]
1165 __ bind(is_null);
1166 __ profile_null_seen(r2);
1167
1168 if (EnableValhalla) {
1169 Label is_null_into_value_array_npe, store_null;
1170
1171 // No way to store null in flat array
1172 __ test_null_free_array_oop(r3, r8, is_null_into_value_array_npe);
1173 __ b(store_null);
1174
1175 __ bind(is_null_into_value_array_npe);
1176 __ b(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
1177
1178 __ bind(store_null);
1179 }
1180
1181 // Store a NULL
1182 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1183 __ b(done);
1184
1185 if (EnableValhalla) {
1186 Label is_type_ok;
1187
1188 // store non-null value
1189 __ bind(is_flat_array);
1190
1191 // Simplistic type check...
1192 // r0 - value, r2 - index, r3 - array.
1193
1194 // Profile the not-null value's klass.
1195 // Load value class
1196 __ load_klass(r1, r0);
1197 __ profile_typecheck(r2, r1, r0); // blows r2, and r0
1198
1199 // flat value array needs exact type match
1200 // is "r8 == r0" (value subclass == array element superclass)
1201
1202 // Move element klass into r0
1203
1204 __ load_klass(r0, r3);
1205
1206 __ ldr(r0, Address(r0, ArrayKlass::element_klass_offset()));
1207 __ cmp(r0, r1);
1208 __ br(Assembler::EQ, is_type_ok);
1209
1210 __ profile_typecheck_failed(r2);
1211 __ b(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1212
1213 __ bind(is_type_ok);
1214
1215 // Reload from TOS to be safe, because of profile_typecheck that blows r2 and r0.
1216 // FIXME: Should we really do it?
1217 __ ldr(r1, at_tos()); // value
1218 __ mov(r2, r3); // array, ldr(r2, at_tos_p2());
1219 __ ldr(r3, at_tos_p1()); // index
1220 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_array_store), r1, r2, r3);
1221 }
1222
1223
1224 // Pop stack arguments
1225 __ bind(done);
1226 __ add(esp, esp, 3 * Interpreter::stackElementSize);
1227 }
1228
1229 void TemplateTable::bastore()
1230 {
1231 transition(itos, vtos);
1232 __ pop_i(r1);
1233 __ pop_ptr(r3);
1234 // r0: value
1235 // r1: index
1236 // r3: array
1237 index_check(r3, r1); // prefer index in r1
1238
1239 // Need to check whether array is boolean or byte
1240 // since both types share the bastore bytecode.
1241 __ load_klass(r2, r3);
1242 __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));
2073 __ br(j_not(cc), not_taken);
2074 branch(false, false);
2075 __ bind(not_taken);
2076 __ profile_not_taken_branch(r0);
2077 }
2078
2079 void TemplateTable::if_nullcmp(Condition cc)
2080 {
2081 transition(atos, vtos);
2082 // assume branch is more often taken than not (loops use backward branches)
2083 Label not_taken;
2084 if (cc == equal)
2085 __ cbnz(r0, not_taken);
2086 else
2087 __ cbz(r0, not_taken);
2088 branch(false, false);
2089 __ bind(not_taken);
2090 __ profile_not_taken_branch(r0);
2091 }
2092
2093 void TemplateTable::if_acmp(Condition cc) {
2094 transition(atos, vtos);
2095 // assume branch is more often taken than not (loops use backward branches)
2096 Label taken, not_taken;
2097 __ pop_ptr(r1);
2098
2099 Register is_value_mask = rscratch1;
2100 __ mov(is_value_mask, markWord::always_locked_pattern);
2101
2102 if (EnableValhalla) {
2103 __ cmp(r1, r0);
2104 __ br(Assembler::EQ, (cc == equal) ? taken : not_taken);
2105
2106 // might be substitutable, test if either r0 or r1 is null
2107 __ andr(r2, r0, r1);
2108 __ cbz(r2, (cc == equal) ? not_taken : taken);
2109
2110 // and both are values ?
2111 __ ldr(r2, Address(r1, oopDesc::mark_offset_in_bytes()));
2112 __ andr(r2, r2, is_value_mask);
2113 __ ldr(r4, Address(r0, oopDesc::mark_offset_in_bytes()));
2114 __ andr(r4, r4, is_value_mask);
2115 __ andr(r2, r2, r4);
2116 __ cmp(r2, is_value_mask);
2117 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2118
2119 // same value klass ?
2120 __ load_metadata(r2, r1);
2121 __ load_metadata(r4, r0);
2122 __ cmp(r2, r4);
2123 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2124
2125 // Know both are the same type, let's test for substitutability...
2126 if (cc == equal) {
2127 invoke_is_substitutable(r0, r1, taken, not_taken);
2128 } else {
2129 invoke_is_substitutable(r0, r1, not_taken, taken);
2130 }
2131 __ stop("Not reachable");
2132 }
2133
2134 __ cmpoop(r1, r0);
2135 __ br(j_not(cc), not_taken);
2136 __ bind(taken);
2137 branch(false, false);
2138 __ bind(not_taken);
2139 __ profile_not_taken_branch(r0);
2140 }
2141
2142 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2143 Label& is_subst, Label& not_subst) {
2144
2145 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2146 // Restored... r0 answer, jmp to outcome...
2147 __ cbz(r0, not_subst);
2148 __ b(is_subst);
2149 }
2150
2151
2152 void TemplateTable::ret() {
2153 transition(vtos, vtos);
2154 // We might be moving to a safepoint. The thread which calls
2155 // Interpreter::notice_safepoints() will effectively flush its cache
2156 // when it makes a system call, but we need to do something to
2157 // ensure that we see the changed dispatch table.
2158 __ membar(MacroAssembler::LoadLoad);
2159
2160 locals_index(r1);
2161 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2162 __ profile_ret(r1, r2);
2163 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2164 __ lea(rbcp, Address(rbcp, r1));
2165 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2166 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2167 }
2168
2169 void TemplateTable::wide_ret() {
2170 transition(vtos, vtos);
2171 locals_index_wide(r1);
2604 // 8179954: We need to make sure that the code generated for
2605 // volatile accesses forms a sequentially-consistent set of
2606 // operations when combined with STLR and LDAR. Without a leading
2607 // membar it's possible for a simple Dekker test to fail if loads
2608 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
2609 // the stores in one method and we interpret the loads in another.
2610 if (!is_c1_or_interpreter_only()){
2611 Label notVolatile;
2612 __ tbz(raw_flags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2613 __ membar(MacroAssembler::AnyAny);
2614 __ bind(notVolatile);
2615 }
2616
2617 const Address field(obj, off);
2618
2619 Label Done, notByte, notBool, notInt, notShort, notChar,
2620 notLong, notFloat, notObj, notDouble;
2621
2622 // x86 uses a shift and mask or wings it with a shift plus assert
2623 // the mask is not needed. aarch64 just uses bitfield extract
2624 __ ubfxw(flags, raw_flags, ConstantPoolCacheEntry::tos_state_shift, ConstantPoolCacheEntry::tos_state_bits);
2625
2626 assert(btos == 0, "change code, btos != 0");
2627 __ cbnz(flags, notByte);
2628
2629 // Don't rewrite getstatic, only getfield
2630 if (is_static) rc = may_not_rewrite;
2631
2632 // btos
2633 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
2634 __ push(btos);
2635 // Rewrite bytecode to be faster
2636 if (rc == may_rewrite) {
2637 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2638 }
2639 __ b(Done);
2640
2641 __ bind(notByte);
2642 __ cmp(flags, (u1)ztos);
2643 __ br(Assembler::NE, notBool);
2644
2645 // ztos (same code as btos)
2646 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2647 __ push(ztos);
2648 // Rewrite bytecode to be faster
2649 if (rc == may_rewrite) {
2650 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2651 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2652 }
2653 __ b(Done);
2654
2655 __ bind(notBool);
2656 __ cmp(flags, (u1)atos);
2657 __ br(Assembler::NE, notObj);
2658 // atos
2659 if (!EnableValhalla) {
2660 do_oop_load(_masm, field, r0, IN_HEAP);
2661 __ push(atos);
2662 if (rc == may_rewrite) {
2663 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2664 }
2665 __ b(Done);
2666 } else { // Valhalla
2667
2668 if (is_static) {
2669 __ load_heap_oop(r0, field);
2670 Label is_inline, isUninitialized;
2671 // Issue below if the static field has not been initialized yet
2672 __ test_field_is_inline_type(raw_flags, r8 /*temp*/, is_inline);
2673 // Not inline case
2674 __ push(atos);
2675 __ b(Done);
2676 // Inline case, must not return null even if uninitialized
2677 __ bind(is_inline);
2678 __ cbz(r0, isUninitialized);
2679 __ push(atos);
2680 __ b(Done);
2681 __ bind(isUninitialized);
2682 __ andw(raw_flags, raw_flags, ConstantPoolCacheEntry::field_index_mask);
2683 __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_static_inline_type_field), obj, raw_flags);
2684 __ verify_oop(r0);
2685 __ push(atos);
2686 __ b(Done);
2687 } else {
2688 Label isFlattened, isInitialized, is_inline, rewrite_inline;
2689 __ test_field_is_inline_type(raw_flags, r8 /*temp*/, is_inline);
2690 // Non-inline field case
2691 __ load_heap_oop(r0, field);
2692 __ push(atos);
2693 if (rc == may_rewrite) {
2694 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2695 }
2696 __ b(Done);
2697 __ bind(is_inline);
2698 __ test_field_is_inlined(raw_flags, r8 /* temp */, isFlattened);
2699 // Non-inline field case
2700 __ load_heap_oop(r0, field);
2701 __ cbnz(r0, isInitialized);
2702 __ andw(raw_flags, raw_flags, ConstantPoolCacheEntry::field_index_mask);
2703 __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_instance_inline_type_field), obj, raw_flags);
2704 __ bind(isInitialized);
2705 __ verify_oop(r0);
2706 __ push(atos);
2707 __ b(rewrite_inline);
2708 __ bind(isFlattened);
2709 __ ldr(r10, Address(cache, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f1_offset())));
2710 __ andw(raw_flags, raw_flags, ConstantPoolCacheEntry::field_index_mask);
2711 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_flattened_field), obj, raw_flags, r10);
2712 __ verify_oop(r0);
2713 __ push(atos);
2714 __ bind(rewrite_inline);
2715 if (rc == may_rewrite) {
2716 patch_bytecode(Bytecodes::_fast_qgetfield, bc, r1);
2717 }
2718 __ b(Done);
2719 }
2720 }
2721
2722 __ bind(notObj);
2723 __ cmp(flags, (u1)itos);
2724 __ br(Assembler::NE, notInt);
2725 // itos
2726 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2727 __ push(itos);
2728 // Rewrite bytecode to be faster
2729 if (rc == may_rewrite) {
2730 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2731 }
2732 __ b(Done);
2733
2734 __ bind(notInt);
2735 __ cmp(flags, (u1)ctos);
2736 __ br(Assembler::NE, notChar);
2737 // ctos
2738 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2739 __ push(ctos);
2740 // Rewrite bytecode to be faster
2870 // c_rarg1: object pointer set up above (NULL if static)
2871 // c_rarg2: cache entry pointer
2872 // c_rarg3: jvalue object on the stack
2873 __ call_VM(noreg,
2874 CAST_FROM_FN_PTR(address,
2875 InterpreterRuntime::post_field_modification),
2876 c_rarg1, c_rarg2, c_rarg3);
2877 __ get_cache_and_index_at_bcp(cache, index, 1);
2878 __ bind(L1);
2879 }
2880 }
2881
2882 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2883 transition(vtos, vtos);
2884
2885 const Register cache = r2;
2886 const Register index = r3;
2887 const Register obj = r2;
2888 const Register off = r19;
2889 const Register flags = r0;
2890 const Register flags2 = r6;
2891 const Register bc = r4;
2892
2893 resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2894 jvmti_post_field_mod(cache, index, is_static);
2895 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2896
2897 Label Done;
2898 __ mov(r5, flags);
2899
2900 {
2901 Label notVolatile;
2902 __ tbz(r5, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2903 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2904 __ bind(notVolatile);
2905 }
2906
2907 // field address
2908 const Address field(obj, off);
2909
2910 Label notByte, notBool, notInt, notShort, notChar,
2911 notLong, notFloat, notObj, notDouble;
2912
2913 __ mov(flags2, flags);
2914
2915 // x86 uses a shift and mask or wings it with a shift plus assert
2916 // the mask is not needed. aarch64 just uses bitfield extract
2917 __ ubfxw(flags, flags, ConstantPoolCacheEntry::tos_state_shift, ConstantPoolCacheEntry::tos_state_bits);
2918
2919 assert(btos == 0, "change code, btos != 0");
2920 __ cbnz(flags, notByte);
2921
2922 // Don't rewrite putstatic, only putfield
2923 if (is_static) rc = may_not_rewrite;
2924
2925 // btos
2926 {
2927 __ pop(btos);
2928 if (!is_static) pop_and_check_object(obj);
2929 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
2930 if (rc == may_rewrite) {
2931 patch_bytecode(Bytecodes::_fast_bputfield, bc, r1, true, byte_no);
2932 }
2933 __ b(Done);
2934 }
2937 __ cmp(flags, (u1)ztos);
2938 __ br(Assembler::NE, notBool);
2939
2940 // ztos
2941 {
2942 __ pop(ztos);
2943 if (!is_static) pop_and_check_object(obj);
2944 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
2945 if (rc == may_rewrite) {
2946 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2947 }
2948 __ b(Done);
2949 }
2950
2951 __ bind(notBool);
2952 __ cmp(flags, (u1)atos);
2953 __ br(Assembler::NE, notObj);
2954
2955 // atos
2956 {
2957 if (!EnableValhalla) {
2958 __ pop(atos);
2959 if (!is_static) pop_and_check_object(obj);
2960 // Store into the field
2961 do_oop_store(_masm, field, r0, IN_HEAP);
2962 if (rc == may_rewrite) {
2963 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2964 }
2965 __ b(Done);
2966 } else { // Valhalla
2967
2968 __ pop(atos);
2969 if (is_static) {
2970 Label not_inline;
2971 __ test_field_is_not_inline_type(flags2, r8 /* temp */, not_inline);
2972 __ null_check(r0);
2973 __ bind(not_inline);
2974 do_oop_store(_masm, field, r0, IN_HEAP);
2975 __ b(Done);
2976 } else {
2977 Label is_inline, isFlattened, rewrite_not_inline, rewrite_inline;
2978 __ test_field_is_inline_type(flags2, r8 /*temp*/, is_inline);
2979 // Not inline case
2980 pop_and_check_object(obj);
2981 // Store into the field
2982 do_oop_store(_masm, field, r0, IN_HEAP);
2983 __ bind(rewrite_not_inline);
2984 if (rc == may_rewrite) {
2985 patch_bytecode(Bytecodes::_fast_aputfield, bc, r19, true, byte_no);
2986 }
2987 __ b(Done);
2988 // Implementation of the inline semantic
2989 __ bind(is_inline);
2990 __ null_check(r0);
2991 __ test_field_is_inlined(flags2, r8 /*temp*/, isFlattened);
2992 // Not inline case
2993 pop_and_check_object(obj);
2994 // Store into the field
2995 do_oop_store(_masm, field, r0, IN_HEAP);
2996 __ b(rewrite_inline);
2997 __ bind(isFlattened);
2998 pop_and_check_object(obj);
2999 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_flattened_value), r0, off, obj);
3000 __ bind(rewrite_inline);
3001 if (rc == may_rewrite) {
3002 patch_bytecode(Bytecodes::_fast_qputfield, bc, r19, true, byte_no);
3003 }
3004 __ b(Done);
3005 }
3006 } // Valhalla
3007 }
3008
3009 __ bind(notObj);
3010 __ cmp(flags, (u1)itos);
3011 __ br(Assembler::NE, notInt);
3012
3013 // itos
3014 {
3015 __ pop(itos);
3016 if (!is_static) pop_and_check_object(obj);
3017 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
3018 if (rc == may_rewrite) {
3019 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
3020 }
3021 __ b(Done);
3022 }
3023
3024 __ bind(notInt);
3025 __ cmp(flags, (u1)ctos);
3026 __ br(Assembler::NE, notChar);
3126 void TemplateTable::putstatic(int byte_no) {
3127 putfield_or_static(byte_no, true);
3128 }
3129
3130 void TemplateTable::jvmti_post_fast_field_mod()
3131 {
3132 if (JvmtiExport::can_post_field_modification()) {
3133 // Check to see if a field modification watch has been set before
3134 // we take the time to call into the VM.
3135 Label L2;
3136 __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3137 __ ldrw(c_rarg3, Address(rscratch1));
3138 __ cbzw(c_rarg3, L2);
3139 __ pop_ptr(r19); // copy the object pointer from tos
3140 __ verify_oop(r19);
3141 __ push_ptr(r19); // put the object pointer back on tos
3142 // Save tos values before call_VM() clobbers them. Since we have
3143 // to do it for every data type, we use the saved values as the
3144 // jvalue object.
3145 switch (bytecode()) { // load values into the jvalue object
3146 case Bytecodes::_fast_qputfield: //fall through
3147 case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
3148 case Bytecodes::_fast_bputfield: // fall through
3149 case Bytecodes::_fast_zputfield: // fall through
3150 case Bytecodes::_fast_sputfield: // fall through
3151 case Bytecodes::_fast_cputfield: // fall through
3152 case Bytecodes::_fast_iputfield: __ push_i(r0); break;
3153 case Bytecodes::_fast_dputfield: __ push_d(); break;
3154 case Bytecodes::_fast_fputfield: __ push_f(); break;
3155 case Bytecodes::_fast_lputfield: __ push_l(r0); break;
3156
3157 default:
3158 ShouldNotReachHere();
3159 }
3160 __ mov(c_rarg3, esp); // points to jvalue on the stack
3161 // access constant pool cache entry
3162 __ get_cache_entry_pointer_at_bcp(c_rarg2, r0, 1);
3163 __ verify_oop(r19);
3164 // r19: object pointer copied above
3165 // c_rarg2: cache entry pointer
3166 // c_rarg3: jvalue object on the stack
3167 __ call_VM(noreg,
3168 CAST_FROM_FN_PTR(address,
3169 InterpreterRuntime::post_field_modification),
3170 r19, c_rarg2, c_rarg3);
3171
3172 switch (bytecode()) { // restore tos values
3173 case Bytecodes::_fast_qputfield: //fall through
3174 case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
3175 case Bytecodes::_fast_bputfield: // fall through
3176 case Bytecodes::_fast_zputfield: // fall through
3177 case Bytecodes::_fast_sputfield: // fall through
3178 case Bytecodes::_fast_cputfield: // fall through
3179 case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
3180 case Bytecodes::_fast_dputfield: __ pop_d(); break;
3181 case Bytecodes::_fast_fputfield: __ pop_f(); break;
3182 case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
3183 default: break;
3184 }
3185 __ bind(L2);
3186 }
3187 }
3188
3189 void TemplateTable::fast_storefield(TosState state)
3190 {
3191 transition(state, vtos);
3192
3193 ByteSize base = ConstantPoolCache::base_offset();
3207 // replace index with field offset from cache entry
3208 __ ldr(r1, Address(r2, in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3209
3210 {
3211 Label notVolatile;
3212 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3213 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
3214 __ bind(notVolatile);
3215 }
3216
3217 Label notVolatile;
3218
3219 // Get object from stack
3220 pop_and_check_object(r2);
3221
3222 // field address
3223 const Address field(r2, r1);
3224
3225 // access field
3226 switch (bytecode()) {
3227 case Bytecodes::_fast_qputfield: //fall through
3228 {
3229 Label isFlattened, done;
3230 __ null_check(r0);
3231 __ test_field_is_flattened(r3, r8 /* temp */, isFlattened);
3232 // No Flattened case
3233 do_oop_store(_masm, field, r0, IN_HEAP);
3234 __ b(done);
3235 __ bind(isFlattened);
3236 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_flattened_value), r0, r1, r2);
3237 __ bind(done);
3238 }
3239 break;
3240 case Bytecodes::_fast_aputfield:
3241 do_oop_store(_masm, field, r0, IN_HEAP);
3242 break;
3243 case Bytecodes::_fast_lputfield:
3244 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg);
3245 break;
3246 case Bytecodes::_fast_iputfield:
3247 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
3248 break;
3249 case Bytecodes::_fast_zputfield:
3250 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
3251 break;
3252 case Bytecodes::_fast_bputfield:
3253 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
3254 break;
3255 case Bytecodes::_fast_sputfield:
3256 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg);
3257 break;
3258 case Bytecodes::_fast_cputfield:
3259 __ access_store_at(T_CHAR, IN_HEAP, field, r0, noreg, noreg);
3317 // r0: object
3318 __ verify_oop(r0);
3319 __ null_check(r0);
3320 const Address field(r0, r1);
3321
3322 // 8179954: We need to make sure that the code generated for
3323 // volatile accesses forms a sequentially-consistent set of
3324 // operations when combined with STLR and LDAR. Without a leading
3325 // membar it's possible for a simple Dekker test to fail if loads
3326 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3327 // the stores in one method and we interpret the loads in another.
3328 if (!is_c1_or_interpreter_only()) {
3329 Label notVolatile;
3330 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3331 __ membar(MacroAssembler::AnyAny);
3332 __ bind(notVolatile);
3333 }
3334
3335 // access field
3336 switch (bytecode()) {
3337 case Bytecodes::_fast_qgetfield:
3338 {
3339 Label isFlattened, isInitialized, Done;
3340 // FIXME: We don't need to reload registers multiple times, but stay close to x86 code
3341 __ ldrw(r9, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())));
3342 __ test_field_is_inlined(r9, r8 /* temp */, isFlattened);
3343 // Non-flattened field case
3344 __ mov(r9, r0);
3345 __ load_heap_oop(r0, field);
3346 __ cbnz(r0, isInitialized);
3347 __ mov(r0, r9);
3348 __ ldrw(r9, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())));
3349 __ andw(r9, r9, ConstantPoolCacheEntry::field_index_mask);
3350 __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_instance_inline_type_field), r0, r9);
3351 __ bind(isInitialized);
3352 __ verify_oop(r0);
3353 __ b(Done);
3354 __ bind(isFlattened);
3355 __ ldrw(r9, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())));
3356 __ andw(r9, r9, ConstantPoolCacheEntry::field_index_mask);
3357 __ ldr(r3, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f1_offset())));
3358 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_flattened_field), r0, r9, r3);
3359 __ verify_oop(r0);
3360 __ bind(Done);
3361 }
3362 break;
3363 case Bytecodes::_fast_agetfield:
3364 do_oop_load(_masm, field, r0, IN_HEAP);
3365 __ verify_oop(r0);
3366 break;
3367 case Bytecodes::_fast_lgetfield:
3368 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3369 break;
3370 case Bytecodes::_fast_igetfield:
3371 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3372 break;
3373 case Bytecodes::_fast_bgetfield:
3374 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3375 break;
3376 case Bytecodes::_fast_sgetfield:
3377 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3378 break;
3379 case Bytecodes::_fast_cgetfield:
3380 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3381 break;
3382 case Bytecodes::_fast_fgetfield:
3898 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), r0);
3899 __ pop(atos); // restore the return value
3900
3901 }
3902 __ b(done);
3903 }
3904
3905 // slow case
3906 __ bind(slow_case);
3907 __ get_constant_pool(c_rarg1);
3908 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3909 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3910 __ verify_oop(r0);
3911
3912 // continue
3913 __ bind(done);
3914 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3915 __ membar(Assembler::StoreStore);
3916 }
3917
3918 void TemplateTable::defaultvalue() {
3919 transition(vtos, atos);
3920 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3921 __ get_constant_pool(c_rarg1);
3922 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::defaultvalue),
3923 c_rarg1, c_rarg2);
3924 __ verify_oop(r0);
3925 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3926 __ membar(Assembler::StoreStore);
3927 }
3928
3929 void TemplateTable::withfield() {
3930 transition(vtos, atos);
3931 resolve_cache_and_index(f2_byte, c_rarg1 /*cache*/, c_rarg2 /*index*/, sizeof(u2));
3932
3933 // n.b. unlike x86 cache is now rcpool plus the indexed offset
3934 // so using rcpool to meet shared code expectations
3935
3936 call_VM(r1, CAST_FROM_FN_PTR(address, InterpreterRuntime::withfield), rcpool);
3937 __ verify_oop(r1);
3938 __ add(esp, esp, r0);
3939 __ mov(r0, r1);
3940 }
3941
3942 void TemplateTable::newarray() {
3943 transition(itos, atos);
3944 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3945 __ mov(c_rarg2, r0);
3946 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3947 c_rarg1, c_rarg2);
3948 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3949 __ membar(Assembler::StoreStore);
3950 }
3951
3952 void TemplateTable::anewarray() {
3953 transition(itos, atos);
3954 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3955 __ get_constant_pool(c_rarg1);
3956 __ mov(c_rarg3, r0);
3957 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3958 c_rarg1, c_rarg2, c_rarg3);
3959 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3960 __ membar(Assembler::StoreStore);
3961 }
3993 __ bind(quicked);
3994 __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3995 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3996
3997 __ bind(resolved);
3998 __ load_klass(r19, r3);
3999
4000 // Generate subtype check. Blows r2, r5. Object in r3.
4001 // Superklass in r0. Subklass in r19.
4002 __ gen_subtype_check(r19, ok_is_subtype);
4003
4004 // Come here on failure
4005 __ push(r3);
4006 // object is at TOS
4007 __ b(Interpreter::_throw_ClassCastException_entry);
4008
4009 // Come here on success
4010 __ bind(ok_is_subtype);
4011 __ mov(r0, r3); // Restore object in r3
4012
4013 __ b(done);
4014 __ bind(is_null);
4015
4016 // Collect counts on whether this test sees NULLs a lot or not.
4017 if (ProfileInterpreter) {
4018 __ profile_null_seen(r2);
4019 }
4020
4021 if (EnableValhalla) {
4022 // Get cpool & tags index
4023 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
4024 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
4025 // See if bytecode has already been quicked
4026 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
4027 __ lea(r1, Address(rscratch1, r19));
4028 __ ldarb(r1, r1);
4029 // See if CP entry is a Q-descriptor
4030 __ andr (r1, r1, JVM_CONSTANT_QDescBit);
4031 __ cmp(r1, (u1) JVM_CONSTANT_QDescBit);
4032 __ br(Assembler::NE, done);
4033 __ b(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
4034 }
4035
4036 __ bind(done);
4037 }
4038
4039 void TemplateTable::instanceof() {
4040 transition(atos, itos);
4041 Label done, is_null, ok_is_subtype, quicked, resolved;
4042 __ cbz(r0, is_null);
4043
4044 // Get cpool & tags index
4045 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
4046 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
4047 // See if bytecode has already been quicked
4048 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
4049 __ lea(r1, Address(rscratch1, r19));
4050 __ ldarb(r1, r1);
4051 __ cmp(r1, (u1)JVM_CONSTANT_Class);
4052 __ br(Assembler::EQ, quicked);
4053
|