< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page

 750   // destroys rbx
 751   // check array
 752   __ null_check(array, arrayOopDesc::length_offset_in_bytes());
 753   // sign extend index for use by indexed load
 754   __ movl2ptr(index, index);
 755   // check index
 756   __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
 757   if (index != rbx) {
 758     // ??? convention: move aberrant index into rbx for exception message
 759     assert(rbx != array, "different registers");
 760     __ movl(rbx, index);
 761   }
 762   Label skip;
 763   __ jccb(Assembler::below, skip);
 764   // Pass array to create more detailed exceptions.
 765   __ mov(NOT_LP64(rax) LP64_ONLY(c_rarg1), array);
 766   __ jump(ExternalAddress(Interpreter::_throw_ArrayIndexOutOfBoundsException_entry));
 767   __ bind(skip);
 768 }
 769 


















































































































 770 void TemplateTable::iaload() {
 771   transition(itos, itos);
 772   // rax: index
 773   // rdx: array
 774   index_check(rdx, rax); // kills rbx
 775   __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, rax,
 776                     Address(rdx, rax, Address::times_4,
 777                             arrayOopDesc::base_offset_in_bytes(T_INT)),
 778                     noreg, noreg);
 779 }
 780 
 781 void TemplateTable::laload() {
 782   transition(itos, ltos);
 783   // rax: index
 784   // rdx: array
 785   index_check(rdx, rax); // kills rbx
 786   NOT_LP64(__ mov(rbx, rax));
 787   // rbx,: index
 788   __ access_load_at(T_LONG, IN_HEAP | IS_ARRAY, noreg /* ltos */,
 789                     Address(rdx, rbx, Address::times_8,
 790                             arrayOopDesc::base_offset_in_bytes(T_LONG)),
 791                     noreg, noreg);

 792 }
 793 
 794 
 795 
 796 void TemplateTable::faload() {
 797   transition(itos, ftos);
 798   // rax: index
 799   // rdx: array
 800   index_check(rdx, rax); // kills rbx
 801   __ access_load_at(T_FLOAT, IN_HEAP | IS_ARRAY, noreg /* ftos */,
 802                     Address(rdx, rax,
 803                             Address::times_4,
 804                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 805                     noreg, noreg);
 806 }
 807 
 808 void TemplateTable::daload() {
 809   transition(itos, dtos);
 810   // rax: index
 811   // rdx: array
 812   index_check(rdx, rax); // kills rbx
 813   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 814                     Address(rdx, rax,
 815                             Address::times_8,
 816                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 817                     noreg, noreg);
 818 }
 819 
 820 void TemplateTable::aaload() {
 821   transition(itos, atos);
 822   // rax: index
 823   // rdx: array
 824   index_check(rdx, rax); // kills rbx
 825   do_oop_load(_masm,
 826               Address(rdx, rax,
 827                       UseCompressedOops ? Address::times_4 : Address::times_ptr,
 828                       arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 829               rax,
 830               IS_ARRAY);

 831 }
 832 
 833 void TemplateTable::baload() {
 834   transition(itos, itos);
 835   // rax: index
 836   // rdx: array
 837   index_check(rdx, rax); // kills rbx
 838   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 839                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 840                     noreg, noreg);

 841 }
 842 
 843 void TemplateTable::caload() {
 844   transition(itos, itos);
 845   // rax: index
 846   // rdx: array
 847   index_check(rdx, rax); // kills rbx
 848   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 849                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 850                     noreg, noreg);

 851 }
 852 
 853 // iload followed by caload frequent pair
 854 void TemplateTable::fast_icaload() {
 855   transition(vtos, itos);
 856   // load index out of locals
 857   locals_index(rbx);
 858   __ movl(rax, iaddress(rbx));
 859 
 860   // rax: index
 861   // rdx: array
 862   index_check(rdx, rax); // kills rbx
 863   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 864                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 865                     noreg, noreg);
 866 }
 867 
 868 
 869 void TemplateTable::saload() {
 870   transition(itos, itos);
 871   // rax: index
 872   // rdx: array
 873   index_check(rdx, rax); // kills rbx
 874   __ access_load_at(T_SHORT, IN_HEAP | IS_ARRAY, rax,
 875                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)),
 876                     noreg, noreg);

 877 }
 878 
 879 void TemplateTable::iload(int n) {
 880   transition(vtos, itos);
 881   __ movl(rax, iaddress(n));
 882 }
 883 
 884 void TemplateTable::lload(int n) {
 885   transition(vtos, ltos);
 886   __ movptr(rax, laddress(n));
 887   NOT_LP64(__ movptr(rdx, haddress(n)));
 888 }
 889 
 890 void TemplateTable::fload(int n) {
 891   transition(vtos, ftos);
 892   __ load_float(faddress(n));
 893 }
 894 
 895 void TemplateTable::dload(int n) {
 896   transition(vtos, dtos);

1048   __ movdbl(daddress(rbx), xmm0);
1049 #else
1050   wide_lstore();
1051 #endif
1052 }
1053 
1054 void TemplateTable::wide_astore() {
1055   transition(vtos, vtos);
1056   __ pop_ptr(rax);
1057   locals_index_wide(rbx);
1058   __ movptr(aaddress(rbx), rax);
1059 }
1060 
1061 void TemplateTable::iastore() {
1062   transition(itos, vtos);
1063   __ pop_i(rbx);
1064   // rax: value
1065   // rbx: index
1066   // rdx: array
1067   index_check(rdx, rbx); // prefer index in rbx
1068   __ access_store_at(T_INT, IN_HEAP | IS_ARRAY,
1069                      Address(rdx, rbx, Address::times_4,
1070                              arrayOopDesc::base_offset_in_bytes(T_INT)),
1071                      rax, noreg, noreg);
1072 }
1073 
1074 void TemplateTable::lastore() {
1075   transition(ltos, vtos);
1076   __ pop_i(rbx);
1077   // rax,: low(value)
1078   // rcx: array
1079   // rdx: high(value)
1080   index_check(rcx, rbx);  // prefer index in rbx,
1081   // rbx,: index
1082   __ access_store_at(T_LONG, IN_HEAP | IS_ARRAY,
1083                      Address(rcx, rbx, Address::times_8,
1084                              arrayOopDesc::base_offset_in_bytes(T_LONG)),
1085                      noreg /* ltos */, noreg, noreg);

1086 }
1087 
1088 
1089 void TemplateTable::fastore() {
1090   transition(ftos, vtos);
1091   __ pop_i(rbx);
1092   // value is in UseSSE >= 1 ? xmm0 : ST(0)
1093   // rbx:  index
1094   // rdx:  array
1095   index_check(rdx, rbx); // prefer index in rbx
1096   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1097                      Address(rdx, rbx, Address::times_4,
1098                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1099                      noreg /* ftos */, noreg, noreg);

1100 }
1101 
1102 void TemplateTable::dastore() {
1103   transition(dtos, vtos);
1104   __ pop_i(rbx);
1105   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1106   // rbx:  index
1107   // rdx:  array
1108   index_check(rdx, rbx); // prefer index in rbx
1109   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1110                      Address(rdx, rbx, Address::times_8,
1111                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1112                      noreg /* dtos */, noreg, noreg);

1113 }
1114 
1115 void TemplateTable::aastore() {
1116   Label is_null, ok_is_subtype, done;
1117   transition(vtos, vtos);
1118   // stack: ..., array, index, value
1119   __ movptr(rax, at_tos());    // value
1120   __ movl(rcx, at_tos_p1()); // index
1121   __ movptr(rdx, at_tos_p2()); // array
1122 
1123   Address element_address(rdx, rcx,
1124                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1125                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1126 




1127   index_check_without_pop(rdx, rcx);     // kills rbx
1128   __ testptr(rax, rax);
1129   __ jcc(Assembler::zero, is_null);
1130 
1131   // Move subklass into rbx
1132   __ load_klass(rbx, rax);
1133   // Move superklass into rax
1134   __ load_klass(rax, rdx);
1135   __ movptr(rax, Address(rax,
1136                          ObjArrayKlass::element_klass_offset()));
1137 
1138   // Generate subtype check.  Blows rcx, rdi
1139   // Superklass in rax.  Subklass in rbx.
1140   __ gen_subtype_check(rbx, ok_is_subtype);
1141 
1142   // Come here on failure
1143   // object is at TOS
1144   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1145 
1146   // Come here on success

1165   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1166 }
1167 
1168 void TemplateTable::bastore() {
1169   transition(itos, vtos);
1170   __ pop_i(rbx);
1171   // rax: value
1172   // rbx: index
1173   // rdx: array
1174   index_check(rdx, rbx); // prefer index in rbx
1175   // Need to check whether array is boolean or byte
1176   // since both types share the bastore bytecode.
1177   __ load_klass(rcx, rdx);
1178   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1179   int diffbit = Klass::layout_helper_boolean_diffbit();
1180   __ testl(rcx, diffbit);
1181   Label L_skip;
1182   __ jccb(Assembler::zero, L_skip);
1183   __ andl(rax, 1);  // if it is a T_BOOLEAN array, mask the stored value to 0/1
1184   __ bind(L_skip);
1185   __ access_store_at(T_BYTE, IN_HEAP | IS_ARRAY,
1186                      Address(rdx, rbx,Address::times_1,
1187                              arrayOopDesc::base_offset_in_bytes(T_BYTE)),
1188                      rax, noreg, noreg);
1189 }
1190 
1191 void TemplateTable::castore() {
1192   transition(itos, vtos);
1193   __ pop_i(rbx);
1194   // rax: value
1195   // rbx: index
1196   // rdx: array
1197   index_check(rdx, rbx);  // prefer index in rbx
1198   __ access_store_at(T_CHAR, IN_HEAP | IS_ARRAY,
1199                      Address(rdx, rbx, Address::times_2,
1200                              arrayOopDesc::base_offset_in_bytes(T_CHAR)),
1201                      rax, noreg, noreg);
1202 }
1203 
1204 
1205 void TemplateTable::sastore() {
1206   castore();
1207 }
1208 
1209 void TemplateTable::istore(int n) {
1210   transition(itos, vtos);
1211   __ movl(iaddress(n), rax);
1212 }
1213 
1214 void TemplateTable::lstore(int n) {
1215   transition(ltos, vtos);
1216   __ movptr(laddress(n), rax);
1217   NOT_LP64(__ movptr(haddress(n), rdx));
1218 }
1219 
1220 void TemplateTable::fstore(int n) {
1221   transition(ftos, vtos);

2774   assert_different_registers(cache, index, flags, off);
2775 
2776   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2777   // Field offset
2778   __ movptr(off, Address(cache, index, Address::times_ptr,
2779                          in_bytes(cp_base_offset +
2780                                   ConstantPoolCacheEntry::f2_offset())));
2781   // Flags
2782   __ movl(flags, Address(cache, index, Address::times_ptr,
2783                          in_bytes(cp_base_offset +
2784                                   ConstantPoolCacheEntry::flags_offset())));
2785 
2786   // klass overwrite register
2787   if (is_static) {
2788     __ movptr(obj, Address(cache, index, Address::times_ptr,
2789                            in_bytes(cp_base_offset +
2790                                     ConstantPoolCacheEntry::f1_offset())));
2791     const int mirror_offset = in_bytes(Klass::java_mirror_offset());
2792     __ movptr(obj, Address(obj, mirror_offset));
2793     __ resolve_oop_handle(obj);




















2794   }
2795 }
2796 
2797 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2798                                                Register method,
2799                                                Register itable_index,
2800                                                Register flags,
2801                                                bool is_invokevirtual,
2802                                                bool is_invokevfinal, /*unused*/
2803                                                bool is_invokedynamic) {
2804   // setup registers
2805   const Register cache = rcx;
2806   const Register index = rdx;
2807   assert_different_registers(method, flags);
2808   assert_different_registers(method, cache, index);
2809   assert_different_registers(itable_index, flags);
2810   assert_different_registers(itable_index, cache, index);
2811   // determine constant pool cache field offsets
2812   assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant");
2813   const int flags_offset = in_bytes(ConstantPoolCache::base_offset() +

2869 }
2870 
2871 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2872   transition(vtos, vtos);
2873 
2874   const Register cache = rcx;
2875   const Register index = rdx;
2876   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2877   const Register off   = rbx;
2878   const Register flags = rax;
2879   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2880 
2881   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2882   jvmti_post_field_access(cache, index, is_static, false);
2883   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2884 
2885   if (!is_static) pop_and_check_object(obj);
2886 
2887   const Address field(obj, off, Address::times_1, 0*wordSize);
2888 





2889   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;
2890 
2891   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2892   // Make sure we don't need to mask edx after the above shift
2893   assert(btos == 0, "change code, btos != 0");
2894 
2895   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2896 
2897   __ jcc(Assembler::notZero, notByte);
2898   // btos
2899   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
2900   __ push(btos);


2901   // Rewrite bytecode to be faster
2902   if (!is_static && rc == may_rewrite) {
2903     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2904   }
2905   __ jmp(Done);
2906 
2907   __ bind(notByte);
2908   __ cmpl(flags, ztos);
2909   __ jcc(Assembler::notEqual, notBool);
2910 
2911   // ztos (same code as btos)
2912   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
2913   __ push(ztos);


2914   // Rewrite bytecode to be faster
2915   if (!is_static && rc == may_rewrite) {
2916     // use btos rewriting, no truncating to t/f bit is needed for getfield.
2917     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2918   }
2919   __ jmp(Done);
2920 
2921   __ bind(notBool);
2922   __ cmpl(flags, atos);
2923   __ jcc(Assembler::notEqual, notObj);
2924   // atos
2925   do_oop_load(_masm, field, rax);
2926   __ push(atos);




2927   if (!is_static && rc == may_rewrite) {
2928     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2929   }
2930   __ jmp(Done);
2931 
2932   __ bind(notObj);
2933   __ cmpl(flags, itos);
2934   __ jcc(Assembler::notEqual, notInt);
2935   // itos
2936   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
2937   __ push(itos);


2938   // Rewrite bytecode to be faster
2939   if (!is_static && rc == may_rewrite) {
2940     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2941   }
2942   __ jmp(Done);
2943 
2944   __ bind(notInt);
2945   __ cmpl(flags, ctos);
2946   __ jcc(Assembler::notEqual, notChar);
2947   // ctos
2948   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
2949   __ push(ctos);


2950   // Rewrite bytecode to be faster
2951   if (!is_static && rc == may_rewrite) {
2952     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2953   }
2954   __ jmp(Done);
2955 
2956   __ bind(notChar);
2957   __ cmpl(flags, stos);
2958   __ jcc(Assembler::notEqual, notShort);
2959   // stos
2960   __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
2961   __ push(stos);


2962   // Rewrite bytecode to be faster
2963   if (!is_static && rc == may_rewrite) {
2964     patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx);
2965   }
2966   __ jmp(Done);
2967 
2968   __ bind(notShort);
2969   __ cmpl(flags, ltos);
2970   __ jcc(Assembler::notEqual, notLong);
2971   // ltos
2972     // Generate code as if volatile (x86_32).  There just aren't enough registers to
2973     // save that information and this code is faster than the test.
2974   __ access_load_at(T_LONG, IN_HEAP | MO_RELAXED, noreg /* ltos */, field, noreg, noreg);
2975   __ push(ltos);


2976   // Rewrite bytecode to be faster
2977   LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
2978   __ jmp(Done);
2979 
2980   __ bind(notLong);
2981   __ cmpl(flags, ftos);
2982   __ jcc(Assembler::notEqual, notFloat);
2983   // ftos
2984 
2985   __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
2986   __ push(ftos);


2987   // Rewrite bytecode to be faster
2988   if (!is_static && rc == may_rewrite) {
2989     patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
2990   }
2991   __ jmp(Done);
2992 
2993   __ bind(notFloat);
2994 #ifdef ASSERT
2995   Label notDouble;
2996   __ cmpl(flags, dtos);
2997   __ jcc(Assembler::notEqual, notDouble);
2998 #endif
2999   // dtos
3000   // MO_RELAXED: for the case of volatile field, in fact it adds no extra work for the underlying implementation
3001   __ access_load_at(T_DOUBLE, IN_HEAP | MO_RELAXED, noreg /* dtos */, field, noreg, noreg);
3002   __ push(dtos);


3003   // Rewrite bytecode to be faster
3004   if (!is_static && rc == may_rewrite) {
3005     patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
3006   }
3007 #ifdef ASSERT
3008   __ jmp(Done);
3009 
3010   __ bind(notDouble);
3011   __ stop("Bad state");
3012 #endif
3013 
3014   __ bind(Done);
3015   // [jk] not needed currently
3016   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3017   //                                              Assembler::LoadStore));
3018 }
3019 
3020 void TemplateTable::getfield(int byte_no) {
3021   getfield_or_static(byte_no, false);
3022 }

3118 
3119 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3120   transition(vtos, vtos);
3121 
3122   const Register cache = rcx;
3123   const Register index = rdx;
3124   const Register obj   = rcx;
3125   const Register off   = rbx;
3126   const Register flags = rax;
3127 
3128   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
3129   jvmti_post_field_mod(cache, index, is_static);
3130   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
3131 
3132   // [jk] not needed currently
3133   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3134   //                                              Assembler::StoreStore));
3135 
3136   Label notVolatile, Done;
3137   __ movl(rdx, flags);
3138   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3139   __ andl(rdx, 0x1);
3140 
3141   // Check for volatile store
3142   __ testl(rdx, rdx);
3143   __ jcc(Assembler::zero, notVolatile);
3144 
3145   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags);
3146   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3147                                                Assembler::StoreStore));
3148   __ jmp(Done);
3149   __ bind(notVolatile);
3150 
3151   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags);
3152 
3153   __ bind(Done);
3154 }
3155 
3156 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3157                                               Register obj, Register off, Register flags) {
3158 
3159   // field addresses
3160   const Address field(obj, off, Address::times_1, 0*wordSize);
3161   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3162 
3163   Label notByte, notBool, notInt, notShort, notChar,
3164         notLong, notFloat, notObj;
3165   Label Done;
3166 
3167   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3168 
3169   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3170 
3171   assert(btos == 0, "change code, btos != 0");
3172   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3173   __ jcc(Assembler::notZero, notByte);
3174 
3175   // btos
3176   {
3177     __ pop(btos);
3178     if (!is_static) pop_and_check_object(obj);


3179     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3180     if (!is_static && rc == may_rewrite) {
3181       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3182     }
3183     __ jmp(Done);
3184   }
3185 
3186   __ bind(notByte);
3187   __ cmpl(flags, ztos);
3188   __ jcc(Assembler::notEqual, notBool);
3189 
3190   // ztos
3191   {
3192     __ pop(ztos);
3193     if (!is_static) pop_and_check_object(obj);


3194     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3195     if (!is_static && rc == may_rewrite) {
3196       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3197     }
3198     __ jmp(Done);
3199   }
3200 
3201   __ bind(notBool);
3202   __ cmpl(flags, atos);
3203   __ jcc(Assembler::notEqual, notObj);
3204 
3205   // atos
3206   {
3207     __ pop(atos);
3208     if (!is_static) pop_and_check_object(obj);




3209     // Store into the field
3210     do_oop_store(_masm, field, rax);
3211     if (!is_static && rc == may_rewrite) {
3212       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3213     }
3214     __ jmp(Done);
3215   }
3216 
3217   __ bind(notObj);
3218   __ cmpl(flags, itos);
3219   __ jcc(Assembler::notEqual, notInt);
3220 
3221   // itos
3222   {
3223     __ pop(itos);
3224     if (!is_static) pop_and_check_object(obj);


3225     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3226     if (!is_static && rc == may_rewrite) {
3227       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3228     }
3229     __ jmp(Done);
3230   }
3231 
3232   __ bind(notInt);
3233   __ cmpl(flags, ctos);
3234   __ jcc(Assembler::notEqual, notChar);
3235 
3236   // ctos
3237   {
3238     __ pop(ctos);
3239     if (!is_static) pop_and_check_object(obj);


3240     __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg);
3241     if (!is_static && rc == may_rewrite) {
3242       patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
3243     }
3244     __ jmp(Done);
3245   }
3246 
3247   __ bind(notChar);
3248   __ cmpl(flags, stos);
3249   __ jcc(Assembler::notEqual, notShort);
3250 
3251   // stos
3252   {
3253     __ pop(stos);
3254     if (!is_static) pop_and_check_object(obj);


3255     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
3256     if (!is_static && rc == may_rewrite) {
3257       patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
3258     }
3259     __ jmp(Done);
3260   }
3261 
3262   __ bind(notShort);
3263   __ cmpl(flags, ltos);
3264   __ jcc(Assembler::notEqual, notLong);
3265 
3266   // ltos
3267   {
3268     __ pop(ltos);
3269     if (!is_static) pop_and_check_object(obj);


3270     // MO_RELAXED: generate atomic store for the case of volatile field (important for x86_32)
3271     __ access_store_at(T_LONG, IN_HEAP | MO_RELAXED, field, noreg /* ltos*/, noreg, noreg);
3272 #ifdef _LP64
3273     if (!is_static && rc == may_rewrite) {
3274       patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
3275     }
3276 #endif // _LP64
3277     __ jmp(Done);
3278   }
3279 
3280   __ bind(notLong);
3281   __ cmpl(flags, ftos);
3282   __ jcc(Assembler::notEqual, notFloat);
3283 
3284   // ftos
3285   {
3286     __ pop(ftos);
3287     if (!is_static) pop_and_check_object(obj);


3288     __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg);
3289     if (!is_static && rc == may_rewrite) {
3290       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
3291     }
3292     __ jmp(Done);
3293   }
3294 
3295   __ bind(notFloat);
3296 #ifdef ASSERT
3297   Label notDouble;
3298   __ cmpl(flags, dtos);
3299   __ jcc(Assembler::notEqual, notDouble);
3300 #endif
3301 
3302   // dtos
3303   {
3304     __ pop(dtos);
3305     if (!is_static) pop_and_check_object(obj);


3306     // MO_RELAXED: for the case of volatile field, in fact it adds no extra work for the underlying implementation
3307     __ access_store_at(T_DOUBLE, IN_HEAP | MO_RELAXED, field, noreg /* dtos */, noreg, noreg);
3308     if (!is_static && rc == may_rewrite) {
3309       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
3310     }
3311   }
3312 
3313 #ifdef ASSERT
3314   __ jmp(Done);
3315 
3316   __ bind(notDouble);
3317   __ stop("Bad state");
3318 #endif
3319 
3320   __ bind(Done);
3321 }
3322 
3323 void TemplateTable::putfield(int byte_no) {
3324   putfield_or_static(byte_no, false);
3325 }

4109     } else {
4110       __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()),
4111                 (intptr_t)markWord::prototype().value()); // header
4112       __ pop(rcx);   // get saved klass back in the register.
4113     }
4114 #ifdef _LP64
4115     __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4116     __ store_klass_gap(rax, rsi);  // zero klass gap for compressed oops
4117 #endif
4118     __ store_klass(rax, rcx);  // klass
4119 
4120     {
4121       SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0);
4122       // Trigger dtrace event for fastpath
4123       __ push(atos);
4124       __ call_VM_leaf(
4125            CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), rax);
4126       __ pop(atos);
4127     }
4128 








4129     __ jmp(done);
4130   }
4131 
4132   // slow case
4133   __ bind(slow_case);
4134   __ pop(rcx);   // restore stack pointer to what it was when we came in.
4135   __ bind(slow_case_no_pop);
4136 
4137   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4138   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4139 
4140   __ get_constant_pool(rarg1);
4141   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4142   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4143    __ verify_oop(rax);
4144 
4145   // continue
4146   __ bind(done);
4147 }
4148 

 750   // destroys rbx
 751   // check array
 752   __ null_check(array, arrayOopDesc::length_offset_in_bytes());
 753   // sign extend index for use by indexed load
 754   __ movl2ptr(index, index);
 755   // check index
 756   __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
 757   if (index != rbx) {
 758     // ??? convention: move aberrant index into rbx for exception message
 759     assert(rbx != array, "different registers");
 760     __ movl(rbx, index);
 761   }
 762   Label skip;
 763   __ jccb(Assembler::below, skip);
 764   // Pass array to create more detailed exceptions.
 765   __ mov(NOT_LP64(rax) LP64_ONLY(c_rarg1), array);
 766   __ jump(ExternalAddress(Interpreter::_throw_ArrayIndexOutOfBoundsException_entry));
 767   __ bind(skip);
 768 }
 769 
 770 #if INCLUDE_TSAN
 771 
 772 TemplateTable::TsanMemoryReleaseAcquireFunction TemplateTable::tsan_release_acquire_method(
 773     TsanMemoryReadWriteFunction tsan_function) {
 774   if (tsan_function == SharedRuntime::tsan_read1
 775       || tsan_function == SharedRuntime::tsan_read2
 776       || tsan_function == SharedRuntime::tsan_read4
 777       || tsan_function == SharedRuntime::tsan_read8) {
 778     return SharedRuntime::tsan_acquire;
 779   } else if (tsan_function == SharedRuntime::tsan_write1
 780       || tsan_function == SharedRuntime::tsan_write2
 781       || tsan_function == SharedRuntime::tsan_write4
 782       || tsan_function == SharedRuntime::tsan_write8) {
 783     return SharedRuntime::tsan_release;
 784   }
 785   ShouldNotReachHere();
 786   return NULL;
 787 }
 788 
 789 void TemplateTable::tsan_observe_get_or_put(
 790     const Address &field,
 791     Register flags,
 792     TsanMemoryReadWriteFunction tsan_function,
 793     TosState tos) {
 794   assert(flags == rdx, "flags should be in rdx register");
 795   assert(ThreadSanitizer, "ThreadSanitizer should be set");
 796 
 797   TsanMemoryReleaseAcquireFunction releaseAcquireFunction =
 798       tsan_release_acquire_method(tsan_function);
 799 
 800   Label done, notAcquireRelease;
 801 
 802   // We could save some instructions by only saving the registers we need.
 803   __ pusha();
 804   // pusha() doesn't save xmm0, which tsan_function clobbers and the
 805   // interpreter still needs.
 806   // This really only needs to be done for some of the float/double accesses,
 807   // but it's here because it's cleaner.
 808   __ push_d(xmm0);
 809   DEBUG_ONLY(
 810     __ pusha();
 811     __ movptr(c_rarg0, field.base());
 812     __ leaq(c_rarg1, field);
 813     __ subq(c_rarg1, field.base());
 814     __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::verify_oop_index),
 815                     c_rarg0 /* oop */, c_rarg1 /* index */);
 816     __ popa();
 817   );
 818   // For volatile reads/writes use an acquire/release.
 819   // If a reference is annotated to be ignored, assume it's safe to
 820   // access the object it's referring to and create a happens-before relation
 821   // between the accesses to this reference.
 822   int32_t acquire_release_mask = 1 << ConstantPoolCacheEntry::is_volatile_shift |
 823       ((tos == atos) ? 1 << ConstantPoolCacheEntry::is_tsan_ignore_shift : 0);
 824   __ testl(flags, acquire_release_mask);
 825   __ jcc(Assembler::zero, notAcquireRelease);
 826 
 827   __ leaq(c_rarg0, field);
 828   __ call_VM_leaf(CAST_FROM_FN_PTR(address, releaseAcquireFunction), c_rarg0);
 829   if (ThreadSanitizerJavaMemory) {
 830     __ jmp(done);
 831 
 832     __ bind(notAcquireRelease);
 833     // Ignore reads/writes to final fields. They can't be racy.
 834     int32_t ignore_mask = 1 << ConstantPoolCacheEntry::is_final_shift |
 835         1 << ConstantPoolCacheEntry::is_tsan_ignore_shift;
 836     __ testl(flags, ignore_mask);
 837     __ jcc(Assembler::notZero, done);
 838 
 839     __ leaq(c_rarg0, field);
 840     __ get_method(c_rarg1);
 841     __ call_VM_leaf(CAST_FROM_FN_PTR(address, tsan_function),
 842                     c_rarg0 /* addr */, c_rarg1 /* method */, rbcp /* bcp */);
 843 
 844     __ bind(done);
 845   } else {
 846     __ bind(notAcquireRelease);
 847   }
 848   __ pop_d(xmm0);
 849   __ popa();
 850 }
 851 
 852 void TemplateTable::tsan_observe_load_or_store(
 853     const Address& field, TsanMemoryReadWriteFunction tsan_function) {
 854   assert(ThreadSanitizer, "ThreadSanitizer should be set");
 855   if (!ThreadSanitizerJavaMemory) {
 856     return;
 857   }
 858   // We could save some instructions by only saving the registers we need.
 859   __ pusha();
 860   // pusha() doesn't save xmm0, which tsan_function clobbers and the
 861   // interpreter still needs.
 862   // This really only needs to be done for some of the float/double accesses,
 863   // but it's here because it's cleaner.
 864   __ push_d(xmm0);
 865   DEBUG_ONLY(
 866     __ pusha();
 867     __ movptr(c_rarg0, field.base());
 868     __ leaq(c_rarg1, field);
 869     __ subq(c_rarg1, field.base());
 870     __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::verify_oop_index),
 871                     c_rarg0 /* oop */, c_rarg1 /* index */);
 872     __ popa();
 873   );
 874   __ leaq(c_rarg0, field);
 875   __ get_method(c_rarg1);
 876   __ call_VM_leaf(CAST_FROM_FN_PTR(address, tsan_function),
 877                   c_rarg0 /* addr */, c_rarg1 /* method */, rbcp /* bcp */);
 878   __ pop_d(xmm0);
 879   __ popa();
 880 }
 881 
 882 #endif  // INCLUDE_TSAN
 883 
 884 void TemplateTable::iaload() {
 885   transition(itos, itos);
 886   // rax: index
 887   // rdx: array
 888   index_check(rdx, rax); // kills rbx
 889   Address addr(rdx, rax, Address::times_4,
 890                arrayOopDesc::base_offset_in_bytes(T_INT));
 891   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(addr, SharedRuntime::tsan_read4));
 892   __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, rax, addr, noreg, noreg);
 893 }
 894 
 895 void TemplateTable::laload() {
 896   transition(itos, ltos);
 897   // rax: index
 898   // rdx: array
 899   index_check(rdx, rax); // kills rbx
 900   NOT_LP64(__ mov(rbx, rax));
 901   // rbx,: index
 902   Address addr(rdx, rbx, Address::times_8,
 903                arrayOopDesc::base_offset_in_bytes(T_LONG));
 904   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(addr, SharedRuntime::tsan_read8));
 905   __ access_load_at(T_LONG, IN_HEAP | IS_ARRAY, noreg /* ltos */, addr, noreg,
 906                     noreg);
 907 }
 908 
 909 
 910 
 911 void TemplateTable::faload() {
 912   transition(itos, ftos);
 913   // rax: index
 914   // rdx: array
 915   index_check(rdx, rax); // kills rbx
 916   Address addr(rdx, rax, Address::times_4,
 917                arrayOopDesc::base_offset_in_bytes(T_FLOAT));
 918   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(addr, SharedRuntime::tsan_read4));
 919   __ access_load_at(T_FLOAT, IN_HEAP | IS_ARRAY, noreg /* ftos */, addr, noreg,
 920                     noreg);
 921 }
 922 
 923 void TemplateTable::daload() {
 924   transition(itos, dtos);
 925   // rax: index
 926   // rdx: array
 927   index_check(rdx, rax); // kills rbx
 928   Address addr(rdx, rax, Address::times_8,
 929                arrayOopDesc::base_offset_in_bytes(T_DOUBLE));
 930   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(addr, SharedRuntime::tsan_read8));
 931   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */, addr, noreg,
 932                     noreg);
 933 }
 934 
 935 void TemplateTable::aaload() {
 936   transition(itos, atos);
 937   // rax: index
 938   // rdx: array
 939   index_check(rdx, rax); // kills rbx
 940   Address addr(rdx, rax,
 941                UseCompressedOops ? Address::times_4 : Address::times_ptr,
 942                arrayOopDesc::base_offset_in_bytes(T_OBJECT));
 943   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(
 944       addr, UseCompressedOops ? SharedRuntime::tsan_read4
 945                               : SharedRuntime::tsan_read8));
 946   do_oop_load(_masm, addr, rax, IS_ARRAY);
 947 }
 948 
 949 void TemplateTable::baload() {
 950   transition(itos, itos);
 951   // rax: index
 952   // rdx: array
 953   index_check(rdx, rax); // kills rbx
 954   Address addr(rdx, rax, Address::times_1,
 955                arrayOopDesc::base_offset_in_bytes(T_BYTE));
 956   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(addr, SharedRuntime::tsan_read1));
 957   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax, addr, noreg, noreg);
 958 }
 959 
 960 void TemplateTable::caload() {
 961   transition(itos, itos);
 962   // rax: index
 963   // rdx: array
 964   index_check(rdx, rax); // kills rbx
 965   Address addr(rdx, rax, Address::times_2,
 966                arrayOopDesc::base_offset_in_bytes(T_CHAR));
 967   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(addr, SharedRuntime::tsan_read2));
 968   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax, addr, noreg, noreg);
 969 }
 970 
 971 // iload followed by caload frequent pair
 972 void TemplateTable::fast_icaload() {
 973   transition(vtos, itos);
 974   // load index out of locals
 975   locals_index(rbx);
 976   __ movl(rax, iaddress(rbx));
 977 
 978   // rax: index
 979   // rdx: array
 980   index_check(rdx, rax); // kills rbx
 981   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 982                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 983                     noreg, noreg);
 984 }
 985 
 986 
 987 void TemplateTable::saload() {
 988   transition(itos, itos);
 989   // rax: index
 990   // rdx: array
 991   index_check(rdx, rax); // kills rbx
 992   Address addr(rdx, rax, Address::times_2,
 993                arrayOopDesc::base_offset_in_bytes(T_SHORT));
 994   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(addr, SharedRuntime::tsan_read2));
 995   __ access_load_at(T_SHORT, IN_HEAP | IS_ARRAY, rax, addr, noreg, noreg);
 996 }
 997 
 998 void TemplateTable::iload(int n) {
 999   transition(vtos, itos);
1000   __ movl(rax, iaddress(n));
1001 }
1002 
1003 void TemplateTable::lload(int n) {
1004   transition(vtos, ltos);
1005   __ movptr(rax, laddress(n));
1006   NOT_LP64(__ movptr(rdx, haddress(n)));
1007 }
1008 
1009 void TemplateTable::fload(int n) {
1010   transition(vtos, ftos);
1011   __ load_float(faddress(n));
1012 }
1013 
1014 void TemplateTable::dload(int n) {
1015   transition(vtos, dtos);

1167   __ movdbl(daddress(rbx), xmm0);
1168 #else
1169   wide_lstore();
1170 #endif
1171 }
1172 
1173 void TemplateTable::wide_astore() {
1174   transition(vtos, vtos);
1175   __ pop_ptr(rax);
1176   locals_index_wide(rbx);
1177   __ movptr(aaddress(rbx), rax);
1178 }
1179 
1180 void TemplateTable::iastore() {
1181   transition(itos, vtos);
1182   __ pop_i(rbx);
1183   // rax: value
1184   // rbx: index
1185   // rdx: array
1186   index_check(rdx, rbx); // prefer index in rbx
1187   Address addr(rdx, rbx, Address::times_4,
1188                arrayOopDesc::base_offset_in_bytes(T_INT));
1189   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(addr, SharedRuntime::tsan_write4));
1190   __ access_store_at(T_INT, IN_HEAP | IS_ARRAY, addr, rax, noreg, noreg);
1191 }
1192 
1193 void TemplateTable::lastore() {
1194   transition(ltos, vtos);
1195   __ pop_i(rbx);
1196   // rax,: low(value)
1197   // rcx: array
1198   // rdx: high(value)
1199   index_check(rcx, rbx);  // prefer index in rbx,
1200   // rbx,: index
1201   Address addr(rcx, rbx, Address::times_8,
1202                arrayOopDesc::base_offset_in_bytes(T_LONG));
1203   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(addr, SharedRuntime::tsan_write8));
1204   __ access_store_at(T_LONG, IN_HEAP | IS_ARRAY, addr, noreg /* ltos */, noreg,
1205                      noreg);
1206 }
1207 
1208 
1209 void TemplateTable::fastore() {
1210   transition(ftos, vtos);
1211   __ pop_i(rbx);
1212   // value is in UseSSE >= 1 ? xmm0 : ST(0)
1213   // rbx:  index
1214   // rdx:  array
1215   index_check(rdx, rbx); // prefer index in rbx
1216   Address addr(rdx, rbx, Address::times_4,
1217                arrayOopDesc::base_offset_in_bytes(T_FLOAT));
1218   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(addr, SharedRuntime::tsan_write4));
1219   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, addr, noreg /* ftos */, noreg,
1220                      noreg);
1221 }
1222 
1223 void TemplateTable::dastore() {
1224   transition(dtos, vtos);
1225   __ pop_i(rbx);
1226   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1227   // rbx:  index
1228   // rdx:  array
1229   index_check(rdx, rbx); // prefer index in rbx
1230   Address addr(rdx, rbx, Address::times_8,
1231                arrayOopDesc::base_offset_in_bytes(T_DOUBLE));
1232   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(addr, SharedRuntime::tsan_write8));
1233   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, addr, noreg /* dtos */,
1234                      noreg, noreg);
1235 }
1236 
1237 void TemplateTable::aastore() {
1238   Label is_null, ok_is_subtype, done;
1239   transition(vtos, vtos);
1240   // stack: ..., array, index, value
1241   __ movptr(rax, at_tos());    // value
1242   __ movl(rcx, at_tos_p1()); // index
1243   __ movptr(rdx, at_tos_p2()); // array
1244 
1245   Address element_address(rdx, rcx,
1246                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1247                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1248 
1249   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(
1250       element_address, UseCompressedOops ? SharedRuntime::tsan_write4
1251                                          : SharedRuntime::tsan_write8));
1252 
1253   index_check_without_pop(rdx, rcx);     // kills rbx
1254   __ testptr(rax, rax);
1255   __ jcc(Assembler::zero, is_null);
1256 
1257   // Move subklass into rbx
1258   __ load_klass(rbx, rax);
1259   // Move superklass into rax
1260   __ load_klass(rax, rdx);
1261   __ movptr(rax, Address(rax,
1262                          ObjArrayKlass::element_klass_offset()));
1263 
1264   // Generate subtype check.  Blows rcx, rdi
1265   // Superklass in rax.  Subklass in rbx.
1266   __ gen_subtype_check(rbx, ok_is_subtype);
1267 
1268   // Come here on failure
1269   // object is at TOS
1270   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1271 
1272   // Come here on success

1291   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1292 }
1293 
1294 void TemplateTable::bastore() {
1295   transition(itos, vtos);
1296   __ pop_i(rbx);
1297   // rax: value
1298   // rbx: index
1299   // rdx: array
1300   index_check(rdx, rbx); // prefer index in rbx
1301   // Need to check whether array is boolean or byte
1302   // since both types share the bastore bytecode.
1303   __ load_klass(rcx, rdx);
1304   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1305   int diffbit = Klass::layout_helper_boolean_diffbit();
1306   __ testl(rcx, diffbit);
1307   Label L_skip;
1308   __ jccb(Assembler::zero, L_skip);
1309   __ andl(rax, 1);  // if it is a T_BOOLEAN array, mask the stored value to 0/1
1310   __ bind(L_skip);
1311   Address addr(rdx, rbx, Address::times_1,
1312                arrayOopDesc::base_offset_in_bytes(T_BYTE));
1313   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(addr, SharedRuntime::tsan_write1));
1314   __ access_store_at(T_BYTE, IN_HEAP | IS_ARRAY, addr, rax, noreg, noreg);
1315 }
1316 
1317 void TemplateTable::castore() {
1318   transition(itos, vtos);
1319   __ pop_i(rbx);
1320   // rax: value
1321   // rbx: index
1322   // rdx: array
1323   index_check(rdx, rbx);  // prefer index in rbx
1324   Address addr(rdx, rbx, Address::times_2,
1325                arrayOopDesc::base_offset_in_bytes(T_CHAR));
1326   TSAN_RUNTIME_ONLY(tsan_observe_load_or_store(addr, SharedRuntime::tsan_write2));
1327   __ access_store_at(T_CHAR, IN_HEAP | IS_ARRAY, addr, rax, noreg, noreg);
1328 }
1329 
1330 
1331 void TemplateTable::sastore() {
1332   castore();
1333 }
1334 
1335 void TemplateTable::istore(int n) {
1336   transition(itos, vtos);
1337   __ movl(iaddress(n), rax);
1338 }
1339 
1340 void TemplateTable::lstore(int n) {
1341   transition(ltos, vtos);
1342   __ movptr(laddress(n), rax);
1343   NOT_LP64(__ movptr(haddress(n), rdx));
1344 }
1345 
1346 void TemplateTable::fstore(int n) {
1347   transition(ftos, vtos);

2900   assert_different_registers(cache, index, flags, off);
2901 
2902   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2903   // Field offset
2904   __ movptr(off, Address(cache, index, Address::times_ptr,
2905                          in_bytes(cp_base_offset +
2906                                   ConstantPoolCacheEntry::f2_offset())));
2907   // Flags
2908   __ movl(flags, Address(cache, index, Address::times_ptr,
2909                          in_bytes(cp_base_offset +
2910                                   ConstantPoolCacheEntry::flags_offset())));
2911 
2912   // klass overwrite register
2913   if (is_static) {
2914     __ movptr(obj, Address(cache, index, Address::times_ptr,
2915                            in_bytes(cp_base_offset +
2916                                     ConstantPoolCacheEntry::f1_offset())));
2917     const int mirror_offset = in_bytes(Klass::java_mirror_offset());
2918     __ movptr(obj, Address(obj, mirror_offset));
2919     __ resolve_oop_handle(obj);
2920     TSAN_RUNTIME_ONLY(
2921       // Draw a happens-before edge from the class's static initializer to
2922       // this lookup.
2923 
2924       // java_lang_Class::_init_lock_offset may not have been initialized
2925       // when generating code. It will be initialized at runtime though.
2926       // So calculate its address and read from it at runtime.
2927       __ pusha();
2928       __ movq(c_rarg0, obj);
2929       AddressLiteral init_lock_offset_address(
2930           (address) java_lang_Class::init_lock_offset_addr(),
2931           relocInfo::none);
2932       __ lea(rax, init_lock_offset_address);
2933       __ movl(rax, Address(rax, 0));
2934       __ addq(c_rarg0, rax);
2935       __ call_VM_leaf(CAST_FROM_FN_PTR(address,
2936                                        SharedRuntime::tsan_acquire),
2937                       c_rarg0);
2938       __ popa();
2939     );
2940   }
2941 }
2942 
2943 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2944                                                Register method,
2945                                                Register itable_index,
2946                                                Register flags,
2947                                                bool is_invokevirtual,
2948                                                bool is_invokevfinal, /*unused*/
2949                                                bool is_invokedynamic) {
2950   // setup registers
2951   const Register cache = rcx;
2952   const Register index = rdx;
2953   assert_different_registers(method, flags);
2954   assert_different_registers(method, cache, index);
2955   assert_different_registers(itable_index, flags);
2956   assert_different_registers(itable_index, cache, index);
2957   // determine constant pool cache field offsets
2958   assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant");
2959   const int flags_offset = in_bytes(ConstantPoolCache::base_offset() +

3015 }
3016 
3017 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3018   transition(vtos, vtos);
3019 
3020   const Register cache = rcx;
3021   const Register index = rdx;
3022   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3023   const Register off   = rbx;
3024   const Register flags = rax;
3025   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
3026 
3027   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
3028   jvmti_post_field_access(cache, index, is_static, false);
3029   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
3030 
3031   if (!is_static) pop_and_check_object(obj);
3032 
3033   const Address field(obj, off, Address::times_1, 0*wordSize);
3034 
3035   // During a TSAN instrumented run, move flags into rdx so we can later
3036   // examine whether the field is volatile or has been annotated to be ignored
3037   // by Tsan.
3038   TSAN_RUNTIME_ONLY(__ movl(rdx, flags));
3039 
3040   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;
3041 
3042   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3043   // Make sure we don't need to mask edx after the above shift
3044   assert(btos == 0, "change code, btos != 0");
3045 
3046   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3047 
3048   __ jcc(Assembler::notZero, notByte);
3049   // btos
3050   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3051   __ push(btos);
3052   TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3053       field, rdx, SharedRuntime::tsan_read1, btos));
3054   // Rewrite bytecode to be faster
3055   if (!is_static && rc == may_rewrite) {
3056     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3057   }
3058   __ jmp(Done);
3059 
3060   __ bind(notByte);
3061   __ cmpl(flags, ztos);
3062   __ jcc(Assembler::notEqual, notBool);
3063 
3064   // ztos (same code as btos)
3065   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
3066   __ push(ztos);
3067   TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3068       field, rdx, SharedRuntime::tsan_read1, ztos));
3069   // Rewrite bytecode to be faster
3070   if (!is_static && rc == may_rewrite) {
3071     // use btos rewriting, no truncating to t/f bit is needed for getfield.
3072     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3073   }
3074   __ jmp(Done);
3075 
3076   __ bind(notBool);
3077   __ cmpl(flags, atos);
3078   __ jcc(Assembler::notEqual, notObj);
3079   // atos
3080   do_oop_load(_masm, field, rax);
3081   __ push(atos);
3082   TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3083       field, rdx, UseCompressedOops ? SharedRuntime::tsan_read4
3084                                     : SharedRuntime::tsan_read8,
3085       atos));
3086   if (!is_static && rc == may_rewrite) {
3087     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3088   }
3089   __ jmp(Done);
3090 
3091   __ bind(notObj);
3092   __ cmpl(flags, itos);
3093   __ jcc(Assembler::notEqual, notInt);
3094   // itos
3095   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3096   __ push(itos);
3097   TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3098       field, rdx, SharedRuntime::tsan_read4, itos));
3099   // Rewrite bytecode to be faster
3100   if (!is_static && rc == may_rewrite) {
3101     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
3102   }
3103   __ jmp(Done);
3104 
3105   __ bind(notInt);
3106   __ cmpl(flags, ctos);
3107   __ jcc(Assembler::notEqual, notChar);
3108   // ctos
3109   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3110   __ push(ctos);
3111   TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3112       field, rdx, SharedRuntime::tsan_read2, ctos));
3113   // Rewrite bytecode to be faster
3114   if (!is_static && rc == may_rewrite) {
3115     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
3116   }
3117   __ jmp(Done);
3118 
3119   __ bind(notChar);
3120   __ cmpl(flags, stos);
3121   __ jcc(Assembler::notEqual, notShort);
3122   // stos
3123   __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3124   __ push(stos);
3125   TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3126       field, rdx, SharedRuntime::tsan_read2, stos));
3127   // Rewrite bytecode to be faster
3128   if (!is_static && rc == may_rewrite) {
3129     patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx);
3130   }
3131   __ jmp(Done);
3132 
3133   __ bind(notShort);
3134   __ cmpl(flags, ltos);
3135   __ jcc(Assembler::notEqual, notLong);
3136   // ltos
3137     // Generate code as if volatile (x86_32).  There just aren't enough registers to
3138     // save that information and this code is faster than the test.
3139   __ access_load_at(T_LONG, IN_HEAP | MO_RELAXED, noreg /* ltos */, field, noreg, noreg);
3140   __ push(ltos);
3141   TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3142       field, rdx, SharedRuntime::tsan_read8, ltos));
3143   // Rewrite bytecode to be faster
3144   LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
3145   __ jmp(Done);
3146 
3147   __ bind(notLong);
3148   __ cmpl(flags, ftos);
3149   __ jcc(Assembler::notEqual, notFloat);
3150   // ftos
3151 
3152   __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
3153   __ push(ftos);
3154   TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3155       field, rdx, SharedRuntime::tsan_read4, ftos));
3156   // Rewrite bytecode to be faster
3157   if (!is_static && rc == may_rewrite) {
3158     patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
3159   }
3160   __ jmp(Done);
3161 
3162   __ bind(notFloat);
3163 #ifdef ASSERT
3164   Label notDouble;
3165   __ cmpl(flags, dtos);
3166   __ jcc(Assembler::notEqual, notDouble);
3167 #endif
3168   // dtos
3169   // MO_RELAXED: for the case of volatile field, in fact it adds no extra work for the underlying implementation
3170   __ access_load_at(T_DOUBLE, IN_HEAP | MO_RELAXED, noreg /* dtos */, field, noreg, noreg);
3171   __ push(dtos);
3172   TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3173       field, rdx, SharedRuntime::tsan_read8, dtos));
3174   // Rewrite bytecode to be faster
3175   if (!is_static && rc == may_rewrite) {
3176     patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
3177   }
3178 #ifdef ASSERT
3179   __ jmp(Done);
3180 
3181   __ bind(notDouble);
3182   __ stop("Bad state");
3183 #endif
3184 
3185   __ bind(Done);
3186   // [jk] not needed currently
3187   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3188   //                                              Assembler::LoadStore));
3189 }
3190 
3191 void TemplateTable::getfield(int byte_no) {
3192   getfield_or_static(byte_no, false);
3193 }

3289 
3290 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3291   transition(vtos, vtos);
3292 
3293   const Register cache = rcx;
3294   const Register index = rdx;
3295   const Register obj   = rcx;
3296   const Register off   = rbx;
3297   const Register flags = rax;
3298 
3299   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
3300   jvmti_post_field_mod(cache, index, is_static);
3301   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
3302 
3303   // [jk] not needed currently
3304   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3305   //                                              Assembler::StoreStore));
3306 
3307   Label notVolatile, Done;
3308   __ movl(rdx, flags);


3309 
3310   // Check for volatile store
3311   __ testl(rdx, rdx);
3312   __ jcc(Assembler::zero, notVolatile);
3313 
3314   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags);
3315   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3316                                                Assembler::StoreStore));
3317   __ jmp(Done);
3318   __ bind(notVolatile);
3319 
3320   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags);
3321 
3322   __ bind(Done);
3323 }
3324 
3325 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3326                                               Register obj, Register off, Register flags) {
3327 
3328   // field addresses
3329   const Address field(obj, off, Address::times_1, 0*wordSize);
3330   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3331 
3332   Label notByte, notBool, notInt, notShort, notChar,
3333         notLong, notFloat, notObj;
3334   Label Done;
3335 
3336   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3337 
3338   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3339 
3340   assert(btos == 0, "change code, btos != 0");
3341   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3342   __ jcc(Assembler::notZero, notByte);
3343 
3344   // btos
3345   {
3346     __ pop(btos);
3347     if (!is_static) pop_and_check_object(obj);
3348     TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3349         field, rdx, SharedRuntime::tsan_write1, btos));
3350     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3351     if (!is_static && rc == may_rewrite) {
3352       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3353     }
3354     __ jmp(Done);
3355   }
3356 
3357   __ bind(notByte);
3358   __ cmpl(flags, ztos);
3359   __ jcc(Assembler::notEqual, notBool);
3360 
3361   // ztos
3362   {
3363     __ pop(ztos);
3364     if (!is_static) pop_and_check_object(obj);
3365     TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3366         field, rdx, SharedRuntime::tsan_write1, ztos));
3367     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3368     if (!is_static && rc == may_rewrite) {
3369       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3370     }
3371     __ jmp(Done);
3372   }
3373 
3374   __ bind(notBool);
3375   __ cmpl(flags, atos);
3376   __ jcc(Assembler::notEqual, notObj);
3377 
3378   // atos
3379   {
3380     __ pop(atos);
3381     if (!is_static) pop_and_check_object(obj);
3382     TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(field, rdx,
3383         UseCompressedOops ? SharedRuntime::tsan_write4
3384                           : SharedRuntime::tsan_write8,
3385         atos));
3386     // Store into the field
3387     do_oop_store(_masm, field, rax);
3388     if (!is_static && rc == may_rewrite) {
3389       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3390     }
3391     __ jmp(Done);
3392   }
3393 
3394   __ bind(notObj);
3395   __ cmpl(flags, itos);
3396   __ jcc(Assembler::notEqual, notInt);
3397 
3398   // itos
3399   {
3400     __ pop(itos);
3401     if (!is_static) pop_and_check_object(obj);
3402     TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3403         field, rdx, SharedRuntime::tsan_write4, itos));
3404     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3405     if (!is_static && rc == may_rewrite) {
3406       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3407     }
3408     __ jmp(Done);
3409   }
3410 
3411   __ bind(notInt);
3412   __ cmpl(flags, ctos);
3413   __ jcc(Assembler::notEqual, notChar);
3414 
3415   // ctos
3416   {
3417     __ pop(ctos);
3418     if (!is_static) pop_and_check_object(obj);
3419     TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3420         field, rdx, SharedRuntime::tsan_write2, ctos));
3421     __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg);
3422     if (!is_static && rc == may_rewrite) {
3423       patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
3424     }
3425     __ jmp(Done);
3426   }
3427 
3428   __ bind(notChar);
3429   __ cmpl(flags, stos);
3430   __ jcc(Assembler::notEqual, notShort);
3431 
3432   // stos
3433   {
3434     __ pop(stos);
3435     if (!is_static) pop_and_check_object(obj);
3436     TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3437         field, rdx, SharedRuntime::tsan_write2, stos));
3438     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
3439     if (!is_static && rc == may_rewrite) {
3440       patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
3441     }
3442     __ jmp(Done);
3443   }
3444 
3445   __ bind(notShort);
3446   __ cmpl(flags, ltos);
3447   __ jcc(Assembler::notEqual, notLong);
3448 
3449   // ltos
3450   {
3451     __ pop(ltos);
3452     if (!is_static) pop_and_check_object(obj);
3453     TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3454         field, rdx, SharedRuntime::tsan_write8, ltos));
3455     // MO_RELAXED: generate atomic store for the case of volatile field (important for x86_32)
3456     __ access_store_at(T_LONG, IN_HEAP | MO_RELAXED, field, noreg /* ltos*/, noreg, noreg);
3457 #ifdef _LP64
3458     if (!is_static && rc == may_rewrite) {
3459       patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
3460     }
3461 #endif // _LP64
3462     __ jmp(Done);
3463   }
3464 
3465   __ bind(notLong);
3466   __ cmpl(flags, ftos);
3467   __ jcc(Assembler::notEqual, notFloat);
3468 
3469   // ftos
3470   {
3471     __ pop(ftos);
3472     if (!is_static) pop_and_check_object(obj);
3473     TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3474         field, rdx, SharedRuntime::tsan_write4, ftos));
3475     __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg);
3476     if (!is_static && rc == may_rewrite) {
3477       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
3478     }
3479     __ jmp(Done);
3480   }
3481 
3482   __ bind(notFloat);
3483 #ifdef ASSERT
3484   Label notDouble;
3485   __ cmpl(flags, dtos);
3486   __ jcc(Assembler::notEqual, notDouble);
3487 #endif
3488 
3489   // dtos
3490   {
3491     __ pop(dtos);
3492     if (!is_static) pop_and_check_object(obj);
3493     TSAN_RUNTIME_ONLY(tsan_observe_get_or_put(
3494         field, rdx, SharedRuntime::tsan_write8, dtos));
3495     // MO_RELAXED: for the case of volatile field, in fact it adds no extra work for the underlying implementation
3496     __ access_store_at(T_DOUBLE, IN_HEAP | MO_RELAXED, field, noreg /* dtos */, noreg, noreg);
3497     if (!is_static && rc == may_rewrite) {
3498       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
3499     }
3500   }
3501 
3502 #ifdef ASSERT
3503   __ jmp(Done);
3504 
3505   __ bind(notDouble);
3506   __ stop("Bad state");
3507 #endif
3508 
3509   __ bind(Done);
3510 }
3511 
3512 void TemplateTable::putfield(int byte_no) {
3513   putfield_or_static(byte_no, false);
3514 }

4298     } else {
4299       __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()),
4300                 (intptr_t)markWord::prototype().value()); // header
4301       __ pop(rcx);   // get saved klass back in the register.
4302     }
4303 #ifdef _LP64
4304     __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4305     __ store_klass_gap(rax, rsi);  // zero klass gap for compressed oops
4306 #endif
4307     __ store_klass(rax, rcx);  // klass
4308 
4309     {
4310       SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0);
4311       // Trigger dtrace event for fastpath
4312       __ push(atos);
4313       __ call_VM_leaf(
4314            CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), rax);
4315       __ pop(atos);
4316     }
4317 
4318     TSAN_RUNTIME_ONLY(
4319       // return value of new oop is in rax.
4320       __ push(atos);
4321       __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::tsan_track_obj),
4322                       rax);
4323       __ pop(atos);
4324     );
4325 
4326     __ jmp(done);
4327   }
4328 
4329   // slow case
4330   __ bind(slow_case);
4331   __ pop(rcx);   // restore stack pointer to what it was when we came in.
4332   __ bind(slow_case_no_pop);
4333 
4334   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4335   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4336 
4337   __ get_constant_pool(rarg1);
4338   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4339   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4340    __ verify_oop(rax);
4341 
4342   // continue
4343   __ bind(done);
4344 }
4345 
< prev index next >