src/cpu/x86/vm/templateTable_x86.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8076373 Sdiff src/cpu/x86/vm

src/cpu/x86/vm/templateTable_x86.cpp

Print this page




 332   }
 333 }
 334 
 335 void TemplateTable::lconst(int value) {
 336   transition(vtos, ltos);
 337   if (value == 0) {
 338     __ xorl(rax, rax);
 339   } else {
 340     __ movl(rax, value);
 341   }
 342 #ifndef _LP64
 343   assert(value >= 0, "check this code");
 344   __ xorptr(rdx, rdx);
 345 #endif
 346 }
 347 
 348 
 349 
 350 void TemplateTable::fconst(int value) {
 351   transition(vtos, ftos);
 352 #ifdef _LP64
 353   static float one = 1.0f, two = 2.0f;
 354   switch (value) {
 355   case 0:
 356     __ xorps(xmm0, xmm0);
 357     break;
 358   case 1:
 359     __ movflt(xmm0, ExternalAddress((address) &one));
 360     break;
 361   case 2:
 362     __ movflt(xmm0, ExternalAddress((address) &two));
 363     break;
 364   default:
 365     ShouldNotReachHere();
 366     break;
 367   }



 368 #else
 369          if (value == 0) { __ fldz();
 370   } else if (value == 1) { __ fld1();
 371   } else if (value == 2) { __ fld1(); __ fld1(); __ faddp(); // should do a better solution here
 372   } else                 { ShouldNotReachHere();
 373   }
 374 #endif

 375 }
 376 
 377 void TemplateTable::dconst(int value) {
 378   transition(vtos, dtos);
 379 #ifdef _LP64
 380   static double one = 1.0;
 381   switch (value) {
 382   case 0:
 383     __ xorpd(xmm0, xmm0);
 384     break;
 385   case 1:
 386     __ movdbl(xmm0, ExternalAddress((address) &one));
 387     break;
 388   default:
 389     ShouldNotReachHere();
 390     break;
 391   }
 392 


 393 #else
 394          if (value == 0) { __ fldz();
 395   } else if (value == 1) { __ fld1();
 396   } else                 { ShouldNotReachHere();
 397   }
 398 #endif

 399 }
 400 
 401 void TemplateTable::bipush() {
 402   transition(vtos, itos);
 403   __ load_signed_byte(rax, at_bcp(1));
 404 }
 405 
 406 void TemplateTable::sipush() {
 407   transition(vtos, itos);
 408   __ load_unsigned_short(rax, at_bcp(1));
 409   __ bswapl(rax);
 410   __ sarl(rax, 16);
 411 }
 412 
 413 void TemplateTable::ldc(bool wide) {
 414   transition(vtos, vtos);
 415   Register rarg = NOT_LP64(rcx) LP64_ONLY(c_rarg1);
 416   Label call_ldc, notFloat, notClass, Done;
 417 
 418   if (wide) {


 437   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClassInError);
 438   __ jccb(Assembler::equal, call_ldc);
 439 
 440   // resolved class - need to call vm to get java mirror of the class
 441   __ cmpl(rdx, JVM_CONSTANT_Class);
 442   __ jcc(Assembler::notEqual, notClass);
 443 
 444   __ bind(call_ldc);
 445 
 446   __ movl(rarg, wide);
 447   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), rarg);
 448 
 449   __ push(atos);
 450   __ jmp(Done);
 451 
 452   __ bind(notClass);
 453   __ cmpl(rdx, JVM_CONSTANT_Float);
 454   __ jccb(Assembler::notEqual, notFloat);
 455 
 456   // ftos
 457   LP64_ONLY(__ movflt(xmm0, Address(rcx, rbx, Address::times_8, base_offset)));
 458   NOT_LP64(__ fld_s(    Address(rcx, rbx, Address::times_ptr, base_offset)));
 459   __ push(ftos);
 460   __ jmp(Done);
 461 
 462   __ bind(notFloat);
 463 #ifdef ASSERT
 464   {
 465     Label L;
 466     __ cmpl(rdx, JVM_CONSTANT_Integer);
 467     __ jcc(Assembler::equal, L);
 468     // String and Object are rewritten to fast_aldc
 469     __ stop("unexpected tag type in ldc");
 470     __ bind(L);
 471   }
 472 #endif
 473   // itos JVM_CONSTANT_Integer only
 474   __ movl(rax, Address(rcx, rbx, Address::times_ptr, base_offset));
 475   __ push(itos);
 476   __ bind(Done);
 477 }
 478 


 505   if (VerifyOops) {
 506     __ verify_oop(result);
 507   }
 508 }
 509 
 510 void TemplateTable::ldc2_w() {
 511   transition(vtos, vtos);
 512   Label Long, Done;
 513   __ get_unsigned_2_byte_index_at_bcp(rbx, 1);
 514 
 515   __ get_cpool_and_tags(rcx, rax);
 516   const int base_offset = ConstantPool::header_size() * wordSize;
 517   const int tags_offset = Array<u1>::base_offset_in_bytes();
 518 
 519   // get type
 520   __ cmpb(Address(rax, rbx, Address::times_1, tags_offset),
 521           JVM_CONSTANT_Double);
 522   __ jccb(Assembler::notEqual, Long);
 523 
 524   // dtos
 525   LP64_ONLY(__ movdbl(xmm0, Address(rcx, rbx, Address::times_8, base_offset)));
 526   NOT_LP64(__ fld_d(    Address(rcx, rbx, Address::times_ptr, base_offset)));
 527   __ push(dtos);
 528 
 529   __ jmpb(Done);
 530   __ bind(Long);
 531 
 532   // ltos
 533   __ movptr(rax, Address(rcx, rbx, Address::times_ptr, base_offset + 0 * wordSize));
 534   NOT_LP64(__ movptr(rdx, Address(rcx, rbx, Address::times_ptr, base_offset + 1 * wordSize)));
 535   __ push(ltos);
 536 
 537   __ bind(Done);
 538 }
 539 
 540 void TemplateTable::locals_index(Register reg, int offset) {
 541   __ load_unsigned_byte(reg, at_bcp(offset));
 542   __ negptr(reg);
 543 }
 544 
 545 void TemplateTable::iload() {
 546   iload_internal();


 600   locals_index(rbx, 3);
 601   __ movl(rax, iaddress(rbx));
 602 }
 603 
 604 void TemplateTable::fast_iload() {
 605   transition(vtos, itos);
 606   locals_index(rbx);
 607   __ movl(rax, iaddress(rbx));
 608 }
 609 
 610 void TemplateTable::lload() {
 611   transition(vtos, ltos);
 612   locals_index(rbx);
 613   __ movptr(rax, laddress(rbx));
 614   NOT_LP64(__ movl(rdx, haddress(rbx)));
 615 }
 616 
 617 void TemplateTable::fload() {
 618   transition(vtos, ftos);
 619   locals_index(rbx);
 620   LP64_ONLY(__ movflt(xmm0, faddress(rbx)));
 621   NOT_LP64(__ fld_s(faddress(rbx)));
 622 }
 623 
 624 void TemplateTable::dload() {
 625   transition(vtos, dtos);
 626   locals_index(rbx);
 627   LP64_ONLY(__ movdbl(xmm0, daddress(rbx)));
 628   NOT_LP64(__ fld_d(daddress(rbx)));
 629 }
 630 
 631 void TemplateTable::aload() {
 632   transition(vtos, atos);
 633   locals_index(rbx);
 634   __ movptr(rax, aaddress(rbx));
 635 }
 636 
 637 void TemplateTable::locals_index_wide(Register reg) {
 638   __ load_unsigned_short(reg, at_bcp(2));
 639   __ bswapl(reg);
 640   __ shrl(reg, 16);
 641   __ negptr(reg);
 642 }
 643 
 644 void TemplateTable::wide_iload() {
 645   transition(vtos, itos);
 646   locals_index_wide(rbx);
 647   __ movl(rax, iaddress(rbx));
 648 }
 649 
 650 void TemplateTable::wide_lload() {
 651   transition(vtos, ltos);
 652   locals_index_wide(rbx);
 653   __ movptr(rax, laddress(rbx));
 654   NOT_LP64(__ movl(rdx, haddress(rbx)));
 655 }
 656 
 657 void TemplateTable::wide_fload() {
 658   transition(vtos, ftos);
 659   locals_index_wide(rbx);
 660   LP64_ONLY(__ movflt(xmm0, faddress(rbx)));
 661   NOT_LP64(__ fld_s(faddress(rbx)));
 662 }
 663 
 664 void TemplateTable::wide_dload() {
 665   transition(vtos, dtos);
 666   locals_index_wide(rbx);
 667   LP64_ONLY(__ movdbl(xmm0, daddress(rbx)));
 668   NOT_LP64(__ fld_d(daddress(rbx)));
 669 }
 670 
 671 void TemplateTable::wide_aload() {
 672   transition(vtos, atos);
 673   locals_index_wide(rbx);
 674   __ movptr(rax, aaddress(rbx));
 675 }
 676 
 677 void TemplateTable::index_check(Register array, Register index) {
 678   // Pop ptr into array
 679   __ pop_ptr(array);
 680   index_check_without_pop(array, index);
 681 }
 682 
 683 void TemplateTable::index_check_without_pop(Register array, Register index) {
 684   // destroys rbx
 685   // check array
 686   __ null_check(array, arrayOopDesc::length_offset_in_bytes());
 687   // sign extend index for use by indexed load
 688   __ movl2ptr(index, index);


 709 }
 710 
 711 void TemplateTable::laload() {
 712   transition(itos, ltos);
 713   // rax: index
 714   // rdx: array
 715   index_check(rdx, rax); // kills rbx
 716   NOT_LP64(__ mov(rbx, rax));
 717   // rbx,: index
 718   __ movptr(rax, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize));
 719   NOT_LP64(__ movl(rdx, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize)));
 720 }
 721 
 722 
 723 
 724 void TemplateTable::faload() {
 725   transition(itos, ftos);
 726   // rax: index
 727   // rdx: array
 728   index_check(rdx, rax); // kills rbx
 729   LP64_ONLY(__ movflt(xmm0, Address(rdx, rax,
 730                          Address::times_4,
 731                          arrayOopDesc::base_offset_in_bytes(T_FLOAT))));
 732   NOT_LP64(__ fld_s(Address(rdx, rax, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT))));
 733 }
 734 
 735 void TemplateTable::daload() {
 736   transition(itos, dtos);
 737   // rax: index
 738   // rdx: array
 739   index_check(rdx, rax); // kills rbx
 740   LP64_ONLY(__ movdbl(xmm0, Address(rdx, rax,
 741                           Address::times_8,
 742                           arrayOopDesc::base_offset_in_bytes(T_DOUBLE))));
 743   NOT_LP64(__ fld_d(Address(rdx, rax, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE))));
 744 }
 745 
 746 void TemplateTable::aaload() {
 747   transition(itos, atos);
 748   // rax: index
 749   // rdx: array
 750   index_check(rdx, rax); // kills rbx
 751   __ load_heap_oop(rax, Address(rdx, rax,
 752                                 UseCompressedOops ? Address::times_4 : Address::times_ptr,
 753                                 arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
 754 }
 755 
 756 void TemplateTable::baload() {
 757   transition(itos, itos);
 758   // rax: index
 759   // rdx: array
 760   index_check(rdx, rax); // kills rbx
 761   __ load_signed_byte(rax, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)));
 762 }
 763 


 790   transition(itos, itos);
 791   // rax: index
 792   // rdx: array
 793   index_check(rdx, rax); // kills rbx
 794   __ load_signed_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)));
 795 }
 796 
 797 void TemplateTable::iload(int n) {
 798   transition(vtos, itos);
 799   __ movl(rax, iaddress(n));
 800 }
 801 
 802 void TemplateTable::lload(int n) {
 803   transition(vtos, ltos);
 804   __ movptr(rax, laddress(n));
 805   NOT_LP64(__ movptr(rdx, haddress(n)));
 806 }
 807 
 808 void TemplateTable::fload(int n) {
 809   transition(vtos, ftos);
 810   LP64_ONLY(__ movflt(xmm0, faddress(n)));
 811   NOT_LP64(__ fld_s(faddress(n)));
 812 }
 813 
 814 void TemplateTable::dload(int n) {
 815   transition(vtos, dtos);
 816   LP64_ONLY(__ movdbl(xmm0, daddress(n)));
 817   NOT_LP64(__ fld_d(daddress(n)));
 818 }
 819 
 820 void TemplateTable::aload(int n) {
 821   transition(vtos, atos);
 822   __ movptr(rax, aaddress(n));
 823 }
 824 
 825 void TemplateTable::aload_0() {
 826   aload_0_internal();
 827 }
 828 
 829 void TemplateTable::nofast_aload_0() {
 830   aload_0_internal(may_not_rewrite);
 831 }
 832 
 833 void TemplateTable::aload_0_internal(RewriteControl rc) {
 834   transition(vtos, atos);
 835   // According to bytecode histograms, the pairs:
 836   //
 837   // _aload_0, _fast_igetfield


 902   }
 903 }
 904 
 905 void TemplateTable::istore() {
 906   transition(itos, vtos);
 907   locals_index(rbx);
 908   __ movl(iaddress(rbx), rax);
 909 }
 910 
 911 
 912 void TemplateTable::lstore() {
 913   transition(ltos, vtos);
 914   locals_index(rbx);
 915   __ movptr(laddress(rbx), rax);
 916   NOT_LP64(__ movptr(haddress(rbx), rdx));
 917 }
 918 
 919 void TemplateTable::fstore() {
 920   transition(ftos, vtos);
 921   locals_index(rbx);
 922   LP64_ONLY(__ movflt(faddress(rbx), xmm0));
 923   NOT_LP64(__ fstp_s(faddress(rbx)));
 924 }
 925 
 926 void TemplateTable::dstore() {
 927   transition(dtos, vtos);
 928   locals_index(rbx);
 929   LP64_ONLY(__ movdbl(daddress(rbx), xmm0));
 930   NOT_LP64(__ fstp_d(daddress(rbx)));
 931 }
 932 
 933 void TemplateTable::astore() {
 934   transition(vtos, vtos);
 935   __ pop_ptr(rax);
 936   locals_index(rbx);
 937   __ movptr(aaddress(rbx), rax);
 938 }
 939 
 940 void TemplateTable::wide_istore() {
 941   transition(vtos, vtos);
 942   __ pop_i();
 943   locals_index_wide(rbx);
 944   __ movl(iaddress(rbx), rax);
 945 }
 946 
 947 void TemplateTable::wide_lstore() {
 948   transition(vtos, vtos);
 949   NOT_LP64(__ pop_l(rax, rdx));
 950   LP64_ONLY(__ pop_l());
 951   locals_index_wide(rbx);
 952   __ movptr(laddress(rbx), rax);
 953   NOT_LP64(__ movl(haddress(rbx), rdx));
 954 }
 955 
 956 void TemplateTable::wide_fstore() {
 957 #ifdef _LP64
 958   transition(vtos, vtos);
 959   __ pop_f();
 960   locals_index_wide(rbx);
 961   __ movflt(faddress(rbx), xmm0);
 962 #else
 963   wide_istore();
 964 #endif
 965 }
 966 
 967 void TemplateTable::wide_dstore() {
 968 #ifdef _LP64
 969   transition(vtos, vtos);
 970   __ pop_d();
 971   locals_index_wide(rbx);
 972   __ movdbl(daddress(rbx), xmm0);
 973 #else
 974   wide_lstore();
 975 #endif
 976 }
 977 
 978 void TemplateTable::wide_astore() {
 979   transition(vtos, vtos);
 980   __ pop_ptr(rax);
 981   locals_index_wide(rbx);
 982   __ movptr(aaddress(rbx), rax);
 983 }
 984 
 985 void TemplateTable::iastore() {
 986   transition(itos, vtos);
 987   __ pop_i(rbx);
 988   // rax: value
 989   // rbx: index
 990   // rdx: array


 994                   arrayOopDesc::base_offset_in_bytes(T_INT)),
 995           rax);
 996 }
 997 
 998 void TemplateTable::lastore() {
 999   transition(ltos, vtos);
1000   __ pop_i(rbx);
1001   // rax,: low(value)
1002   // rcx: array
1003   // rdx: high(value)
1004   index_check(rcx, rbx);  // prefer index in rbx,
1005   // rbx,: index
1006   __ movptr(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize), rax);
1007   NOT_LP64(__ movl(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize), rdx));
1008 }
1009 
1010 
1011 void TemplateTable::fastore() {
1012   transition(ftos, vtos);
1013   __ pop_i(rbx);
1014   // xmm0: value
1015   // rbx:  index
1016   // rdx:  array
1017   index_check(rdx, rbx); // prefer index in rbx
1018   LP64_ONLY(__ movflt(Address(rdx, rbx,
1019                    Address::times_4,
1020                    arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1021            xmm0));
1022   NOT_LP64(__ fstp_s(Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT))));
1023 }
1024 
1025 void TemplateTable::dastore() {
1026   transition(dtos, vtos);
1027   __ pop_i(rbx);
1028   // xmm0: value
1029   // rbx:  index
1030   // rdx:  array
1031   index_check(rdx, rbx); // prefer index in rbx
1032   LP64_ONLY(__ movdbl(Address(rdx, rbx,
1033                    Address::times_8,
1034                    arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1035            xmm0));
1036   NOT_LP64(__ fstp_d(Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE))));
1037 }
1038 
1039 void TemplateTable::aastore() {
1040   Label is_null, ok_is_subtype, done;
1041   transition(vtos, vtos);
1042   // stack: ..., array, index, value
1043   __ movptr(rax, at_tos());    // value
1044   __ movl(rcx, at_tos_p1()); // index
1045   __ movptr(rdx, at_tos_p2()); // array
1046 
1047   Address element_address(rdx, rcx,
1048                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1049                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1050 
1051   index_check_without_pop(rdx, rcx);     // kills rbx
1052   __ testptr(rax, rax);
1053   __ jcc(Assembler::zero, is_null);
1054 
1055   // Move subklass into rbx
1056   __ load_klass(rbx, rax);


1117 }
1118 
1119 
1120 void TemplateTable::sastore() {
1121   castore();
1122 }
1123 
1124 void TemplateTable::istore(int n) {
1125   transition(itos, vtos);
1126   __ movl(iaddress(n), rax);
1127 }
1128 
1129 void TemplateTable::lstore(int n) {
1130   transition(ltos, vtos);
1131   __ movptr(laddress(n), rax);
1132   NOT_LP64(__ movptr(haddress(n), rdx));
1133 }
1134 
1135 void TemplateTable::fstore(int n) {
1136   transition(ftos, vtos);
1137   LP64_ONLY(__ movflt(faddress(n), xmm0));
1138   NOT_LP64(__ fstp_s(faddress(n)));
1139 }
1140 
1141 void TemplateTable::dstore(int n) {
1142   transition(dtos, vtos);
1143   LP64_ONLY(__ movdbl(daddress(n), xmm0));
1144   NOT_LP64(__ fstp_d(daddress(n)));
1145 }
1146 
1147 
1148 void TemplateTable::astore(int n) {
1149   transition(vtos, vtos);
1150   __ pop_ptr(rax);
1151   __ movptr(aaddress(n), rax);
1152 }
1153 
1154 void TemplateTable::pop() {
1155   transition(vtos, vtos);
1156   __ addptr(rsp, Interpreter::stackElementSize);
1157 }
1158 
1159 void TemplateTable::pop2() {
1160   transition(vtos, vtos);
1161   __ addptr(rsp, 2 * Interpreter::stackElementSize);
1162 }
1163 
1164 


1408   __ pop_l(rax, rdx);                            // get shift value
1409   __ lshr(rdx, rax, true);
1410 #endif
1411 }
1412 
1413 void TemplateTable::lushr() {
1414   transition(itos, ltos);
1415 #ifdef _LP64
1416   __ movl(rcx, rax);                             // get shift count
1417   __ pop_l(rax);                                 // get shift value
1418   __ shrq(rax);
1419 #else
1420   __ mov(rcx, rax);                              // get shift count
1421   __ pop_l(rax, rdx);                            // get shift value
1422   __ lshr(rdx, rax);
1423 #endif
1424 }
1425 
1426 void TemplateTable::fop2(Operation op) {
1427   transition(ftos, ftos);
1428 #ifdef _LP64

1429   switch (op) {
1430   case add:
1431     __ addss(xmm0, at_rsp());
1432     __ addptr(rsp, Interpreter::stackElementSize);
1433     break;
1434   case sub:
1435     __ movflt(xmm1, xmm0);
1436     __ pop_f(xmm0);
1437     __ subss(xmm0, xmm1);
1438     break;
1439   case mul:
1440     __ mulss(xmm0, at_rsp());
1441     __ addptr(rsp, Interpreter::stackElementSize);
1442     break;
1443   case div:
1444     __ movflt(xmm1, xmm0);
1445     __ pop_f(xmm0);
1446     __ divss(xmm0, xmm1);
1447     break;
1448   case rem:












1449     __ movflt(xmm1, xmm0);
1450     __ pop_f(xmm0);
1451     __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::frem), 2);










1452     break;
1453   default:
1454     ShouldNotReachHere();
1455     break;
1456   }



1457 #else
1458   switch (op) {
1459     case add: __ fadd_s (at_rsp());                break;
1460     case sub: __ fsubr_s(at_rsp());                break;
1461     case mul: __ fmul_s (at_rsp());                break;
1462     case div: __ fdivr_s(at_rsp());                break;
1463     case rem: __ fld_s  (at_rsp()); __ fremr(rax); break;
1464     default : ShouldNotReachHere();
1465   }
1466   __ f2ieee();
1467   __ pop(rax);  // pop float thing off
1468 #endif

1469 }
1470 
1471 void TemplateTable::dop2(Operation op) {
1472   transition(dtos, dtos);
1473 #ifdef _LP64
1474   switch (op) {
1475   case add:
1476     __ addsd(xmm0, at_rsp());
1477     __ addptr(rsp, 2 * Interpreter::stackElementSize);
1478     break;
1479   case sub:
1480     __ movdbl(xmm1, xmm0);
1481     __ pop_d(xmm0);
1482     __ subsd(xmm0, xmm1);
1483     break;
1484   case mul:
1485     __ mulsd(xmm0, at_rsp());
1486     __ addptr(rsp, 2 * Interpreter::stackElementSize);
1487     break;
1488   case div:
1489     __ movdbl(xmm1, xmm0);
1490     __ pop_d(xmm0);
1491     __ divsd(xmm0, xmm1);
1492     break;
1493   case rem:




1494     __ movdbl(xmm1, xmm0);
1495     __ pop_d(xmm0);
1496     __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::drem), 2);











1497     break;
1498   default:
1499     ShouldNotReachHere();
1500     break;
1501   }



1502 #else
1503   switch (op) {
1504     case add: __ fadd_d (at_rsp());                break;
1505     case sub: __ fsubr_d(at_rsp());                break;
1506     case mul: {
1507       Label L_strict;
1508       Label L_join;
1509       const Address access_flags      (rcx, Method::access_flags_offset());
1510       __ get_method(rcx);
1511       __ movl(rcx, access_flags);
1512       __ testl(rcx, JVM_ACC_STRICT);
1513       __ jccb(Assembler::notZero, L_strict);
1514       __ fmul_d (at_rsp());
1515       __ jmpb(L_join);
1516       __ bind(L_strict);
1517       __ fld_x(ExternalAddress(StubRoutines::addr_fpu_subnormal_bias1()));
1518       __ fmulp();
1519       __ fmul_d (at_rsp());
1520       __ fld_x(ExternalAddress(StubRoutines::addr_fpu_subnormal_bias2()));
1521       __ fmulp();


1532       __ jccb(Assembler::notZero, L_strict);
1533       __ fdivr_d(at_rsp());
1534       __ jmp(L_join);
1535       __ bind(L_strict);
1536       __ fld_x(ExternalAddress(StubRoutines::addr_fpu_subnormal_bias1()));
1537       __ fmul_d (at_rsp());
1538       __ fdivrp();
1539       __ fld_x(ExternalAddress(StubRoutines::addr_fpu_subnormal_bias2()));
1540       __ fmulp();
1541       __ bind(L_join);
1542       break;
1543     }
1544     case rem: __ fld_d  (at_rsp()); __ fremr(rax); break;
1545     default : ShouldNotReachHere();
1546   }
1547   __ d2ieee();
1548   // Pop double precision number from rsp.
1549   __ pop(rax);
1550   __ pop(rdx);
1551 #endif

1552 }
1553 
1554 void TemplateTable::ineg() {
1555   transition(itos, itos);
1556   __ negl(rax);
1557 }
1558 
1559 void TemplateTable::lneg() {
1560   transition(ltos, ltos);
1561   LP64_ONLY(__ negq(rax));
1562   NOT_LP64(__ lneg(rdx, rax));
1563 }
1564 
1565 #ifdef _LP64
1566 // Note: 'double' and 'long long' have 32-bits alignment on x86.
1567 static jlong* double_quadword(jlong *adr, jlong lo, jlong hi) {
1568   // Use the expression (adr)&(~0xF) to provide 128-bits aligned address
1569   // of 128-bits operands for SSE instructions.
1570   jlong *operand = (jlong*)(((intptr_t)adr)&((intptr_t)(~0xF)));
1571   // Store the value to a 128-bits operand.
1572   operand[0] = lo;
1573   operand[1] = hi;
1574   return operand;
1575 }
1576 
1577 // Buffer for 128-bits masks used by SSE instructions.
1578 static jlong float_signflip_pool[2*2];
1579 static jlong double_signflip_pool[2*2];
1580 #endif
1581 
1582 void TemplateTable::fneg() {
1583   transition(ftos, ftos);
1584 #ifdef _LP64
1585   static jlong *float_signflip  = double_quadword(&float_signflip_pool[1], 0x8000000080000000, 0x8000000080000000);
1586   __ xorps(xmm0, ExternalAddress((address) float_signflip));
1587 #else
1588   __ fchs();
1589 #endif

1590 }
1591 
1592 void TemplateTable::dneg() {
1593   transition(dtos, dtos);
1594 #ifdef _LP64
1595   static jlong *double_signflip  = double_quadword(&double_signflip_pool[1], 0x8000000000000000, 0x8000000000000000);
1596   __ xorpd(xmm0, ExternalAddress((address) double_signflip));



1597 #else
1598   __ fchs();
1599 #endif

1600 }
1601 
1602 void TemplateTable::iinc() {
1603   transition(vtos, vtos);
1604   __ load_signed_byte(rdx, at_bcp(2)); // get constant
1605   locals_index(rbx);
1606   __ addl(iaddress(rbx), rdx);
1607 }
1608 
1609 void TemplateTable::wide_iinc() {
1610   transition(vtos, vtos);
1611   __ movl(rdx, at_bcp(4)); // get constant
1612   locals_index_wide(rbx);
1613   __ bswapl(rdx); // swap bytes & sign-extend constant
1614   __ sarl(rdx, 16);
1615   __ addl(iaddress(rbx), rdx);
1616   // Note: should probably use only one movl to get both
1617   //       the index and the constant -> fix this
1618 }
1619 


1781       case Bytecodes::_d2l: tos_out = ltos; break;
1782       case Bytecodes::_i2f: // fall through
1783       case Bytecodes::_l2f: // fall through
1784       case Bytecodes::_d2f: tos_out = ftos; break;
1785       case Bytecodes::_i2d: // fall through
1786       case Bytecodes::_l2d: // fall through
1787       case Bytecodes::_f2d: tos_out = dtos; break;
1788       default             : ShouldNotReachHere();
1789     }
1790     transition(tos_in, tos_out);
1791   }
1792 #endif // ASSERT
1793 
1794   // Conversion
1795   // (Note: use push(rcx)/pop(rcx) for 1/2-word stack-ptr manipulation)
1796   switch (bytecode()) {
1797     case Bytecodes::_i2l:
1798       __ extend_sign(rdx, rax);
1799       break;
1800     case Bytecodes::_i2f:



1801       __ push(rax);          // store int on tos
1802       __ fild_s(at_rsp());   // load int to ST0
1803       __ f2ieee();           // truncate to float size
1804       __ pop(rcx);           // adjust rsp

1805       break;
1806     case Bytecodes::_i2d:



1807       __ push(rax);          // add one slot for d2ieee()
1808       __ push(rax);          // store int on tos
1809       __ fild_s(at_rsp());   // load int to ST0
1810       __ d2ieee();           // truncate to double size
1811       __ pop(rcx);           // adjust rsp
1812       __ pop(rcx);

1813       break;
1814     case Bytecodes::_i2b:
1815       __ shll(rax, 24);      // truncate upper 24 bits
1816       __ sarl(rax, 24);      // and sign-extend byte
1817       LP64_ONLY(__ movsbl(rax, rax));
1818       break;
1819     case Bytecodes::_i2c:
1820       __ andl(rax, 0xFFFF);  // truncate upper 16 bits
1821       LP64_ONLY(__ movzwl(rax, rax));
1822       break;
1823     case Bytecodes::_i2s:
1824       __ shll(rax, 16);      // truncate upper 16 bits
1825       __ sarl(rax, 16);      // and sign-extend short
1826       LP64_ONLY(__ movswl(rax, rax));
1827       break;
1828     case Bytecodes::_l2i:
1829       /* nothing to do */
1830       break;
1831     case Bytecodes::_l2f:




1832       __ push(rdx);          // store long on tos
1833       __ push(rax);
1834       __ fild_d(at_rsp());   // load long to ST0
1835       __ f2ieee();           // truncate to float size
1836       __ pop(rcx);           // adjust rsp
1837       __ pop(rcx);




1838       break;
1839     case Bytecodes::_l2d:



1840       __ push(rdx);          // store long on tos
1841       __ push(rax);
1842       __ fild_d(at_rsp());   // load long to ST0
1843       __ d2ieee();           // truncate to double size
1844       __ pop(rcx);           // adjust rsp
1845       __ pop(rcx);




1846       break;
1847     case Bytecodes::_f2i:





1848       __ push(rcx);          // reserve space for argument
1849       __ fstp_s(at_rsp());   // pass float argument on stack

1850       __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::f2i), 1);
1851       break;
1852     case Bytecodes::_f2l:





1853       __ push(rcx);          // reserve space for argument
1854       __ fstp_s(at_rsp());   // pass float argument on stack

1855       __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::f2l), 1);
1856       break;
1857     case Bytecodes::_f2d:

1858       /* nothing to do */






1859       break;
1860     case Bytecodes::_d2i:



1861       __ push(rcx);          // reserve space for argument
1862       __ push(rcx);
1863       __ fstp_d(at_rsp());   // pass double argument on stack

1864       __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::d2i), 2);
1865       break;
1866     case Bytecodes::_d2l:



1867       __ push(rcx);          // reserve space for argument
1868       __ push(rcx);
1869       __ fstp_d(at_rsp());   // pass double argument on stack

1870       __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::d2l), 2);
1871       break;
1872     case Bytecodes::_d2f:

1873       __ push(rcx);          // reserve space for f2ieee()
1874       __ f2ieee();           // truncate to float size
1875       __ pop(rcx);           // adjust rsp









1876       break;
1877     default             :
1878       ShouldNotReachHere();
1879   }
1880 #endif
1881 }
1882 
1883 void TemplateTable::lcmp() {
1884   transition(ltos, itos);
1885 #ifdef _LP64
1886   Label done;
1887   __ pop_l(rdx);
1888   __ cmpq(rdx, rax);
1889   __ movl(rax, -1);
1890   __ jccb(Assembler::less, done);
1891   __ setb(Assembler::notEqual, rax);
1892   __ movzbl(rax, rax);
1893   __ bind(done);
1894 #else
1895 
1896   // y = rdx:rax
1897   __ pop_l(rbx, rcx);             // get x = rcx:rbx
1898   __ lcmp2int(rcx, rbx, rdx, rax);// rcx := cmp(x, y)
1899   __ mov(rax, rcx);
1900 #endif
1901 }
1902 
1903 void TemplateTable::float_cmp(bool is_float, int unordered_result) {
1904 #ifdef _LP64

1905   Label done;
1906   if (is_float) {
1907     // XXX get rid of pop here, use ... reg, mem32
1908     __ pop_f(xmm1);
1909     __ ucomiss(xmm1, xmm0);
1910   } else {
1911     // XXX get rid of pop here, use ... reg, mem64
1912     __ pop_d(xmm1);
1913     __ ucomisd(xmm1, xmm0);
1914   }
1915   if (unordered_result < 0) {
1916     __ movl(rax, -1);
1917     __ jccb(Assembler::parity, done);
1918     __ jccb(Assembler::below, done);
1919     __ setb(Assembler::notEqual, rdx);
1920     __ movzbl(rax, rdx);
1921   } else {
1922     __ movl(rax, 1);
1923     __ jccb(Assembler::parity, done);
1924     __ jccb(Assembler::above, done);
1925     __ movl(rax, 0);
1926     __ jccb(Assembler::equal, done);
1927     __ decrementl(rax);
1928   }
1929   __ bind(done);



1930 #else
1931   if (is_float) {
1932     __ fld_s(at_rsp());
1933   } else {
1934     __ fld_d(at_rsp());
1935     __ pop(rdx);
1936   }
1937   __ pop(rcx);
1938   __ fcmp2int(rax, unordered_result < 0);
1939 #endif

1940 }
1941 
1942 void TemplateTable::branch(bool is_jsr, bool is_wide) {
1943   __ get_method(rcx); // rcx holds method
1944   __ profile_taken_branch(rax, rbx); // rax holds updated MDP, rbx
1945                                      // holds bumped taken count
1946 
1947   const ByteSize be_offset = MethodCounters::backedge_counter_offset() +
1948                              InvocationCounter::counter_offset();
1949   const ByteSize inv_offset = MethodCounters::invocation_counter_offset() +
1950                               InvocationCounter::counter_offset();
1951 
1952   // Load up edx with the branch displacement
1953   if (is_wide) {
1954     __ movl(rdx, at_bcp(1));
1955   } else {
1956     __ load_signed_short(rdx, at_bcp(1));
1957   }
1958   __ bswapl(rdx);
1959 


2730   // save that information and this code is faster than the test.
2731   __ fild_d(field);                // Must load atomically
2732   __ subptr(rsp,2*wordSize);    // Make space for store
2733   __ fistp_d(Address(rsp,0));
2734   __ pop(rax);
2735   __ pop(rdx);
2736 #else
2737   __ movq(rax, field);
2738 #endif
2739 
2740   __ push(ltos);
2741   // Rewrite bytecode to be faster
2742   LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
2743   __ jmp(Done);
2744 
2745   __ bind(notLong);
2746   __ cmpl(flags, ftos);
2747   __ jcc(Assembler::notEqual, notFloat);
2748   // ftos
2749 
2750   LP64_ONLY(__ movflt(xmm0, field));
2751   NOT_LP64(__ fld_s(field));
2752   __ push(ftos);
2753   // Rewrite bytecode to be faster
2754   if (!is_static && rc == may_rewrite) {
2755     patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
2756   }
2757   __ jmp(Done);
2758 
2759   __ bind(notFloat);
2760 #ifdef ASSERT
2761   __ cmpl(flags, dtos);
2762   __ jcc(Assembler::notEqual, notDouble);
2763 #endif
2764   // dtos
2765   LP64_ONLY(__ movdbl(xmm0, field));
2766   NOT_LP64(__ fld_d(field));
2767   __ push(dtos);
2768   // Rewrite bytecode to be faster
2769   if (!is_static && rc == may_rewrite) {
2770     patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
2771   }
2772 #ifdef ASSERT
2773   __ jmp(Done);
2774 
2775 
2776   __ bind(notDouble);
2777   __ stop("Bad state");
2778 #endif
2779 
2780   __ bind(Done);
2781   // [jk] not needed currently
2782   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2783   //                                              Assembler::LoadStore));
2784 }
2785 
2786 void TemplateTable::getfield(int byte_no) {


3028 
3029     __ bind(notVolatileLong);
3030 
3031     __ pop(ltos);  // overwrites rdx
3032     if (!is_static) pop_and_check_object(obj);
3033     __ movptr(hi, rdx);
3034     __ movptr(field, rax);
3035     // Don't rewrite to _fast_lputfield for potential volatile case.
3036     __ jmp(notVolatile);
3037   }
3038 #endif // _LP64
3039 
3040   __ bind(notLong);
3041   __ cmpl(flags, ftos);
3042   __ jcc(Assembler::notEqual, notFloat);
3043 
3044   // ftos
3045   {
3046     __ pop(ftos);
3047     if (!is_static) pop_and_check_object(obj);
3048     NOT_LP64( __ fstp_s(field);)
3049     LP64_ONLY( __ movflt(field, xmm0);)
3050     if (!is_static && rc == may_rewrite) {
3051       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
3052     }
3053     __ jmp(Done);
3054   }
3055 
3056   __ bind(notFloat);
3057 #ifdef ASSERT
3058   __ cmpl(flags, dtos);
3059   __ jcc(Assembler::notEqual, notDouble);
3060 #endif
3061 
3062   // dtos
3063   {
3064     __ pop(dtos);
3065     if (!is_static) pop_and_check_object(obj);
3066     NOT_LP64( __ fstp_d(field);)
3067     LP64_ONLY( __ movdbl(field, xmm0);)
3068     if (!is_static && rc == may_rewrite) {
3069       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
3070     }
3071   }
3072 
3073 #ifdef ASSERT
3074   __ jmp(Done);
3075 
3076   __ bind(notDouble);
3077   __ stop("Bad state");
3078 #endif
3079 
3080   __ bind(Done);
3081 
3082   // Check for volatile store
3083   __ testl(rdx, rdx);
3084   __ jcc(Assembler::zero, notVolatile);
3085   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3086                                                Assembler::StoreStore));
3087   __ bind(notVolatile);


3105 
3106   if (JvmtiExport::can_post_field_modification()) {
3107     // Check to see if a field modification watch has been set before
3108     // we take the time to call into the VM.
3109     Label L2;
3110     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3111     __ testl(scratch, scratch);
3112     __ jcc(Assembler::zero, L2);
3113     __ pop_ptr(rbx);                  // copy the object pointer from tos
3114     __ verify_oop(rbx);
3115     __ push_ptr(rbx);                 // put the object pointer back on tos
3116     // Save tos values before call_VM() clobbers them. Since we have
3117     // to do it for every data type, we use the saved values as the
3118     // jvalue object.
3119     switch (bytecode()) {          // load values into the jvalue object
3120     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3121     case Bytecodes::_fast_bputfield: // fall through
3122     case Bytecodes::_fast_sputfield: // fall through
3123     case Bytecodes::_fast_cputfield: // fall through
3124     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3125     case Bytecodes::_fast_dputfield: __ push_d(); break;
3126     case Bytecodes::_fast_fputfield: __ push_f(); break;
3127     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3128 
3129     default:
3130       ShouldNotReachHere();
3131     }
3132     __ mov(scratch, rsp);             // points to jvalue on the stack
3133     // access constant pool cache entry
3134     LP64_ONLY(__ get_cache_entry_pointer_at_bcp(c_rarg2, rax, 1));
3135     NOT_LP64(__ get_cache_entry_pointer_at_bcp(rax, rdx, 1));
3136     __ verify_oop(rbx);
3137     // rbx: object pointer copied above
3138     // c_rarg2: cache entry pointer
3139     // c_rarg3: jvalue object on the stack
3140     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3141     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3142 
3143     switch (bytecode()) {             // restore tos values
3144     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3145     case Bytecodes::_fast_bputfield: // fall through
3146     case Bytecodes::_fast_sputfield: // fall through
3147     case Bytecodes::_fast_cputfield: // fall through
3148     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3149     case Bytecodes::_fast_dputfield: __ pop_d(); break;
3150     case Bytecodes::_fast_fputfield: __ pop_f(); break;
3151     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3152     }
3153     __ bind(L2);
3154   }
3155 }
3156 
3157 void TemplateTable::fast_storefield(TosState state) {
3158   transition(state, vtos);
3159 
3160   ByteSize base = ConstantPoolCache::base_offset();
3161 
3162   jvmti_post_fast_field_mod();
3163 
3164   // access constant pool cache
3165   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3166 
3167   // test for volatile with rdx but rdx is tos register for lputfield.
3168   __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3169                        in_bytes(base +
3170                                 ConstantPoolCacheEntry::flags_offset())));


3194     break;
3195   case Bytecodes::_fast_lputfield:
3196 #ifdef _LP64
3197   __ movq(field, rax);
3198 #else
3199   __ stop("should not be rewritten");
3200 #endif
3201     break;
3202   case Bytecodes::_fast_iputfield:
3203     __ movl(field, rax);
3204     break;
3205   case Bytecodes::_fast_bputfield:
3206     __ movb(field, rax);
3207     break;
3208   case Bytecodes::_fast_sputfield:
3209     // fall through
3210   case Bytecodes::_fast_cputfield:
3211     __ movw(field, rax);
3212     break;
3213   case Bytecodes::_fast_fputfield:
3214     NOT_LP64( __ fstp_s(field); )
3215     LP64_ONLY( __ movflt(field, xmm0);)
3216     break;
3217   case Bytecodes::_fast_dputfield:
3218     NOT_LP64( __ fstp_d(field); )
3219     LP64_ONLY( __ movdbl(field, xmm0);)
3220     break;
3221   default:
3222     ShouldNotReachHere();
3223   }
3224 
3225   // Check for volatile store
3226   __ testl(rdx, rdx);
3227   __ jcc(Assembler::zero, notVolatile);
3228   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3229                                                Assembler::StoreStore));
3230   __ bind(notVolatile);
3231 }
3232 
3233 void TemplateTable::fast_accessfield(TosState state) {
3234   transition(atos, state);
3235 
3236   // Do the JVMTI work here to avoid disturbing the register state below
3237   if (JvmtiExport::can_post_field_access()) {
3238     // Check to see if a field access watch has been set before we
3239     // take the time to call into the VM.


3284   case Bytecodes::_fast_lgetfield:
3285 #ifdef _LP64
3286   __ movq(rax, field);
3287 #else
3288   __ stop("should not be rewritten");
3289 #endif
3290     break;
3291   case Bytecodes::_fast_igetfield:
3292     __ movl(rax, field);
3293     break;
3294   case Bytecodes::_fast_bgetfield:
3295     __ movsbl(rax, field);
3296     break;
3297   case Bytecodes::_fast_sgetfield:
3298     __ load_signed_short(rax, field);
3299     break;
3300   case Bytecodes::_fast_cgetfield:
3301     __ load_unsigned_short(rax, field);
3302     break;
3303   case Bytecodes::_fast_fgetfield:
3304     LP64_ONLY(__ movflt(xmm0, field));
3305     NOT_LP64(__ fld_s(field));
3306     break;
3307   case Bytecodes::_fast_dgetfield:
3308     LP64_ONLY(__ movdbl(xmm0, field));
3309     NOT_LP64(__ fld_d(field));
3310     break;
3311   default:
3312     ShouldNotReachHere();
3313   }
3314   // [jk] not needed currently
3315   // if (os::is_MP()) {
3316   //   Label notVolatile;
3317   //   __ testl(rdx, rdx);
3318   //   __ jcc(Assembler::zero, notVolatile);
3319   //   __ membar(Assembler::LoadLoad);
3320   //   __ bind(notVolatile);
3321   //};
3322 }
3323 
3324 void TemplateTable::fast_xaccess(TosState state) {
3325   transition(vtos, state);
3326 
3327   // get receiver
3328   __ movptr(rax, aaddress(0));
3329   // access constant pool cache
3330   __ get_cache_and_index_at_bcp(rcx, rdx, 2);
3331   __ movptr(rbx,
3332             Address(rcx, rdx, Address::times_ptr,
3333                     in_bytes(ConstantPoolCache::base_offset() +
3334                              ConstantPoolCacheEntry::f2_offset())));
3335   // make sure exception is reported in correct bcp range (getfield is
3336   // next instruction)
3337   __ increment(rbcp);
3338   __ null_check(rax);
3339   const Address field = Address(rax, rbx, Address::times_1, 0*wordSize);
3340   switch (state) {
3341   case itos:
3342     __ movl(rax, field);
3343     break;
3344   case atos:
3345     __ load_heap_oop(rax, field);
3346     __ verify_oop(rax);
3347     break;
3348   case ftos:
3349     LP64_ONLY(__ movflt(xmm0, field));
3350     NOT_LP64(__ fld_s(field));
3351     break;
3352   default:
3353     ShouldNotReachHere();
3354   }
3355 
3356   // [jk] not needed currently
3357   // if (os::is_MP()) {
3358   //   Label notVolatile;
3359   //   __ movl(rdx, Address(rcx, rdx, Address::times_8,
3360   //                        in_bytes(ConstantPoolCache::base_offset() +
3361   //                                 ConstantPoolCacheEntry::flags_offset())));
3362   //   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3363   //   __ testl(rdx, 0x1);
3364   //   __ jcc(Assembler::zero, notVolatile);
3365   //   __ membar(Assembler::LoadLoad);
3366   //   __ bind(notVolatile);
3367   // }
3368 
3369   __ decrement(rbcp);
3370 }




 332   }
 333 }
 334 
 335 void TemplateTable::lconst(int value) {
 336   transition(vtos, ltos);
 337   if (value == 0) {
 338     __ xorl(rax, rax);
 339   } else {
 340     __ movl(rax, value);
 341   }
 342 #ifndef _LP64
 343   assert(value >= 0, "check this code");
 344   __ xorptr(rdx, rdx);
 345 #endif
 346 }
 347 
 348 
 349 
 350 void TemplateTable::fconst(int value) {
 351   transition(vtos, ftos);
 352   if (UseSSE >= 1) {
 353     static float one = 1.0f, two = 2.0f;
 354     switch (value) {
 355     case 0:
 356       __ xorps(xmm0, xmm0);
 357       break;
 358     case 1:
 359       __ movflt(xmm0, ExternalAddress((address) &one));
 360       break;
 361     case 2:
 362       __ movflt(xmm0, ExternalAddress((address) &two));
 363       break;
 364     default:
 365       ShouldNotReachHere();
 366       break;
 367     }
 368   } else {
 369 #ifdef _LP64
 370     ShouldNotReachHere();
 371 #else
 372            if (value == 0) { __ fldz();
 373     } else if (value == 1) { __ fld1();
 374     } else if (value == 2) { __ fld1(); __ fld1(); __ faddp(); // should do a better solution here
 375     } else                 { ShouldNotReachHere();
 376     }
 377 #endif // _LP64
 378   }
 379 }
 380 
 381 void TemplateTable::dconst(int value) {
 382   transition(vtos, dtos);
 383   if (UseSSE >= 2) {
 384     static double one = 1.0;
 385     switch (value) {
 386     case 0:
 387       __ xorpd(xmm0, xmm0);
 388       break;
 389     case 1:
 390       __ movdbl(xmm0, ExternalAddress((address) &one));
 391       break;
 392     default:
 393       ShouldNotReachHere();
 394       break;
 395     }
 396   } else {
 397 #ifdef _LP64
 398     ShouldNotReachHere();
 399 #else
 400            if (value == 0) { __ fldz();
 401     } else if (value == 1) { __ fld1();
 402     } else                 { ShouldNotReachHere();
 403     }
 404 #endif
 405   }
 406 }
 407 
 408 void TemplateTable::bipush() {
 409   transition(vtos, itos);
 410   __ load_signed_byte(rax, at_bcp(1));
 411 }
 412 
 413 void TemplateTable::sipush() {
 414   transition(vtos, itos);
 415   __ load_unsigned_short(rax, at_bcp(1));
 416   __ bswapl(rax);
 417   __ sarl(rax, 16);
 418 }
 419 
 420 void TemplateTable::ldc(bool wide) {
 421   transition(vtos, vtos);
 422   Register rarg = NOT_LP64(rcx) LP64_ONLY(c_rarg1);
 423   Label call_ldc, notFloat, notClass, Done;
 424 
 425   if (wide) {


 444   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClassInError);
 445   __ jccb(Assembler::equal, call_ldc);
 446 
 447   // resolved class - need to call vm to get java mirror of the class
 448   __ cmpl(rdx, JVM_CONSTANT_Class);
 449   __ jcc(Assembler::notEqual, notClass);
 450 
 451   __ bind(call_ldc);
 452 
 453   __ movl(rarg, wide);
 454   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), rarg);
 455 
 456   __ push(atos);
 457   __ jmp(Done);
 458 
 459   __ bind(notClass);
 460   __ cmpl(rdx, JVM_CONSTANT_Float);
 461   __ jccb(Assembler::notEqual, notFloat);
 462 
 463   // ftos
 464   __ load_float(Address(rcx, rbx, Address::times_ptr, base_offset));

 465   __ push(ftos);
 466   __ jmp(Done);
 467 
 468   __ bind(notFloat);
 469 #ifdef ASSERT
 470   {
 471     Label L;
 472     __ cmpl(rdx, JVM_CONSTANT_Integer);
 473     __ jcc(Assembler::equal, L);
 474     // String and Object are rewritten to fast_aldc
 475     __ stop("unexpected tag type in ldc");
 476     __ bind(L);
 477   }
 478 #endif
 479   // itos JVM_CONSTANT_Integer only
 480   __ movl(rax, Address(rcx, rbx, Address::times_ptr, base_offset));
 481   __ push(itos);
 482   __ bind(Done);
 483 }
 484 


 511   if (VerifyOops) {
 512     __ verify_oop(result);
 513   }
 514 }
 515 
 516 void TemplateTable::ldc2_w() {
 517   transition(vtos, vtos);
 518   Label Long, Done;
 519   __ get_unsigned_2_byte_index_at_bcp(rbx, 1);
 520 
 521   __ get_cpool_and_tags(rcx, rax);
 522   const int base_offset = ConstantPool::header_size() * wordSize;
 523   const int tags_offset = Array<u1>::base_offset_in_bytes();
 524 
 525   // get type
 526   __ cmpb(Address(rax, rbx, Address::times_1, tags_offset),
 527           JVM_CONSTANT_Double);
 528   __ jccb(Assembler::notEqual, Long);
 529 
 530   // dtos
 531   __ load_double(Address(rcx, rbx, Address::times_ptr, base_offset));

 532   __ push(dtos);
 533 
 534   __ jmpb(Done);
 535   __ bind(Long);
 536 
 537   // ltos
 538   __ movptr(rax, Address(rcx, rbx, Address::times_ptr, base_offset + 0 * wordSize));
 539   NOT_LP64(__ movptr(rdx, Address(rcx, rbx, Address::times_ptr, base_offset + 1 * wordSize)));
 540   __ push(ltos);
 541 
 542   __ bind(Done);
 543 }
 544 
 545 void TemplateTable::locals_index(Register reg, int offset) {
 546   __ load_unsigned_byte(reg, at_bcp(offset));
 547   __ negptr(reg);
 548 }
 549 
 550 void TemplateTable::iload() {
 551   iload_internal();


 605   locals_index(rbx, 3);
 606   __ movl(rax, iaddress(rbx));
 607 }
 608 
 609 void TemplateTable::fast_iload() {
 610   transition(vtos, itos);
 611   locals_index(rbx);
 612   __ movl(rax, iaddress(rbx));
 613 }
 614 
 615 void TemplateTable::lload() {
 616   transition(vtos, ltos);
 617   locals_index(rbx);
 618   __ movptr(rax, laddress(rbx));
 619   NOT_LP64(__ movl(rdx, haddress(rbx)));
 620 }
 621 
 622 void TemplateTable::fload() {
 623   transition(vtos, ftos);
 624   locals_index(rbx);
 625   __ load_float(faddress(rbx));

 626 }
 627 
 628 void TemplateTable::dload() {
 629   transition(vtos, dtos);
 630   locals_index(rbx);
 631   __ load_double(daddress(rbx));

 632 }
 633 
 634 void TemplateTable::aload() {
 635   transition(vtos, atos);
 636   locals_index(rbx);
 637   __ movptr(rax, aaddress(rbx));
 638 }
 639 
 640 void TemplateTable::locals_index_wide(Register reg) {
 641   __ load_unsigned_short(reg, at_bcp(2));
 642   __ bswapl(reg);
 643   __ shrl(reg, 16);
 644   __ negptr(reg);
 645 }
 646 
 647 void TemplateTable::wide_iload() {
 648   transition(vtos, itos);
 649   locals_index_wide(rbx);
 650   __ movl(rax, iaddress(rbx));
 651 }
 652 
 653 void TemplateTable::wide_lload() {
 654   transition(vtos, ltos);
 655   locals_index_wide(rbx);
 656   __ movptr(rax, laddress(rbx));
 657   NOT_LP64(__ movl(rdx, haddress(rbx)));
 658 }
 659 
 660 void TemplateTable::wide_fload() {
 661   transition(vtos, ftos);
 662   locals_index_wide(rbx);
 663   __ load_float(faddress(rbx));

 664 }
 665 
 666 void TemplateTable::wide_dload() {
 667   transition(vtos, dtos);
 668   locals_index_wide(rbx);
 669   __ load_double(daddress(rbx));

 670 }
 671 
 672 void TemplateTable::wide_aload() {
 673   transition(vtos, atos);
 674   locals_index_wide(rbx);
 675   __ movptr(rax, aaddress(rbx));
 676 }
 677 
 678 void TemplateTable::index_check(Register array, Register index) {
 679   // Pop ptr into array
 680   __ pop_ptr(array);
 681   index_check_without_pop(array, index);
 682 }
 683 
 684 void TemplateTable::index_check_without_pop(Register array, Register index) {
 685   // destroys rbx
 686   // check array
 687   __ null_check(array, arrayOopDesc::length_offset_in_bytes());
 688   // sign extend index for use by indexed load
 689   __ movl2ptr(index, index);


 710 }
 711 
 712 void TemplateTable::laload() {
 713   transition(itos, ltos);
 714   // rax: index
 715   // rdx: array
 716   index_check(rdx, rax); // kills rbx
 717   NOT_LP64(__ mov(rbx, rax));
 718   // rbx,: index
 719   __ movptr(rax, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize));
 720   NOT_LP64(__ movl(rdx, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize)));
 721 }
 722 
 723 
 724 
 725 void TemplateTable::faload() {
 726   transition(itos, ftos);
 727   // rax: index
 728   // rdx: array
 729   index_check(rdx, rax); // kills rbx
 730   __ load_float(Address(rdx, rax,
 731                         Address::times_4,
 732                         arrayOopDesc::base_offset_in_bytes(T_FLOAT)));

 733 }
 734 
 735 void TemplateTable::daload() {
 736   transition(itos, dtos);
 737   // rax: index
 738   // rdx: array
 739   index_check(rdx, rax); // kills rbx
 740   __ load_double(Address(rdx, rax,
 741                          Address::times_8,
 742                          arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));

 743 }
 744 
 745 void TemplateTable::aaload() {
 746   transition(itos, atos);
 747   // rax: index
 748   // rdx: array
 749   index_check(rdx, rax); // kills rbx
 750   __ load_heap_oop(rax, Address(rdx, rax,
 751                                 UseCompressedOops ? Address::times_4 : Address::times_ptr,
 752                                 arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
 753 }
 754 
 755 void TemplateTable::baload() {
 756   transition(itos, itos);
 757   // rax: index
 758   // rdx: array
 759   index_check(rdx, rax); // kills rbx
 760   __ load_signed_byte(rax, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)));
 761 }
 762 


 789   transition(itos, itos);
 790   // rax: index
 791   // rdx: array
 792   index_check(rdx, rax); // kills rbx
 793   __ load_signed_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)));
 794 }
 795 
 796 void TemplateTable::iload(int n) {
 797   transition(vtos, itos);
 798   __ movl(rax, iaddress(n));
 799 }
 800 
 801 void TemplateTable::lload(int n) {
 802   transition(vtos, ltos);
 803   __ movptr(rax, laddress(n));
 804   NOT_LP64(__ movptr(rdx, haddress(n)));
 805 }
 806 
 807 void TemplateTable::fload(int n) {
 808   transition(vtos, ftos);
 809   __ load_float(faddress(n));

 810 }
 811 
 812 void TemplateTable::dload(int n) {
 813   transition(vtos, dtos);
 814   __ load_double(daddress(n));

 815 }
 816 
 817 void TemplateTable::aload(int n) {
 818   transition(vtos, atos);
 819   __ movptr(rax, aaddress(n));
 820 }
 821 
 822 void TemplateTable::aload_0() {
 823   aload_0_internal();
 824 }
 825 
 826 void TemplateTable::nofast_aload_0() {
 827   aload_0_internal(may_not_rewrite);
 828 }
 829 
 830 void TemplateTable::aload_0_internal(RewriteControl rc) {
 831   transition(vtos, atos);
 832   // According to bytecode histograms, the pairs:
 833   //
 834   // _aload_0, _fast_igetfield


 899   }
 900 }
 901 
 902 void TemplateTable::istore() {
 903   transition(itos, vtos);
 904   locals_index(rbx);
 905   __ movl(iaddress(rbx), rax);
 906 }
 907 
 908 
 909 void TemplateTable::lstore() {
 910   transition(ltos, vtos);
 911   locals_index(rbx);
 912   __ movptr(laddress(rbx), rax);
 913   NOT_LP64(__ movptr(haddress(rbx), rdx));
 914 }
 915 
 916 void TemplateTable::fstore() {
 917   transition(ftos, vtos);
 918   locals_index(rbx);
 919   __ store_float(faddress(rbx));

 920 }
 921 
 922 void TemplateTable::dstore() {
 923   transition(dtos, vtos);
 924   locals_index(rbx);
 925   __ store_double(daddress(rbx));

 926 }
 927 
 928 void TemplateTable::astore() {
 929   transition(vtos, vtos);
 930   __ pop_ptr(rax);
 931   locals_index(rbx);
 932   __ movptr(aaddress(rbx), rax);
 933 }
 934 
 935 void TemplateTable::wide_istore() {
 936   transition(vtos, vtos);
 937   __ pop_i();
 938   locals_index_wide(rbx);
 939   __ movl(iaddress(rbx), rax);
 940 }
 941 
 942 void TemplateTable::wide_lstore() {
 943   transition(vtos, vtos);
 944   NOT_LP64(__ pop_l(rax, rdx));
 945   LP64_ONLY(__ pop_l());
 946   locals_index_wide(rbx);
 947   __ movptr(laddress(rbx), rax);
 948   NOT_LP64(__ movl(haddress(rbx), rdx));
 949 }
 950 
 951 void TemplateTable::wide_fstore() {
 952 #ifdef _LP64
 953   transition(vtos, vtos);
 954   __ pop_f(xmm0);
 955   locals_index_wide(rbx);
 956   __ movflt(faddress(rbx), xmm0);
 957 #else
 958   wide_istore();
 959 #endif
 960 }
 961 
 962 void TemplateTable::wide_dstore() {
 963 #ifdef _LP64
 964   transition(vtos, vtos);
 965   __ pop_d(xmm0);
 966   locals_index_wide(rbx);
 967   __ movdbl(daddress(rbx), xmm0);
 968 #else
 969   wide_lstore();
 970 #endif
 971 }
 972 
 973 void TemplateTable::wide_astore() {
 974   transition(vtos, vtos);
 975   __ pop_ptr(rax);
 976   locals_index_wide(rbx);
 977   __ movptr(aaddress(rbx), rax);
 978 }
 979 
 980 void TemplateTable::iastore() {
 981   transition(itos, vtos);
 982   __ pop_i(rbx);
 983   // rax: value
 984   // rbx: index
 985   // rdx: array


 989                   arrayOopDesc::base_offset_in_bytes(T_INT)),
 990           rax);
 991 }
 992 
 993 void TemplateTable::lastore() {
 994   transition(ltos, vtos);
 995   __ pop_i(rbx);
 996   // rax,: low(value)
 997   // rcx: array
 998   // rdx: high(value)
 999   index_check(rcx, rbx);  // prefer index in rbx,
1000   // rbx,: index
1001   __ movptr(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize), rax);
1002   NOT_LP64(__ movl(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize), rdx));
1003 }
1004 
1005 
1006 void TemplateTable::fastore() {
1007   transition(ftos, vtos);
1008   __ pop_i(rbx);
1009   // value is in UseSSE >= 1 ? xmm0 : ST(0)
1010   // rbx:  index
1011   // rdx:  array
1012   index_check(rdx, rbx); // prefer index in rbx
1013   __ store_float(Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT)));




1014 }
1015 
1016 void TemplateTable::dastore() {
1017   transition(dtos, vtos);
1018   __ pop_i(rbx);
1019   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1020   // rbx:  index
1021   // rdx:  array
1022   index_check(rdx, rbx); // prefer index in rbx
1023   __ store_double(Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));




1024 }
1025 
1026 void TemplateTable::aastore() {
1027   Label is_null, ok_is_subtype, done;
1028   transition(vtos, vtos);
1029   // stack: ..., array, index, value
1030   __ movptr(rax, at_tos());    // value
1031   __ movl(rcx, at_tos_p1()); // index
1032   __ movptr(rdx, at_tos_p2()); // array
1033 
1034   Address element_address(rdx, rcx,
1035                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1036                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1037 
1038   index_check_without_pop(rdx, rcx);     // kills rbx
1039   __ testptr(rax, rax);
1040   __ jcc(Assembler::zero, is_null);
1041 
1042   // Move subklass into rbx
1043   __ load_klass(rbx, rax);


1104 }
1105 
1106 
1107 void TemplateTable::sastore() {
1108   castore();
1109 }
1110 
1111 void TemplateTable::istore(int n) {
1112   transition(itos, vtos);
1113   __ movl(iaddress(n), rax);
1114 }
1115 
1116 void TemplateTable::lstore(int n) {
1117   transition(ltos, vtos);
1118   __ movptr(laddress(n), rax);
1119   NOT_LP64(__ movptr(haddress(n), rdx));
1120 }
1121 
1122 void TemplateTable::fstore(int n) {
1123   transition(ftos, vtos);
1124   __ store_float(faddress(n));

1125 }
1126 
1127 void TemplateTable::dstore(int n) {
1128   transition(dtos, vtos);
1129   __ store_double(daddress(n));

1130 }
1131 
1132 
1133 void TemplateTable::astore(int n) {
1134   transition(vtos, vtos);
1135   __ pop_ptr(rax);
1136   __ movptr(aaddress(n), rax);
1137 }
1138 
1139 void TemplateTable::pop() {
1140   transition(vtos, vtos);
1141   __ addptr(rsp, Interpreter::stackElementSize);
1142 }
1143 
1144 void TemplateTable::pop2() {
1145   transition(vtos, vtos);
1146   __ addptr(rsp, 2 * Interpreter::stackElementSize);
1147 }
1148 
1149 


1393   __ pop_l(rax, rdx);                            // get shift value
1394   __ lshr(rdx, rax, true);
1395 #endif
1396 }
1397 
1398 void TemplateTable::lushr() {
1399   transition(itos, ltos);
1400 #ifdef _LP64
1401   __ movl(rcx, rax);                             // get shift count
1402   __ pop_l(rax);                                 // get shift value
1403   __ shrq(rax);
1404 #else
1405   __ mov(rcx, rax);                              // get shift count
1406   __ pop_l(rax, rdx);                            // get shift value
1407   __ lshr(rdx, rax);
1408 #endif
1409 }
1410 
1411 void TemplateTable::fop2(Operation op) {
1412   transition(ftos, ftos);
1413 
1414   if (UseSSE >= 1) {
1415     switch (op) {
1416     case add:
1417       __ addss(xmm0, at_rsp());
1418       __ addptr(rsp, Interpreter::stackElementSize);
1419       break;
1420     case sub:
1421       __ movflt(xmm1, xmm0);
1422       __ pop_f(xmm0);
1423       __ subss(xmm0, xmm1);
1424       break;
1425     case mul:
1426       __ mulss(xmm0, at_rsp());
1427       __ addptr(rsp, Interpreter::stackElementSize);
1428       break;
1429     case div:
1430       __ movflt(xmm1, xmm0);
1431       __ pop_f(xmm0);
1432       __ divss(xmm0, xmm1);
1433       break;
1434     case rem:
1435       // On x86_64 platforms the SharedRuntime::frem method is called to perform the
1436       // modulo operation. The frem method calls the function
1437       // double fmod(double x, double y) in math.h. The documentation of fmod states:
1438       // "If x or y is a NaN, a NaN is returned." without specifying what type of NaN
1439       // (signalling or quiet) is returned.
1440       //
1441       // On x86_32 platforms the FPU is used to perform the modulo operation. The
1442       // reason is that on 32-bit Windows the sign of modulo operations diverges from
1443       // what is considered the standard (e.g., -0.0f % -3.14f is 0.0f (and not -0.0f).
1444       // The fprem instruction used on x86_32 is functionally equivalent to
1445       // SharedRuntime::frem in that it returns a NaN.
1446 #ifdef _LP64
1447       __ movflt(xmm1, xmm0);
1448       __ pop_f(xmm0);
1449       __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::frem), 2);
1450 #else
1451       __ push_f(xmm0);
1452       __ pop_f();
1453       __ fld_s(at_rsp());
1454       __ fremr(rax);
1455       __ f2ieee();
1456       __ pop(rax);  // pop second operand off the stack
1457       __ push_f();
1458       __ pop_f(xmm0);
1459 #endif
1460       break;
1461     default:
1462       ShouldNotReachHere();
1463       break;
1464     }
1465   } else {
1466 #ifdef _LP64
1467     ShouldNotReachHere();
1468 #else
1469     switch (op) {
1470     case add: __ fadd_s (at_rsp());                break;
1471     case sub: __ fsubr_s(at_rsp());                break;
1472     case mul: __ fmul_s (at_rsp());                break;
1473     case div: __ fdivr_s(at_rsp());                break;
1474     case rem: __ fld_s  (at_rsp()); __ fremr(rax); break;
1475     default : ShouldNotReachHere();
1476     }
1477     __ f2ieee();
1478     __ pop(rax);  // pop second operand off the stack
1479 #endif // _LP64
1480   }
1481 }
1482 
1483 void TemplateTable::dop2(Operation op) {
1484   transition(dtos, dtos);
1485   if (UseSSE >= 2) {
1486     switch (op) {
1487     case add:
1488       __ addsd(xmm0, at_rsp());
1489       __ addptr(rsp, 2 * Interpreter::stackElementSize);
1490       break;
1491     case sub:
1492       __ movdbl(xmm1, xmm0);
1493       __ pop_d(xmm0);
1494       __ subsd(xmm0, xmm1);
1495       break;
1496     case mul:
1497       __ mulsd(xmm0, at_rsp());
1498       __ addptr(rsp, 2 * Interpreter::stackElementSize);
1499       break;
1500     case div:
1501       __ movdbl(xmm1, xmm0);
1502       __ pop_d(xmm0);
1503       __ divsd(xmm0, xmm1);
1504       break;
1505     case rem:
1506       // Similar to fop2(), the modulo operation is performed using the
1507       // SharedRuntime::drem method (on x86_64 platforms) or using the
1508       // FPU (on x86_32 platforms) for the same reasons as mentioned in fop2().
1509 #ifdef _LP64
1510       __ movdbl(xmm1, xmm0);
1511       __ pop_d(xmm0);
1512       __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::drem), 2);
1513 #else
1514       __ push_d(xmm0);
1515       __ pop_d();
1516       __ fld_d(at_rsp());
1517       __ fremr(rax);
1518       __ d2ieee();
1519       __ pop(rax);
1520       __ pop(rdx);
1521       __ push_d();
1522       __ pop_d(xmm0);
1523 #endif
1524       break;
1525     default:
1526       ShouldNotReachHere();
1527       break;
1528     }
1529   } else {
1530 #ifdef _LP64
1531     ShouldNotReachHere();
1532 #else
1533     switch (op) {
1534     case add: __ fadd_d (at_rsp());                break;
1535     case sub: __ fsubr_d(at_rsp());                break;
1536     case mul: {
1537       Label L_strict;
1538       Label L_join;
1539       const Address access_flags      (rcx, Method::access_flags_offset());
1540       __ get_method(rcx);
1541       __ movl(rcx, access_flags);
1542       __ testl(rcx, JVM_ACC_STRICT);
1543       __ jccb(Assembler::notZero, L_strict);
1544       __ fmul_d (at_rsp());
1545       __ jmpb(L_join);
1546       __ bind(L_strict);
1547       __ fld_x(ExternalAddress(StubRoutines::addr_fpu_subnormal_bias1()));
1548       __ fmulp();
1549       __ fmul_d (at_rsp());
1550       __ fld_x(ExternalAddress(StubRoutines::addr_fpu_subnormal_bias2()));
1551       __ fmulp();


1562       __ jccb(Assembler::notZero, L_strict);
1563       __ fdivr_d(at_rsp());
1564       __ jmp(L_join);
1565       __ bind(L_strict);
1566       __ fld_x(ExternalAddress(StubRoutines::addr_fpu_subnormal_bias1()));
1567       __ fmul_d (at_rsp());
1568       __ fdivrp();
1569       __ fld_x(ExternalAddress(StubRoutines::addr_fpu_subnormal_bias2()));
1570       __ fmulp();
1571       __ bind(L_join);
1572       break;
1573     }
1574     case rem: __ fld_d  (at_rsp()); __ fremr(rax); break;
1575     default : ShouldNotReachHere();
1576     }
1577     __ d2ieee();
1578     // Pop double precision number from rsp.
1579     __ pop(rax);
1580     __ pop(rdx);
1581 #endif
1582   }
1583 }
1584 
1585 void TemplateTable::ineg() {
1586   transition(itos, itos);
1587   __ negl(rax);
1588 }
1589 
1590 void TemplateTable::lneg() {
1591   transition(ltos, ltos);
1592   LP64_ONLY(__ negq(rax));
1593   NOT_LP64(__ lneg(rdx, rax));
1594 }
1595 

1596 // Note: 'double' and 'long long' have 32-bits alignment on x86.
1597 static jlong* double_quadword(jlong *adr, jlong lo, jlong hi) {
1598   // Use the expression (adr)&(~0xF) to provide 128-bits aligned address
1599   // of 128-bits operands for SSE instructions.
1600   jlong *operand = (jlong*)(((intptr_t)adr)&((intptr_t)(~0xF)));
1601   // Store the value to a 128-bits operand.
1602   operand[0] = lo;
1603   operand[1] = hi;
1604   return operand;
1605 }
1606 
1607 // Buffer for 128-bits masks used by SSE instructions.
1608 static jlong float_signflip_pool[2*2];
1609 static jlong double_signflip_pool[2*2];

1610 
1611 void TemplateTable::fneg() {
1612   transition(ftos, ftos);
1613   if (UseSSE >= 1) {
1614     static jlong *float_signflip  = double_quadword(&float_signflip_pool[1], 0x8000000080000000, 0x8000000080000000);
1615     __ xorps(xmm0, ExternalAddress((address) float_signflip));
1616   } else {
1617     LP64_ONLY(ShouldNotReachHere());
1618     NOT_LP64(__ fchs());
1619   }
1620 }
1621 
1622 void TemplateTable::dneg() {
1623   transition(dtos, dtos);
1624   if (UseSSE >= 2) {
1625     static jlong *double_signflip  = double_quadword(&double_signflip_pool[1], 0x8000000000000000, 0x8000000000000000);
1626     __ xorpd(xmm0, ExternalAddress((address) double_signflip));
1627   } else {
1628 #ifdef _LP64
1629     ShouldNotReachHere();
1630 #else
1631     __ fchs();
1632 #endif
1633   }
1634 }
1635 
1636 void TemplateTable::iinc() {
1637   transition(vtos, vtos);
1638   __ load_signed_byte(rdx, at_bcp(2)); // get constant
1639   locals_index(rbx);
1640   __ addl(iaddress(rbx), rdx);
1641 }
1642 
1643 void TemplateTable::wide_iinc() {
1644   transition(vtos, vtos);
1645   __ movl(rdx, at_bcp(4)); // get constant
1646   locals_index_wide(rbx);
1647   __ bswapl(rdx); // swap bytes & sign-extend constant
1648   __ sarl(rdx, 16);
1649   __ addl(iaddress(rbx), rdx);
1650   // Note: should probably use only one movl to get both
1651   //       the index and the constant -> fix this
1652 }
1653 


1815       case Bytecodes::_d2l: tos_out = ltos; break;
1816       case Bytecodes::_i2f: // fall through
1817       case Bytecodes::_l2f: // fall through
1818       case Bytecodes::_d2f: tos_out = ftos; break;
1819       case Bytecodes::_i2d: // fall through
1820       case Bytecodes::_l2d: // fall through
1821       case Bytecodes::_f2d: tos_out = dtos; break;
1822       default             : ShouldNotReachHere();
1823     }
1824     transition(tos_in, tos_out);
1825   }
1826 #endif // ASSERT
1827 
1828   // Conversion
1829   // (Note: use push(rcx)/pop(rcx) for 1/2-word stack-ptr manipulation)
1830   switch (bytecode()) {
1831     case Bytecodes::_i2l:
1832       __ extend_sign(rdx, rax);
1833       break;
1834     case Bytecodes::_i2f:
1835       if (UseSSE >= 1) {
1836         __ cvtsi2ssl(xmm0, rax);
1837       } else {
1838         __ push(rax);          // store int on tos
1839         __ fild_s(at_rsp());   // load int to ST0
1840         __ f2ieee();           // truncate to float size
1841         __ pop(rcx);           // adjust rsp
1842       }
1843       break;
1844     case Bytecodes::_i2d:
1845       if (UseSSE >= 2) {
1846         __ cvtsi2sdl(xmm0, rax);
1847       } else {
1848       __ push(rax);          // add one slot for d2ieee()
1849       __ push(rax);          // store int on tos
1850       __ fild_s(at_rsp());   // load int to ST0
1851       __ d2ieee();           // truncate to double size
1852       __ pop(rcx);           // adjust rsp
1853       __ pop(rcx);
1854       }
1855       break;
1856     case Bytecodes::_i2b:
1857       __ shll(rax, 24);      // truncate upper 24 bits
1858       __ sarl(rax, 24);      // and sign-extend byte
1859       LP64_ONLY(__ movsbl(rax, rax));
1860       break;
1861     case Bytecodes::_i2c:
1862       __ andl(rax, 0xFFFF);  // truncate upper 16 bits
1863       LP64_ONLY(__ movzwl(rax, rax));
1864       break;
1865     case Bytecodes::_i2s:
1866       __ shll(rax, 16);      // truncate upper 16 bits
1867       __ sarl(rax, 16);      // and sign-extend short
1868       LP64_ONLY(__ movswl(rax, rax));
1869       break;
1870     case Bytecodes::_l2i:
1871       /* nothing to do */
1872       break;
1873     case Bytecodes::_l2f:
1874       // On 64-bit platforms, the cvtsi2ssq instruction is used to convert
1875       // 64-bit long values to floats. On 32-bit platforms it is not possible
1876       // to use that instruction with 64-bit operands, therefore the FPU is
1877       // used to perform the conversion.
1878       __ push(rdx);          // store long on tos
1879       __ push(rax);
1880       __ fild_d(at_rsp());   // load long to ST0
1881       __ f2ieee();           // truncate to float size
1882       __ pop(rcx);           // adjust rsp
1883       __ pop(rcx);
1884       if (UseSSE >= 1) {
1885         __ push_f();
1886         __ pop_f(xmm0);
1887       }
1888       break;
1889     case Bytecodes::_l2d:
1890       // On 32-bit platforms the FPU is used for conversion because on
1891       // 32-bit platforms it is not not possible to use the cvtsi2sdq
1892       // instruction with 64-bit operands.
1893       __ push(rdx);          // store long on tos
1894       __ push(rax);
1895       __ fild_d(at_rsp());   // load long to ST0
1896       __ d2ieee();           // truncate to double size
1897       __ pop(rcx);           // adjust rsp
1898       __ pop(rcx);
1899       if (UseSSE >= 2) {
1900         __ push_d();
1901         __ pop_d(xmm0);
1902       }
1903       break;
1904     case Bytecodes::_f2i:
1905       // SharedRuntime::f2i does not differentiate between sNaNs and qNaNs
1906       // as it returns 0 for any NaN.
1907       if (UseSSE >= 1) {
1908         __ push_f(xmm0);
1909       } else {
1910         __ push(rcx);          // reserve space for argument
1911         __ fstp_s(at_rsp());   // pass float argument on stack
1912       }
1913       __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::f2i), 1);
1914       break;
1915     case Bytecodes::_f2l:
1916       // SharedRuntime::f2l does not differentiate between sNaNs and qNaNs
1917       // as it returns 0 for any NaN.
1918       if (UseSSE >= 1) {
1919        __ push_f(xmm0);
1920       } else {
1921         __ push(rcx);          // reserve space for argument
1922         __ fstp_s(at_rsp());   // pass float argument on stack
1923       }
1924       __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::f2l), 1);
1925       break;
1926     case Bytecodes::_f2d:
1927       if (UseSSE < 1) {
1928         /* nothing to do */
1929       } else if (UseSSE == 1) {
1930         __ push_f(xmm0);
1931         __ pop_f();
1932       } else { // UseSSE >= 2
1933         __ cvtss2sd(xmm0, xmm0);
1934       }
1935       break;
1936     case Bytecodes::_d2i:
1937       if (UseSSE >= 2) {
1938         __ push_d(xmm0);
1939       } else {
1940         __ push(rcx);          // reserve space for argument
1941         __ push(rcx);
1942         __ fstp_d(at_rsp());   // pass double argument on stack
1943       }
1944       __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::d2i), 2);
1945       break;
1946     case Bytecodes::_d2l:
1947       if (UseSSE >= 2) {
1948         __ push_d(xmm0);
1949       } else {
1950         __ push(rcx);          // reserve space for argument
1951         __ push(rcx);
1952         __ fstp_d(at_rsp());   // pass double argument on stack
1953       }
1954       __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::d2l), 2);
1955       break;
1956     case Bytecodes::_d2f:
1957       if (UseSSE <= 1) {
1958         __ push(rcx);          // reserve space for f2ieee()
1959         __ f2ieee();           // truncate to float size
1960         __ pop(rcx);           // adjust rsp
1961         if (UseSSE == 1) {
1962           // The cvtsd2ss instruction is not available if UseSSE==1, therefore
1963           // the conversion is performed using the FPU in this case.
1964           __ push_f();
1965           __ pop_f(xmm0);
1966         }
1967       } else { // UseSSE >= 2
1968         __ cvtsd2ss(xmm0, xmm0);
1969       }
1970       break;
1971     default             :
1972       ShouldNotReachHere();
1973   }
1974 #endif
1975 }
1976 
1977 void TemplateTable::lcmp() {
1978   transition(ltos, itos);
1979 #ifdef _LP64
1980   Label done;
1981   __ pop_l(rdx);
1982   __ cmpq(rdx, rax);
1983   __ movl(rax, -1);
1984   __ jccb(Assembler::less, done);
1985   __ setb(Assembler::notEqual, rax);
1986   __ movzbl(rax, rax);
1987   __ bind(done);
1988 #else
1989 
1990   // y = rdx:rax
1991   __ pop_l(rbx, rcx);             // get x = rcx:rbx
1992   __ lcmp2int(rcx, rbx, rdx, rax);// rcx := cmp(x, y)
1993   __ mov(rax, rcx);
1994 #endif
1995 }
1996 
1997 void TemplateTable::float_cmp(bool is_float, int unordered_result) {
1998   if ((is_float && UseSSE >= 1) ||
1999       (!is_float && UseSSE >= 2)) {
2000     Label done;
2001     if (is_float) {
2002       // XXX get rid of pop here, use ... reg, mem32
2003       __ pop_f(xmm1);
2004       __ ucomiss(xmm1, xmm0);
2005     } else {
2006       // XXX get rid of pop here, use ... reg, mem64
2007       __ pop_d(xmm1);
2008       __ ucomisd(xmm1, xmm0);
2009     }
2010     if (unordered_result < 0) {
2011       __ movl(rax, -1);
2012       __ jccb(Assembler::parity, done);
2013       __ jccb(Assembler::below, done);
2014       __ setb(Assembler::notEqual, rdx);
2015       __ movzbl(rax, rdx);
2016     } else {
2017       __ movl(rax, 1);
2018       __ jccb(Assembler::parity, done);
2019       __ jccb(Assembler::above, done);
2020       __ movl(rax, 0);
2021       __ jccb(Assembler::equal, done);
2022       __ decrementl(rax);
2023     }
2024     __ bind(done);
2025   } else {
2026 #ifdef _LP64
2027     ShouldNotReachHere();
2028 #else
2029     if (is_float) {
2030       __ fld_s(at_rsp());
2031     } else {
2032       __ fld_d(at_rsp());
2033       __ pop(rdx);
2034     }
2035     __ pop(rcx);
2036     __ fcmp2int(rax, unordered_result < 0);
2037 #endif // _LP64
2038   }
2039 }
2040 
2041 void TemplateTable::branch(bool is_jsr, bool is_wide) {
2042   __ get_method(rcx); // rcx holds method
2043   __ profile_taken_branch(rax, rbx); // rax holds updated MDP, rbx
2044                                      // holds bumped taken count
2045 
2046   const ByteSize be_offset = MethodCounters::backedge_counter_offset() +
2047                              InvocationCounter::counter_offset();
2048   const ByteSize inv_offset = MethodCounters::invocation_counter_offset() +
2049                               InvocationCounter::counter_offset();
2050 
2051   // Load up edx with the branch displacement
2052   if (is_wide) {
2053     __ movl(rdx, at_bcp(1));
2054   } else {
2055     __ load_signed_short(rdx, at_bcp(1));
2056   }
2057   __ bswapl(rdx);
2058 


2829   // save that information and this code is faster than the test.
2830   __ fild_d(field);                // Must load atomically
2831   __ subptr(rsp,2*wordSize);    // Make space for store
2832   __ fistp_d(Address(rsp,0));
2833   __ pop(rax);
2834   __ pop(rdx);
2835 #else
2836   __ movq(rax, field);
2837 #endif
2838 
2839   __ push(ltos);
2840   // Rewrite bytecode to be faster
2841   LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
2842   __ jmp(Done);
2843 
2844   __ bind(notLong);
2845   __ cmpl(flags, ftos);
2846   __ jcc(Assembler::notEqual, notFloat);
2847   // ftos
2848 
2849   __ load_float(field);

2850   __ push(ftos);
2851   // Rewrite bytecode to be faster
2852   if (!is_static && rc == may_rewrite) {
2853     patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
2854   }
2855   __ jmp(Done);
2856 
2857   __ bind(notFloat);
2858 #ifdef ASSERT
2859   __ cmpl(flags, dtos);
2860   __ jcc(Assembler::notEqual, notDouble);
2861 #endif
2862   // dtos
2863   __ load_double(field);

2864   __ push(dtos);
2865   // Rewrite bytecode to be faster
2866   if (!is_static && rc == may_rewrite) {
2867     patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
2868   }
2869 #ifdef ASSERT
2870   __ jmp(Done);
2871 
2872 
2873   __ bind(notDouble);
2874   __ stop("Bad state");
2875 #endif
2876 
2877   __ bind(Done);
2878   // [jk] not needed currently
2879   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2880   //                                              Assembler::LoadStore));
2881 }
2882 
2883 void TemplateTable::getfield(int byte_no) {


3125 
3126     __ bind(notVolatileLong);
3127 
3128     __ pop(ltos);  // overwrites rdx
3129     if (!is_static) pop_and_check_object(obj);
3130     __ movptr(hi, rdx);
3131     __ movptr(field, rax);
3132     // Don't rewrite to _fast_lputfield for potential volatile case.
3133     __ jmp(notVolatile);
3134   }
3135 #endif // _LP64
3136 
3137   __ bind(notLong);
3138   __ cmpl(flags, ftos);
3139   __ jcc(Assembler::notEqual, notFloat);
3140 
3141   // ftos
3142   {
3143     __ pop(ftos);
3144     if (!is_static) pop_and_check_object(obj);
3145     __ store_float(field);

3146     if (!is_static && rc == may_rewrite) {
3147       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
3148     }
3149     __ jmp(Done);
3150   }
3151 
3152   __ bind(notFloat);
3153 #ifdef ASSERT
3154   __ cmpl(flags, dtos);
3155   __ jcc(Assembler::notEqual, notDouble);
3156 #endif
3157 
3158   // dtos
3159   {
3160     __ pop(dtos);
3161     if (!is_static) pop_and_check_object(obj);
3162     __ store_double(field);

3163     if (!is_static && rc == may_rewrite) {
3164       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
3165     }
3166   }
3167 
3168 #ifdef ASSERT
3169   __ jmp(Done);
3170 
3171   __ bind(notDouble);
3172   __ stop("Bad state");
3173 #endif
3174 
3175   __ bind(Done);
3176 
3177   // Check for volatile store
3178   __ testl(rdx, rdx);
3179   __ jcc(Assembler::zero, notVolatile);
3180   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3181                                                Assembler::StoreStore));
3182   __ bind(notVolatile);


3200 
3201   if (JvmtiExport::can_post_field_modification()) {
3202     // Check to see if a field modification watch has been set before
3203     // we take the time to call into the VM.
3204     Label L2;
3205     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3206     __ testl(scratch, scratch);
3207     __ jcc(Assembler::zero, L2);
3208     __ pop_ptr(rbx);                  // copy the object pointer from tos
3209     __ verify_oop(rbx);
3210     __ push_ptr(rbx);                 // put the object pointer back on tos
3211     // Save tos values before call_VM() clobbers them. Since we have
3212     // to do it for every data type, we use the saved values as the
3213     // jvalue object.
3214     switch (bytecode()) {          // load values into the jvalue object
3215     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3216     case Bytecodes::_fast_bputfield: // fall through
3217     case Bytecodes::_fast_sputfield: // fall through
3218     case Bytecodes::_fast_cputfield: // fall through
3219     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3220     case Bytecodes::_fast_dputfield: __ push(dtos); break;
3221     case Bytecodes::_fast_fputfield: __ push(ftos); break;
3222     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3223 
3224     default:
3225       ShouldNotReachHere();
3226     }
3227     __ mov(scratch, rsp);             // points to jvalue on the stack
3228     // access constant pool cache entry
3229     LP64_ONLY(__ get_cache_entry_pointer_at_bcp(c_rarg2, rax, 1));
3230     NOT_LP64(__ get_cache_entry_pointer_at_bcp(rax, rdx, 1));
3231     __ verify_oop(rbx);
3232     // rbx: object pointer copied above
3233     // c_rarg2: cache entry pointer
3234     // c_rarg3: jvalue object on the stack
3235     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3236     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3237 
3238     switch (bytecode()) {             // restore tos values
3239     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3240     case Bytecodes::_fast_bputfield: // fall through
3241     case Bytecodes::_fast_sputfield: // fall through
3242     case Bytecodes::_fast_cputfield: // fall through
3243     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3244     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3245     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3246     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3247     }
3248     __ bind(L2);
3249   }
3250 }
3251 
3252 void TemplateTable::fast_storefield(TosState state) {
3253   transition(state, vtos);
3254 
3255   ByteSize base = ConstantPoolCache::base_offset();
3256 
3257   jvmti_post_fast_field_mod();
3258 
3259   // access constant pool cache
3260   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3261 
3262   // test for volatile with rdx but rdx is tos register for lputfield.
3263   __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3264                        in_bytes(base +
3265                                 ConstantPoolCacheEntry::flags_offset())));


3289     break;
3290   case Bytecodes::_fast_lputfield:
3291 #ifdef _LP64
3292   __ movq(field, rax);
3293 #else
3294   __ stop("should not be rewritten");
3295 #endif
3296     break;
3297   case Bytecodes::_fast_iputfield:
3298     __ movl(field, rax);
3299     break;
3300   case Bytecodes::_fast_bputfield:
3301     __ movb(field, rax);
3302     break;
3303   case Bytecodes::_fast_sputfield:
3304     // fall through
3305   case Bytecodes::_fast_cputfield:
3306     __ movw(field, rax);
3307     break;
3308   case Bytecodes::_fast_fputfield:
3309     __ store_float(field);

3310     break;
3311   case Bytecodes::_fast_dputfield:
3312     __ store_double(field);

3313     break;
3314   default:
3315     ShouldNotReachHere();
3316   }
3317 
3318   // Check for volatile store
3319   __ testl(rdx, rdx);
3320   __ jcc(Assembler::zero, notVolatile);
3321   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3322                                                Assembler::StoreStore));
3323   __ bind(notVolatile);
3324 }
3325 
3326 void TemplateTable::fast_accessfield(TosState state) {
3327   transition(atos, state);
3328 
3329   // Do the JVMTI work here to avoid disturbing the register state below
3330   if (JvmtiExport::can_post_field_access()) {
3331     // Check to see if a field access watch has been set before we
3332     // take the time to call into the VM.


3377   case Bytecodes::_fast_lgetfield:
3378 #ifdef _LP64
3379   __ movq(rax, field);
3380 #else
3381   __ stop("should not be rewritten");
3382 #endif
3383     break;
3384   case Bytecodes::_fast_igetfield:
3385     __ movl(rax, field);
3386     break;
3387   case Bytecodes::_fast_bgetfield:
3388     __ movsbl(rax, field);
3389     break;
3390   case Bytecodes::_fast_sgetfield:
3391     __ load_signed_short(rax, field);
3392     break;
3393   case Bytecodes::_fast_cgetfield:
3394     __ load_unsigned_short(rax, field);
3395     break;
3396   case Bytecodes::_fast_fgetfield:
3397     __ load_float(field);

3398     break;
3399   case Bytecodes::_fast_dgetfield:
3400     __ load_double(field);

3401     break;
3402   default:
3403     ShouldNotReachHere();
3404   }
3405   // [jk] not needed currently
3406   // if (os::is_MP()) {
3407   //   Label notVolatile;
3408   //   __ testl(rdx, rdx);
3409   //   __ jcc(Assembler::zero, notVolatile);
3410   //   __ membar(Assembler::LoadLoad);
3411   //   __ bind(notVolatile);
3412   //};
3413 }
3414 
3415 void TemplateTable::fast_xaccess(TosState state) {
3416   transition(vtos, state);
3417 
3418   // get receiver
3419   __ movptr(rax, aaddress(0));
3420   // access constant pool cache
3421   __ get_cache_and_index_at_bcp(rcx, rdx, 2);
3422   __ movptr(rbx,
3423             Address(rcx, rdx, Address::times_ptr,
3424                     in_bytes(ConstantPoolCache::base_offset() +
3425                              ConstantPoolCacheEntry::f2_offset())));
3426   // make sure exception is reported in correct bcp range (getfield is
3427   // next instruction)
3428   __ increment(rbcp);
3429   __ null_check(rax);
3430   const Address field = Address(rax, rbx, Address::times_1, 0*wordSize);
3431   switch (state) {
3432   case itos:
3433     __ movl(rax, field);
3434     break;
3435   case atos:
3436     __ load_heap_oop(rax, field);
3437     __ verify_oop(rax);
3438     break;
3439   case ftos:
3440     __ load_float(field);

3441     break;
3442   default:
3443     ShouldNotReachHere();
3444   }
3445 
3446   // [jk] not needed currently
3447   // if (os::is_MP()) {
3448   //   Label notVolatile;
3449   //   __ movl(rdx, Address(rcx, rdx, Address::times_8,
3450   //                        in_bytes(ConstantPoolCache::base_offset() +
3451   //                                 ConstantPoolCacheEntry::flags_offset())));
3452   //   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3453   //   __ testl(rdx, 0x1);
3454   //   __ jcc(Assembler::zero, notVolatile);
3455   //   __ membar(Assembler::LoadLoad);
3456   //   __ bind(notVolatile);
3457   // }
3458 
3459   __ decrement(rbcp);
3460 }


src/cpu/x86/vm/templateTable_x86.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File