src/cpu/sparc/vm/templateTable_sparc.cpp

Print this page




1321 
1322 
1323 void TemplateTable::ineg() {
1324   transition(itos, itos);
1325   __ neg(Otos_i);
1326 }
1327 
1328 
1329 void TemplateTable::lneg() {
1330   transition(ltos, ltos);
1331 #ifdef _LP64
1332   __ sub(G0, Otos_l, Otos_l);
1333 #else
1334   __ lneg(Otos_l1, Otos_l2);
1335 #endif
1336 }
1337 
1338 
1339 void TemplateTable::fneg() {
1340   transition(ftos, ftos);
1341   __ fneg(FloatRegisterImpl::S, Ftos_f);
1342 }
1343 
1344 
1345 void TemplateTable::dneg() {
1346   transition(dtos, dtos);
1347   // v8 has fnegd if source and dest are the same
1348   __ fneg(FloatRegisterImpl::D, Ftos_f);
1349 }
1350 
1351 
1352 void TemplateTable::iinc() {
1353   transition(vtos, vtos);
1354   locals_index(G3_scratch);
1355   __ ldsb(Lbcp, 2, O2);  // load constant
1356   __ access_local_int(G3_scratch, Otos_i);
1357   __ add(Otos_i, O2, Otos_i);
1358   __ st(Otos_i, G3_scratch, 0);    // access_local_int puts E.A. in G3_scratch
1359 }
1360 
1361 
1362 void TemplateTable::wide_iinc() {
1363   transition(vtos, vtos);
1364   locals_index_wide(G3_scratch);
1365   __ get_2_byte_integer_at_bcp( 4,  O2, O3, InterpreterMacroAssembler::Signed);
1366   __ access_local_int(G3_scratch, Otos_i);
1367   __ add(Otos_i, O3, Otos_i);
1368   __ st(Otos_i, G3_scratch, 0);    // access_local_int puts E.A. in G3_scratch


1453 
1454    case Bytecodes::_i2s:
1455     __ sll(Otos_i, 16, Otos_i);
1456     __ sra(Otos_i, 16, Otos_i);
1457     break;
1458 
1459    case Bytecodes::_l2i:
1460 #ifndef _LP64
1461     __ mov(Otos_l2, Otos_i);
1462 #else
1463     // Sign-extend into the high 32 bits
1464     __ sra(Otos_l, 0, Otos_i);
1465 #endif
1466     break;
1467 
1468    case Bytecodes::_l2f:
1469    case Bytecodes::_l2d:
1470     __ st_long(Otos_l, __ d_tmp);
1471     __ ldf(FloatRegisterImpl::D, __ d_tmp, Ftos_d);
1472 
1473     if (VM_Version::v9_instructions_work()) {
1474       if (bytecode() == Bytecodes::_l2f) {
1475         __ fxtof(FloatRegisterImpl::S, Ftos_d, Ftos_f);
1476       } else {
1477         __ fxtof(FloatRegisterImpl::D, Ftos_d, Ftos_d);
1478       }
1479     } else {
1480       __ call_VM_leaf(
1481         Lscratch,
1482         bytecode() == Bytecodes::_l2f
1483           ? CAST_FROM_FN_PTR(address, SharedRuntime::l2f)
1484           : CAST_FROM_FN_PTR(address, SharedRuntime::l2d)
1485       );
1486     }
1487     break;
1488 
1489   case Bytecodes::_f2i:  {
1490       Label isNaN;
1491       // result must be 0 if value is NaN; test by comparing value to itself
1492       __ fcmp(FloatRegisterImpl::S, Assembler::fcc0, Ftos_f, Ftos_f);
1493       // According to the v8 manual, you have to have a non-fp instruction
1494       // between fcmp and fb.
1495       if (!VM_Version::v9_instructions_work()) {
1496         __ nop();
1497       }
1498       __ fb(Assembler::f_unordered, true, Assembler::pn, isNaN);
1499       __ delayed()->clr(Otos_i);                                     // NaN
1500       __ ftoi(FloatRegisterImpl::S, Ftos_f, F30);
1501       __ stf(FloatRegisterImpl::S, F30, __ d_tmp);
1502       __ ld(__ d_tmp, Otos_i);
1503       __ bind(isNaN);
1504     }
1505     break;
1506 
1507    case Bytecodes::_f2l:
1508     // must uncache tos
1509     __ push_f();
1510 #ifdef _LP64
1511     __ pop_f(F1);
1512 #else
1513     __ pop_i(O0);
1514 #endif
1515     __ call_VM_leaf(Lscratch, CAST_FROM_FN_PTR(address, SharedRuntime::f2l));
1516     break;
1517 


1520     break;
1521 
1522    case Bytecodes::_d2i:
1523    case Bytecodes::_d2l:
1524     // must uncache tos
1525     __ push_d();
1526 #ifdef _LP64
1527     // LP64 calling conventions pass first double arg in D0
1528     __ pop_d( Ftos_d );
1529 #else
1530     __ pop_i( O0 );
1531     __ pop_i( O1 );
1532 #endif
1533     __ call_VM_leaf(Lscratch,
1534         bytecode() == Bytecodes::_d2i
1535           ? CAST_FROM_FN_PTR(address, SharedRuntime::d2i)
1536           : CAST_FROM_FN_PTR(address, SharedRuntime::d2l));
1537     break;
1538 
1539     case Bytecodes::_d2f:
1540     if (VM_Version::v9_instructions_work()) {
1541       __ ftof( FloatRegisterImpl::D, FloatRegisterImpl::S, Ftos_d, Ftos_f);
1542     }
1543     else {
1544       // must uncache tos
1545       __ push_d();
1546       __ pop_i(O0);
1547       __ pop_i(O1);
1548       __ call_VM_leaf(Lscratch, CAST_FROM_FN_PTR(address, SharedRuntime::d2f));
1549     }
1550     break;
1551 
1552     default: ShouldNotReachHere();
1553   }
1554   __ bind(done);
1555 }
1556 
1557 
1558 void TemplateTable::lcmp() {
1559   transition(ltos, itos);
1560 
1561 #ifdef _LP64
1562   __ pop_l(O1); // pop off value 1, value 2 is in O0
1563   __ lcmp( O1, Otos_l, Otos_i );
1564 #else
1565   __ pop_l(O2); // cmp O2,3 to O0,1
1566   __ lcmp( O2, O3, Otos_l1, Otos_l2, Otos_i );
1567 #endif
1568 }
1569 


1939   // and start
1940   Label entry;
1941   __ ba(entry);
1942   __ delayed()->ld( Rarray, -BytesPerInt, Rj);
1943   // (Rj is already in the native byte-ordering.)
1944 
1945   // binary search loop
1946   { Label loop;
1947     __ bind( loop );
1948     // int h = (i + j) >> 1;
1949     __ sra( Rh, 1, Rh );
1950     // if (key < array[h].fast_match()) {
1951     //   j = h;
1952     // } else {
1953     //   i = h;
1954     // }
1955     __ sll( Rh, log_entry_size, Rscratch );
1956     __ ld( Rarray, Rscratch, Rscratch );
1957     // (Rscratch is already in the native byte-ordering.)
1958     __ cmp( Rkey, Rscratch );
1959     if ( VM_Version::v9_instructions_work() ) {
1960       __ movcc( Assembler::less,         false, Assembler::icc, Rh, Rj );  // j = h if (key <  array[h].fast_match())
1961       __ movcc( Assembler::greaterEqual, false, Assembler::icc, Rh, Ri );  // i = h if (key >= array[h].fast_match())
1962     }
1963     else {
1964       Label end_of_if;
1965       __ br( Assembler::less, true, Assembler::pt, end_of_if );
1966       __ delayed()->mov( Rh, Rj ); // if (<) Rj = Rh
1967       __ mov( Rh, Ri );            // else i = h
1968       __ bind(end_of_if);          // }
1969     }
1970 
1971     // while (i+1 < j)
1972     __ bind( entry );
1973     __ add( Ri, 1, Rscratch );
1974     __ cmp(Rscratch, Rj);
1975     __ br( Assembler::less, true, Assembler::pt, loop );
1976     __ delayed()->add( Ri, Rj, Rh ); // start h = i + j  >> 1;
1977   }
1978 
1979   // end of binary search, result index is i (must check again!)
1980   Label default_case;
1981   Label continue_execution;
1982   if (ProfileInterpreter) {
1983     __ mov( Ri, Rh );              // Save index in i for profiling
1984   }
1985   __ sll( Ri, log_entry_size, Ri );
1986   __ ld( Rarray, Ri, Rscratch );
1987   // (Rscratch is already in the native byte-ordering.)
1988   __ cmp( Rkey, Rscratch );
1989   __ br( Assembler::notEqual, true, Assembler::pn, default_case );


3401   // Allocation in the shared Eden
3402   if (allow_shared_alloc) {
3403     Register RoldTopValue = G1_scratch;
3404     Register RtopAddr = G3_scratch;
3405     Register RnewTopValue = RallocatedObject;
3406     Register RendValue = Rscratch;
3407 
3408     __ set((intptr_t)Universe::heap()->top_addr(), RtopAddr);
3409 
3410     Label retry;
3411     __ bind(retry);
3412     __ set((intptr_t)Universe::heap()->end_addr(), RendValue);
3413     __ ld_ptr(RendValue, 0, RendValue);
3414     __ ld_ptr(RtopAddr, 0, RoldTopValue);
3415     __ add(RoldTopValue, Roffset, RnewTopValue);
3416 
3417     // RnewTopValue contains the top address after the new object
3418     // has been allocated.
3419     __ cmp_and_brx_short(RnewTopValue, RendValue, Assembler::greaterUnsigned, Assembler::pn, slow_case);
3420 
3421     __ casx_under_lock(RtopAddr, RoldTopValue, RnewTopValue,
3422       VM_Version::v9_instructions_work() ? NULL :
3423       (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr());
3424 
3425     // if someone beat us on the allocation, try again, otherwise continue
3426     __ cmp_and_brx_short(RoldTopValue, RnewTopValue, Assembler::notEqual, Assembler::pn, retry);
3427 
3428     // bump total bytes allocated by this thread
3429     // RoldTopValue and RtopAddr are dead, so can use G1 and G3
3430     __ incr_allocated_bytes(Roffset, G1_scratch, G3_scratch);
3431   }
3432 
3433   if (UseTLAB || Universe::heap()->supports_inline_contig_alloc()) {
3434     // clear object fields
3435     __ bind(initialize_object);
3436     __ deccc(Roffset, sizeof(oopDesc));
3437     __ br(Assembler::zero, false, Assembler::pt, initialize_header);
3438     __ delayed()->add(RallocatedObject, sizeof(oopDesc), G3_scratch);
3439 
3440     // initialize remaining object fields
3441     if (UseBlockZeroing) {
3442       // Use BIS for zeroing
3443       __ bis_zeroing(G3_scratch, Roffset, G1_scratch, initialize_header);


3684 
3685   assert(O0 == Otos_i, "Be sure where the object to lock is");
3686 
3687   // find a free slot in the monitor block
3688 
3689 
3690   // initialize entry pointer
3691   __ clr(O1); // points to free slot or NULL
3692 
3693   {
3694     Label entry, loop, exit;
3695     __ add( __ top_most_monitor(), O2 ); // last one to check
3696     __ ba( entry );
3697     __ delayed()->mov( Lmonitors, O3 ); // first one to check
3698 
3699 
3700     __ bind( loop );
3701 
3702     __ verify_oop(O4);          // verify each monitor's oop
3703     __ tst(O4); // is this entry unused?
3704     if (VM_Version::v9_instructions_work())
3705       __ movcc( Assembler::zero, false, Assembler::ptr_cc, O3, O1);
3706     else {
3707       Label L;
3708       __ br( Assembler::zero, true, Assembler::pn, L );
3709       __ delayed()->mov(O3, O1); // rememeber this one if match
3710       __ bind(L);
3711     }
3712 
3713     __ cmp(O4, O0); // check if current entry is for same object
3714     __ brx( Assembler::equal, false, Assembler::pn, exit );
3715     __ delayed()->inc( O3, frame::interpreter_frame_monitor_size() * wordSize ); // check next one
3716 
3717     __ bind( entry );
3718 
3719     __ cmp( O3, O2 );
3720     __ brx( Assembler::lessEqualUnsigned, true, Assembler::pt, loop );
3721     __ delayed()->ld_ptr(O3, BasicObjectLock::obj_offset_in_bytes(), O4);
3722 
3723     __ bind( exit );
3724   }
3725 
3726   { Label allocated;
3727 
3728     // found free slot?
3729     __ br_notnull_short(O1, Assembler::pn, allocated);
3730 
3731     __ add_monitor_to_stack( false, O2, O3 );




1321 
1322 
1323 void TemplateTable::ineg() {
1324   transition(itos, itos);
1325   __ neg(Otos_i);
1326 }
1327 
1328 
1329 void TemplateTable::lneg() {
1330   transition(ltos, ltos);
1331 #ifdef _LP64
1332   __ sub(G0, Otos_l, Otos_l);
1333 #else
1334   __ lneg(Otos_l1, Otos_l2);
1335 #endif
1336 }
1337 
1338 
1339 void TemplateTable::fneg() {
1340   transition(ftos, ftos);
1341   __ fneg(FloatRegisterImpl::S, Ftos_f, Ftos_f);
1342 }
1343 
1344 
1345 void TemplateTable::dneg() {
1346   transition(dtos, dtos);
1347   __ fneg(FloatRegisterImpl::D, Ftos_f, Ftos_f);

1348 }
1349 
1350 
1351 void TemplateTable::iinc() {
1352   transition(vtos, vtos);
1353   locals_index(G3_scratch);
1354   __ ldsb(Lbcp, 2, O2);  // load constant
1355   __ access_local_int(G3_scratch, Otos_i);
1356   __ add(Otos_i, O2, Otos_i);
1357   __ st(Otos_i, G3_scratch, 0);    // access_local_int puts E.A. in G3_scratch
1358 }
1359 
1360 
1361 void TemplateTable::wide_iinc() {
1362   transition(vtos, vtos);
1363   locals_index_wide(G3_scratch);
1364   __ get_2_byte_integer_at_bcp( 4,  O2, O3, InterpreterMacroAssembler::Signed);
1365   __ access_local_int(G3_scratch, Otos_i);
1366   __ add(Otos_i, O3, Otos_i);
1367   __ st(Otos_i, G3_scratch, 0);    // access_local_int puts E.A. in G3_scratch


1452 
1453    case Bytecodes::_i2s:
1454     __ sll(Otos_i, 16, Otos_i);
1455     __ sra(Otos_i, 16, Otos_i);
1456     break;
1457 
1458    case Bytecodes::_l2i:
1459 #ifndef _LP64
1460     __ mov(Otos_l2, Otos_i);
1461 #else
1462     // Sign-extend into the high 32 bits
1463     __ sra(Otos_l, 0, Otos_i);
1464 #endif
1465     break;
1466 
1467    case Bytecodes::_l2f:
1468    case Bytecodes::_l2d:
1469     __ st_long(Otos_l, __ d_tmp);
1470     __ ldf(FloatRegisterImpl::D, __ d_tmp, Ftos_d);
1471 

1472     if (bytecode() == Bytecodes::_l2f) {
1473       __ fxtof(FloatRegisterImpl::S, Ftos_d, Ftos_f);
1474     } else {
1475       __ fxtof(FloatRegisterImpl::D, Ftos_d, Ftos_d);
1476     }








1477     break;
1478 
1479   case Bytecodes::_f2i:  {
1480       Label isNaN;
1481       // result must be 0 if value is NaN; test by comparing value to itself
1482       __ fcmp(FloatRegisterImpl::S, Assembler::fcc0, Ftos_f, Ftos_f);





1483       __ fb(Assembler::f_unordered, true, Assembler::pn, isNaN);
1484       __ delayed()->clr(Otos_i);                                     // NaN
1485       __ ftoi(FloatRegisterImpl::S, Ftos_f, F30);
1486       __ stf(FloatRegisterImpl::S, F30, __ d_tmp);
1487       __ ld(__ d_tmp, Otos_i);
1488       __ bind(isNaN);
1489     }
1490     break;
1491 
1492    case Bytecodes::_f2l:
1493     // must uncache tos
1494     __ push_f();
1495 #ifdef _LP64
1496     __ pop_f(F1);
1497 #else
1498     __ pop_i(O0);
1499 #endif
1500     __ call_VM_leaf(Lscratch, CAST_FROM_FN_PTR(address, SharedRuntime::f2l));
1501     break;
1502 


1505     break;
1506 
1507    case Bytecodes::_d2i:
1508    case Bytecodes::_d2l:
1509     // must uncache tos
1510     __ push_d();
1511 #ifdef _LP64
1512     // LP64 calling conventions pass first double arg in D0
1513     __ pop_d( Ftos_d );
1514 #else
1515     __ pop_i( O0 );
1516     __ pop_i( O1 );
1517 #endif
1518     __ call_VM_leaf(Lscratch,
1519         bytecode() == Bytecodes::_d2i
1520           ? CAST_FROM_FN_PTR(address, SharedRuntime::d2i)
1521           : CAST_FROM_FN_PTR(address, SharedRuntime::d2l));
1522     break;
1523 
1524     case Bytecodes::_d2f:

1525       __ ftof( FloatRegisterImpl::D, FloatRegisterImpl::S, Ftos_d, Ftos_f);








1526     break;
1527 
1528     default: ShouldNotReachHere();
1529   }
1530   __ bind(done);
1531 }
1532 
1533 
1534 void TemplateTable::lcmp() {
1535   transition(ltos, itos);
1536 
1537 #ifdef _LP64
1538   __ pop_l(O1); // pop off value 1, value 2 is in O0
1539   __ lcmp( O1, Otos_l, Otos_i );
1540 #else
1541   __ pop_l(O2); // cmp O2,3 to O0,1
1542   __ lcmp( O2, O3, Otos_l1, Otos_l2, Otos_i );
1543 #endif
1544 }
1545 


1915   // and start
1916   Label entry;
1917   __ ba(entry);
1918   __ delayed()->ld( Rarray, -BytesPerInt, Rj);
1919   // (Rj is already in the native byte-ordering.)
1920 
1921   // binary search loop
1922   { Label loop;
1923     __ bind( loop );
1924     // int h = (i + j) >> 1;
1925     __ sra( Rh, 1, Rh );
1926     // if (key < array[h].fast_match()) {
1927     //   j = h;
1928     // } else {
1929     //   i = h;
1930     // }
1931     __ sll( Rh, log_entry_size, Rscratch );
1932     __ ld( Rarray, Rscratch, Rscratch );
1933     // (Rscratch is already in the native byte-ordering.)
1934     __ cmp( Rkey, Rscratch );

1935     __ movcc( Assembler::less,         false, Assembler::icc, Rh, Rj );  // j = h if (key <  array[h].fast_match())
1936     __ movcc( Assembler::greaterEqual, false, Assembler::icc, Rh, Ri );  // i = h if (key >= array[h].fast_match())








1937 
1938     // while (i+1 < j)
1939     __ bind( entry );
1940     __ add( Ri, 1, Rscratch );
1941     __ cmp(Rscratch, Rj);
1942     __ br( Assembler::less, true, Assembler::pt, loop );
1943     __ delayed()->add( Ri, Rj, Rh ); // start h = i + j  >> 1;
1944   }
1945 
1946   // end of binary search, result index is i (must check again!)
1947   Label default_case;
1948   Label continue_execution;
1949   if (ProfileInterpreter) {
1950     __ mov( Ri, Rh );              // Save index in i for profiling
1951   }
1952   __ sll( Ri, log_entry_size, Ri );
1953   __ ld( Rarray, Ri, Rscratch );
1954   // (Rscratch is already in the native byte-ordering.)
1955   __ cmp( Rkey, Rscratch );
1956   __ br( Assembler::notEqual, true, Assembler::pn, default_case );


3368   // Allocation in the shared Eden
3369   if (allow_shared_alloc) {
3370     Register RoldTopValue = G1_scratch;
3371     Register RtopAddr = G3_scratch;
3372     Register RnewTopValue = RallocatedObject;
3373     Register RendValue = Rscratch;
3374 
3375     __ set((intptr_t)Universe::heap()->top_addr(), RtopAddr);
3376 
3377     Label retry;
3378     __ bind(retry);
3379     __ set((intptr_t)Universe::heap()->end_addr(), RendValue);
3380     __ ld_ptr(RendValue, 0, RendValue);
3381     __ ld_ptr(RtopAddr, 0, RoldTopValue);
3382     __ add(RoldTopValue, Roffset, RnewTopValue);
3383 
3384     // RnewTopValue contains the top address after the new object
3385     // has been allocated.
3386     __ cmp_and_brx_short(RnewTopValue, RendValue, Assembler::greaterUnsigned, Assembler::pn, slow_case);
3387 
3388     __ cas_ptr(RtopAddr, RoldTopValue, RnewTopValue);


3389 
3390     // if someone beat us on the allocation, try again, otherwise continue
3391     __ cmp_and_brx_short(RoldTopValue, RnewTopValue, Assembler::notEqual, Assembler::pn, retry);
3392 
3393     // bump total bytes allocated by this thread
3394     // RoldTopValue and RtopAddr are dead, so can use G1 and G3
3395     __ incr_allocated_bytes(Roffset, G1_scratch, G3_scratch);
3396   }
3397 
3398   if (UseTLAB || Universe::heap()->supports_inline_contig_alloc()) {
3399     // clear object fields
3400     __ bind(initialize_object);
3401     __ deccc(Roffset, sizeof(oopDesc));
3402     __ br(Assembler::zero, false, Assembler::pt, initialize_header);
3403     __ delayed()->add(RallocatedObject, sizeof(oopDesc), G3_scratch);
3404 
3405     // initialize remaining object fields
3406     if (UseBlockZeroing) {
3407       // Use BIS for zeroing
3408       __ bis_zeroing(G3_scratch, Roffset, G1_scratch, initialize_header);


3649 
3650   assert(O0 == Otos_i, "Be sure where the object to lock is");
3651 
3652   // find a free slot in the monitor block
3653 
3654 
3655   // initialize entry pointer
3656   __ clr(O1); // points to free slot or NULL
3657 
3658   {
3659     Label entry, loop, exit;
3660     __ add( __ top_most_monitor(), O2 ); // last one to check
3661     __ ba( entry );
3662     __ delayed()->mov( Lmonitors, O3 ); // first one to check
3663 
3664 
3665     __ bind( loop );
3666 
3667     __ verify_oop(O4);          // verify each monitor's oop
3668     __ tst(O4); // is this entry unused?

3669     __ movcc( Assembler::zero, false, Assembler::ptr_cc, O3, O1);






3670 
3671     __ cmp(O4, O0); // check if current entry is for same object
3672     __ brx( Assembler::equal, false, Assembler::pn, exit );
3673     __ delayed()->inc( O3, frame::interpreter_frame_monitor_size() * wordSize ); // check next one
3674 
3675     __ bind( entry );
3676 
3677     __ cmp( O3, O2 );
3678     __ brx( Assembler::lessEqualUnsigned, true, Assembler::pt, loop );
3679     __ delayed()->ld_ptr(O3, BasicObjectLock::obj_offset_in_bytes(), O4);
3680 
3681     __ bind( exit );
3682   }
3683 
3684   { Label allocated;
3685 
3686     // found free slot?
3687     __ br_notnull_short(O1, Assembler::pn, allocated);
3688 
3689     __ add_monitor_to_stack( false, O2, O3 );