src/cpu/x86/vm/templateTable_x86_64.cpp

Print this page
rev 5510 : 8027252: Crash in interpreter because get_unsigned_2_byte_index_at_bcp reads 4 bytes
Summary: Use 2-byte loads to load indexes from the byte code stream to avoid out of bounds reads.
Reviewed-by:


 551 
 552 void TemplateTable::fload() {
 553   transition(vtos, ftos);
 554   locals_index(rbx);
 555   __ movflt(xmm0, faddress(rbx));
 556 }
 557 
 558 void TemplateTable::dload() {
 559   transition(vtos, dtos);
 560   locals_index(rbx);
 561   __ movdbl(xmm0, daddress(rbx));
 562 }
 563 
 564 void TemplateTable::aload() {
 565   transition(vtos, atos);
 566   locals_index(rbx);
 567   __ movptr(rax, aaddress(rbx));
 568 }
 569 
 570 void TemplateTable::locals_index_wide(Register reg) {
 571   __ movl(reg, at_bcp(2));
 572   __ bswapl(reg);
 573   __ shrl(reg, 16);
 574   __ negptr(reg);
 575 }
 576 
 577 void TemplateTable::wide_iload() {
 578   transition(vtos, itos);
 579   locals_index_wide(rbx);
 580   __ movl(rax, iaddress(rbx));
 581 }
 582 
 583 void TemplateTable::wide_lload() {
 584   transition(vtos, ltos);
 585   locals_index_wide(rbx);
 586   __ movq(rax, laddress(rbx));
 587 }
 588 
 589 void TemplateTable::wide_fload() {
 590   transition(vtos, ftos);
 591   locals_index_wide(rbx);


1558     __ jccb(Assembler::parity, done);
1559     __ jccb(Assembler::above, done);
1560     __ movl(rax, 0);
1561     __ jccb(Assembler::equal, done);
1562     __ decrementl(rax);
1563   }
1564   __ bind(done);
1565 }
1566 
1567 void TemplateTable::branch(bool is_jsr, bool is_wide) {
1568   __ get_method(rcx); // rcx holds method
1569   __ profile_taken_branch(rax, rbx); // rax holds updated MDP, rbx
1570                                      // holds bumped taken count
1571 
1572   const ByteSize be_offset = MethodCounters::backedge_counter_offset() +
1573                              InvocationCounter::counter_offset();
1574   const ByteSize inv_offset = MethodCounters::invocation_counter_offset() +
1575                               InvocationCounter::counter_offset();
1576 
1577   // Load up edx with the branch displacement

1578   __ movl(rdx, at_bcp(1));



1579   __ bswapl(rdx);
1580 
1581   if (!is_wide) {
1582     __ sarl(rdx, 16);
1583   }
1584   __ movl2ptr(rdx, rdx);
1585 
1586   // Handle all the JSR stuff here, then exit.
1587   // It's much shorter and cleaner than intermingling with the non-JSR
1588   // normal-branch stuff occurring below.
1589   if (is_jsr) {
1590     // Pre-load the next target bytecode into rbx
1591     __ load_unsigned_byte(rbx, Address(r13, rdx, Address::times_1, 0));
1592 
1593     // compute return address as bci in rax
1594     __ lea(rax, at_bcp((is_wide ? 5 : 3) -
1595                         in_bytes(ConstMethod::codes_offset())));
1596     __ subptr(rax, Address(rcx, Method::const_offset()));
1597     // Adjust the bcp in r13 by the displacement in rdx
1598     __ addptr(r13, rdx);




 551 
 552 void TemplateTable::fload() {
 553   transition(vtos, ftos);
 554   locals_index(rbx);
 555   __ movflt(xmm0, faddress(rbx));
 556 }
 557 
 558 void TemplateTable::dload() {
 559   transition(vtos, dtos);
 560   locals_index(rbx);
 561   __ movdbl(xmm0, daddress(rbx));
 562 }
 563 
 564 void TemplateTable::aload() {
 565   transition(vtos, atos);
 566   locals_index(rbx);
 567   __ movptr(rax, aaddress(rbx));
 568 }
 569 
 570 void TemplateTable::locals_index_wide(Register reg) {
 571   __ load_unsigned_short(reg, at_bcp(2));
 572   __ bswapl(reg);
 573   __ shrl(reg, 16);
 574   __ negptr(reg);
 575 }
 576 
 577 void TemplateTable::wide_iload() {
 578   transition(vtos, itos);
 579   locals_index_wide(rbx);
 580   __ movl(rax, iaddress(rbx));
 581 }
 582 
 583 void TemplateTable::wide_lload() {
 584   transition(vtos, ltos);
 585   locals_index_wide(rbx);
 586   __ movq(rax, laddress(rbx));
 587 }
 588 
 589 void TemplateTable::wide_fload() {
 590   transition(vtos, ftos);
 591   locals_index_wide(rbx);


1558     __ jccb(Assembler::parity, done);
1559     __ jccb(Assembler::above, done);
1560     __ movl(rax, 0);
1561     __ jccb(Assembler::equal, done);
1562     __ decrementl(rax);
1563   }
1564   __ bind(done);
1565 }
1566 
1567 void TemplateTable::branch(bool is_jsr, bool is_wide) {
1568   __ get_method(rcx); // rcx holds method
1569   __ profile_taken_branch(rax, rbx); // rax holds updated MDP, rbx
1570                                      // holds bumped taken count
1571 
1572   const ByteSize be_offset = MethodCounters::backedge_counter_offset() +
1573                              InvocationCounter::counter_offset();
1574   const ByteSize inv_offset = MethodCounters::invocation_counter_offset() +
1575                               InvocationCounter::counter_offset();
1576 
1577   // Load up edx with the branch displacement
1578   if (is_wide) {
1579     __ movl(rdx, at_bcp(1));
1580   } else {
1581     __ load_signed_short(rdx, at_bcp(1));
1582   }
1583   __ bswapl(rdx);
1584 
1585   if (!is_wide) {
1586     __ sarl(rdx, 16);
1587   }
1588   __ movl2ptr(rdx, rdx);
1589 
1590   // Handle all the JSR stuff here, then exit.
1591   // It's much shorter and cleaner than intermingling with the non-JSR
1592   // normal-branch stuff occurring below.
1593   if (is_jsr) {
1594     // Pre-load the next target bytecode into rbx
1595     __ load_unsigned_byte(rbx, Address(r13, rdx, Address::times_1, 0));
1596 
1597     // compute return address as bci in rax
1598     __ lea(rax, at_bcp((is_wide ? 5 : 3) -
1599                         in_bytes(ConstMethod::codes_offset())));
1600     __ subptr(rax, Address(rcx, Method::const_offset()));
1601     // Adjust the bcp in r13 by the displacement in rdx
1602     __ addptr(r13, rdx);