631 }
632
633 // See if we've got enough room on the stack for locals plus overhead
634 // below JavaThread::stack_overflow_limit(). If not, throw a StackOverflowError
635 // without going through the signal handler, i.e., reserved and yellow zones
636 // will not be made usable. The shadow zone must suffice to handle the
637 // overflow.
638 // The expression stack grows down incrementally, so the normal guard
639 // page mechanism will work for that.
640 //
641 // NOTE: Since the additional locals are also always pushed (wasn't
642 // obvious in generate_method_entry) so the guard should work for them
643 // too.
644 //
645 // Args:
646 // r3: number of additional locals this frame needs (what we must check)
647 // rmethod: Method*
648 //
649 // Kills:
650 // r0
651 void TemplateInterpreterGenerator::generate_stack_overflow_check(void) {
652
653 // monitor entry size: see picture of stack set
654 // (generate_method_entry) and frame_amd64.hpp
655 const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
656
657 // total overhead size: entry_size + (saved rbp through expr stack
658 // bottom). be sure to change this if you add/subtract anything
659 // to/from the overhead area
660 const int overhead_size =
661 -(frame::interpreter_frame_initial_sp_offset * wordSize) + entry_size;
662
663 const int page_size = os::vm_page_size();
664
665 Label after_frame_check;
666
667 // see if the frame is greater than one page in size. If so,
668 // then we need to verify there is enough stack space remaining
669 // for the additional locals.
670 //
671 // Note that we use SUBS rather than CMP here because the immediate
|
631 }
632
633 // See if we've got enough room on the stack for locals plus overhead
634 // below JavaThread::stack_overflow_limit(). If not, throw a StackOverflowError
635 // without going through the signal handler, i.e., reserved and yellow zones
636 // will not be made usable. The shadow zone must suffice to handle the
637 // overflow.
638 // The expression stack grows down incrementally, so the normal guard
639 // page mechanism will work for that.
640 //
641 // NOTE: Since the additional locals are also always pushed (wasn't
642 // obvious in generate_method_entry) so the guard should work for them
643 // too.
644 //
645 // Args:
646 // r3: number of additional locals this frame needs (what we must check)
647 // rmethod: Method*
648 //
649 // Kills:
650 // r0
651 void TemplateInterpreterGenerator::generate_stack_overflow_check(Register unused1, Register unused2, Register unused3) {
652 assert(unused1 == noreg && unused2 == noreg && unused3 == noreg, "Not used on aarch");
653
654 // monitor entry size: see picture of stack set
655 // (generate_method_entry) and frame_amd64.hpp
656 const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
657
658 // total overhead size: entry_size + (saved rbp through expr stack
659 // bottom). be sure to change this if you add/subtract anything
660 // to/from the overhead area
661 const int overhead_size =
662 -(frame::interpreter_frame_initial_sp_offset * wordSize) + entry_size;
663
664 const int page_size = os::vm_page_size();
665
666 Label after_frame_check;
667
668 // see if the frame is greater than one page in size. If so,
669 // then we need to verify there is enough stack space remaining
670 // for the additional locals.
671 //
672 // Note that we use SUBS rather than CMP here because the immediate
|