src/share/vm/asm/assembler.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File hs25_8011661 Sdiff src/share/vm/asm

src/share/vm/asm/assembler.cpp

Print this page




  27 #include "asm/macroAssembler.inline.hpp"
  28 #include "asm/codeBuffer.hpp"
  29 #include "runtime/atomic.hpp"
  30 #include "runtime/atomic.inline.hpp"
  31 #include "runtime/icache.hpp"
  32 #include "runtime/os.hpp"
  33 
  34 
  35 // Implementation of AbstractAssembler
  36 //
  37 // The AbstractAssembler is generating code into a CodeBuffer. To make code generation faster,
  38 // the assembler keeps a copy of the code buffers boundaries & modifies them when
  39 // emitting bytes rather than using the code buffers accessor functions all the time.
  40 // The code buffer is updated via set_code_end(...) after emitting a whole instruction.
  41 
  42 AbstractAssembler::AbstractAssembler(CodeBuffer* code) {
  43   if (code == NULL)  return;
  44   CodeSection* cs = code->insts();
  45   cs->clear_mark();   // new assembler kills old mark
  46   if (cs->start() == NULL)  {
  47     vm_exit_out_of_memory(0, err_msg("CodeCache: no room for %s",
  48                                      code->name()));
  49   }
  50   _code_section = cs;
  51   _oop_recorder= code->oop_recorder();
  52   DEBUG_ONLY( _short_branch_delta = 0; )
  53 }
  54 
  55 void AbstractAssembler::set_code_section(CodeSection* cs) {
  56   assert(cs->outer() == code_section()->outer(), "sanity");
  57   assert(cs->is_allocated(), "need to pre-allocate this section");
  58   cs->clear_mark();  // new assembly into this section kills old mark
  59   _code_section = cs;
  60 }
  61 
  62 // Inform CodeBuffer that incoming code and relocation will be for stubs
  63 address AbstractAssembler::start_a_stub(int required_space) {
  64   CodeBuffer*  cb = code();
  65   CodeSection* cs = cb->stubs();
  66   assert(_code_section == cb->insts(), "not in insts?");
  67   if (cs->maybe_expand_to_ensure_remaining(required_space)




  27 #include "asm/macroAssembler.inline.hpp"
  28 #include "asm/codeBuffer.hpp"
  29 #include "runtime/atomic.hpp"
  30 #include "runtime/atomic.inline.hpp"
  31 #include "runtime/icache.hpp"
  32 #include "runtime/os.hpp"
  33 
  34 
  35 // Implementation of AbstractAssembler
  36 //
  37 // The AbstractAssembler is generating code into a CodeBuffer. To make code generation faster,
  38 // the assembler keeps a copy of the code buffers boundaries & modifies them when
  39 // emitting bytes rather than using the code buffers accessor functions all the time.
  40 // The code buffer is updated via set_code_end(...) after emitting a whole instruction.
  41 
  42 AbstractAssembler::AbstractAssembler(CodeBuffer* code) {
  43   if (code == NULL)  return;
  44   CodeSection* cs = code->insts();
  45   cs->clear_mark();   // new assembler kills old mark
  46   if (cs->start() == NULL)  {
  47     vm_exit_out_of_memory(0, OOM_MMAP_ERROR, err_msg("CodeCache: no room for %s",
  48                                      code->name()));
  49   }
  50   _code_section = cs;
  51   _oop_recorder= code->oop_recorder();
  52   DEBUG_ONLY( _short_branch_delta = 0; )
  53 }
  54 
  55 void AbstractAssembler::set_code_section(CodeSection* cs) {
  56   assert(cs->outer() == code_section()->outer(), "sanity");
  57   assert(cs->is_allocated(), "need to pre-allocate this section");
  58   cs->clear_mark();  // new assembly into this section kills old mark
  59   _code_section = cs;
  60 }
  61 
  62 // Inform CodeBuffer that incoming code and relocation will be for stubs
  63 address AbstractAssembler::start_a_stub(int required_space) {
  64   CodeBuffer*  cb = code();
  65   CodeSection* cs = cb->stubs();
  66   assert(_code_section == cb->insts(), "not in insts?");
  67   if (cs->maybe_expand_to_ensure_remaining(required_space)


src/share/vm/asm/assembler.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File