< prev index next >

src/share/vm/c1/c1_LIRGenerator.hpp

Print this page




 278 #ifdef CARDTABLEMODREF_POST_BARRIER_HELPER
 279   void CardTableModRef_post_barrier_helper(LIR_OprDesc* addr, LIR_Const* card_table_base);
 280 #endif
 281 
 282 
 283   static LIR_Opr result_register_for(ValueType* type, bool callee = false);
 284 
 285   ciObject* get_jobject_constant(Value value);
 286 
 287   LIRItemList* invoke_visit_arguments(Invoke* x);
 288   void invoke_load_arguments(Invoke* x, LIRItemList* args, const LIR_OprList* arg_list);
 289 
 290   void trace_block_entry(BlockBegin* block);
 291 
 292   // volatile field operations are never patchable because a klass
 293   // must be loaded to know it's volatile which means that the offset
 294   // it always known as well.
 295   void volatile_field_store(LIR_Opr value, LIR_Address* address, CodeEmitInfo* info);
 296   void volatile_field_load(LIR_Address* address, LIR_Opr result, CodeEmitInfo* info);
 297 


 298   void put_Object_unsafe(LIR_Opr src, LIR_Opr offset, LIR_Opr data, BasicType type, bool is_volatile);
 299   void get_Object_unsafe(LIR_Opr dest, LIR_Opr src, LIR_Opr offset, BasicType type, bool is_volatile);
 300 
 301   void arithmetic_call_op (Bytecodes::Code code, LIR_Opr result, LIR_OprList* args);
 302 
 303   void increment_counter(address counter, BasicType type, int step = 1);
 304   void increment_counter(LIR_Address* addr, int step = 1);
 305 
 306   // is_strictfp is only needed for mul and div (and only generates different code on i486)
 307   void arithmetic_op(Bytecodes::Code code, LIR_Opr result, LIR_Opr left, LIR_Opr right, bool is_strictfp, LIR_Opr tmp, CodeEmitInfo* info = NULL);
 308   // machine dependent.  returns true if it emitted code for the multiply
 309   bool strength_reduce_multiply(LIR_Opr left, int constant, LIR_Opr result, LIR_Opr tmp);
 310 
 311   void store_stack_parameter (LIR_Opr opr, ByteSize offset_from_sp_in_bytes);
 312 
 313   void klass2reg_with_patching(LIR_Opr r, ciMetadata* obj, CodeEmitInfo* info, bool need_resolve = false);
 314 
 315   // this loads the length and compares against the index
 316   void array_range_check          (LIR_Opr array, LIR_Opr index, CodeEmitInfo* null_check_info, CodeEmitInfo* range_check_info);
 317   // For java.nio.Buffer.checkIndex




 278 #ifdef CARDTABLEMODREF_POST_BARRIER_HELPER
 279   void CardTableModRef_post_barrier_helper(LIR_OprDesc* addr, LIR_Const* card_table_base);
 280 #endif
 281 
 282 
 283   static LIR_Opr result_register_for(ValueType* type, bool callee = false);
 284 
 285   ciObject* get_jobject_constant(Value value);
 286 
 287   LIRItemList* invoke_visit_arguments(Invoke* x);
 288   void invoke_load_arguments(Invoke* x, LIRItemList* args, const LIR_OprList* arg_list);
 289 
 290   void trace_block_entry(BlockBegin* block);
 291 
 292   // volatile field operations are never patchable because a klass
 293   // must be loaded to know it's volatile which means that the offset
 294   // it always known as well.
 295   void volatile_field_store(LIR_Opr value, LIR_Address* address, CodeEmitInfo* info);
 296   void volatile_field_load(LIR_Address* address, LIR_Opr result, CodeEmitInfo* info);
 297 
 298   LIR_Opr unpack_offset(LIR_Opr src, LIR_Opr offset);
 299 
 300   void put_Object_unsafe(LIR_Opr src, LIR_Opr offset, LIR_Opr data, BasicType type, bool is_volatile);
 301   void get_Object_unsafe(LIR_Opr dest, LIR_Opr src, LIR_Opr offset, BasicType type, bool is_volatile);
 302 
 303   void arithmetic_call_op (Bytecodes::Code code, LIR_Opr result, LIR_OprList* args);
 304 
 305   void increment_counter(address counter, BasicType type, int step = 1);
 306   void increment_counter(LIR_Address* addr, int step = 1);
 307 
 308   // is_strictfp is only needed for mul and div (and only generates different code on i486)
 309   void arithmetic_op(Bytecodes::Code code, LIR_Opr result, LIR_Opr left, LIR_Opr right, bool is_strictfp, LIR_Opr tmp, CodeEmitInfo* info = NULL);
 310   // machine dependent.  returns true if it emitted code for the multiply
 311   bool strength_reduce_multiply(LIR_Opr left, int constant, LIR_Opr result, LIR_Opr tmp);
 312 
 313   void store_stack_parameter (LIR_Opr opr, ByteSize offset_from_sp_in_bytes);
 314 
 315   void klass2reg_with_patching(LIR_Opr r, ciMetadata* obj, CodeEmitInfo* info, bool need_resolve = false);
 316 
 317   // this loads the length and compares against the index
 318   void array_range_check          (LIR_Opr array, LIR_Opr index, CodeEmitInfo* null_check_info, CodeEmitInfo* range_check_info);
 319   // For java.nio.Buffer.checkIndex


< prev index next >