< prev index next >

src/hotspot/share/c1/c1_LIRGenerator.cpp

Print this page


1970       __ null_check(obj, new CodeEmitInfo(info));
1971     }
1972   }
1973   __ load(new LIR_Address(array.result(), arrayOopDesc::length_offset_in_bytes(), T_INT), reg, info, lir_patch_none);
1974 }
1975 
1976 
1977 void LIRGenerator::do_LoadIndexed(LoadIndexed* x) {
1978   bool use_length = x->length() != NULL;
1979   LIRItem array(x->array(), this);
1980   LIRItem index(x->index(), this);
1981   LIRItem length(this);
1982   bool needs_range_check = x->compute_needs_range_check();
1983 
1984   if (use_length && needs_range_check) {
1985     length.set_instruction(x->length());
1986     length.load_item();
1987   }
1988 
1989   array.load_item();
1990   if (index.is_constant() && can_inline_as_constant(x->index())) {

1991     // let it be a constant
1992     index.dont_load_item();
1993   } else {
1994     index.load_item();
1995   }
1996 
1997   CodeEmitInfo* range_check_info = state_for(x);
1998   CodeEmitInfo* null_check_info = NULL;
1999   if (x->needs_null_check()) {
2000     NullCheck* nc = x->explicit_null_check();
2001     if (nc != NULL) {
2002       null_check_info = state_for(nc);
2003     } else {
2004       null_check_info = range_check_info;
2005     }
2006     if (StressLoopInvariantCodeMotion && null_check_info->deoptimize_on_exception()) {
2007       LIR_Opr obj = new_register(T_OBJECT);
2008       __ move(LIR_OprFact::oopConst(NULL), obj);
2009       __ null_check(obj, new CodeEmitInfo(null_check_info));
2010     }
2011   }
2012 
2013   if (GenerateRangeChecks && needs_range_check) {
2014     if (StressLoopInvariantCodeMotion && range_check_info->deoptimize_on_exception()) {
2015       __ branch(lir_cond_always, T_ILLEGAL, new RangeCheckStub(range_check_info, index.result(), array.result()));
2016     } else if (use_length) {
2017       // TODO: use a (modified) version of array_range_check that does not require a
2018       //       constant length to be loaded to a register
2019       __ cmp(lir_cond_belowEqual, length.result(), index.result());
2020       __ branch(lir_cond_belowEqual, T_INT, new RangeCheckStub(range_check_info, index.result(), array.result()));
2021     } else {
2022       array_range_check(array.result(), index.result(), null_check_info, range_check_info);
2023       // The range check performs the null check, so clear it out for the load
2024       null_check_info = NULL;
2025     }
2026   }
2027 
2028   if (x->array()->is_flattened_array()) {
2029     if (x->array()->declared_type()->is_loaded()) {
2030       // Find the destination address (of the NewValueTypeInstance)
2031       LIR_Opr obj = x->vt()->operand();
2032       LIRItem obj_item(x->vt(), this);
2033 
2034       access_flattened_array(true, array, index, obj_item);
2035       set_no_result(x);
2036       return;
2037     } else {
2038       // If the array is indeed flattened, deopt. Otherwise access it as a normal object array.
2039       CodeEmitInfo* deopt_info = state_for(x, x->state_before());
2040       maybe_deopt_value_array_access(array, null_check_info, deopt_info);
2041     }










2042   }

2043   DecoratorSet decorators = IN_HEAP | IS_ARRAY;
2044   LIR_Opr result = rlock_result(x, x->elt_type());
2045   access_load_at(decorators, x->elt_type(),
2046                  array, index.result(), result,
2047                  NULL, null_check_info);





2048 }
2049 
2050 
2051 void LIRGenerator::do_NullCheck(NullCheck* x) {
2052   if (x->can_trap()) {
2053     LIRItem value(x->obj(), this);
2054     value.load_item();
2055     CodeEmitInfo* info = state_for(x);
2056     __ null_check(value.result(), info);
2057   }
2058 }
2059 
2060 
2061 void LIRGenerator::do_TypeCast(TypeCast* x) {
2062   LIRItem value(x->obj(), this);
2063   value.load_item();
2064   // the result is the same as from the node we are casting
2065   set_result(x, value.result());
2066 }
2067 




1970       __ null_check(obj, new CodeEmitInfo(info));
1971     }
1972   }
1973   __ load(new LIR_Address(array.result(), arrayOopDesc::length_offset_in_bytes(), T_INT), reg, info, lir_patch_none);
1974 }
1975 
1976 
1977 void LIRGenerator::do_LoadIndexed(LoadIndexed* x) {
1978   bool use_length = x->length() != NULL;
1979   LIRItem array(x->array(), this);
1980   LIRItem index(x->index(), this);
1981   LIRItem length(this);
1982   bool needs_range_check = x->compute_needs_range_check();
1983 
1984   if (use_length && needs_range_check) {
1985     length.set_instruction(x->length());
1986     length.load_item();
1987   }
1988 
1989   array.load_item();
1990   if (index.is_constant() && can_inline_as_constant(x->index())
1991       && !x->array()->maybe_flattened_array()) {
1992     // let it be a constant
1993     index.dont_load_item();
1994   } else {
1995     index.load_item();
1996   }
1997 
1998   CodeEmitInfo* range_check_info = state_for(x);
1999   CodeEmitInfo* null_check_info = NULL;
2000   if (x->needs_null_check()) {
2001     NullCheck* nc = x->explicit_null_check();
2002     if (nc != NULL) {
2003       null_check_info = state_for(nc);
2004     } else {
2005       null_check_info = range_check_info;
2006     }
2007     if (StressLoopInvariantCodeMotion && null_check_info->deoptimize_on_exception()) {
2008       LIR_Opr obj = new_register(T_OBJECT);
2009       __ move(LIR_OprFact::oopConst(NULL), obj);
2010       __ null_check(obj, new CodeEmitInfo(null_check_info));
2011     }
2012   }
2013 
2014   if (GenerateRangeChecks && needs_range_check) {
2015     if (StressLoopInvariantCodeMotion && range_check_info->deoptimize_on_exception()) {
2016       __ branch(lir_cond_always, T_ILLEGAL, new RangeCheckStub(range_check_info, index.result(), array.result()));
2017     } else if (use_length) {
2018       // TODO: use a (modified) version of array_range_check that does not require a
2019       //       constant length to be loaded to a register
2020       __ cmp(lir_cond_belowEqual, length.result(), index.result());
2021       __ branch(lir_cond_belowEqual, T_INT, new RangeCheckStub(range_check_info, index.result(), array.result()));
2022     } else {
2023       array_range_check(array.result(), index.result(), null_check_info, range_check_info);
2024       // The range check performs the null check, so clear it out for the load
2025       null_check_info = NULL;
2026     }
2027   }
2028 
2029   if (x->array()->is_loaded_flattened_array()) {

2030     // Find the destination address (of the NewValueTypeInstance)
2031     LIR_Opr obj = x->vt()->operand();
2032     LIRItem obj_item(x->vt(), this);
2033 
2034     access_flattened_array(true, array, index, obj_item);
2035     set_no_result(x);

2036   } else {
2037     LIR_Opr result = rlock_result(x, x->elt_type());
2038     LoadFlattenedArrayStub* slow_path = NULL;
2039     
2040     if (x->array()->maybe_flattened_array()) {
2041       // Check if we indeed have a flattened array
2042       slow_path = new LoadFlattenedArrayStub(array.result(), index.result(), result, state_for(x));
2043       LIR_Opr array_klass_reg = new_register(T_METADATA);
2044 
2045       __ move(new LIR_Address(array.result(), oopDesc::klass_offset_in_bytes(), T_ADDRESS), array_klass_reg);
2046       LIR_Opr layout = new_register(T_INT);
2047       __ move(new LIR_Address(array_klass_reg, in_bytes(Klass::layout_helper_offset()), T_INT), layout);
2048       __ shift_right(layout, Klass::_lh_array_tag_shift, layout);
2049       __ cmp(lir_cond_equal, layout, LIR_OprFact::intConst(Klass::_lh_array_tag_vt_value));
2050       __ branch(lir_cond_equal, T_ILLEGAL, slow_path);
2051     }
2052 
2053     DecoratorSet decorators = IN_HEAP | IS_ARRAY;

2054     access_load_at(decorators, x->elt_type(),
2055                    array, index.result(), result,
2056                    NULL, null_check_info);
2057 
2058     if (slow_path != NULL) {
2059       __ branch_destination(slow_path->continuation());
2060     }
2061   }
2062 }
2063 
2064 
2065 void LIRGenerator::do_NullCheck(NullCheck* x) {
2066   if (x->can_trap()) {
2067     LIRItem value(x->obj(), this);
2068     value.load_item();
2069     CodeEmitInfo* info = state_for(x);
2070     __ null_check(value.result(), info);
2071   }
2072 }
2073 
2074 
2075 void LIRGenerator::do_TypeCast(TypeCast* x) {
2076   LIRItem value(x->obj(), this);
2077   value.load_item();
2078   // the result is the same as from the node we are casting
2079   set_result(x, value.result());
2080 }
2081 


< prev index next >