< prev index next >

src/hotspot/share/c1/c1_LIRGenerator.cpp

Print this page


1526       value.load_byte_item();
1527     } else  {
1528       value.load_item();
1529     }
1530   } else {
1531     value.load_for_store(field_type);
1532   }
1533 
1534   set_no_result(x);
1535 
1536 #ifndef PRODUCT
1537   if (PrintNotLoaded && needs_patching) {
1538     tty->print_cr("   ###class not loaded at store_%s bci %d",
1539                   x->is_static() ?  "static" : "field", x->printable_bci());
1540   }
1541 #endif
1542 
1543   if (x->needs_null_check() &&
1544       (needs_patching ||
1545        MacroAssembler::needs_explicit_null_check(x->offset()))) {
1546     if (needs_patching && x->field()->signature()->starts_with("Q", 1)) {
1547       // We are storing a field of type "QT;", but T is not yet loaded, so we don't
1548       // know whether this field is flattened or not. Let's deoptimize and recompile.



1549       CodeStub* stub = new DeoptimizeStub(new CodeEmitInfo(info),
1550                                           Deoptimization::Reason_unloaded,
1551                                           Deoptimization::Action_make_not_entrant);
1552       __ branch(lir_cond_always, T_ILLEGAL, stub);
1553     } else {
1554       // Emit an explicit null check because the offset is too large.
1555       // If the class is not loaded and the object is NULL, we need to deoptimize to throw a
1556       // NoClassDefFoundError in the interpreter instead of an implicit NPE from compiled code.
1557       __ null_check(object.result(), new CodeEmitInfo(info), /* deoptimize */ needs_patching);
1558     }
1559   }
1560 
1561   DecoratorSet decorators = IN_HEAP;
1562   if (is_volatile) {
1563     decorators |= MO_SEQ_CST;
1564   }
1565   if (needs_patching) {
1566     decorators |= C1_NEEDS_PATCHING;
1567   }
1568 


1851   decorators |= ACCESS_WRITE;
1852   // Atomic operations are SEQ_CST by default
1853   decorators |= ((decorators & MO_DECORATOR_MASK) != 0) ? MO_SEQ_CST : 0;
1854   LIRAccess access(this, decorators, base, offset, type);
1855   if (access.is_raw()) {
1856     return _barrier_set->BarrierSetC1::atomic_add_at(access, value);
1857   } else {
1858     return _barrier_set->atomic_add_at(access, value);
1859   }
1860 }
1861 
1862 LIR_Opr LIRGenerator::access_resolve(DecoratorSet decorators, LIR_Opr obj) {
1863   // Use stronger ACCESS_WRITE|ACCESS_READ by default.
1864   if ((decorators & (ACCESS_READ | ACCESS_WRITE)) == 0) {
1865     decorators |= ACCESS_READ | ACCESS_WRITE;
1866   }
1867 
1868   return _barrier_set->resolve(this, decorators, obj);
1869 }
1870 










































































1871 void LIRGenerator::do_LoadField(LoadField* x) {
1872   bool needs_patching = x->needs_patching();
1873   bool is_volatile = x->field()->is_volatile();
1874   BasicType field_type = x->field_type();
1875 
1876   CodeEmitInfo* info = NULL;
1877   if (needs_patching) {
1878     assert(x->explicit_null_check() == NULL, "can't fold null check into patching field access");
1879     info = state_for(x, x->state_before());
1880   } else if (x->needs_null_check()) {
1881     NullCheck* nc = x->explicit_null_check();
1882     if (nc == NULL) {
1883       info = state_for(x);
1884     } else {
1885       info = state_for(nc);
1886     }
1887   }
1888 
1889   LIRItem object(x->obj(), this);
1890 
1891   object.load_item();
1892 
1893 #ifndef PRODUCT
1894   if (PrintNotLoaded && needs_patching) {
1895     tty->print_cr("   ###class not loaded at load_%s bci %d",
1896                   x->is_static() ?  "static" : "field", x->printable_bci());
1897   }
1898 #endif
1899 





1900   bool stress_deopt = StressLoopInvariantCodeMotion && info && info->deoptimize_on_exception();
1901   if (x->needs_null_check() &&
1902       (needs_patching ||
1903        MacroAssembler::needs_explicit_null_check(x->offset()) ||
1904        stress_deopt)) {
1905     if (needs_patching && x->field()->signature()->starts_with("Q", 1)) {
1906       // We are loading a field of type "QT;", but class T is not yet loaded. We don't know
1907       // whether this field is flattened or not. Let's deoptimize and recompile.
1908       CodeStub* stub = new DeoptimizeStub(new CodeEmitInfo(info),
1909                                           Deoptimization::Reason_unloaded,
1910                                           Deoptimization::Action_make_not_entrant);
1911       __ branch(lir_cond_always, T_ILLEGAL, stub);
1912     } else {
1913       LIR_Opr obj = object.result();
1914       if (stress_deopt) {
1915         obj = new_register(T_OBJECT);
1916         __ move(LIR_OprFact::oopConst(NULL), obj);
1917       }
1918       // Emit an explicit null check because the offset is too large.
1919       // If the class is not loaded and the object is NULL, we need to deoptimize to throw a
1920       // NoClassDefFoundError in the interpreter instead of an implicit NPE from compiled code.
1921       __ null_check(obj, new CodeEmitInfo(info), /* deoptimize */ needs_patching);
1922     }
1923   } else if (x->value_klass() != NULL && x->default_value() == NULL) {
1924     assert(x->is_static() && !x->value_klass()->is_loaded(), "must be");
1925     assert(needs_patching, "must be");
1926     // The value klass was not loaded so we don't know what its default value should be
1927     CodeStub* stub = new DeoptimizeStub(new CodeEmitInfo(info),
1928                                         Deoptimization::Reason_unloaded,
1929                                         Deoptimization::Action_make_not_entrant);
1930     __ branch(lir_cond_always, T_ILLEGAL, stub);
1931   }
1932 
1933   DecoratorSet decorators = IN_HEAP;
1934   if (is_volatile) {
1935     decorators |= MO_SEQ_CST;
1936   }
1937   if (needs_patching) {
1938     decorators |= C1_NEEDS_PATCHING;
1939   }
1940 
1941   LIR_Opr result = rlock_result(x, field_type);
1942   access_load_at(decorators, field_type,
1943                  object, LIR_OprFact::intConst(x->offset()), result,
1944                  info ? new CodeEmitInfo(info) : NULL, info);
1945 
1946   if (x->value_klass() != NULL && x->default_value() != NULL) {
1947     LabelObj* L_end = new LabelObj();
1948     __ cmp(lir_cond_notEqual, result, LIR_OprFact::oopConst(NULL));
1949     __ branch(lir_cond_notEqual, T_OBJECT, L_end->label());
1950 
1951     LIRItem default_value(x->default_value(), this);
1952     default_value.load_item();
1953     __ move(default_value.result(), result);
1954 
1955     __ branch_destination(L_end->label());
1956   }
1957 }
1958 
1959 
1960 //------------------------java.nio.Buffer.checkIndex------------------------
1961 
1962 // int java.nio.Buffer.checkIndex(int)
1963 void LIRGenerator::do_NIOCheckIndex(Intrinsic* x) {
1964   // NOTE: by the time we are in checkIndex() we are guaranteed that
1965   // the buffer is non-null (because checkIndex is package-private and
1966   // only called from within other methods in the buffer).
1967   assert(x->number_of_arguments() == 2, "wrong type");
1968   LIRItem buf  (x->argument_at(0), this);
1969   LIRItem index(x->argument_at(1), this);
1970   buf.load_item();
1971   index.load_item();
1972 
1973   LIR_Opr result = rlock_result(x);




1526       value.load_byte_item();
1527     } else  {
1528       value.load_item();
1529     }
1530   } else {
1531     value.load_for_store(field_type);
1532   }
1533 
1534   set_no_result(x);
1535 
1536 #ifndef PRODUCT
1537   if (PrintNotLoaded && needs_patching) {
1538     tty->print_cr("   ###class not loaded at store_%s bci %d",
1539                   x->is_static() ?  "static" : "field", x->printable_bci());
1540   }
1541 #endif
1542 
1543   if (x->needs_null_check() &&
1544       (needs_patching ||
1545        MacroAssembler::needs_explicit_null_check(x->offset()))) {
1546     if (needs_patching && x->field()->is_flattenable()) {
1547       // We are storing a field of type "QT;" into holder class H, but H is not yet
1548       // loaded. (If H had been loaded, then T must also have already been loaded
1549       // due to the "Q" signature, and needs_patching would be false).
1550       assert(!x->field()->holder()->is_loaded(), "must be");
1551       // We don't know the offset of this field. Let's deopt and recompile.
1552       CodeStub* stub = new DeoptimizeStub(new CodeEmitInfo(info),
1553                                           Deoptimization::Reason_unloaded,
1554                                           Deoptimization::Action_make_not_entrant);
1555       __ branch(lir_cond_always, T_ILLEGAL, stub);
1556     } else {
1557       // Emit an explicit null check because the offset is too large.
1558       // If the class is not loaded and the object is NULL, we need to deoptimize to throw a
1559       // NoClassDefFoundError in the interpreter instead of an implicit NPE from compiled code.
1560       __ null_check(object.result(), new CodeEmitInfo(info), /* deoptimize */ needs_patching);
1561     }
1562   }
1563 
1564   DecoratorSet decorators = IN_HEAP;
1565   if (is_volatile) {
1566     decorators |= MO_SEQ_CST;
1567   }
1568   if (needs_patching) {
1569     decorators |= C1_NEEDS_PATCHING;
1570   }
1571 


1854   decorators |= ACCESS_WRITE;
1855   // Atomic operations are SEQ_CST by default
1856   decorators |= ((decorators & MO_DECORATOR_MASK) != 0) ? MO_SEQ_CST : 0;
1857   LIRAccess access(this, decorators, base, offset, type);
1858   if (access.is_raw()) {
1859     return _barrier_set->BarrierSetC1::atomic_add_at(access, value);
1860   } else {
1861     return _barrier_set->atomic_add_at(access, value);
1862   }
1863 }
1864 
1865 LIR_Opr LIRGenerator::access_resolve(DecoratorSet decorators, LIR_Opr obj) {
1866   // Use stronger ACCESS_WRITE|ACCESS_READ by default.
1867   if ((decorators & (ACCESS_READ | ACCESS_WRITE)) == 0) {
1868     decorators |= ACCESS_READ | ACCESS_WRITE;
1869   }
1870 
1871   return _barrier_set->resolve(this, decorators, obj);
1872 }
1873 
1874 Value LIRGenerator::flattenable_load_field_prolog(LoadField* x, CodeEmitInfo* info) {
1875   ciField* field = x->field();
1876   ciInstanceKlass* holder = field->holder();
1877   Value default_value = NULL;
1878 
1879   // Unloaded "QV;" klasses are represented by a ciInstanceKlass
1880   bool field_type_unloaded = field->type()->is_instance_klass() && !field->type()->as_instance_klass()->is_loaded();
1881 
1882   // Check for edge cases (1), (2) and (3) for getstatic and getfield
1883   bool deopt = false;
1884   bool need_default = false;
1885   if (field->is_static()) {
1886       // (1) holder is unloaded -- no problem: it will be loaded by patching, and field offset will be determined.
1887       // No check needed here.
1888 
1889     if (field_type_unloaded) {
1890       // (2) field type is unloaded -- problem: we don't know what the default value is. Let's deopt.
1891       //                               FIXME: consider getting the default value in patching code.
1892       deopt = true;
1893     } else {
1894       need_default = true;
1895     }
1896 
1897       // (3) field is not flattened -- we don't care: static fields are never flattened.
1898       // No check needed here.
1899   } else {
1900     if (!holder->is_loaded()) {
1901       // (1) holder is unloaded -- problem: we needed the field offset back in GraphBuilder::access_field()
1902       //                           FIXME: consider getting field offset in patching code (but only if the field
1903       //                           type was loaded at compilation time).
1904       deopt = true;
1905     } else if (field_type_unloaded) {
1906       // (2) field type is unloaded -- problem: we don't know whether it's flattened or not. Let's deopt
1907       deopt = true;
1908     } else if (!field->is_flattened()) {
1909       // (3) field is not flattened -- need default value in cases of uninitialized field
1910       need_default = true;
1911     }
1912   }
1913 
1914   if (deopt) {
1915     assert(!need_default, "deopt and need_default cannot both be true");
1916     assert(x->needs_patching(), "must be");
1917     assert(info != NULL, "must be");
1918     CodeStub* stub = new DeoptimizeStub(new CodeEmitInfo(info),
1919                                         Deoptimization::Reason_unloaded,
1920                                         Deoptimization::Action_make_not_entrant);
1921     __ branch(lir_cond_always, T_ILLEGAL, stub);
1922   } else if (need_default) {
1923     assert(!field_type_unloaded, "must be");
1924     assert(field->type()->is_valuetype(), "must be");
1925     ciValueKlass* value_klass = field->type()->as_value_klass();
1926     assert(value_klass->is_loaded(), "must be");
1927 
1928     if (field->is_static() && holder->is_loaded()) {
1929       ciInstance* mirror = field->holder()->java_mirror();
1930       ciObject* val = mirror->field_value(field).as_object();
1931       if (val->is_null_object()) {
1932         // This is a non-nullable static field, but it's not initialized.
1933         // We need to do a null check, and replace it with the default value.
1934       } else {
1935         // No need to perform null check on this static field
1936         need_default = false;
1937       }
1938     }
1939 
1940     if (need_default) {
1941       default_value = new Constant(new InstanceConstant(value_klass->default_value_instance()));
1942     }
1943   }
1944 
1945   return default_value;
1946 }
1947 
1948 void LIRGenerator::do_LoadField(LoadField* x) {
1949   bool needs_patching = x->needs_patching();
1950   bool is_volatile = x->field()->is_volatile();
1951   BasicType field_type = x->field_type();
1952 
1953   CodeEmitInfo* info = NULL;
1954   if (needs_patching) {
1955     assert(x->explicit_null_check() == NULL, "can't fold null check into patching field access");
1956     info = state_for(x, x->state_before());
1957   } else if (x->needs_null_check()) {
1958     NullCheck* nc = x->explicit_null_check();
1959     if (nc == NULL) {
1960       info = state_for(x);
1961     } else {
1962       info = state_for(nc);
1963     }
1964   }
1965 
1966   LIRItem object(x->obj(), this);
1967 
1968   object.load_item();
1969 
1970 #ifndef PRODUCT
1971   if (PrintNotLoaded && needs_patching) {
1972     tty->print_cr("   ###class not loaded at load_%s bci %d",
1973                   x->is_static() ?  "static" : "field", x->printable_bci());
1974   }
1975 #endif
1976 
1977   Value default_value = NULL;
1978   if (x->field()->is_flattenable()) {
1979     default_value = flattenable_load_field_prolog(x, info);
1980   }
1981 
1982   bool stress_deopt = StressLoopInvariantCodeMotion && info && info->deoptimize_on_exception();
1983   if (x->needs_null_check() &&
1984       (needs_patching ||
1985        MacroAssembler::needs_explicit_null_check(x->offset()) ||
1986        stress_deopt)) {








1987     LIR_Opr obj = object.result();
1988     if (stress_deopt) {
1989       obj = new_register(T_OBJECT);
1990       __ move(LIR_OprFact::oopConst(NULL), obj);
1991     }
1992     // Emit an explicit null check because the offset is too large.
1993     // If the class is not loaded and the object is NULL, we need to deoptimize to throw a
1994     // NoClassDefFoundError in the interpreter instead of an implicit NPE from compiled code.
1995     __ null_check(obj, new CodeEmitInfo(info), /* deoptimize */ needs_patching);
1996   }









1997 
1998   DecoratorSet decorators = IN_HEAP;
1999   if (is_volatile) {
2000     decorators |= MO_SEQ_CST;
2001   }
2002   if (needs_patching) {
2003     decorators |= C1_NEEDS_PATCHING;
2004   }
2005 
2006   LIR_Opr result = rlock_result(x, field_type);
2007   access_load_at(decorators, field_type,
2008                  object, LIR_OprFact::intConst(x->offset()), result,
2009                  info ? new CodeEmitInfo(info) : NULL, info);
2010 
2011   if (default_value != NULL) {
2012     LabelObj* L_end = new LabelObj();
2013     __ cmp(lir_cond_notEqual, result, LIR_OprFact::oopConst(NULL));
2014     __ branch(lir_cond_notEqual, T_OBJECT, L_end->label());
2015 
2016     LIRItem dv(default_value, this);
2017     dv.load_item();
2018     __ move(dv.result(), result);
2019 
2020     __ branch_destination(L_end->label());
2021   }
2022 }
2023 
2024 
2025 //------------------------java.nio.Buffer.checkIndex------------------------
2026 
2027 // int java.nio.Buffer.checkIndex(int)
2028 void LIRGenerator::do_NIOCheckIndex(Intrinsic* x) {
2029   // NOTE: by the time we are in checkIndex() we are guaranteed that
2030   // the buffer is non-null (because checkIndex is package-private and
2031   // only called from within other methods in the buffer).
2032   assert(x->number_of_arguments() == 2, "wrong type");
2033   LIRItem buf  (x->argument_at(0), this);
2034   LIRItem index(x->argument_at(1), this);
2035   buf.load_item();
2036   index.load_item();
2037 
2038   LIR_Opr result = rlock_result(x);


< prev index next >