src/share/vm/c1/c1_Compiler.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8130832-review Sdiff src/share/vm/c1

src/share/vm/c1/c1_Compiler.cpp

Print this page




  82 }
  83 
  84 BufferBlob* Compiler::init_buffer_blob() {
  85   // Allocate buffer blob once at startup since allocation for each
  86   // compilation seems to be too expensive (at least on Intel win32).
  87   assert (CompilerThread::current()->get_buffer_blob() == NULL, "Should initialize only once");
  88 
  89   // setup CodeBuffer.  Preallocate a BufferBlob of size
  90   // NMethodSizeLimit plus some extra space for constants.
  91   int code_buffer_size = Compilation::desired_max_code_buffer_size() +
  92     Compilation::desired_max_constant_size();
  93 
  94   BufferBlob* buffer_blob = BufferBlob::create("C1 temporary CodeBuffer", code_buffer_size);
  95   if (buffer_blob != NULL) {
  96     CompilerThread::current()->set_buffer_blob(buffer_blob);
  97   }
  98 
  99   return buffer_blob;
 100 }
 101 


























































































































































 102 
 103 void Compiler::compile_method(ciEnv* env, ciMethod* method, int entry_bci) {
 104   BufferBlob* buffer_blob = CompilerThread::current()->get_buffer_blob();
 105   assert(buffer_blob != NULL, "Must exist");
 106   // invoke compilation
 107   {
 108     // We are nested here because we need for the destructor
 109     // of Compilation to occur before we release the any
 110     // competing compiler thread
 111     ResourceMark rm;
 112     Compilation c(this, env, method, entry_bci, buffer_blob);
 113   }
 114 }
 115 
 116 
 117 void Compiler::print_timers() {
 118   Compilation::print_timers();






 119 }


  82 }
  83 
  84 BufferBlob* Compiler::init_buffer_blob() {
  85   // Allocate buffer blob once at startup since allocation for each
  86   // compilation seems to be too expensive (at least on Intel win32).
  87   assert (CompilerThread::current()->get_buffer_blob() == NULL, "Should initialize only once");
  88 
  89   // setup CodeBuffer.  Preallocate a BufferBlob of size
  90   // NMethodSizeLimit plus some extra space for constants.
  91   int code_buffer_size = Compilation::desired_max_code_buffer_size() +
  92     Compilation::desired_max_constant_size();
  93 
  94   BufferBlob* buffer_blob = BufferBlob::create("C1 temporary CodeBuffer", code_buffer_size);
  95   if (buffer_blob != NULL) {
  96     CompilerThread::current()->set_buffer_blob(buffer_blob);
  97   }
  98 
  99   return buffer_blob;
 100 }
 101 
 102 bool Compiler::is_intrinsic_supported(methodHandle method) {
 103   vmIntrinsics::ID id = method->intrinsic_id();
 104   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 105 
 106   if (method->is_synchronized()) {
 107     // C1 does not support intrinsification of synchronized methods.
 108     return false;
 109   }
 110 
 111   switch (id) {
 112   case vmIntrinsics::_compareAndSwapLong:
 113     if (!VM_Version::supports_cx8()) return false;
 114     break;
 115   case vmIntrinsics::_getAndAddInt:
 116     if (!VM_Version::supports_atomic_getadd4()) return false;
 117     break;
 118   case vmIntrinsics::_getAndAddLong:
 119     if (!VM_Version::supports_atomic_getadd8()) return false;
 120     break;
 121   case vmIntrinsics::_getAndSetInt:
 122     if (!VM_Version::supports_atomic_getset4()) return false;
 123     break;
 124   case vmIntrinsics::_getAndSetLong:
 125     if (!VM_Version::supports_atomic_getset8()) return false;
 126     break;
 127   case vmIntrinsics::_getAndSetObject:
 128 #ifdef _LP64
 129     if (!UseCompressedOops && !VM_Version::supports_atomic_getset8()) return false;
 130     if (UseCompressedOops && !VM_Version::supports_atomic_getset4()) return false;
 131 #else
 132     if (!VM_Version::supports_atomic_getset4()) return false;
 133 #endif
 134     break;
 135   case vmIntrinsics::_arraycopy:
 136   case vmIntrinsics::_currentTimeMillis:
 137   case vmIntrinsics::_nanoTime:
 138   case vmIntrinsics::_Reference_get:
 139     // Use the intrinsic version of Reference.get() so that the value in
 140     // the referent field can be registered by the G1 pre-barrier code.
 141     // Also to prevent commoning reads from this field across safepoint
 142     // since GC can change its value.
 143   case vmIntrinsics::_loadFence:
 144   case vmIntrinsics::_storeFence:
 145   case vmIntrinsics::_fullFence:
 146   case vmIntrinsics::_floatToRawIntBits:
 147   case vmIntrinsics::_intBitsToFloat:
 148   case vmIntrinsics::_doubleToRawLongBits:
 149   case vmIntrinsics::_longBitsToDouble:
 150   case vmIntrinsics::_getClass:
 151   case vmIntrinsics::_isInstance:
 152   case vmIntrinsics::_currentThread:
 153   case vmIntrinsics::_dabs:
 154   case vmIntrinsics::_dsqrt:
 155   case vmIntrinsics::_dsin:
 156   case vmIntrinsics::_dcos:
 157   case vmIntrinsics::_dtan:
 158   case vmIntrinsics::_dlog:
 159   case vmIntrinsics::_dlog10:
 160   case vmIntrinsics::_dexp:
 161   case vmIntrinsics::_dpow:
 162   case vmIntrinsics::_getObject:
 163   case vmIntrinsics::_getBoolean:
 164   case vmIntrinsics::_getByte:
 165   case vmIntrinsics::_getShort:
 166   case vmIntrinsics::_getChar:
 167   case vmIntrinsics::_getInt:
 168   case vmIntrinsics::_getLong:
 169   case vmIntrinsics::_getFloat:
 170   case vmIntrinsics::_getDouble:
 171   case vmIntrinsics::_putObject:
 172   case vmIntrinsics::_putBoolean:
 173   case vmIntrinsics::_putByte:
 174   case vmIntrinsics::_putShort:
 175   case vmIntrinsics::_putChar:
 176   case vmIntrinsics::_putInt:
 177   case vmIntrinsics::_putLong:
 178   case vmIntrinsics::_putFloat:
 179   case vmIntrinsics::_putDouble:
 180   case vmIntrinsics::_getObjectVolatile:
 181   case vmIntrinsics::_getBooleanVolatile:
 182   case vmIntrinsics::_getByteVolatile:
 183   case vmIntrinsics::_getShortVolatile:
 184   case vmIntrinsics::_getCharVolatile:
 185   case vmIntrinsics::_getIntVolatile:
 186   case vmIntrinsics::_getLongVolatile:
 187   case vmIntrinsics::_getFloatVolatile:
 188   case vmIntrinsics::_getDoubleVolatile:
 189   case vmIntrinsics::_putObjectVolatile:
 190   case vmIntrinsics::_putBooleanVolatile:
 191   case vmIntrinsics::_putByteVolatile:
 192   case vmIntrinsics::_putShortVolatile:
 193   case vmIntrinsics::_putCharVolatile:
 194   case vmIntrinsics::_putIntVolatile:
 195   case vmIntrinsics::_putLongVolatile:
 196   case vmIntrinsics::_putFloatVolatile:
 197   case vmIntrinsics::_putDoubleVolatile:
 198   case vmIntrinsics::_getByte_raw:
 199   case vmIntrinsics::_getShort_raw:
 200   case vmIntrinsics::_getChar_raw:
 201   case vmIntrinsics::_getInt_raw:
 202   case vmIntrinsics::_getLong_raw:
 203   case vmIntrinsics::_getFloat_raw:
 204   case vmIntrinsics::_getDouble_raw:
 205   case vmIntrinsics::_putByte_raw:
 206   case vmIntrinsics::_putShort_raw:
 207   case vmIntrinsics::_putChar_raw:
 208   case vmIntrinsics::_putInt_raw:
 209   case vmIntrinsics::_putLong_raw:
 210   case vmIntrinsics::_putFloat_raw:
 211   case vmIntrinsics::_putDouble_raw:
 212   case vmIntrinsics::_putOrderedObject:
 213   case vmIntrinsics::_putOrderedInt:
 214   case vmIntrinsics::_putOrderedLong:
 215   case vmIntrinsics::_getShortUnaligned:
 216   case vmIntrinsics::_getCharUnaligned:
 217   case vmIntrinsics::_getIntUnaligned:
 218   case vmIntrinsics::_getLongUnaligned:
 219   case vmIntrinsics::_putShortUnaligned:
 220   case vmIntrinsics::_putCharUnaligned:
 221   case vmIntrinsics::_putIntUnaligned:
 222   case vmIntrinsics::_putLongUnaligned:
 223   case vmIntrinsics::_checkIndex:
 224   case vmIntrinsics::_updateCRC32:
 225   case vmIntrinsics::_updateBytesCRC32:
 226   case vmIntrinsics::_updateByteBufferCRC32:
 227   case vmIntrinsics::_compareAndSwapInt:
 228   case vmIntrinsics::_compareAndSwapObject:
 229 #ifdef TRACE_HAVE_INTRINSICS
 230   case vmIntrinsics::_classID:
 231   case vmIntrinsics::_threadID:
 232   case vmIntrinsics::_counterTime:
 233 #endif
 234     break;
 235   default:
 236     return false; // Intrinsics not on the previous list are not available.
 237   }
 238 
 239   return true;
 240 }
 241 
 242 bool Compiler::is_intrinsic_disabled_by_flag(methodHandle method) {
 243   vmIntrinsics::ID id = method->intrinsic_id();
 244   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 245 
 246   if (!InlineNatives && id != vmIntrinsics::_Reference_get) {
 247     return true;
 248   }
 249 
 250   if (!InlineClassNatives && id == vmIntrinsics::_getClass) {
 251     return true;
 252   }
 253 
 254   return false;
 255 }
 256 
 257 void Compiler::compile_method(ciEnv* env, ciMethod* method, int entry_bci) {
 258   BufferBlob* buffer_blob = CompilerThread::current()->get_buffer_blob();
 259   assert(buffer_blob != NULL, "Must exist");
 260   // invoke compilation
 261   {
 262     // We are nested here because we need for the destructor
 263     // of Compilation to occur before we release the any
 264     // competing compiler thread
 265     ResourceMark rm;
 266     Compilation c(this, env, method, entry_bci, buffer_blob);
 267   }
 268 }
 269 
 270 
 271 void Compiler::print_timers() {
 272   Compilation::print_timers();
 273 }
 274 
 275 bool Compiler::is_intrinsic_available(methodHandle method) {
 276   return is_intrinsic_supported(method) &&
 277          !vmIntrinsics::is_disabled_by_flags(method->intrinsic_id()) &&
 278          !is_intrinsic_disabled_by_flag(method);
 279 }
src/share/vm/c1/c1_Compiler.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File