1883 } else {
1884 CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
1885 }
1886 }
1887 }
1888 UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
1889 } else {
1890 istate->set_msg(more_monitors);
1891 UPDATE_PC_AND_RETURN(0); // Re-execute
1892 }
1893 }
1894
1895 CASE(_monitorexit): {
1896 oop lockee = STACK_OBJECT(-1);
1897 CHECK_NULL(lockee);
1898 // derefing's lockee ought to provoke implicit null check
1899 // find our monitor slot
1900 BasicObjectLock* limit = istate->monitor_base();
1901 BasicObjectLock* most_recent = (BasicObjectLock*) istate->stack_base();
1902 while (most_recent != limit ) {
1903 if ((most_recent)->obj() == lockee) {
1904 BasicLock* lock = most_recent->lock();
1905 markOop header = lock->displaced_header();
1906 most_recent->set_obj(NULL);
1907 if (!lockee->mark()->has_bias_pattern()) {
1908 bool call_vm = UseHeavyMonitors;
1909 // If it isn't recursive we either must swap old header or call the runtime
1910 if (header != NULL || call_vm) {
1911 if (call_vm || Atomic::cmpxchg_ptr(header, lockee->mark_addr(), lock) != lock) {
1912 // restore object for the slow case
1913 most_recent->set_obj(lockee);
1914 CALL_VM(InterpreterRuntime::monitorexit(THREAD, most_recent), handle_exception);
1915 }
1916 }
1917 }
1918 UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
1919 }
1920 most_recent++;
1921 }
1922 // Need to throw illegal monitor state exception
1923 CALL_VM(InterpreterRuntime::throw_illegal_monitor_state_exception(THREAD), handle_exception);
2135 } else if (tos_type == ftos) {
2136 obj->float_field_put(field_offset, STACK_FLOAT(-1));
2137 } else {
2138 obj->double_field_put(field_offset, STACK_DOUBLE(-1));
2139 }
2140 }
2141
2142 UPDATE_PC_AND_TOS_AND_CONTINUE(3, count);
2143 }
2144
2145 CASE(_new): {
2146 u2 index = Bytes::get_Java_u2(pc+1);
2147 ConstantPool* constants = istate->method()->constants();
2148 if (!constants->tag_at(index).is_unresolved_klass()) {
2149 // Make sure klass is initialized and doesn't have a finalizer
2150 Klass* entry = constants->slot_at(index).get_klass();
2151 assert(entry->is_klass(), "Should be resolved klass");
2152 Klass* k_entry = (Klass*) entry;
2153 assert(k_entry->oop_is_instance(), "Should be InstanceKlass");
2154 InstanceKlass* ik = (InstanceKlass*) k_entry;
2155 if ( ik->is_initialized() && ik->can_be_fastpath_allocated() ) {
2156 size_t obj_size = ik->size_helper();
2157 oop result = NULL;
2158 // If the TLAB isn't pre-zeroed then we'll have to do it
2159 bool need_zero = !ZeroTLAB;
2160 if (UseTLAB) {
2161 result = (oop) THREAD->tlab().allocate(obj_size);
2162 }
2163 // Disable non-TLAB-based fast-path, because profiling requires that all
2164 // allocations go through InterpreterRuntime::_new() if THREAD->tlab().allocate
2165 // returns NULL.
2166 #ifndef CC_INTERP_PROFILE
2167 if (result == NULL) {
2168 need_zero = true;
2169 // Try allocate in shared eden
2170 retry:
2171 HeapWord* compare_to = *Universe::heap()->top_addr();
2172 HeapWord* new_top = compare_to + obj_size;
2173 if (new_top <= *Universe::heap()->end_addr()) {
2174 if (Atomic::cmpxchg_ptr(new_top, Universe::heap()->top_addr(), compare_to) != compare_to) {
2175 goto retry;
|
1883 } else {
1884 CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
1885 }
1886 }
1887 }
1888 UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
1889 } else {
1890 istate->set_msg(more_monitors);
1891 UPDATE_PC_AND_RETURN(0); // Re-execute
1892 }
1893 }
1894
1895 CASE(_monitorexit): {
1896 oop lockee = STACK_OBJECT(-1);
1897 CHECK_NULL(lockee);
1898 // derefing's lockee ought to provoke implicit null check
1899 // find our monitor slot
1900 BasicObjectLock* limit = istate->monitor_base();
1901 BasicObjectLock* most_recent = (BasicObjectLock*) istate->stack_base();
1902 while (most_recent != limit ) {
1903 if (most_recent->obj() == lockee) {
1904 BasicLock* lock = most_recent->lock();
1905 markOop header = lock->displaced_header();
1906 most_recent->set_obj(NULL);
1907 if (!lockee->mark()->has_bias_pattern()) {
1908 bool call_vm = UseHeavyMonitors;
1909 // If it isn't recursive we either must swap old header or call the runtime
1910 if (header != NULL || call_vm) {
1911 if (call_vm || Atomic::cmpxchg_ptr(header, lockee->mark_addr(), lock) != lock) {
1912 // restore object for the slow case
1913 most_recent->set_obj(lockee);
1914 CALL_VM(InterpreterRuntime::monitorexit(THREAD, most_recent), handle_exception);
1915 }
1916 }
1917 }
1918 UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
1919 }
1920 most_recent++;
1921 }
1922 // Need to throw illegal monitor state exception
1923 CALL_VM(InterpreterRuntime::throw_illegal_monitor_state_exception(THREAD), handle_exception);
2135 } else if (tos_type == ftos) {
2136 obj->float_field_put(field_offset, STACK_FLOAT(-1));
2137 } else {
2138 obj->double_field_put(field_offset, STACK_DOUBLE(-1));
2139 }
2140 }
2141
2142 UPDATE_PC_AND_TOS_AND_CONTINUE(3, count);
2143 }
2144
2145 CASE(_new): {
2146 u2 index = Bytes::get_Java_u2(pc+1);
2147 ConstantPool* constants = istate->method()->constants();
2148 if (!constants->tag_at(index).is_unresolved_klass()) {
2149 // Make sure klass is initialized and doesn't have a finalizer
2150 Klass* entry = constants->slot_at(index).get_klass();
2151 assert(entry->is_klass(), "Should be resolved klass");
2152 Klass* k_entry = (Klass*) entry;
2153 assert(k_entry->oop_is_instance(), "Should be InstanceKlass");
2154 InstanceKlass* ik = (InstanceKlass*) k_entry;
2155 if (ik->is_initialized() && ik->can_be_fastpath_allocated() ) {
2156 size_t obj_size = ik->size_helper();
2157 oop result = NULL;
2158 // If the TLAB isn't pre-zeroed then we'll have to do it
2159 bool need_zero = !ZeroTLAB;
2160 if (UseTLAB) {
2161 result = (oop) THREAD->tlab().allocate(obj_size);
2162 }
2163 // Disable non-TLAB-based fast-path, because profiling requires that all
2164 // allocations go through InterpreterRuntime::_new() if THREAD->tlab().allocate
2165 // returns NULL.
2166 #ifndef CC_INTERP_PROFILE
2167 if (result == NULL) {
2168 need_zero = true;
2169 // Try allocate in shared eden
2170 retry:
2171 HeapWord* compare_to = *Universe::heap()->top_addr();
2172 HeapWord* new_top = compare_to + obj_size;
2173 if (new_top <= *Universe::heap()->end_addr()) {
2174 if (Atomic::cmpxchg_ptr(new_top, Universe::heap()->top_addr(), compare_to) != compare_to) {
2175 goto retry;
|