< prev index next >

src/hotspot/share/oops/markOop.hpp

Print this page
rev 54621 : imported patch 8221734-v1


 160          no_lock_in_place         = unlocked_value
 161   };
 162 
 163   enum { max_age                  = age_mask };
 164 
 165   enum { max_bias_epoch           = epoch_mask };
 166 
 167   // Biased Locking accessors.
 168   // These must be checked by all code which calls into the
 169   // ObjectSynchronizer and other code. The biasing is not understood
 170   // by the lower-level CAS-based locking code, although the runtime
 171   // fixes up biased locks to be compatible with it when a bias is
 172   // revoked.
 173   bool has_bias_pattern() const {
 174     return (mask_bits(value(), biased_lock_mask_in_place) == biased_lock_pattern);
 175   }
 176   JavaThread* biased_locker() const {
 177     assert(has_bias_pattern(), "should not call this otherwise");
 178     return (JavaThread*) ((intptr_t) (mask_bits(value(), ~(biased_lock_mask_in_place | age_mask_in_place | epoch_mask_in_place))));
 179   }









 180   // Indicates that the mark has the bias bit set but that it has not
 181   // yet been biased toward a particular thread
 182   bool is_biased_anonymously() const {
 183     return (has_bias_pattern() && (biased_locker() == NULL));
 184   }
 185   // Indicates epoch in which this bias was acquired. If the epoch
 186   // changes due to too many bias revocations occurring, the biases
 187   // from the previous epochs are all considered invalid.
 188   int bias_epoch() const {
 189     assert(has_bias_pattern(), "should not call this otherwise");
 190     return (mask_bits(value(), epoch_mask_in_place) >> epoch_shift);
 191   }
 192   markOop set_bias_epoch(int epoch) {
 193     assert(has_bias_pattern(), "should not call this otherwise");
 194     assert((epoch & (~epoch_mask)) == 0, "epoch overflow");
 195     return markOop(mask_bits(value(), ~epoch_mask_in_place) | (epoch << epoch_shift));
 196   }
 197   markOop incr_bias_epoch() {
 198     return set_bias_epoch((1 + bias_epoch()) & epoch_mask);
 199   }




 160          no_lock_in_place         = unlocked_value
 161   };
 162 
 163   enum { max_age                  = age_mask };
 164 
 165   enum { max_bias_epoch           = epoch_mask };
 166 
 167   // Biased Locking accessors.
 168   // These must be checked by all code which calls into the
 169   // ObjectSynchronizer and other code. The biasing is not understood
 170   // by the lower-level CAS-based locking code, although the runtime
 171   // fixes up biased locks to be compatible with it when a bias is
 172   // revoked.
 173   bool has_bias_pattern() const {
 174     return (mask_bits(value(), biased_lock_mask_in_place) == biased_lock_pattern);
 175   }
 176   JavaThread* biased_locker() const {
 177     assert(has_bias_pattern(), "should not call this otherwise");
 178     return (JavaThread*) ((intptr_t) (mask_bits(value(), ~(biased_lock_mask_in_place | age_mask_in_place | epoch_mask_in_place))));
 179   }
 180   bool biased_locker_is(JavaThread* thread) const {
 181     if (!has_bias_pattern()) {
 182       return false;
 183     }
 184     // If current thread is not the owner it can be unbiased at anytime.
 185     JavaThread* jt = (JavaThread*) ((intptr_t) (mask_bits(value(), ~(biased_lock_mask_in_place | age_mask_in_place | epoch_mask_in_place))));
 186     return jt == thread;
 187   }
 188 
 189   // Indicates that the mark has the bias bit set but that it has not
 190   // yet been biased toward a particular thread
 191   bool is_biased_anonymously() const {
 192     return (has_bias_pattern() && (biased_locker() == NULL));
 193   }
 194   // Indicates epoch in which this bias was acquired. If the epoch
 195   // changes due to too many bias revocations occurring, the biases
 196   // from the previous epochs are all considered invalid.
 197   int bias_epoch() const {
 198     assert(has_bias_pattern(), "should not call this otherwise");
 199     return (mask_bits(value(), epoch_mask_in_place) >> epoch_shift);
 200   }
 201   markOop set_bias_epoch(int epoch) {
 202     assert(has_bias_pattern(), "should not call this otherwise");
 203     assert((epoch & (~epoch_mask)) == 0, "epoch overflow");
 204     return markOop(mask_bits(value(), ~epoch_mask_in_place) | (epoch << epoch_shift));
 205   }
 206   markOop incr_bias_epoch() {
 207     return set_bias_epoch((1 + bias_epoch()) & epoch_mask);
 208   }


< prev index next >