< prev index next >

src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp

Print this page
rev 9232 : 8248851: CMS: Missing memory fences between free chunk check and klass read
Reviewed-by: aph, kbarrett, dholmes
Contributed-by: wangshuai94@huawei.com

*** 1,7 **** /* ! * Copyright (c) 2001, 2014, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. --- 1,7 ---- /* ! * Copyright (c) 2001, 2020, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation.
*** 992,1001 **** --- 992,1005 ---- if (FreeChunk::indicatesFreeChunk(p)) { assert(res != 0, "Block size should not be 0"); return res; } } else { + // The barrier is required to prevent reordering of the free chunk check + // and the klass read. + OrderAccess::loadload(); + // must read from what 'p' points to in each loop. Klass* k = ((volatile oopDesc*)p)->klass_or_null(); if (k != NULL) { assert(k->is_klass(), "Should really be klass oop."); oop o = (oop)p;
*** 1047,1056 **** --- 1051,1064 ---- assert(res != 0, "Block size should not be 0"); assert(loops == 0, "Should be 0"); return res; } } else { + // The barrier is required to prevent reordering of the free chunk check + // and the klass read. + OrderAccess::loadload(); + // must read from what 'p' points to in each loop. Klass* k = ((volatile oopDesc*)p)->klass_or_null(); // We trust the size of any object that has a non-NULL // klass and (for those in the perm gen) is parsable // -- irrespective of its conc_safe-ty.
*** 1109,1118 **** --- 1117,1131 ---- // and those objects (if garbage) may have been modified to hold // live range information. // assert(CollectedHeap::use_parallel_gc_threads() || _bt.block_start(p) == p, // "Should be a block boundary"); if (FreeChunk::indicatesFreeChunk(p)) return false; + + // The barrier is required to prevent reordering of the free chunk check + // and the klass read. + OrderAccess::loadload(); + Klass* k = oop(p)->klass_or_null(); if (k != NULL) { // Ignore mark word because it may have been used to // chain together promoted objects (the last one // would have a null value).
< prev index next >