9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/classLoader.hpp"
27 #include "classfile/javaClasses.hpp"
28 #include "gc/shared/allocTracer.hpp"
29 #include "gc/shared/gcId.hpp"
30 #include "gc/shared/gcLocker.inline.hpp"
31 #include "gc/shared/genCollectedHeap.hpp"
32 #include "gc/shared/vmGCOperations.hpp"
33 #include "memory/oopFactory.hpp"
34 #include "oops/instanceKlass.hpp"
35 #include "oops/instanceRefKlass.hpp"
36 #include "runtime/handles.inline.hpp"
37 #include "runtime/init.hpp"
38 #include "runtime/interfaceSupport.hpp"
39 #include "utilities/dtrace.hpp"
40 #include "utilities/macros.hpp"
41 #include "utilities/preserveException.hpp"
42 #if INCLUDE_ALL_GCS
43 #include "gc/g1/g1CollectedHeap.inline.hpp"
44 #endif // INCLUDE_ALL_GCS
45
46 VM_GC_Operation::~VM_GC_Operation() {
47 CollectedHeap* ch = Universe::heap();
48 ch->collector_policy()->set_all_soft_refs_clear(false);
49 }
181 }
182 }
183
184 void VM_GenCollectFull::doit() {
185 SvcGCMarker sgcm(SvcGCMarker::FULL);
186
187 GenCollectedHeap* gch = GenCollectedHeap::heap();
188 GCCauseSetter gccs(gch, _gc_cause);
189 gch->do_full_collection(gch->must_clear_all_soft_refs(), _max_generation);
190 }
191
192 VM_CollectForMetadataAllocation::VM_CollectForMetadataAllocation(ClassLoaderData* loader_data,
193 size_t size,
194 Metaspace::MetadataType mdtype,
195 uint gc_count_before,
196 uint full_gc_count_before,
197 GCCause::Cause gc_cause)
198 : VM_GC_Operation(gc_count_before, gc_cause, full_gc_count_before, true),
199 _loader_data(loader_data), _size(size), _mdtype(mdtype), _result(NULL) {
200 assert(_size != 0, "An allocation should always be requested with this operation.");
201 AllocTracer::send_allocation_requiring_gc_event(_size * HeapWordSize, GCId::peek());
202 }
203
204 // Returns true iff concurrent GCs unloads metadata.
205 bool VM_CollectForMetadataAllocation::initiate_concurrent_GC() {
206 #if INCLUDE_ALL_GCS
207 if (UseConcMarkSweepGC && CMSClassUnloadingEnabled) {
208 MetaspaceGC::set_should_concurrent_collect(true);
209 return true;
210 }
211
212 if (UseG1GC && ClassUnloadingWithConcurrentMark) {
213 G1CollectedHeap* g1h = G1CollectedHeap::heap();
214 g1h->g1_policy()->collector_state()->set_initiate_conc_mark_if_possible(true);
215
216 GCCauseSetter x(g1h, _gc_cause);
217
218 // At this point we are supposed to start a concurrent cycle. We
219 // will do so if one is not already in progress.
220 bool should_start = g1h->g1_policy()->force_initial_mark_if_outside_cycle(_gc_cause);
221
293 heap->collect_as_vm_thread(GCCause::_last_ditch_collection);
294 _result = _loader_data->metaspace_non_null()->allocate(_size, _mdtype);
295 if (_result != NULL) {
296 return;
297 }
298
299 if (Verbose && PrintGCDetails) {
300 gclog_or_tty->print_cr("\nAfter Metaspace GC failed to allocate size "
301 SIZE_FORMAT, _size);
302 }
303
304 if (GC_locker::is_active_and_needs_gc()) {
305 set_gc_locked();
306 }
307 }
308
309 VM_CollectForAllocation::VM_CollectForAllocation(size_t word_size, uint gc_count_before, GCCause::Cause cause)
310 : VM_GC_Operation(gc_count_before, cause), _result(NULL), _word_size(word_size) {
311 // Only report if operation was really caused by an allocation.
312 if (_word_size != 0) {
313 AllocTracer::send_allocation_requiring_gc_event(_word_size * HeapWordSize, GCId::peek());
314 }
315 }
|
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/classLoader.hpp"
27 #include "classfile/javaClasses.hpp"
28 #include "gc/shared/allocTracer.hpp"
29 #include "gc/shared/gcLocker.inline.hpp"
30 #include "gc/shared/genCollectedHeap.hpp"
31 #include "gc/shared/vmGCOperations.hpp"
32 #include "memory/oopFactory.hpp"
33 #include "oops/instanceKlass.hpp"
34 #include "oops/instanceRefKlass.hpp"
35 #include "runtime/handles.inline.hpp"
36 #include "runtime/init.hpp"
37 #include "runtime/interfaceSupport.hpp"
38 #include "utilities/dtrace.hpp"
39 #include "utilities/macros.hpp"
40 #include "utilities/preserveException.hpp"
41 #if INCLUDE_ALL_GCS
42 #include "gc/g1/g1CollectedHeap.inline.hpp"
43 #endif // INCLUDE_ALL_GCS
44
45 VM_GC_Operation::~VM_GC_Operation() {
46 CollectedHeap* ch = Universe::heap();
47 ch->collector_policy()->set_all_soft_refs_clear(false);
48 }
180 }
181 }
182
183 void VM_GenCollectFull::doit() {
184 SvcGCMarker sgcm(SvcGCMarker::FULL);
185
186 GenCollectedHeap* gch = GenCollectedHeap::heap();
187 GCCauseSetter gccs(gch, _gc_cause);
188 gch->do_full_collection(gch->must_clear_all_soft_refs(), _max_generation);
189 }
190
191 VM_CollectForMetadataAllocation::VM_CollectForMetadataAllocation(ClassLoaderData* loader_data,
192 size_t size,
193 Metaspace::MetadataType mdtype,
194 uint gc_count_before,
195 uint full_gc_count_before,
196 GCCause::Cause gc_cause)
197 : VM_GC_Operation(gc_count_before, gc_cause, full_gc_count_before, true),
198 _loader_data(loader_data), _size(size), _mdtype(mdtype), _result(NULL) {
199 assert(_size != 0, "An allocation should always be requested with this operation.");
200 AllocTracer::send_allocation_requiring_gc_event(_size * HeapWordSize);
201 }
202
203 // Returns true iff concurrent GCs unloads metadata.
204 bool VM_CollectForMetadataAllocation::initiate_concurrent_GC() {
205 #if INCLUDE_ALL_GCS
206 if (UseConcMarkSweepGC && CMSClassUnloadingEnabled) {
207 MetaspaceGC::set_should_concurrent_collect(true);
208 return true;
209 }
210
211 if (UseG1GC && ClassUnloadingWithConcurrentMark) {
212 G1CollectedHeap* g1h = G1CollectedHeap::heap();
213 g1h->g1_policy()->collector_state()->set_initiate_conc_mark_if_possible(true);
214
215 GCCauseSetter x(g1h, _gc_cause);
216
217 // At this point we are supposed to start a concurrent cycle. We
218 // will do so if one is not already in progress.
219 bool should_start = g1h->g1_policy()->force_initial_mark_if_outside_cycle(_gc_cause);
220
292 heap->collect_as_vm_thread(GCCause::_last_ditch_collection);
293 _result = _loader_data->metaspace_non_null()->allocate(_size, _mdtype);
294 if (_result != NULL) {
295 return;
296 }
297
298 if (Verbose && PrintGCDetails) {
299 gclog_or_tty->print_cr("\nAfter Metaspace GC failed to allocate size "
300 SIZE_FORMAT, _size);
301 }
302
303 if (GC_locker::is_active_and_needs_gc()) {
304 set_gc_locked();
305 }
306 }
307
308 VM_CollectForAllocation::VM_CollectForAllocation(size_t word_size, uint gc_count_before, GCCause::Cause cause)
309 : VM_GC_Operation(gc_count_before, cause), _result(NULL), _word_size(word_size) {
310 // Only report if operation was really caused by an allocation.
311 if (_word_size != 0) {
312 AllocTracer::send_allocation_requiring_gc_event(_word_size * HeapWordSize);
313 }
314 }
|