585 HeapWord* GenCollectorPolicy::mem_allocate_work(size_t size,
586 bool is_tlab,
587 bool* gc_overhead_limit_was_exceeded) {
588 GenCollectedHeap *gch = GenCollectedHeap::heap();
589
590 debug_only(gch->check_for_valid_allocation_state());
591 assert(gch->no_gc_in_progress(), "Allocation during gc not allowed");
592
593 // In general gc_overhead_limit_was_exceeded should be false so
594 // set it so here and reset it to true only if the gc time
595 // limit is being exceeded as checked below.
596 *gc_overhead_limit_was_exceeded = false;
597
598 HeapWord* result = NULL;
599
600 // Loop until the allocation is satisfied, or unsatisfied after GC.
601 for (int try_count = 1, gclocker_stalled_count = 0; /* return or throw */; try_count += 1) {
602 HandleMark hm; // Discard any handles allocated in each iteration.
603
604 // First allocation attempt is lock-free.
605 Generation *young = gch->get_gen(0);
606 assert(young->supports_inline_contig_alloc(),
607 "Otherwise, must do alloc within heap lock");
608 if (young->should_allocate(size, is_tlab)) {
609 result = young->par_allocate(size, is_tlab);
610 if (result != NULL) {
611 assert(gch->is_in_reserved(result), "result not in heap");
612 return result;
613 }
614 }
615 unsigned int gc_count_before; // Read inside the Heap_lock locked region.
616 {
617 MutexLocker ml(Heap_lock);
618 if (PrintGC && Verbose) {
619 gclog_or_tty->print_cr("TwoGenerationCollectorPolicy::mem_allocate_work:"
620 " attempting locked slow path allocation");
621 }
622 // Note that only large objects get a shot at being
623 // allocated in later generations.
624 bool first_only = ! should_try_older_generation_allocation(size);
625
626 result = gch->attempt_allocation(size, is_tlab, first_only);
627 if (result != NULL) {
628 assert(gch->is_in_reserved(result), "result not in heap");
629 return result;
630 }
631
632 if (GC_locker::is_active_and_needs_gc()) {
633 if (is_tlab) {
634 return NULL; // Caller will retry allocating individual object.
635 }
636 if (!gch->is_maximal_no_gc()) {
637 // Try and expand heap to satisfy request.
638 result = expand_heap_and_allocate(size, is_tlab);
639 // Result could be null if we are out of space.
689
690 const bool limit_exceeded = size_policy()->gc_overhead_limit_exceeded();
691 const bool softrefs_clear = all_soft_refs_clear();
692
693 if (limit_exceeded && softrefs_clear) {
694 *gc_overhead_limit_was_exceeded = true;
695 size_policy()->set_gc_overhead_limit_exceeded(false);
696 if (op.result() != NULL) {
697 CollectedHeap::fill_with_object(op.result(), size);
698 }
699 return NULL;
700 }
701 assert(result == NULL || gch->is_in_reserved(result),
702 "result not in heap");
703 return result;
704 }
705
706 // Give a warning if we seem to be looping forever.
707 if ((QueuedAllocationWarningCount > 0) &&
708 (try_count % QueuedAllocationWarningCount == 0)) {
709 warning("TwoGenerationCollectorPolicy::mem_allocate_work retries %d times \n\t"
710 " size=" SIZE_FORMAT " %s", try_count, size, is_tlab ? "(TLAB)" : "");
711 }
712 }
713 }
714
715 HeapWord* GenCollectorPolicy::expand_heap_and_allocate(size_t size,
716 bool is_tlab) {
717 GenCollectedHeap *gch = GenCollectedHeap::heap();
718 HeapWord* result = NULL;
719 for (int i = number_of_generations() - 1; i >= 0 && result == NULL; i--) {
720 Generation *gen = gch->get_gen(i);
721 if (gen->should_allocate(size, is_tlab)) {
722 result = gen->expand_and_allocate(size, is_tlab);
723 }
724 }
725 assert(result == NULL || gch->is_in_reserved(result), "result not in heap");
726 return result;
727 }
728
729 HeapWord* GenCollectorPolicy::satisfy_failed_allocation(size_t size,
730 bool is_tlab) {
731 GenCollectedHeap *gch = GenCollectedHeap::heap();
732 GCCauseSetter x(gch, GCCause::_allocation_failure);
733 HeapWord* result = NULL;
734
735 assert(size != 0, "Precondition violated");
736 if (GC_locker::is_active_and_needs_gc()) {
737 // GC locker is active; instead of a collection we will attempt
738 // to expand the heap, if there's room for expansion.
739 if (!gch->is_maximal_no_gc()) {
740 result = expand_heap_and_allocate(size, is_tlab);
741 }
742 return result; // Could be null if we are out of space.
875 return op.result();
876 }
877 loop_count++;
878 if ((QueuedAllocationWarningCount > 0) &&
879 (loop_count % QueuedAllocationWarningCount == 0)) {
880 warning("satisfy_failed_metadata_allocation() retries %d times \n\t"
881 " size=" SIZE_FORMAT, loop_count, word_size);
882 }
883 } while (true); // Until a GC is done
884 }
885
886 // Return true if any of the following is true:
887 // . the allocation won't fit into the current young gen heap
888 // . gc locker is occupied (jni critical section)
889 // . heap memory is tight -- the most recent previous collection
890 // was a full collection because a partial collection (would
891 // have) failed and is likely to fail again
892 bool GenCollectorPolicy::should_try_older_generation_allocation(
893 size_t word_size) const {
894 GenCollectedHeap* gch = GenCollectedHeap::heap();
895 size_t young_capacity = gch->get_gen(0)->capacity_before_gc();
896 return (word_size > heap_word_size(young_capacity))
897 || GC_locker::is_active_and_needs_gc()
898 || gch->incremental_collection_failed();
899 }
900
901
902 //
903 // MarkSweepPolicy methods
904 //
905
906 void MarkSweepPolicy::initialize_alignments() {
907 _space_alignment = _gen_alignment = (uintx)Generation::GenGrain;
908 _heap_alignment = compute_heap_alignment();
909 }
910
911 void MarkSweepPolicy::initialize_generations() {
912 _generations = NEW_C_HEAP_ARRAY3(GenerationSpecPtr, number_of_generations(), mtGC, CURRENT_PC,
913 AllocFailStrategy::RETURN_NULL);
914 if (_generations == NULL) {
915 vm_exit_during_initialization("Unable to allocate gen spec");
|
585 HeapWord* GenCollectorPolicy::mem_allocate_work(size_t size,
586 bool is_tlab,
587 bool* gc_overhead_limit_was_exceeded) {
588 GenCollectedHeap *gch = GenCollectedHeap::heap();
589
590 debug_only(gch->check_for_valid_allocation_state());
591 assert(gch->no_gc_in_progress(), "Allocation during gc not allowed");
592
593 // In general gc_overhead_limit_was_exceeded should be false so
594 // set it so here and reset it to true only if the gc time
595 // limit is being exceeded as checked below.
596 *gc_overhead_limit_was_exceeded = false;
597
598 HeapWord* result = NULL;
599
600 // Loop until the allocation is satisfied, or unsatisfied after GC.
601 for (int try_count = 1, gclocker_stalled_count = 0; /* return or throw */; try_count += 1) {
602 HandleMark hm; // Discard any handles allocated in each iteration.
603
604 // First allocation attempt is lock-free.
605 Generation *young = gch->young_gen();
606 assert(young->supports_inline_contig_alloc(),
607 "Otherwise, must do alloc within heap lock");
608 if (young->should_allocate(size, is_tlab)) {
609 result = young->par_allocate(size, is_tlab);
610 if (result != NULL) {
611 assert(gch->is_in_reserved(result), "result not in heap");
612 return result;
613 }
614 }
615 unsigned int gc_count_before; // Read inside the Heap_lock locked region.
616 {
617 MutexLocker ml(Heap_lock);
618 if (PrintGC && Verbose) {
619 gclog_or_tty->print_cr("GenCollectorPolicy::mem_allocate_work:"
620 " attempting locked slow path allocation");
621 }
622 // Note that only large objects get a shot at being
623 // allocated in later generations.
624 bool first_only = ! should_try_older_generation_allocation(size);
625
626 result = gch->attempt_allocation(size, is_tlab, first_only);
627 if (result != NULL) {
628 assert(gch->is_in_reserved(result), "result not in heap");
629 return result;
630 }
631
632 if (GC_locker::is_active_and_needs_gc()) {
633 if (is_tlab) {
634 return NULL; // Caller will retry allocating individual object.
635 }
636 if (!gch->is_maximal_no_gc()) {
637 // Try and expand heap to satisfy request.
638 result = expand_heap_and_allocate(size, is_tlab);
639 // Result could be null if we are out of space.
689
690 const bool limit_exceeded = size_policy()->gc_overhead_limit_exceeded();
691 const bool softrefs_clear = all_soft_refs_clear();
692
693 if (limit_exceeded && softrefs_clear) {
694 *gc_overhead_limit_was_exceeded = true;
695 size_policy()->set_gc_overhead_limit_exceeded(false);
696 if (op.result() != NULL) {
697 CollectedHeap::fill_with_object(op.result(), size);
698 }
699 return NULL;
700 }
701 assert(result == NULL || gch->is_in_reserved(result),
702 "result not in heap");
703 return result;
704 }
705
706 // Give a warning if we seem to be looping forever.
707 if ((QueuedAllocationWarningCount > 0) &&
708 (try_count % QueuedAllocationWarningCount == 0)) {
709 warning("GenCollectorPolicy::mem_allocate_work retries %d times \n\t"
710 " size=" SIZE_FORMAT " %s", try_count, size, is_tlab ? "(TLAB)" : "");
711 }
712 }
713 }
714
715 HeapWord* GenCollectorPolicy::expand_heap_and_allocate(size_t size,
716 bool is_tlab) {
717 GenCollectedHeap *gch = GenCollectedHeap::heap();
718 HeapWord* result = NULL;
719 Generation *old = gch->old_gen();
720 if (old->should_allocate(size, is_tlab)) {
721 result = old->expand_and_allocate(size, is_tlab);
722 }
723 if (result == NULL) {
724 Generation *young = gch->young_gen();
725 if (young->should_allocate(size, is_tlab)) {
726 result = young->expand_and_allocate(size, is_tlab);
727 }
728 }
729 assert(result == NULL || gch->is_in_reserved(result), "result not in heap");
730 return result;
731 }
732
733 HeapWord* GenCollectorPolicy::satisfy_failed_allocation(size_t size,
734 bool is_tlab) {
735 GenCollectedHeap *gch = GenCollectedHeap::heap();
736 GCCauseSetter x(gch, GCCause::_allocation_failure);
737 HeapWord* result = NULL;
738
739 assert(size != 0, "Precondition violated");
740 if (GC_locker::is_active_and_needs_gc()) {
741 // GC locker is active; instead of a collection we will attempt
742 // to expand the heap, if there's room for expansion.
743 if (!gch->is_maximal_no_gc()) {
744 result = expand_heap_and_allocate(size, is_tlab);
745 }
746 return result; // Could be null if we are out of space.
879 return op.result();
880 }
881 loop_count++;
882 if ((QueuedAllocationWarningCount > 0) &&
883 (loop_count % QueuedAllocationWarningCount == 0)) {
884 warning("satisfy_failed_metadata_allocation() retries %d times \n\t"
885 " size=" SIZE_FORMAT, loop_count, word_size);
886 }
887 } while (true); // Until a GC is done
888 }
889
890 // Return true if any of the following is true:
891 // . the allocation won't fit into the current young gen heap
892 // . gc locker is occupied (jni critical section)
893 // . heap memory is tight -- the most recent previous collection
894 // was a full collection because a partial collection (would
895 // have) failed and is likely to fail again
896 bool GenCollectorPolicy::should_try_older_generation_allocation(
897 size_t word_size) const {
898 GenCollectedHeap* gch = GenCollectedHeap::heap();
899 size_t young_capacity = gch->young_gen()->capacity_before_gc();
900 return (word_size > heap_word_size(young_capacity))
901 || GC_locker::is_active_and_needs_gc()
902 || gch->incremental_collection_failed();
903 }
904
905
906 //
907 // MarkSweepPolicy methods
908 //
909
910 void MarkSweepPolicy::initialize_alignments() {
911 _space_alignment = _gen_alignment = (uintx)Generation::GenGrain;
912 _heap_alignment = compute_heap_alignment();
913 }
914
915 void MarkSweepPolicy::initialize_generations() {
916 _generations = NEW_C_HEAP_ARRAY3(GenerationSpecPtr, number_of_generations(), mtGC, CURRENT_PC,
917 AllocFailStrategy::RETURN_NULL);
918 if (_generations == NULL) {
919 vm_exit_during_initialization("Unable to allocate gen spec");
|