36 #include "services/memTracker.hpp"
37 #include "utilities/macros.hpp"
38 #ifdef COMPILER1
39 #include "c1/c1_LIR.hpp"
40 #include "c1/c1_LIRGenerator.hpp"
41 #endif
42
43 // This kind of "BarrierSet" allows a "CollectedHeap" to detect and
44 // enumerate ref fields that have been modified (since the last
45 // enumeration.)
46
47 size_t CardTableModRefBS::compute_byte_map_size()
48 {
49 assert(_guard_index == cards_required(_whole_heap.word_size()) - 1,
50 "uninitialized, check declaration order");
51 assert(_page_size != 0, "uninitialized, check declaration order");
52 const size_t granularity = os::vm_allocation_granularity();
53 return align_size_up(_guard_index + 1, MAX2(_page_size, granularity));
54 }
55
56 CardTableModRefBS::CardTableModRefBS(MemRegion whole_heap, BarrierSet::Name kind) :
57 ModRefBarrierSet(kind),
58 _whole_heap(whole_heap),
59 _guard_index(0),
60 _guard_region(),
61 _last_valid_index(0),
62 _page_size(os::vm_page_size()),
63 _byte_map_size(0),
64 _covered(NULL),
65 _committed(NULL),
66 _cur_covered_regions(0),
67 _byte_map(NULL),
68 byte_map_base(NULL),
69 // LNC functionality
70 _lowest_non_clean(NULL),
71 _lowest_non_clean_chunk_size(NULL),
72 _lowest_non_clean_base_chunk_index(NULL),
73 _last_LNC_resizing_collection(NULL)
74 {
75 assert((uintptr_t(_whole_heap.start()) & (card_size - 1)) == 0, "heap must start at card boundary");
76 assert((uintptr_t(_whole_heap.end()) & (card_size - 1)) == 0, "heap must end at card boundary");
77
|
36 #include "services/memTracker.hpp"
37 #include "utilities/macros.hpp"
38 #ifdef COMPILER1
39 #include "c1/c1_LIR.hpp"
40 #include "c1/c1_LIRGenerator.hpp"
41 #endif
42
43 // This kind of "BarrierSet" allows a "CollectedHeap" to detect and
44 // enumerate ref fields that have been modified (since the last
45 // enumeration.)
46
47 size_t CardTableModRefBS::compute_byte_map_size()
48 {
49 assert(_guard_index == cards_required(_whole_heap.word_size()) - 1,
50 "uninitialized, check declaration order");
51 assert(_page_size != 0, "uninitialized, check declaration order");
52 const size_t granularity = os::vm_allocation_granularity();
53 return align_size_up(_guard_index + 1, MAX2(_page_size, granularity));
54 }
55
56 CardTableModRefBS::CardTableModRefBS(
57 MemRegion whole_heap,
58 const BarrierSet::FakeRtti& fake_rtti) :
59 ModRefBarrierSet(fake_rtti.add_tag(BarrierSet::CardTableModRef)),
60 _whole_heap(whole_heap),
61 _guard_index(0),
62 _guard_region(),
63 _last_valid_index(0),
64 _page_size(os::vm_page_size()),
65 _byte_map_size(0),
66 _covered(NULL),
67 _committed(NULL),
68 _cur_covered_regions(0),
69 _byte_map(NULL),
70 byte_map_base(NULL),
71 // LNC functionality
72 _lowest_non_clean(NULL),
73 _lowest_non_clean_chunk_size(NULL),
74 _lowest_non_clean_base_chunk_index(NULL),
75 _last_LNC_resizing_collection(NULL)
76 {
77 assert((uintptr_t(_whole_heap.start()) & (card_size - 1)) == 0, "heap must start at card boundary");
78 assert((uintptr_t(_whole_heap.end()) & (card_size - 1)) == 0, "heap must end at card boundary");
79
|