< prev index next >

src/hotspot/share/gc/shared/referenceProcessor.cpp

Print this page
rev 49944 : imported patch 8201492-properly-implement-non-contiguous-reference-processing
rev 49945 : imported patch 8201492-stefanj-review
rev 49949 : imported patch 8202021-cleanup-referenceprocessor
rev 49950 : imported patch 8202021-stefanj-review
rev 49951 : imported patch 8202017-reference-processor-remove-enqueue
rev 49952 : imported patch 8202017-kim-review
rev 49953 : imported patch 8201491-precleaning


 577                     DiscoveredList                refs_lists[],
 578                     bool                         clear_referent,
 579                     bool                          marks_oops_alive,
 580                     ReferenceProcessorPhaseTimes* phase_times)
 581     : ProcessTask(ref_processor, refs_lists, marks_oops_alive, phase_times),
 582       _clear_referent(clear_referent)
 583   { }
 584   virtual void work(unsigned int i, BoolObjectClosure& is_alive,
 585                     OopClosure& keep_alive,
 586                     VoidClosure& complete_gc)
 587   {
 588     RefProcWorkerTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase3, _phase_times, i);
 589 
 590     _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
 591                                   &is_alive, &keep_alive, &complete_gc);
 592   }
 593 private:
 594   bool _clear_referent;
 595 };
 596 


















 597 #ifndef PRODUCT
 598 void ReferenceProcessor::log_reflist_counts(DiscoveredList ref_lists[], uint active_length, size_t total_refs) {
 599   if (!log_is_enabled(Trace, gc, ref)) {
 600     return;
 601   }
 602 
 603   stringStream st;
 604   for (uint i = 0; i < active_length; ++i) {
 605     st.print(SIZE_FORMAT " ", ref_lists[i].length());
 606   }
 607   log_develop_trace(gc, ref)("%s= " SIZE_FORMAT, st.as_string(), total_refs);
 608 #ifdef ASSERT
 609   for (uint i = active_length; i < _max_num_queues; i++) {
 610     assert(ref_lists[i].length() == 0, SIZE_FORMAT " unexpected References in %u",
 611            ref_lists[i].length(), i);
 612   }
 613 #endif
 614 }
 615 #endif
 616 
 617 void ReferenceProcessor::set_active_mt_degree(uint v) {
 618   _num_queues = v;
 619   _next_id = 0;
 620 }
 621 
 622 // Balances reference queues.
 623 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
 624 // queues[0, 1, ..., _num_q-1] because only the first _num_q
 625 // corresponding to the active workers will be processed.
 626 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
 627 {
 628   // calculate total length
 629   size_t total_refs = 0;
 630   log_develop_trace(gc, ref)("Balance ref_lists ");
 631 


 632   for (uint i = 0; i < _max_num_queues; ++i) {
 633     total_refs += ref_lists[i].length();
 634   }
 635   log_reflist_counts(ref_lists, _max_num_queues, total_refs);
 636   size_t avg_refs = total_refs / _num_queues + 1;
 637   uint to_idx = 0;
 638   for (uint from_idx = 0; from_idx < _max_num_queues; from_idx++) {
 639     bool move_all = false;
 640     if (from_idx >= _num_queues) {
 641       move_all = ref_lists[from_idx].length() > 0;
 642     }
 643     while ((ref_lists[from_idx].length() > avg_refs) ||
 644            move_all) {
 645       assert(to_idx < _num_queues, "Sanity Check!");
 646       if (ref_lists[to_idx].length() < avg_refs) {
 647         // move superfluous refs
 648         size_t refs_to_move;
 649         // Move all the Ref's if the from queue will not be processed.
 650         if (move_all) {
 651           refs_to_move = MIN2(ref_lists[from_idx].length(),
 652                               avg_refs - ref_lists[to_idx].length());
 653         } else {
 654           refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
 655                               avg_refs - ref_lists[to_idx].length());


 676         ref_lists[to_idx].set_head(move_head);
 677         ref_lists[to_idx].inc_length(refs_to_move);
 678 
 679         // Remove the chain from the from list.
 680         if (move_tail == new_head) {
 681           // We found the end of the from list.
 682           ref_lists[from_idx].set_head(NULL);
 683         } else {
 684           ref_lists[from_idx].set_head(new_head);
 685         }
 686         ref_lists[from_idx].dec_length(refs_to_move);
 687         if (ref_lists[from_idx].length() == 0) {
 688           break;
 689         }
 690       } else {
 691         to_idx = (to_idx + 1) % _num_queues;
 692       }
 693     }
 694   }
 695 #ifdef ASSERT

 696   size_t balanced_total_refs = 0;
 697   for (uint i = 0; i < _num_queues; ++i) {
 698     balanced_total_refs += ref_lists[i].length();
 699   }
 700   log_reflist_counts(ref_lists, _num_queues, balanced_total_refs);
 701   assert(total_refs == balanced_total_refs, "Balancing was incomplete");
 702 #endif
 703 }
 704 
 705 void ReferenceProcessor::process_discovered_reflist(
 706   DiscoveredList                refs_lists[],
 707   ReferencePolicy*              policy,
 708   bool                          clear_referent,
 709   BoolObjectClosure*            is_alive,
 710   OopClosure*                   keep_alive,
 711   VoidClosure*                  complete_gc,
 712   AbstractRefProcTaskExecutor*  task_executor,
 713   ReferenceProcessorPhaseTimes* phase_times)
 714 {
 715   bool mt_processing = task_executor != NULL && _processing_is_mt;
 716 
 717   phase_times->set_processing_is_mt(mt_processing);
 718 
 719   if (mt_processing && ParallelRefProcBalancingEnabled) {
 720     RefProcBalanceQueuesTimeTracker tt(phase_times);


 994     RawAccess<>::oop_store(discovered_addr, next_discovered);
 995     list->set_head(obj);
 996     list->inc_length(1);
 997 
 998     log_develop_trace(gc, ref)("Discovered reference (" INTPTR_FORMAT ": %s)", p2i(obj), obj->klass()->internal_name());
 999   }
1000   assert(oopDesc::is_oop(obj), "Discovered a bad reference");
1001   verify_referent(obj);
1002   return true;
1003 }
1004 
1005 bool ReferenceProcessor::has_discovered_references() {
1006   for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
1007     if (!_discovered_refs[i].is_empty()) {
1008       return true;
1009     }
1010   }
1011   return false;
1012 }
1013 
1014 // Preclean the discovered references by removing those
1015 // whose referents are alive, and by marking from those that
1016 // are not active. These lists can be handled here
1017 // in any order and, indeed, concurrently.
1018 void ReferenceProcessor::preclean_discovered_references(
1019   BoolObjectClosure* is_alive,
1020   OopClosure* keep_alive,
1021   VoidClosure* complete_gc,
1022   YieldClosure* yield,
1023   GCTimer* gc_timer) {

1024 
1025   // Soft references
1026   {
1027     GCTraceTime(Debug, gc, ref) tm("Preclean SoftReferences", gc_timer);

1028     for (uint i = 0; i < _max_num_queues; i++) {
1029       if (yield->should_return()) {
1030         return;
1031       }
1032       preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
1033                                   keep_alive, complete_gc, yield);



1034     }

1035   }
1036 
1037   // Weak references
1038   {
1039     GCTraceTime(Debug, gc, ref) tm("Preclean WeakReferences", gc_timer);

1040     for (uint i = 0; i < _max_num_queues; i++) {
1041       if (yield->should_return()) {
1042         return;
1043       }
1044       preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,
1045                                   keep_alive, complete_gc, yield);



1046     }

1047   }
1048 
1049   // Final references
1050   {
1051     GCTraceTime(Debug, gc, ref) tm("Preclean FinalReferences", gc_timer);

1052     for (uint i = 0; i < _max_num_queues; i++) {
1053       if (yield->should_return()) {
1054         return;
1055       }
1056       preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
1057                                   keep_alive, complete_gc, yield);



1058     }

1059   }
1060 
1061   // Phantom references
1062   {
1063     GCTraceTime(Debug, gc, ref) tm("Preclean PhantomReferences", gc_timer);

1064     for (uint i = 0; i < _max_num_queues; i++) {
1065       if (yield->should_return()) {
1066         return;
1067       }
1068       preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
1069                                   keep_alive, complete_gc, yield);



1070     }

1071   }
1072 }
1073 
1074 // Walk the given discovered ref list, and remove all reference objects
1075 // whose referents are still alive, whose referents are NULL or which
1076 // are not active (have a non-NULL next field). NOTE: When we are
1077 // thus precleaning the ref lists (which happens single-threaded today),
1078 // we do not disable refs discovery to honor the correct semantics of
1079 // java.lang.Reference. As a result, we need to be careful below
1080 // that ref removal steps interleave safely with ref discovery steps
1081 // (in this thread).
1082 void
1083 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList&    refs_list,
1084                                                 BoolObjectClosure* is_alive,
1085                                                 OopClosure*        keep_alive,
1086                                                 VoidClosure*       complete_gc,
1087                                                 YieldClosure*      yield) {
1088   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
1089   while (iter.has_next()) {



1090     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1091     oop obj = iter.obj();
1092     oop next = java_lang_ref_Reference::next(obj);
1093     if (iter.referent() == NULL || iter.is_referent_alive() ||
1094         next != NULL) {
1095       // The referent has been cleared, or is alive, or the Reference is not
1096       // active; we need to trace and mark its cohort.
1097       log_develop_trace(gc, ref)("Precleaning Reference (" INTPTR_FORMAT ": %s)",
1098                                  p2i(iter.obj()), iter.obj()->klass()->internal_name());
1099       // Remove Reference object from list
1100       iter.remove();
1101       // Keep alive its cohort.
1102       iter.make_referent_alive();
1103       if (UseCompressedOops) {
1104         narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr_raw(obj);
1105         keep_alive->do_oop(next_addr);
1106       } else {
1107         oop* next_addr = (oop*)java_lang_ref_Reference::next_addr_raw(obj);
1108         keep_alive->do_oop(next_addr);
1109       }
1110       iter.move_to_next();
1111     } else {
1112       iter.next();
1113     }
1114   }
1115   // Close the reachable set
1116   complete_gc->do_void();
1117 
1118   NOT_PRODUCT(
1119     if (iter.processed() > 0) {
1120       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " Refs out of " SIZE_FORMAT " Refs in discovered list " INTPTR_FORMAT,
1121         iter.removed(), iter.processed(), p2i(&refs_list));
1122     }
1123   )

1124 }
1125 
1126 const char* ReferenceProcessor::list_name(uint i) {
1127    assert(i <= _max_num_queues * number_of_subclasses_of_ref(),
1128           "Out of bounds index");
1129 
1130    int j = i / _max_num_queues;
1131    switch (j) {
1132      case 0: return "SoftRef";
1133      case 1: return "WeakRef";
1134      case 2: return "FinalRef";
1135      case 3: return "PhantomRef";
1136    }
1137    ShouldNotReachHere();
1138    return NULL;
1139 }


 577                     DiscoveredList                refs_lists[],
 578                     bool                         clear_referent,
 579                     bool                          marks_oops_alive,
 580                     ReferenceProcessorPhaseTimes* phase_times)
 581     : ProcessTask(ref_processor, refs_lists, marks_oops_alive, phase_times),
 582       _clear_referent(clear_referent)
 583   { }
 584   virtual void work(unsigned int i, BoolObjectClosure& is_alive,
 585                     OopClosure& keep_alive,
 586                     VoidClosure& complete_gc)
 587   {
 588     RefProcWorkerTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase3, _phase_times, i);
 589 
 590     _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
 591                                   &is_alive, &keep_alive, &complete_gc);
 592   }
 593 private:
 594   bool _clear_referent;
 595 };
 596 
 597 void ReferenceProcessor::log_reflist(const char* prefix, DiscoveredList list[], uint num_active_queues) {
 598   LogTarget(Trace, gc, ref) lt;
 599 
 600   if (!lt.is_enabled()) {
 601     return;
 602   }
 603 
 604   size_t total = 0;
 605 
 606   LogStream ls(lt);
 607   ls.print("%s", prefix);
 608   for (uint i = 0; i < num_active_queues; i++) {
 609     ls.print(SIZE_FORMAT " ", list[i].length());
 610     total += list[i].length();
 611   }
 612   ls.print_cr("(" SIZE_FORMAT ")", total);
 613 }
 614 
 615 #ifndef PRODUCT
 616 void ReferenceProcessor::log_reflist_counts(DiscoveredList ref_lists[], uint num_active_queues) {
 617   if (!log_is_enabled(Trace, gc, ref)) {
 618     return;
 619   }
 620 
 621   log_reflist("", ref_lists, num_active_queues);




 622 #ifdef ASSERT
 623   for (uint i = num_active_queues; i < _max_num_queues; i++) {
 624     assert(ref_lists[i].length() == 0, SIZE_FORMAT " unexpected References in %u",
 625            ref_lists[i].length(), i);
 626   }
 627 #endif
 628 }
 629 #endif
 630 
 631 void ReferenceProcessor::set_active_mt_degree(uint v) {
 632   _num_queues = v;
 633   _next_id = 0;
 634 }
 635 
 636 // Balances reference queues.
 637 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
 638 // queues[0, 1, ..., _num_q-1] because only the first _num_q
 639 // corresponding to the active workers will be processed.
 640 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
 641 {
 642   // calculate total length
 643   size_t total_refs = 0;
 644   log_develop_trace(gc, ref)("Balance ref_lists ");
 645 
 646   log_reflist_counts(ref_lists, _max_num_queues);
 647 
 648   for (uint i = 0; i < _max_num_queues; ++i) {
 649     total_refs += ref_lists[i].length();
 650   }

 651   size_t avg_refs = total_refs / _num_queues + 1;
 652   uint to_idx = 0;
 653   for (uint from_idx = 0; from_idx < _max_num_queues; from_idx++) {
 654     bool move_all = false;
 655     if (from_idx >= _num_queues) {
 656       move_all = ref_lists[from_idx].length() > 0;
 657     }
 658     while ((ref_lists[from_idx].length() > avg_refs) ||
 659            move_all) {
 660       assert(to_idx < _num_queues, "Sanity Check!");
 661       if (ref_lists[to_idx].length() < avg_refs) {
 662         // move superfluous refs
 663         size_t refs_to_move;
 664         // Move all the Ref's if the from queue will not be processed.
 665         if (move_all) {
 666           refs_to_move = MIN2(ref_lists[from_idx].length(),
 667                               avg_refs - ref_lists[to_idx].length());
 668         } else {
 669           refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
 670                               avg_refs - ref_lists[to_idx].length());


 691         ref_lists[to_idx].set_head(move_head);
 692         ref_lists[to_idx].inc_length(refs_to_move);
 693 
 694         // Remove the chain from the from list.
 695         if (move_tail == new_head) {
 696           // We found the end of the from list.
 697           ref_lists[from_idx].set_head(NULL);
 698         } else {
 699           ref_lists[from_idx].set_head(new_head);
 700         }
 701         ref_lists[from_idx].dec_length(refs_to_move);
 702         if (ref_lists[from_idx].length() == 0) {
 703           break;
 704         }
 705       } else {
 706         to_idx = (to_idx + 1) % _num_queues;
 707       }
 708     }
 709   }
 710 #ifdef ASSERT
 711   log_reflist_counts(ref_lists, _num_queues);
 712   size_t balanced_total_refs = 0;
 713   for (uint i = 0; i < _num_queues; ++i) {
 714     balanced_total_refs += ref_lists[i].length();
 715   }

 716   assert(total_refs == balanced_total_refs, "Balancing was incomplete");
 717 #endif
 718 }
 719 
 720 void ReferenceProcessor::process_discovered_reflist(
 721   DiscoveredList                refs_lists[],
 722   ReferencePolicy*              policy,
 723   bool                          clear_referent,
 724   BoolObjectClosure*            is_alive,
 725   OopClosure*                   keep_alive,
 726   VoidClosure*                  complete_gc,
 727   AbstractRefProcTaskExecutor*  task_executor,
 728   ReferenceProcessorPhaseTimes* phase_times)
 729 {
 730   bool mt_processing = task_executor != NULL && _processing_is_mt;
 731 
 732   phase_times->set_processing_is_mt(mt_processing);
 733 
 734   if (mt_processing && ParallelRefProcBalancingEnabled) {
 735     RefProcBalanceQueuesTimeTracker tt(phase_times);


1009     RawAccess<>::oop_store(discovered_addr, next_discovered);
1010     list->set_head(obj);
1011     list->inc_length(1);
1012 
1013     log_develop_trace(gc, ref)("Discovered reference (" INTPTR_FORMAT ": %s)", p2i(obj), obj->klass()->internal_name());
1014   }
1015   assert(oopDesc::is_oop(obj), "Discovered a bad reference");
1016   verify_referent(obj);
1017   return true;
1018 }
1019 
1020 bool ReferenceProcessor::has_discovered_references() {
1021   for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
1022     if (!_discovered_refs[i].is_empty()) {
1023       return true;
1024     }
1025   }
1026   return false;
1027 }
1028 
1029 void ReferenceProcessor::preclean_discovered_references(BoolObjectClosure* is_alive,





1030                                                         OopClosure* keep_alive,
1031                                                         VoidClosure* complete_gc,
1032                                                         YieldClosure* yield,
1033                                                         GCTimer* gc_timer) {
1034   // These lists can be handled here in any order and, indeed, concurrently.
1035     
1036   // Soft references
1037   {
1038     GCTraceTime(Debug, gc, ref) tm("Preclean SoftReferences", gc_timer);
1039     log_reflist("SoftRef before: ", _discoveredSoftRefs, _max_num_queues);
1040     for (uint i = 0; i < _max_num_queues; i++) {
1041       if (yield->should_return()) {
1042         return;
1043       }        
1044       if (preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
1045                                       keep_alive, complete_gc, yield)) {
1046         log_reflist("SoftRef abort: ", _discoveredSoftRefs, _max_num_queues);
1047         return;
1048       }
1049     }
1050     log_reflist("SoftRef after: ", _discoveredSoftRefs, _max_num_queues);
1051   }
1052 
1053   // Weak references
1054   {
1055     GCTraceTime(Debug, gc, ref) tm("Preclean WeakReferences", gc_timer);
1056     log_reflist("WeakRef before: ", _discoveredWeakRefs, _max_num_queues);
1057     for (uint i = 0; i < _max_num_queues; i++) {
1058       if (yield->should_return()) {
1059         return;
1060       }
1061       if (preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,
1062                                       keep_alive, complete_gc, yield)) {
1063         log_reflist("WeakRef abort: ", _discoveredWeakRefs, _max_num_queues);
1064         return;
1065       }
1066     }
1067     log_reflist("WeakRef after: ", _discoveredWeakRefs, _max_num_queues);
1068   }
1069 
1070   // Final references
1071   {
1072     GCTraceTime(Debug, gc, ref) tm("Preclean FinalReferences", gc_timer);
1073     log_reflist("FinalRef before: ", _discoveredFinalRefs, _max_num_queues);
1074     for (uint i = 0; i < _max_num_queues; i++) {
1075       if (yield->should_return()) {
1076         return;
1077       }
1078       if (preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
1079                                       keep_alive, complete_gc, yield)) {
1080         log_reflist("FinalRef abort: ", _discoveredFinalRefs, _max_num_queues);
1081         return;
1082       }
1083     }
1084     log_reflist("FinalRef after: ", _discoveredFinalRefs, _max_num_queues);
1085   }
1086 
1087   // Phantom references
1088   {
1089     GCTraceTime(Debug, gc, ref) tm("Preclean PhantomReferences", gc_timer);
1090     log_reflist("PhantomRef before: ", _discoveredPhantomRefs, _max_num_queues);
1091     for (uint i = 0; i < _max_num_queues; i++) {
1092       if (yield->should_return()) {
1093         return;
1094       }
1095       if (preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
1096                                       keep_alive, complete_gc, yield)) {
1097         log_reflist("PhantomRef abort: ", _discoveredPhantomRefs, _max_num_queues);
1098         return;
1099       }
1100     }
1101     log_reflist("PhantomRef after: ", _discoveredPhantomRefs, _max_num_queues);
1102   }
1103 }
1104 
1105 // Walk the given discovered ref list, and remove all reference objects
1106 // whose referents are still alive, whose referents are NULL or which
1107 // are not active (have a non-NULL next field). NOTE: When we are
1108 // thus precleaning the ref lists (which happens single-threaded today),
1109 // we do not disable refs discovery to honor the correct semantics of
1110 // java.lang.Reference. As a result, we need to be careful below
1111 // that ref removal steps interleave safely with ref discovery steps
1112 // (in this thread).
1113 bool ReferenceProcessor::preclean_discovered_reflist(DiscoveredList&    refs_list,

1114                                                      BoolObjectClosure* is_alive,
1115                                                      OopClosure*        keep_alive,
1116                                                      VoidClosure*       complete_gc,
1117                                                      YieldClosure*      yield) {
1118   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
1119   while (iter.has_next()) {
1120     if (yield->should_return_fine_grain()) {
1121       return true;
1122     }
1123     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1124     oop obj = iter.obj();
1125     oop next = java_lang_ref_Reference::next(obj);
1126     if (iter.referent() == NULL || iter.is_referent_alive() || next != NULL) {

1127       // The referent has been cleared, or is alive, or the Reference is not
1128       // active; we need to trace and mark its cohort.
1129       log_develop_trace(gc, ref)("Precleaning Reference (" INTPTR_FORMAT ": %s)",
1130                                  p2i(iter.obj()), iter.obj()->klass()->internal_name());
1131       // Remove Reference object from list
1132       iter.remove();
1133       // Keep alive its cohort.
1134       iter.make_referent_alive();
1135       if (UseCompressedOops) {
1136         narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr_raw(obj);
1137         keep_alive->do_oop(next_addr);
1138       } else {
1139         oop* next_addr = (oop*)java_lang_ref_Reference::next_addr_raw(obj);
1140         keep_alive->do_oop(next_addr);
1141       }
1142       iter.move_to_next();
1143     } else {
1144       iter.next();
1145     }
1146   }
1147   // Close the reachable set
1148   complete_gc->do_void();
1149 
1150   NOT_PRODUCT(
1151     if (iter.processed() > 0) {
1152       log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " Refs out of " SIZE_FORMAT " Refs in discovered list " INTPTR_FORMAT,
1153         iter.removed(), iter.processed(), p2i(&refs_list));
1154     }
1155   )
1156   return false;
1157 }
1158 
1159 const char* ReferenceProcessor::list_name(uint i) {
1160    assert(i <= _max_num_queues * number_of_subclasses_of_ref(),
1161           "Out of bounds index");
1162 
1163    int j = i / _max_num_queues;
1164    switch (j) {
1165      case 0: return "SoftRef";
1166      case 1: return "WeakRef";
1167      case 2: return "FinalRef";
1168      case 3: return "PhantomRef";
1169    }
1170    ShouldNotReachHere();
1171    return NULL;
1172 }
< prev index next >