370 static T load_at(oop base, ptrdiff_t offset) {
371 return load<T>(field_addr(base, offset));
372 }
373
374 template <typename T>
375 static T atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
376 return atomic_cmpxchg(new_value, field_addr(base, offset), compare_value);
377 }
378
379 template <typename T>
380 static T atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
381 return atomic_xchg(new_value, field_addr(base, offset));
382 }
383
384 template <typename T>
385 static bool oop_arraycopy(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length);
386
387 static void clone(oop src, oop dst, size_t size);
388
389 static oop resolve(oop obj) { return obj; }
390 };
391
392 #endif // SHARE_VM_RUNTIME_ACCESSBACKEND_HPP
|
383 static T load_at(oop base, ptrdiff_t offset) {
384 return load<T>(field_addr(base, offset));
385 }
386
387 template <typename T>
388 static T atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
389 return atomic_cmpxchg(new_value, field_addr(base, offset), compare_value);
390 }
391
392 template <typename T>
393 static T atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
394 return atomic_xchg(new_value, field_addr(base, offset));
395 }
396
397 template <typename T>
398 static bool oop_arraycopy(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length);
399
400 static void clone(oop src, oop dst, size_t size);
401
402 static oop resolve(oop obj) { return obj; }
403
404 static bool equals(oop o1, oop o2) { return o1 == o2; }
405 };
406
407 // Below is the implementation of the first 4 steps of the template pipeline:
408 // * Step 1: Set default decorators and decay types. This step gets rid of CV qualifiers
409 // and sets default decorators to sensible values.
410 // * Step 2: Reduce types. This step makes sure there is only a single T type and not
411 // multiple types. The P type of the address and T type of the value must
412 // match.
413 // * Step 3: Pre-runtime dispatch. This step checks whether a runtime call can be
414 // avoided, and in that case avoids it (calling raw accesses or
415 // primitive accesses in a build that does not require primitive GC barriers)
416 // * Step 4: Runtime-dispatch. This step performs a runtime dispatch to the corresponding
417 // BarrierSet::AccessBarrier accessor that attaches GC-required barriers
418 // to the access.
419
420 namespace AccessInternal {
421 template <typename T>
422 struct OopOrNarrowOopInternal: AllStatic {
423 typedef oop type;
424 };
425
426 template <>
427 struct OopOrNarrowOopInternal<narrowOop>: AllStatic {
428 typedef narrowOop type;
429 };
430
431 // This metafunction returns a canonicalized oop/narrowOop type for a passed
432 // in oop-like types passed in from oop_* overloads where the user has sworn
433 // that the passed in values should be oop-like (e.g. oop, oopDesc*, arrayOop,
434 // narrowOoop, instanceOopDesc*, and random other things).
435 // In the oop_* overloads, it must hold that if the passed in type T is not
436 // narrowOop, then it by contract has to be one of many oop-like types implicitly
437 // convertible to oop, and hence returns oop as the canonical oop type.
438 // If it turns out it was not, then the implicit conversion to oop will fail
439 // to compile, as desired.
440 template <typename T>
441 struct OopOrNarrowOop: AllStatic {
442 typedef typename OopOrNarrowOopInternal<typename Decay<T>::type>::type type;
443 };
444
445 inline void* field_addr(oop base, ptrdiff_t byte_offset) {
446 return reinterpret_cast<void*>(reinterpret_cast<intptr_t>((void*)base) + byte_offset);
447 }
448 // Step 4: Runtime dispatch
449 // The RuntimeDispatch class is responsible for performing a runtime dispatch of the
450 // accessor. This is required when the access either depends on whether compressed oops
451 // is being used, or it depends on which GC implementation was chosen (e.g. requires GC
452 // barriers). The way it works is that a function pointer initially pointing to an
453 // accessor resolution function gets called for each access. Upon first invocation,
454 // it resolves which accessor to be used in future invocations and patches the
455 // function pointer to this new accessor.
456
457 template <DecoratorSet decorators, typename T, BarrierType type>
458 struct RuntimeDispatch: AllStatic {};
459
460 template <DecoratorSet decorators, typename T>
461 struct RuntimeDispatch<decorators, T, BARRIER_STORE>: AllStatic {
462 typedef typename AccessFunction<decorators, T, BARRIER_STORE>::type func_t;
463 static func_t _store_func;
464
465 static void store_init(void* addr, T value);
466
467 static inline void store(void* addr, T value) {
468 _store_func(addr, value);
469 }
470 };
471
472 template <DecoratorSet decorators, typename T>
473 struct RuntimeDispatch<decorators, T, BARRIER_STORE_AT>: AllStatic {
474 typedef typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type func_t;
475 static func_t _store_at_func;
476
477 static void store_at_init(oop base, ptrdiff_t offset, T value);
478
479 static inline void store_at(oop base, ptrdiff_t offset, T value) {
480 _store_at_func(base, offset, value);
481 }
482 };
483
484 template <DecoratorSet decorators, typename T>
485 struct RuntimeDispatch<decorators, T, BARRIER_LOAD>: AllStatic {
486 typedef typename AccessFunction<decorators, T, BARRIER_LOAD>::type func_t;
487 static func_t _load_func;
488
489 static T load_init(void* addr);
490
491 static inline T load(void* addr) {
492 return _load_func(addr);
493 }
494 };
495
496 template <DecoratorSet decorators, typename T>
497 struct RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>: AllStatic {
498 typedef typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type func_t;
499 static func_t _load_at_func;
500
501 static T load_at_init(oop base, ptrdiff_t offset);
502
503 static inline T load_at(oop base, ptrdiff_t offset) {
504 return _load_at_func(base, offset);
505 }
506 };
507
508 template <DecoratorSet decorators, typename T>
509 struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>: AllStatic {
510 typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type func_t;
511 static func_t _atomic_cmpxchg_func;
512
513 static T atomic_cmpxchg_init(T new_value, void* addr, T compare_value);
514
515 static inline T atomic_cmpxchg(T new_value, void* addr, T compare_value) {
516 return _atomic_cmpxchg_func(new_value, addr, compare_value);
517 }
518 };
519
520 template <DecoratorSet decorators, typename T>
521 struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>: AllStatic {
522 typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type func_t;
523 static func_t _atomic_cmpxchg_at_func;
524
525 static T atomic_cmpxchg_at_init(T new_value, oop base, ptrdiff_t offset, T compare_value);
526
527 static inline T atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
528 return _atomic_cmpxchg_at_func(new_value, base, offset, compare_value);
529 }
530 };
531
532 template <DecoratorSet decorators, typename T>
533 struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>: AllStatic {
534 typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type func_t;
535 static func_t _atomic_xchg_func;
536
537 static T atomic_xchg_init(T new_value, void* addr);
538
539 static inline T atomic_xchg(T new_value, void* addr) {
540 return _atomic_xchg_func(new_value, addr);
541 }
542 };
543
544 template <DecoratorSet decorators, typename T>
545 struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>: AllStatic {
546 typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type func_t;
547 static func_t _atomic_xchg_at_func;
548
549 static T atomic_xchg_at_init(T new_value, oop base, ptrdiff_t offset);
550
551 static inline T atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
552 return _atomic_xchg_at_func(new_value, base, offset);
553 }
554 };
555
556 template <DecoratorSet decorators, typename T>
557 struct RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>: AllStatic {
558 typedef typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type func_t;
559 static func_t _arraycopy_func;
560
561 static bool arraycopy_init(arrayOop src_obj, arrayOop dst_obj, T *src, T* dst, size_t length);
562
563 static inline bool arraycopy(arrayOop src_obj, arrayOop dst_obj, T *src, T* dst, size_t length) {
564 return _arraycopy_func(src_obj, dst_obj, src, dst, length);
565 }
566 };
567
568 template <DecoratorSet decorators, typename T>
569 struct RuntimeDispatch<decorators, T, BARRIER_CLONE>: AllStatic {
570 typedef typename AccessFunction<decorators, T, BARRIER_CLONE>::type func_t;
571 static func_t _clone_func;
572
573 static void clone_init(oop src, oop dst, size_t size);
574
575 static inline void clone(oop src, oop dst, size_t size) {
576 _clone_func(src, dst, size);
577 }
578 };
579
580 template <DecoratorSet decorators, typename T>
581 struct RuntimeDispatch<decorators, T, BARRIER_RESOLVE>: AllStatic {
582 typedef typename AccessFunction<decorators, T, BARRIER_RESOLVE>::type func_t;
583 static func_t _resolve_func;
584
585 static oop resolve_init(oop obj);
586
587 static inline oop resolve(oop obj) {
588 return _resolve_func(obj);
589 }
590 };
591
592 template <DecoratorSet decorators, typename T>
593 struct RuntimeDispatch<decorators, T, BARRIER_EQUALS>: AllStatic {
594 typedef typename AccessFunction<decorators, T, BARRIER_EQUALS>::type func_t;
595 static func_t _equals_func;
596
597 static bool equals_init(oop o1, oop o2);
598
599 static inline bool equals(oop o1, oop o2) {
600 return _equals_func(o1, o2);
601 }
602 };
603
604 // Initialize the function pointers to point to the resolving function.
605 template <DecoratorSet decorators, typename T>
606 typename AccessFunction<decorators, T, BARRIER_STORE>::type
607 RuntimeDispatch<decorators, T, BARRIER_STORE>::_store_func = &store_init;
608
609 template <DecoratorSet decorators, typename T>
610 typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type
611 RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::_store_at_func = &store_at_init;
612
613 template <DecoratorSet decorators, typename T>
614 typename AccessFunction<decorators, T, BARRIER_LOAD>::type
615 RuntimeDispatch<decorators, T, BARRIER_LOAD>::_load_func = &load_init;
616
617 template <DecoratorSet decorators, typename T>
618 typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type
619 RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::_load_at_func = &load_at_init;
620
621 template <DecoratorSet decorators, typename T>
622 typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type
623 RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::_atomic_cmpxchg_func = &atomic_cmpxchg_init;
624
625 template <DecoratorSet decorators, typename T>
626 typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type
627 RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::_atomic_cmpxchg_at_func = &atomic_cmpxchg_at_init;
628
629 template <DecoratorSet decorators, typename T>
630 typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type
631 RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::_atomic_xchg_func = &atomic_xchg_init;
632
633 template <DecoratorSet decorators, typename T>
634 typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type
635 RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::_atomic_xchg_at_func = &atomic_xchg_at_init;
636
637 template <DecoratorSet decorators, typename T>
638 typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type
639 RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::_arraycopy_func = &arraycopy_init;
640
641 template <DecoratorSet decorators, typename T>
642 typename AccessFunction<decorators, T, BARRIER_CLONE>::type
643 RuntimeDispatch<decorators, T, BARRIER_CLONE>::_clone_func = &clone_init;
644
645 template <DecoratorSet decorators, typename T>
646 typename AccessFunction<decorators, T, BARRIER_RESOLVE>::type
647 RuntimeDispatch<decorators, T, BARRIER_RESOLVE>::_resolve_func = &resolve_init;
648
649 template <DecoratorSet decorators, typename T>
650 typename AccessFunction<decorators, T, BARRIER_EQUALS>::type
651 RuntimeDispatch<decorators, T, BARRIER_EQUALS>::_equals_func = &equals_init;
652
653 // Step 3: Pre-runtime dispatching.
654 // The PreRuntimeDispatch class is responsible for filtering the barrier strength
655 // decorators. That is, for AS_RAW, it hardwires the accesses without a runtime
656 // dispatch point. Otherwise it goes through a runtime check if hardwiring was
657 // not possible.
658 struct PreRuntimeDispatch: AllStatic {
659 template<DecoratorSet decorators>
660 struct CanHardwireRaw: public IntegralConstant<
661 bool,
662 !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // primitive access
663 !HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value || // don't care about compressed oops (oop* address)
664 HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value> // we can infer we use compressed oops (narrowOop* address)
665 {};
666
667 static const DecoratorSet convert_compressed_oops = INTERNAL_RT_USE_COMPRESSED_OOPS | INTERNAL_CONVERT_COMPRESSED_OOP;
668
669 template<DecoratorSet decorators>
670 static bool is_hardwired_primitive() {
671 return !HasDecorator<decorators, INTERNAL_BT_BARRIER_ON_PRIMITIVES>::value &&
672 !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value;
673 }
674
675 template <DecoratorSet decorators, typename T>
676 inline static typename EnableIf<
677 HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value>::type
678 store(void* addr, T value) {
679 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
680 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
681 Raw::oop_store(addr, value);
682 } else {
683 Raw::store(addr, value);
684 }
685 }
686
687 template <DecoratorSet decorators, typename T>
688 inline static typename EnableIf<
689 HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value>::type
690 store(void* addr, T value) {
691 if (UseCompressedOops) {
692 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
693 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
694 } else {
695 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
696 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
697 }
698 }
699
700 template <DecoratorSet decorators, typename T>
701 inline static typename EnableIf<
702 !HasDecorator<decorators, AS_RAW>::value>::type
703 store(void* addr, T value) {
704 if (is_hardwired_primitive<decorators>()) {
705 const DecoratorSet expanded_decorators = decorators | AS_RAW;
706 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
707 } else {
708 RuntimeDispatch<decorators, T, BARRIER_STORE>::store(addr, value);
709 }
710 }
711
712 template <DecoratorSet decorators, typename T>
713 inline static typename EnableIf<
714 HasDecorator<decorators, AS_RAW>::value>::type
715 store_at(oop base, ptrdiff_t offset, T value) {
716 store<decorators>(field_addr(base, offset), value);
717 }
718
719 template <DecoratorSet decorators, typename T>
720 inline static typename EnableIf<
721 !HasDecorator<decorators, AS_RAW>::value>::type
722 store_at(oop base, ptrdiff_t offset, T value) {
723 if (is_hardwired_primitive<decorators>()) {
724 const DecoratorSet expanded_decorators = decorators | AS_RAW;
725 PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, value);
726 } else {
727 RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::store_at(base, offset, value);
728 }
729 }
730
731 template <DecoratorSet decorators, typename T>
732 inline static typename EnableIf<
733 HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
734 load(void* addr) {
735 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
736 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
737 return Raw::template oop_load<T>(addr);
738 } else {
739 return Raw::template load<T>(addr);
740 }
741 }
742
743 template <DecoratorSet decorators, typename T>
744 inline static typename EnableIf<
745 HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
746 load(void* addr) {
747 if (UseCompressedOops) {
748 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
749 return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
750 } else {
751 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
752 return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
753 }
754 }
755
756 template <DecoratorSet decorators, typename T>
757 inline static typename EnableIf<
758 !HasDecorator<decorators, AS_RAW>::value, T>::type
759 load(void* addr) {
760 if (is_hardwired_primitive<decorators>()) {
761 const DecoratorSet expanded_decorators = decorators | AS_RAW;
762 return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
763 } else {
764 return RuntimeDispatch<decorators, T, BARRIER_LOAD>::load(addr);
765 }
766 }
767
768 template <DecoratorSet decorators, typename T>
769 inline static typename EnableIf<
770 HasDecorator<decorators, AS_RAW>::value, T>::type
771 load_at(oop base, ptrdiff_t offset) {
772 return load<decorators, T>(field_addr(base, offset));
773 }
774
775 template <DecoratorSet decorators, typename T>
776 inline static typename EnableIf<
777 !HasDecorator<decorators, AS_RAW>::value, T>::type
778 load_at(oop base, ptrdiff_t offset) {
779 if (is_hardwired_primitive<decorators>()) {
780 const DecoratorSet expanded_decorators = decorators | AS_RAW;
781 return PreRuntimeDispatch::load_at<expanded_decorators, T>(base, offset);
782 } else {
783 return RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::load_at(base, offset);
784 }
785 }
786
787 template <DecoratorSet decorators, typename T>
788 inline static typename EnableIf<
789 HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
790 atomic_cmpxchg(T new_value, void* addr, T compare_value) {
791 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
792 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
793 return Raw::oop_atomic_cmpxchg(new_value, addr, compare_value);
794 } else {
795 return Raw::atomic_cmpxchg(new_value, addr, compare_value);
796 }
797 }
798
799 template <DecoratorSet decorators, typename T>
800 inline static typename EnableIf<
801 HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
802 atomic_cmpxchg(T new_value, void* addr, T compare_value) {
803 if (UseCompressedOops) {
804 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
805 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
806 } else {
807 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
808 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
809 }
810 }
811
812 template <DecoratorSet decorators, typename T>
813 inline static typename EnableIf<
814 !HasDecorator<decorators, AS_RAW>::value, T>::type
815 atomic_cmpxchg(T new_value, void* addr, T compare_value) {
816 if (is_hardwired_primitive<decorators>()) {
817 const DecoratorSet expanded_decorators = decorators | AS_RAW;
818 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
819 } else {
820 return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::atomic_cmpxchg(new_value, addr, compare_value);
821 }
822 }
823
824 template <DecoratorSet decorators, typename T>
825 inline static typename EnableIf<
826 HasDecorator<decorators, AS_RAW>::value, T>::type
827 atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
828 return atomic_cmpxchg<decorators>(new_value, field_addr(base, offset), compare_value);
829 }
830
831 template <DecoratorSet decorators, typename T>
832 inline static typename EnableIf<
833 !HasDecorator<decorators, AS_RAW>::value, T>::type
834 atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
835 if (is_hardwired_primitive<decorators>()) {
836 const DecoratorSet expanded_decorators = decorators | AS_RAW;
837 return PreRuntimeDispatch::atomic_cmpxchg_at<expanded_decorators>(new_value, base, offset, compare_value);
838 } else {
839 return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::atomic_cmpxchg_at(new_value, base, offset, compare_value);
840 }
841 }
842
843 template <DecoratorSet decorators, typename T>
844 inline static typename EnableIf<
845 HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
846 atomic_xchg(T new_value, void* addr) {
847 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
848 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
849 return Raw::oop_atomic_xchg(new_value, addr);
850 } else {
851 return Raw::atomic_xchg(new_value, addr);
852 }
853 }
854
855 template <DecoratorSet decorators, typename T>
856 inline static typename EnableIf<
857 HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
858 atomic_xchg(T new_value, void* addr) {
859 if (UseCompressedOops) {
860 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
861 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
862 } else {
863 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
864 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
865 }
866 }
867
868 template <DecoratorSet decorators, typename T>
869 inline static typename EnableIf<
870 !HasDecorator<decorators, AS_RAW>::value, T>::type
871 atomic_xchg(T new_value, void* addr) {
872 if (is_hardwired_primitive<decorators>()) {
873 const DecoratorSet expanded_decorators = decorators | AS_RAW;
874 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
875 } else {
876 return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::atomic_xchg(new_value, addr);
877 }
878 }
879
880 template <DecoratorSet decorators, typename T>
881 inline static typename EnableIf<
882 HasDecorator<decorators, AS_RAW>::value, T>::type
883 atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
884 return atomic_xchg<decorators>(new_value, field_addr(base, offset));
885 }
886
887 template <DecoratorSet decorators, typename T>
888 inline static typename EnableIf<
889 !HasDecorator<decorators, AS_RAW>::value, T>::type
890 atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
891 if (is_hardwired_primitive<decorators>()) {
892 const DecoratorSet expanded_decorators = decorators | AS_RAW;
893 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, base, offset);
894 } else {
895 return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::atomic_xchg_at(new_value, base, offset);
896 }
897 }
898
899 template <DecoratorSet decorators, typename T>
900 inline static typename EnableIf<
901 HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, bool>::type
902 arraycopy(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length) {
903 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
904 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
905 return Raw::oop_arraycopy(src_obj, dst_obj, src, dst, length);
906 } else {
907 return Raw::arraycopy(src_obj, dst_obj, src, dst, length);
908 }
909 }
910
911 template <DecoratorSet decorators, typename T>
912 inline static typename EnableIf<
913 HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, bool>::type
914 arraycopy(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length) {
915 if (UseCompressedOops) {
916 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
917 return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, dst_obj, src, dst, length);
918 } else {
919 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
920 return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, dst_obj, src, dst, length);
921 }
922 }
923
924 template <DecoratorSet decorators, typename T>
925 inline static typename EnableIf<
926 !HasDecorator<decorators, AS_RAW>::value, bool>::type
927 arraycopy(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length) {
928 if (is_hardwired_primitive<decorators>()) {
929 const DecoratorSet expanded_decorators = decorators | AS_RAW;
930 return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, dst_obj, src, dst, length);
931 } else {
932 return RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::arraycopy(src_obj, dst_obj, src, dst, length);
933 }
934 }
935
936 template <DecoratorSet decorators>
937 inline static typename EnableIf<
938 HasDecorator<decorators, AS_RAW>::value>::type
939 clone(oop src, oop dst, size_t size) {
940 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
941 Raw::clone(src, dst, size);
942 }
943
944 template <DecoratorSet decorators>
945 inline static typename EnableIf<
946 !HasDecorator<decorators, AS_RAW>::value>::type
947 clone(oop src, oop dst, size_t size) {
948 RuntimeDispatch<decorators, oop, BARRIER_CLONE>::clone(src, dst, size);
949 }
950
951 template <DecoratorSet decorators>
952 inline static typename EnableIf<
953 HasDecorator<decorators, INTERNAL_BT_TO_SPACE_INVARIANT>::value, oop>::type
954 resolve(oop obj) {
955 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
956 return Raw::resolve(obj);
957 }
958
959 template <DecoratorSet decorators>
960 inline static typename EnableIf<
961 !HasDecorator<decorators, INTERNAL_BT_TO_SPACE_INVARIANT>::value, oop>::type
962 resolve(oop obj) {
963 return RuntimeDispatch<decorators, oop, BARRIER_RESOLVE>::resolve(obj);
964 }
965
966 template <DecoratorSet decorators>
967 inline static typename EnableIf<
968 HasDecorator<decorators, INTERNAL_BT_TO_SPACE_INVARIANT>::value, bool>::type
969 equals(oop o1, oop o2) {
970 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
971 return Raw::equals(o1, o2);
972 }
973
974 template <DecoratorSet decorators>
975 inline static typename EnableIf<
976 !HasDecorator<decorators, INTERNAL_BT_TO_SPACE_INVARIANT>::value, bool>::type
977 equals(oop o1, oop o2) {
978 return RuntimeDispatch<decorators, oop, BARRIER_EQUALS>::equals(o1, o2);
979 }
980 };
981
982 // This class adds implied decorators that follow according to decorator rules.
983 // For example adding default reference strength and default memory ordering
984 // semantics.
985 template <DecoratorSet input_decorators>
986 struct DecoratorFixup: AllStatic {
987 // If no reference strength has been picked, then strong will be picked
988 static const DecoratorSet ref_strength_default = input_decorators |
989 (((ON_DECORATOR_MASK & input_decorators) == 0 && (INTERNAL_VALUE_IS_OOP & input_decorators) != 0) ?
990 ON_STRONG_OOP_REF : INTERNAL_EMPTY);
991 // If no memory ordering has been picked, unordered will be picked
992 static const DecoratorSet memory_ordering_default = ref_strength_default |
993 ((MO_DECORATOR_MASK & ref_strength_default) == 0 ? MO_UNORDERED : INTERNAL_EMPTY);
994 // If no barrier strength has been picked, normal will be used
995 static const DecoratorSet barrier_strength_default = memory_ordering_default |
996 ((AS_DECORATOR_MASK & memory_ordering_default) == 0 ? AS_NORMAL : INTERNAL_EMPTY);
997 // Heap array accesses imply it is a heap access
998 static const DecoratorSet heap_array_is_in_heap = barrier_strength_default |
999 ((IN_HEAP_ARRAY & barrier_strength_default) != 0 ? IN_HEAP : INTERNAL_EMPTY);
1000 static const DecoratorSet conc_root_is_root = heap_array_is_in_heap |
1001 ((IN_CONCURRENT_ROOT & heap_array_is_in_heap) != 0 ? IN_ROOT : INTERNAL_EMPTY);
1002 static const DecoratorSet archive_root_is_root = conc_root_is_root |
1003 ((IN_ARCHIVE_ROOT & conc_root_is_root) != 0 ? IN_ROOT : INTERNAL_EMPTY);
1004 static const DecoratorSet value = archive_root_is_root | BT_BUILDTIME_DECORATORS;
1005 };
1006
1007 // Step 2: Reduce types.
1008 // Enforce that for non-oop types, T and P have to be strictly the same.
1009 // P is the type of the address and T is the type of the values.
1010 // As for oop types, it is allow to send T in {narrowOop, oop} and
1011 // P in {narrowOop, oop, HeapWord*}. The following rules apply according to
1012 // the subsequent table. (columns are P, rows are T)
1013 // | | HeapWord | oop | narrowOop |
1014 // | oop | rt-comp | hw-none | hw-comp |
1015 // | narrowOop | x | x | hw-none |
1016 //
1017 // x means not allowed
1018 // rt-comp means it must be checked at runtime whether the oop is compressed.
1019 // hw-none means it is statically known the oop will not be compressed.
1020 // hw-comp means it is statically known the oop will be compressed.
1021
1022 template <DecoratorSet decorators, typename T>
1023 inline void store_reduce_types(T* addr, T value) {
1024 PreRuntimeDispatch::store<decorators>(addr, value);
1025 }
1026
1027 template <DecoratorSet decorators>
1028 inline void store_reduce_types(narrowOop* addr, oop value) {
1029 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1030 INTERNAL_RT_USE_COMPRESSED_OOPS;
1031 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
1032 }
1033
1034 template <DecoratorSet decorators>
1035 inline void store_reduce_types(narrowOop* addr, narrowOop value) {
1036 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1037 INTERNAL_RT_USE_COMPRESSED_OOPS;
1038 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
1039 }
1040
1041 template <DecoratorSet decorators>
1042 inline void store_reduce_types(HeapWord* addr, oop value) {
1043 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1044 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
1045 }
1046
1047 template <DecoratorSet decorators, typename T>
1048 inline T atomic_cmpxchg_reduce_types(T new_value, T* addr, T compare_value) {
1049 return PreRuntimeDispatch::atomic_cmpxchg<decorators>(new_value, addr, compare_value);
1050 }
1051
1052 template <DecoratorSet decorators>
1053 inline oop atomic_cmpxchg_reduce_types(oop new_value, narrowOop* addr, oop compare_value) {
1054 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1055 INTERNAL_RT_USE_COMPRESSED_OOPS;
1056 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
1057 }
1058
1059 template <DecoratorSet decorators>
1060 inline narrowOop atomic_cmpxchg_reduce_types(narrowOop new_value, narrowOop* addr, narrowOop compare_value) {
1061 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1062 INTERNAL_RT_USE_COMPRESSED_OOPS;
1063 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
1064 }
1065
1066 template <DecoratorSet decorators>
1067 inline oop atomic_cmpxchg_reduce_types(oop new_value,
1068 HeapWord* addr,
1069 oop compare_value) {
1070 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1071 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
1072 }
1073
1074 template <DecoratorSet decorators, typename T>
1075 inline T atomic_xchg_reduce_types(T new_value, T* addr) {
1076 const DecoratorSet expanded_decorators = decorators;
1077 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
1078 }
1079
1080 template <DecoratorSet decorators>
1081 inline oop atomic_xchg_reduce_types(oop new_value, narrowOop* addr) {
1082 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1083 INTERNAL_RT_USE_COMPRESSED_OOPS;
1084 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
1085 }
1086
1087 template <DecoratorSet decorators>
1088 inline narrowOop atomic_xchg_reduce_types(narrowOop new_value, narrowOop* addr) {
1089 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1090 INTERNAL_RT_USE_COMPRESSED_OOPS;
1091 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
1092 }
1093
1094 template <DecoratorSet decorators>
1095 inline oop atomic_xchg_reduce_types(oop new_value, HeapWord* addr) {
1096 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1097 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
1098 }
1099
1100 template <DecoratorSet decorators, typename T>
1101 inline T load_reduce_types(T* addr) {
1102 return PreRuntimeDispatch::load<decorators, T>(addr);
1103 }
1104
1105 template <DecoratorSet decorators, typename T>
1106 inline typename OopOrNarrowOop<T>::type load_reduce_types(narrowOop* addr) {
1107 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1108 INTERNAL_RT_USE_COMPRESSED_OOPS;
1109 return PreRuntimeDispatch::load<expanded_decorators, typename OopOrNarrowOop<T>::type>(addr);
1110 }
1111
1112 template <DecoratorSet decorators, typename T>
1113 inline oop load_reduce_types(HeapWord* addr) {
1114 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1115 return PreRuntimeDispatch::load<expanded_decorators, oop>(addr);
1116 }
1117
1118 template <DecoratorSet decorators, typename T>
1119 inline bool arraycopy_reduce_types(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length) {
1120 return PreRuntimeDispatch::arraycopy<decorators>(src_obj, dst_obj, src, dst, length);
1121 }
1122
1123 template <DecoratorSet decorators>
1124 inline bool arraycopy_reduce_types(arrayOop src_obj, arrayOop dst_obj, HeapWord* src, HeapWord* dst, size_t length) {
1125 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1126 return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, dst_obj, src, dst, length);
1127 }
1128
1129 template <DecoratorSet decorators>
1130 inline bool arraycopy_reduce_types(arrayOop src_obj, arrayOop dst_obj, narrowOop* src, narrowOop* dst, size_t length) {
1131 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1132 INTERNAL_RT_USE_COMPRESSED_OOPS;
1133 return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, dst_obj, src, dst, length);
1134 }
1135
1136 // Step 1: Set default decorators. This step remembers if a type was volatile
1137 // and then sets the MO_VOLATILE decorator by default. Otherwise, a default
1138 // memory ordering is set for the access, and the implied decorator rules
1139 // are applied to select sensible defaults for decorators that have not been
1140 // explicitly set. For example, default object referent strength is set to strong.
1141 // This step also decays the types passed in (e.g. getting rid of CV qualifiers
1142 // and references from the types). This step also perform some type verification
1143 // that the passed in types make sense.
1144
1145 template <DecoratorSet decorators, typename T>
1146 static void verify_types(){
1147 // If this fails to compile, then you have sent in something that is
1148 // not recognized as a valid primitive type to a primitive Access function.
1149 STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // oops have already been validated
1150 (IsPointer<T>::value || IsIntegral<T>::value) ||
1151 IsFloatingPoint<T>::value)); // not allowed primitive type
1152 }
1153
1154 template <DecoratorSet decorators, typename P, typename T>
1155 inline void store(P* addr, T value) {
1156 verify_types<decorators, T>();
1157 typedef typename Decay<P>::type DecayedP;
1158 typedef typename Decay<T>::type DecayedT;
1159 DecayedT decayed_value = value;
1160 // If a volatile address is passed in but no memory ordering decorator,
1161 // set the memory ordering to MO_VOLATILE by default.
1162 const DecoratorSet expanded_decorators = DecoratorFixup<
1163 (IsVolatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1164 (MO_VOLATILE | decorators) : decorators>::value;
1165 store_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr), decayed_value);
1166 }
1167
1168 template <DecoratorSet decorators, typename T>
1169 inline void store_at(oop base, ptrdiff_t offset, T value) {
1170 verify_types<decorators, T>();
1171 typedef typename Decay<T>::type DecayedT;
1172 DecayedT decayed_value = value;
1173 const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1174 (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1175 INTERNAL_CONVERT_COMPRESSED_OOP : INTERNAL_EMPTY)>::value;
1176 PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, decayed_value);
1177 }
1178
1179 template <DecoratorSet decorators, typename P, typename T>
1180 inline T load(P* addr) {
1181 verify_types<decorators, T>();
1182 typedef typename Decay<P>::type DecayedP;
1183 typedef typename Conditional<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1184 typename OopOrNarrowOop<T>::type,
1185 typename Decay<T>::type>::type DecayedT;
1186 // If a volatile address is passed in but no memory ordering decorator,
1187 // set the memory ordering to MO_VOLATILE by default.
1188 const DecoratorSet expanded_decorators = DecoratorFixup<
1189 (IsVolatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1190 (MO_VOLATILE | decorators) : decorators>::value;
1191 return load_reduce_types<expanded_decorators, DecayedT>(const_cast<DecayedP*>(addr));
1192 }
1193
1194 template <DecoratorSet decorators, typename T>
1195 inline T load_at(oop base, ptrdiff_t offset) {
1196 verify_types<decorators, T>();
1197 typedef typename Conditional<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1198 typename OopOrNarrowOop<T>::type,
1199 typename Decay<T>::type>::type DecayedT;
1200 // Expand the decorators (figure out sensible defaults)
1201 // Potentially remember if we need compressed oop awareness
1202 const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1203 (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1204 INTERNAL_CONVERT_COMPRESSED_OOP : INTERNAL_EMPTY)>::value;
1205 return PreRuntimeDispatch::load_at<expanded_decorators, DecayedT>(base, offset);
1206 }
1207
1208 template <DecoratorSet decorators, typename P, typename T>
1209 inline T atomic_cmpxchg(T new_value, P* addr, T compare_value) {
1210 verify_types<decorators, T>();
1211 typedef typename Decay<P>::type DecayedP;
1212 typedef typename Decay<T>::type DecayedT;
1213 DecayedT new_decayed_value = new_value;
1214 DecayedT compare_decayed_value = compare_value;
1215 const DecoratorSet expanded_decorators = DecoratorFixup<
1216 (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1217 (MO_SEQ_CST | decorators) : decorators>::value;
1218 return atomic_cmpxchg_reduce_types<expanded_decorators>(new_decayed_value,
1219 const_cast<DecayedP*>(addr),
1220 compare_decayed_value);
1221 }
1222
1223 template <DecoratorSet decorators, typename T>
1224 inline T atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
1225 verify_types<decorators, T>();
1226 typedef typename Decay<T>::type DecayedT;
1227 DecayedT new_decayed_value = new_value;
1228 DecayedT compare_decayed_value = compare_value;
1229 // Determine default memory ordering
1230 const DecoratorSet expanded_decorators = DecoratorFixup<
1231 (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1232 (MO_SEQ_CST | decorators) : decorators>::value;
1233 // Potentially remember that we need compressed oop awareness
1234 const DecoratorSet final_decorators = expanded_decorators |
1235 (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1236 INTERNAL_CONVERT_COMPRESSED_OOP : INTERNAL_EMPTY);
1237 return PreRuntimeDispatch::atomic_cmpxchg_at<final_decorators>(new_decayed_value, base,
1238 offset, compare_decayed_value);
1239 }
1240
1241 template <DecoratorSet decorators, typename P, typename T>
1242 inline T atomic_xchg(T new_value, P* addr) {
1243 verify_types<decorators, T>();
1244 typedef typename Decay<P>::type DecayedP;
1245 typedef typename Decay<T>::type DecayedT;
1246 DecayedT new_decayed_value = new_value;
1247 // atomic_xchg is only available in SEQ_CST flavour.
1248 const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST>::value;
1249 return atomic_xchg_reduce_types<expanded_decorators>(new_decayed_value,
1250 const_cast<DecayedP*>(addr));
1251 }
1252
1253 template <DecoratorSet decorators, typename T>
1254 inline T atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
1255 verify_types<decorators, T>();
1256 typedef typename Decay<T>::type DecayedT;
1257 DecayedT new_decayed_value = new_value;
1258 // atomic_xchg is only available in SEQ_CST flavour.
1259 const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST |
1260 (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1261 INTERNAL_CONVERT_COMPRESSED_OOP : INTERNAL_EMPTY)>::value;
1262 return PreRuntimeDispatch::atomic_xchg_at<expanded_decorators>(new_decayed_value, base, offset);
1263 }
1264
1265 template <DecoratorSet decorators, typename T>
1266 inline bool arraycopy(arrayOop src_obj, arrayOop dst_obj, T* src, T* dst, size_t length) {
1267 STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ||
1268 (IsSame<T, void>::value || IsIntegral<T>::value) ||
1269 IsFloatingPoint<T>::value)); // arraycopy allows type erased void elements
1270 typedef typename Decay<T>::type DecayedT;
1271 const DecoratorSet expanded_decorators = DecoratorFixup<decorators | IN_HEAP_ARRAY | IN_HEAP>::value;
1272 return arraycopy_reduce_types<expanded_decorators>(src_obj, dst_obj,
1273 const_cast<DecayedT*>(src),
1274 const_cast<DecayedT*>(dst),
1275 length);
1276 }
1277
1278 template <DecoratorSet decorators>
1279 inline void clone(oop src, oop dst, size_t size) {
1280 const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1281 PreRuntimeDispatch::clone<expanded_decorators>(src, dst, size);
1282 }
1283
1284 template <DecoratorSet decorators>
1285 inline oop resolve(oop obj) {
1286 const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1287 return PreRuntimeDispatch::resolve<expanded_decorators>(obj);
1288 }
1289
1290 template <DecoratorSet decorators>
1291 inline bool equals(oop o1, oop o2) {
1292 const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1293 return PreRuntimeDispatch::equals<expanded_decorators>(o1, o2);
1294 }
1295 }
1296
1297 #endif // SHARE_OOPS_ACCESSBACKEND_HPP
|