src/cpu/sparc/vm/assembler_sparc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File c1-coops Sdiff src/cpu/sparc/vm

src/cpu/sparc/vm/assembler_sparc.cpp

Print this page




 892     mov(G3, L3);                // avoid clobbering G3
 893     mov(G4, L4);                // avoid clobbering G4
 894     mov(G5_method, L5);         // avoid clobbering G5_method
 895 #endif /* CC_INTERP */
 896 #if defined(COMPILER2) && !defined(_LP64)
 897     // Save & restore possible 64-bit Long arguments in G-regs
 898     srlx(G1,32,L0);
 899     srlx(G4,32,L6);
 900 #endif
 901     call(CAST_FROM_FN_PTR(address,verify_thread_subroutine), relocInfo::runtime_call_type);
 902     delayed()->mov(G2_thread, O0);
 903 
 904     mov(L1, G1);                // Restore G1
 905     // G2 restored below
 906     mov(L3, G3);                // restore G3
 907     mov(L4, G4);                // restore G4
 908     mov(L5, G5_method);         // restore G5_method
 909 #if defined(COMPILER2) && !defined(_LP64)
 910     // Save & restore possible 64-bit Long arguments in G-regs
 911     sllx(L0,32,G2);             // Move old high G1 bits high in G2
 912     sllx(G1, 0,G1);             // Clear current high G1 bits
 913     or3 (G1,G2,G1);             // Recover 64-bit G1
 914     sllx(L6,32,G2);             // Move old high G4 bits high in G2
 915     sllx(G4, 0,G4);             // Clear current high G4 bits
 916     or3 (G4,G2,G4);             // Recover 64-bit G4
 917 #endif
 918     restore(O0, 0, G2_thread);
 919   }
 920 }
 921 
 922 
 923 void MacroAssembler::save_thread(const Register thread_cache) {
 924   verify_thread();
 925   if (thread_cache->is_valid()) {
 926     assert(thread_cache->is_local() || thread_cache->is_in(), "bad volatile");
 927     mov(G2_thread, thread_cache);
 928   }
 929   if (VerifyThread) {
 930     // smash G2_thread, as if the VM were about to anyway
 931     set(0x67676767, G2_thread);
 932   }
 933 }
 934 
 935 




 892     mov(G3, L3);                // avoid clobbering G3
 893     mov(G4, L4);                // avoid clobbering G4
 894     mov(G5_method, L5);         // avoid clobbering G5_method
 895 #endif /* CC_INTERP */
 896 #if defined(COMPILER2) && !defined(_LP64)
 897     // Save & restore possible 64-bit Long arguments in G-regs
 898     srlx(G1,32,L0);
 899     srlx(G4,32,L6);
 900 #endif
 901     call(CAST_FROM_FN_PTR(address,verify_thread_subroutine), relocInfo::runtime_call_type);
 902     delayed()->mov(G2_thread, O0);
 903 
 904     mov(L1, G1);                // Restore G1
 905     // G2 restored below
 906     mov(L3, G3);                // restore G3
 907     mov(L4, G4);                // restore G4
 908     mov(L5, G5_method);         // restore G5_method
 909 #if defined(COMPILER2) && !defined(_LP64)
 910     // Save & restore possible 64-bit Long arguments in G-regs
 911     sllx(L0,32,G2);             // Move old high G1 bits high in G2
 912     srl(G1, 0,G1);              // Clear current high G1 bits
 913     or3 (G1,G2,G1);             // Recover 64-bit G1
 914     sllx(L6,32,G2);             // Move old high G4 bits high in G2
 915     srl(G4, 0,G4);              // Clear current high G4 bits
 916     or3 (G4,G2,G4);             // Recover 64-bit G4
 917 #endif
 918     restore(O0, 0, G2_thread);
 919   }
 920 }
 921 
 922 
 923 void MacroAssembler::save_thread(const Register thread_cache) {
 924   verify_thread();
 925   if (thread_cache->is_valid()) {
 926     assert(thread_cache->is_local() || thread_cache->is_in(), "bad volatile");
 927     mov(G2_thread, thread_cache);
 928   }
 929   if (VerifyThread) {
 930     // smash G2_thread, as if the VM were about to anyway
 931     set(0x67676767, G2_thread);
 932   }
 933 }
 934 
 935 


src/cpu/sparc/vm/assembler_sparc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File