src/cpu/sparc/vm/assembler_sparc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File c1-coops Sdiff src/cpu/sparc/vm

src/cpu/sparc/vm/assembler_sparc.cpp

Print this page




 876     mov(G3, L3);                // avoid clobbering G3
 877     mov(G4, L4);                // avoid clobbering G4
 878     mov(G5_method, L5);         // avoid clobbering G5_method
 879 #endif /* CC_INTERP */
 880 #if defined(COMPILER2) && !defined(_LP64)
 881     // Save & restore possible 64-bit Long arguments in G-regs
 882     srlx(G1,32,L0);
 883     srlx(G4,32,L6);
 884 #endif
 885     call(CAST_FROM_FN_PTR(address,verify_thread_subroutine), relocInfo::runtime_call_type);
 886     delayed()->mov(G2_thread, O0);
 887 
 888     mov(L1, G1);                // Restore G1
 889     // G2 restored below
 890     mov(L3, G3);                // restore G3
 891     mov(L4, G4);                // restore G4
 892     mov(L5, G5_method);         // restore G5_method
 893 #if defined(COMPILER2) && !defined(_LP64)
 894     // Save & restore possible 64-bit Long arguments in G-regs
 895     sllx(L0,32,G2);             // Move old high G1 bits high in G2
 896     sllx(G1, 0,G1);             // Clear current high G1 bits
 897     or3 (G1,G2,G1);             // Recover 64-bit G1
 898     sllx(L6,32,G2);             // Move old high G4 bits high in G2
 899     sllx(G4, 0,G4);             // Clear current high G4 bits
 900     or3 (G4,G2,G4);             // Recover 64-bit G4
 901 #endif
 902     restore(O0, 0, G2_thread);
 903   }
 904 }
 905 
 906 
 907 void MacroAssembler::save_thread(const Register thread_cache) {
 908   verify_thread();
 909   if (thread_cache->is_valid()) {
 910     assert(thread_cache->is_local() || thread_cache->is_in(), "bad volatile");
 911     mov(G2_thread, thread_cache);
 912   }
 913   if (VerifyThread) {
 914     // smash G2_thread, as if the VM were about to anyway
 915     set(0x67676767, G2_thread);
 916   }
 917 }
 918 
 919 




 876     mov(G3, L3);                // avoid clobbering G3
 877     mov(G4, L4);                // avoid clobbering G4
 878     mov(G5_method, L5);         // avoid clobbering G5_method
 879 #endif /* CC_INTERP */
 880 #if defined(COMPILER2) && !defined(_LP64)
 881     // Save & restore possible 64-bit Long arguments in G-regs
 882     srlx(G1,32,L0);
 883     srlx(G4,32,L6);
 884 #endif
 885     call(CAST_FROM_FN_PTR(address,verify_thread_subroutine), relocInfo::runtime_call_type);
 886     delayed()->mov(G2_thread, O0);
 887 
 888     mov(L1, G1);                // Restore G1
 889     // G2 restored below
 890     mov(L3, G3);                // restore G3
 891     mov(L4, G4);                // restore G4
 892     mov(L5, G5_method);         // restore G5_method
 893 #if defined(COMPILER2) && !defined(_LP64)
 894     // Save & restore possible 64-bit Long arguments in G-regs
 895     sllx(L0,32,G2);             // Move old high G1 bits high in G2
 896     slr(G1, 0,G1);              // Clear current high G1 bits
 897     or3 (G1,G2,G1);             // Recover 64-bit G1
 898     sllx(L6,32,G2);             // Move old high G4 bits high in G2
 899     slr(G4, 0,G4);              // Clear current high G4 bits
 900     or3 (G4,G2,G4);             // Recover 64-bit G4
 901 #endif
 902     restore(O0, 0, G2_thread);
 903   }
 904 }
 905 
 906 
 907 void MacroAssembler::save_thread(const Register thread_cache) {
 908   verify_thread();
 909   if (thread_cache->is_valid()) {
 910     assert(thread_cache->is_local() || thread_cache->is_in(), "bad volatile");
 911     mov(G2_thread, thread_cache);
 912   }
 913   if (VerifyThread) {
 914     // smash G2_thread, as if the VM were about to anyway
 915     set(0x67676767, G2_thread);
 916   }
 917 }
 918 
 919 


src/cpu/sparc/vm/assembler_sparc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File