1 /*
2 * Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
68 #endif // AMD64
69 }
70
71 inline u4 Bytes::swap_u4(u4 x) {
72 #ifdef AMD64
73 return bswap_32(x);
74 #else
75 u4 ret;
76 __asm__ __volatile__ (
77 "bswap %0"
78 :"=r" (ret) // output : register 0 => ret
79 :"0" (x) // input : x => register 0
80 :"0" // clobbered register
81 );
82 return ret;
83 #endif // AMD64
84 }
85
86 #ifdef AMD64
87 inline u8 Bytes::swap_u8(u8 x) {
88 #ifdef SPARC_WORKS
89 // workaround for SunStudio12 CR6615391
90 __asm__ __volatile__ (
91 "bswapq %0"
92 :"=r" (x) // output : register 0 => x
93 :"0" (x) // input : x => register 0
94 :"0" // clobbered register
95 );
96 return x;
97 #else
98 return bswap_64(x);
99 #endif
100 }
101 #else
102 // Helper function for swap_u8
103 inline u8 Bytes::swap_u8_base(u4 x, u4 y) {
104 return (((u8)swap_u4(x))<<32) | swap_u4(y);
105 }
106
107 inline u8 Bytes::swap_u8(u8 x) {
108 return swap_u8_base(*(u4*)&x, *(((u4*)&x)+1));
109 }
110 #endif // !AMD64
111
112 #endif // OS_CPU_BSD_X86_BYTES_BSD_X86_INLINE_HPP
|
1 /*
2 * Copyright (c) 1999, 2020, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
68 #endif // AMD64
69 }
70
71 inline u4 Bytes::swap_u4(u4 x) {
72 #ifdef AMD64
73 return bswap_32(x);
74 #else
75 u4 ret;
76 __asm__ __volatile__ (
77 "bswap %0"
78 :"=r" (ret) // output : register 0 => ret
79 :"0" (x) // input : x => register 0
80 :"0" // clobbered register
81 );
82 return ret;
83 #endif // AMD64
84 }
85
86 #ifdef AMD64
87 inline u8 Bytes::swap_u8(u8 x) {
88 return bswap_64(x);
89 }
90 #else
91 // Helper function for swap_u8
92 inline u8 Bytes::swap_u8_base(u4 x, u4 y) {
93 return (((u8)swap_u4(x))<<32) | swap_u4(y);
94 }
95
96 inline u8 Bytes::swap_u8(u8 x) {
97 return swap_u8_base(*(u4*)&x, *(((u4*)&x)+1));
98 }
99 #endif // !AMD64
100
101 #endif // OS_CPU_BSD_X86_BYTES_BSD_X86_INLINE_HPP
|