1 /*
2 * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
85 return exchange_value;
86 }
87
88 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) {
89 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
90 }
91
92
93 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value) {
94 int mp = os::is_MP();
95 __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgl %1,(%3)"
96 : "=a" (exchange_value)
97 : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
98 : "cc", "memory");
99 return exchange_value;
100 }
101
102 #ifdef AMD64
103 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
104 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }
105
106 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
107 intptr_t addend = add_value;
108 bool mp = os::is_MP();
109 __asm__ __volatile__ (LOCK_IF_MP(%3) "xaddq %0,(%2)"
110 : "=r" (addend)
111 : "0" (addend), "r" (dest), "r" (mp)
112 : "cc", "memory");
113 return addend + add_value;
114 }
115
116 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) {
117 return (void*)add_ptr(add_value, (volatile intptr_t*)dest);
118 }
119
120 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
121 bool mp = os::is_MP();
122 __asm__ __volatile__ (LOCK_IF_MP(%1) "addq $1,(%0)"
123 :
124 : "r" (dest), "r" (mp)
|
1 /*
2 * Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
85 return exchange_value;
86 }
87
88 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) {
89 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
90 }
91
92
93 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value) {
94 int mp = os::is_MP();
95 __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgl %1,(%3)"
96 : "=a" (exchange_value)
97 : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
98 : "cc", "memory");
99 return exchange_value;
100 }
101
102 #ifdef AMD64
103 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
104 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }
105 inline void Atomic::store (julong store_value, julong* dest) {
106 assert(EnableJFR, "sanity check");
107 *dest = store_value;
108 }
109
110 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
111 intptr_t addend = add_value;
112 bool mp = os::is_MP();
113 __asm__ __volatile__ (LOCK_IF_MP(%3) "xaddq %0,(%2)"
114 : "=r" (addend)
115 : "0" (addend), "r" (dest), "r" (mp)
116 : "cc", "memory");
117 return addend + add_value;
118 }
119
120 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) {
121 return (void*)add_ptr(add_value, (volatile intptr_t*)dest);
122 }
123
124 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
125 bool mp = os::is_MP();
126 __asm__ __volatile__ (LOCK_IF_MP(%1) "addq $1,(%0)"
127 :
128 : "r" (dest), "r" (mp)
|