< prev index next >

src/os_cpu/windows_x86/vm/os_windows_x86.cpp

Print this page
rev 13267 : [mq]: Atomic_polishing
   1 /*
   2  * Copyright (c) 1999, 2016, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *


 200   // This structure describes the covered dynamic code area.
 201   // Addresses are relative to the beginning on the code cache area
 202   prt = &pDCD->rt;
 203   prt->BeginAddress = 0;
 204   prt->EndAddress = (ULONG)(high - low);
 205   prt->UnwindData = ((char *)punwind - low);
 206 
 207   guarantee(RtlAddFunctionTable(prt, 1, (ULONGLONG)low),
 208             "Failed to register Dynamic Code Exception Handler with RtlAddFunctionTable");
 209 
 210 #endif // AMD64
 211   return true;
 212 }
 213 
 214 void os::initialize_thread(Thread* thr) {
 215 // Nothing to do.
 216 }
 217 
 218 // Atomics and Stub Functions
 219 
 220 typedef jint      xchg_func_t            (jint,     volatile jint*);
 221 typedef intptr_t  xchg_ptr_func_t        (intptr_t, volatile intptr_t*);
 222 typedef jint      cmpxchg_func_t         (jint,     volatile jint*,  jint);
 223 typedef jbyte     cmpxchg_byte_func_t    (jbyte,    volatile jbyte*, jbyte);
 224 typedef jlong     cmpxchg_long_func_t    (jlong,    volatile jlong*, jlong);
 225 typedef jint      add_func_t             (jint,     volatile jint*);
 226 typedef intptr_t  add_ptr_func_t         (intptr_t, volatile intptr_t*);
 227 
 228 #ifdef AMD64
 229 
 230 jint os::atomic_xchg_bootstrap(jint exchange_value, volatile jint* dest) {
 231   // try to use the stub:
 232   xchg_func_t* func = CAST_TO_FN_PTR(xchg_func_t*, StubRoutines::atomic_xchg_entry());
 233 
 234   if (func != NULL) {
 235     os::atomic_xchg_func = func;
 236     return (*func)(exchange_value, dest);
 237   }
 238   assert(Threads::number_of_threads() == 0, "for bootstrap only");
 239 
 240   jint old_value = *dest;
 241   *dest = exchange_value;
 242   return old_value;
 243 }
 244 
 245 intptr_t os::atomic_xchg_ptr_bootstrap(intptr_t exchange_value, volatile intptr_t* dest) {
 246   // try to use the stub:
 247   xchg_ptr_func_t* func = CAST_TO_FN_PTR(xchg_ptr_func_t*, StubRoutines::atomic_xchg_ptr_entry());
 248 
 249   if (func != NULL) {
 250     os::atomic_xchg_ptr_func = func;
 251     return (*func)(exchange_value, dest);
 252   }
 253   assert(Threads::number_of_threads() == 0, "for bootstrap only");
 254 
 255   intptr_t old_value = *dest;
 256   *dest = exchange_value;
 257   return old_value;
 258 }
 259 
 260 
 261 jint os::atomic_cmpxchg_bootstrap(jint exchange_value, volatile jint* dest, jint compare_value) {
 262   // try to use the stub:
 263   cmpxchg_func_t* func = CAST_TO_FN_PTR(cmpxchg_func_t*, StubRoutines::atomic_cmpxchg_entry());
 264 
 265   if (func != NULL) {
 266     os::atomic_cmpxchg_func = func;
 267     return (*func)(exchange_value, dest, compare_value);
 268   }
 269   assert(Threads::number_of_threads() == 0, "for bootstrap only");
 270 
 271   jint old_value = *dest;
 272   if (old_value == compare_value)
 273     *dest = exchange_value;
 274   return old_value;
 275 }
 276 
 277 jbyte os::atomic_cmpxchg_byte_bootstrap(jbyte exchange_value, volatile jbyte* dest, jbyte compare_value) {
 278   // try to use the stub:
 279   cmpxchg_byte_func_t* func = CAST_TO_FN_PTR(cmpxchg_byte_func_t*, StubRoutines::atomic_cmpxchg_byte_entry());
 280 
 281   if (func != NULL) {
 282     os::atomic_cmpxchg_byte_func = func;
 283     return (*func)(exchange_value, dest, compare_value);
 284   }
 285   assert(Threads::number_of_threads() == 0, "for bootstrap only");
 286 
 287   jbyte old_value = *dest;
 288   if (old_value == compare_value)
 289     *dest = exchange_value;
 290   return old_value;
 291 }
 292 
 293 #endif // AMD64
 294 
 295 jlong os::atomic_cmpxchg_long_bootstrap(jlong exchange_value, volatile jlong* dest, jlong compare_value) {
 296   // try to use the stub:
 297   cmpxchg_long_func_t* func = CAST_TO_FN_PTR(cmpxchg_long_func_t*, StubRoutines::atomic_cmpxchg_long_entry());
 298 
 299   if (func != NULL) {
 300     os::atomic_cmpxchg_long_func = func;
 301     return (*func)(exchange_value, dest, compare_value);
 302   }
 303   assert(Threads::number_of_threads() == 0, "for bootstrap only");
 304 
 305   jlong old_value = *dest;
 306   if (old_value == compare_value)
 307     *dest = exchange_value;
 308   return old_value;
 309 }
 310 
 311 #ifdef AMD64
 312 
 313 jint os::atomic_add_bootstrap(jint add_value, volatile jint* dest) {
 314   // try to use the stub:
 315   add_func_t* func = CAST_TO_FN_PTR(add_func_t*, StubRoutines::atomic_add_entry());
 316 
 317   if (func != NULL) {
 318     os::atomic_add_func = func;
 319     return (*func)(add_value, dest);
 320   }
 321   assert(Threads::number_of_threads() == 0, "for bootstrap only");
 322 
 323   return (*dest) += add_value;
 324 }
 325 
 326 intptr_t os::atomic_add_ptr_bootstrap(intptr_t add_value, volatile intptr_t* dest) {
 327   // try to use the stub:
 328   add_ptr_func_t* func = CAST_TO_FN_PTR(add_ptr_func_t*, StubRoutines::atomic_add_ptr_entry());
 329 
 330   if (func != NULL) {
 331     os::atomic_add_ptr_func = func;
 332     return (*func)(add_value, dest);
 333   }


   1 /*
   2  * Copyright (c) 1999, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *


 200   // This structure describes the covered dynamic code area.
 201   // Addresses are relative to the beginning on the code cache area
 202   prt = &pDCD->rt;
 203   prt->BeginAddress = 0;
 204   prt->EndAddress = (ULONG)(high - low);
 205   prt->UnwindData = ((char *)punwind - low);
 206 
 207   guarantee(RtlAddFunctionTable(prt, 1, (ULONGLONG)low),
 208             "Failed to register Dynamic Code Exception Handler with RtlAddFunctionTable");
 209 
 210 #endif // AMD64
 211   return true;
 212 }
 213 
 214 void os::initialize_thread(Thread* thr) {
 215 // Nothing to do.
 216 }
 217 
 218 // Atomics and Stub Functions
 219 
 220 typedef int32_t  xchg_func_t            (int32_t,  volatile int32_t*);
 221 typedef intptr_t xchg_ptr_func_t        (intptr_t, volatile intptr_t*);
 222 typedef int32_t  cmpxchg_func_t         (int32_t,  volatile int32_t*, int32_t);
 223 typedef int8_t   cmpxchg_byte_func_t    (int8_t,   volatile int8_t*,  int8_t);
 224 typedef int64_t  cmpxchg_long_func_t    (int64_t,  volatile int64_t*, int64_t);
 225 typedef int32_t  add_func_t             (int32_t,  volatile int32_t*);
 226 typedef intptr_t add_ptr_func_t         (intptr_t, volatile intptr_t*);
 227 
 228 #ifdef AMD64
 229 
 230 int32_t os::atomic_xchg_bootstrap(int32_t exchange_value, volatile int32_t* dest) {
 231   // try to use the stub:
 232   xchg_func_t* func = CAST_TO_FN_PTR(xchg_func_t*, StubRoutines::atomic_xchg_entry());
 233 
 234   if (func != NULL) {
 235     os::atomic_xchg_func = func;
 236     return (*func)(exchange_value, dest);
 237   }
 238   assert(Threads::number_of_threads() == 0, "for bootstrap only");
 239 
 240   int32_t old_value = *dest;
 241   *dest = exchange_value;
 242   return old_value;
 243 }
 244 
 245 intptr_t os::atomic_xchg_ptr_bootstrap(intptr_t exchange_value, volatile intptr_t* dest) {
 246   // try to use the stub:
 247   xchg_ptr_func_t* func = CAST_TO_FN_PTR(xchg_ptr_func_t*, StubRoutines::atomic_xchg_ptr_entry());
 248 
 249   if (func != NULL) {
 250     os::atomic_xchg_ptr_func = func;
 251     return (*func)(exchange_value, dest);
 252   }
 253   assert(Threads::number_of_threads() == 0, "for bootstrap only");
 254 
 255   intptr_t old_value = *dest;
 256   *dest = exchange_value;
 257   return old_value;
 258 }
 259 
 260 
 261 int32_t os::atomic_cmpxchg_bootstrap(int32_t exchange_value, volatile int32_t* dest, int32_t compare_value) {
 262   // try to use the stub:
 263   cmpxchg_func_t* func = CAST_TO_FN_PTR(cmpxchg_func_t*, StubRoutines::atomic_cmpxchg_entry());
 264 
 265   if (func != NULL) {
 266     os::atomic_cmpxchg_func = func;
 267     return (*func)(exchange_value, dest, compare_value);
 268   }
 269   assert(Threads::number_of_threads() == 0, "for bootstrap only");
 270 
 271   int32_t old_value = *dest;
 272   if (old_value == compare_value)
 273     *dest = exchange_value;
 274   return old_value;
 275 }
 276 
 277 int8_t os::atomic_cmpxchg_byte_bootstrap(int8_t exchange_value, volatile int8_t* dest, int8_t compare_value) {
 278   // try to use the stub:
 279   cmpxchg_byte_func_t* func = CAST_TO_FN_PTR(cmpxchg_byte_func_t*, StubRoutines::atomic_cmpxchg_byte_entry());
 280 
 281   if (func != NULL) {
 282     os::atomic_cmpxchg_byte_func = func;
 283     return (*func)(exchange_value, dest, compare_value);
 284   }
 285   assert(Threads::number_of_threads() == 0, "for bootstrap only");
 286 
 287   int8_t old_value = *dest;
 288   if (old_value == compare_value)
 289     *dest = exchange_value;
 290   return old_value;
 291 }
 292 
 293 #endif // AMD64
 294 
 295 int64_t os::atomic_cmpxchg_long_bootstrap(int64_t exchange_value, volatile int64_t* dest, int64_t compare_value) {
 296   // try to use the stub:
 297   cmpxchg_long_func_t* func = CAST_TO_FN_PTR(cmpxchg_long_func_t*, StubRoutines::atomic_cmpxchg_long_entry());
 298 
 299   if (func != NULL) {
 300     os::atomic_cmpxchg_long_func = func;
 301     return (*func)(exchange_value, dest, compare_value);
 302   }
 303   assert(Threads::number_of_threads() == 0, "for bootstrap only");
 304 
 305   int64_t old_value = *dest;
 306   if (old_value == compare_value)
 307     *dest = exchange_value;
 308   return old_value;
 309 }
 310 
 311 #ifdef AMD64
 312 
 313 int32_t os::atomic_add_bootstrap(int32_t add_value, volatile int32_t* dest) {
 314   // try to use the stub:
 315   add_func_t* func = CAST_TO_FN_PTR(add_func_t*, StubRoutines::atomic_add_entry());
 316 
 317   if (func != NULL) {
 318     os::atomic_add_func = func;
 319     return (*func)(add_value, dest);
 320   }
 321   assert(Threads::number_of_threads() == 0, "for bootstrap only");
 322 
 323   return (*dest) += add_value;
 324 }
 325 
 326 intptr_t os::atomic_add_ptr_bootstrap(intptr_t add_value, volatile intptr_t* dest) {
 327   // try to use the stub:
 328   add_ptr_func_t* func = CAST_TO_FN_PTR(add_ptr_func_t*, StubRoutines::atomic_add_ptr_entry());
 329 
 330   if (func != NULL) {
 331     os::atomic_add_ptr_func = func;
 332     return (*func)(add_value, dest);
 333   }


< prev index next >