--- old/src/os_cpu/windows_x86/vm/os_windows_x86.cpp 2017-07-27 17:46:45.043146635 +0200 +++ new/src/os_cpu/windows_x86/vm/os_windows_x86.cpp 2017-07-27 17:46:44.891146640 +0200 @@ -1,5 +1,5 @@ /* - * Copyright (c) 1999, 2016, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 1999, 2017, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -217,17 +217,17 @@ // Atomics and Stub Functions -typedef jint xchg_func_t (jint, volatile jint*); -typedef intptr_t xchg_ptr_func_t (intptr_t, volatile intptr_t*); -typedef jint cmpxchg_func_t (jint, volatile jint*, jint); -typedef jbyte cmpxchg_byte_func_t (jbyte, volatile jbyte*, jbyte); -typedef jlong cmpxchg_long_func_t (jlong, volatile jlong*, jlong); -typedef jint add_func_t (jint, volatile jint*); -typedef intptr_t add_ptr_func_t (intptr_t, volatile intptr_t*); +typedef int32_t xchg_func_t (int32_t, volatile int32_t*); +typedef intptr_t xchg_ptr_func_t (intptr_t, volatile intptr_t*); +typedef int32_t cmpxchg_func_t (int32_t, volatile int32_t*, int32_t); +typedef int8_t cmpxchg_byte_func_t (int8_t, volatile int8_t*, int8_t); +typedef int64_t cmpxchg_long_func_t (int64_t, volatile int64_t*, int64_t); +typedef int32_t add_func_t (int32_t, volatile int32_t*); +typedef intptr_t add_ptr_func_t (intptr_t, volatile intptr_t*); #ifdef AMD64 -jint os::atomic_xchg_bootstrap(jint exchange_value, volatile jint* dest) { +int32_t os::atomic_xchg_bootstrap(int32_t exchange_value, volatile int32_t* dest) { // try to use the stub: xchg_func_t* func = CAST_TO_FN_PTR(xchg_func_t*, StubRoutines::atomic_xchg_entry()); @@ -237,7 +237,7 @@ } assert(Threads::number_of_threads() == 0, "for bootstrap only"); - jint old_value = *dest; + int32_t old_value = *dest; *dest = exchange_value; return old_value; } @@ -258,7 +258,7 @@ } -jint os::atomic_cmpxchg_bootstrap(jint exchange_value, volatile jint* dest, jint compare_value) { +int32_t os::atomic_cmpxchg_bootstrap(int32_t exchange_value, volatile int32_t* dest, int32_t compare_value) { // try to use the stub: cmpxchg_func_t* func = CAST_TO_FN_PTR(cmpxchg_func_t*, StubRoutines::atomic_cmpxchg_entry()); @@ -268,13 +268,13 @@ } assert(Threads::number_of_threads() == 0, "for bootstrap only"); - jint old_value = *dest; + int32_t old_value = *dest; if (old_value == compare_value) *dest = exchange_value; return old_value; } -jbyte os::atomic_cmpxchg_byte_bootstrap(jbyte exchange_value, volatile jbyte* dest, jbyte compare_value) { +int8_t os::atomic_cmpxchg_byte_bootstrap(int8_t exchange_value, volatile int8_t* dest, int8_t compare_value) { // try to use the stub: cmpxchg_byte_func_t* func = CAST_TO_FN_PTR(cmpxchg_byte_func_t*, StubRoutines::atomic_cmpxchg_byte_entry()); @@ -284,7 +284,7 @@ } assert(Threads::number_of_threads() == 0, "for bootstrap only"); - jbyte old_value = *dest; + int8_t old_value = *dest; if (old_value == compare_value) *dest = exchange_value; return old_value; @@ -292,7 +292,7 @@ #endif // AMD64 -jlong os::atomic_cmpxchg_long_bootstrap(jlong exchange_value, volatile jlong* dest, jlong compare_value) { +int64_t os::atomic_cmpxchg_long_bootstrap(int64_t exchange_value, volatile int64_t* dest, int64_t compare_value) { // try to use the stub: cmpxchg_long_func_t* func = CAST_TO_FN_PTR(cmpxchg_long_func_t*, StubRoutines::atomic_cmpxchg_long_entry()); @@ -302,7 +302,7 @@ } assert(Threads::number_of_threads() == 0, "for bootstrap only"); - jlong old_value = *dest; + int64_t old_value = *dest; if (old_value == compare_value) *dest = exchange_value; return old_value; @@ -310,7 +310,7 @@ #ifdef AMD64 -jint os::atomic_add_bootstrap(jint add_value, volatile jint* dest) { +int32_t os::atomic_add_bootstrap(int32_t add_value, volatile int32_t* dest) { // try to use the stub: add_func_t* func = CAST_TO_FN_PTR(add_func_t*, StubRoutines::atomic_add_entry());