< prev index next >
src/java.desktop/windows/native/libawt/windows/awt_Robot.cpp
Print this page
*** 314,326 ****
--- 314,342 ----
UINT scancode;
JNIEnv * env = (JNIEnv *)JNU_GetEnv(jvm, JNI_VERSION_1_2);
// convert Java key into Windows key (and modifiers too)
AwtComponent::JavaKeyToWindowsKey(jkey, &vkey, &modifiers);
+
if (vkey == 0) {
+ /* vkey would be 0 for all non-ascii inputs. If non-ascii
+ then we assume they are unicode characters and will
+ supply such input to SendInput() which can handle unicode
+ characters as well as ascii chars. Windows provides api's to
+ handle ascii and unicode characters. All ascii characters
+ (both OEM and standard ascii) would be supplied to keybd_event().
+
+ HandleUnicodeKeys() returns the status of the SendInput()
+ which returns 0 if there is a fail else non-zero.
+ The status 0 tells that the SendInput() in unable to interpret
+ the supplied input upon which the illegal argument exception
+ would be raised.
+ */
+ if(!HandleUnicodeKeys(jkey, dwFlags)) {
// no equivalent Windows key found for given Java keycode
JNU_ThrowIllegalArgumentException(env, "Invalid key code");
+ }
} else {
// get the scancode from the virtual key
scancode = ::MapVirtualKey(vkey, 0);
if (vkey == VK_RMENU ||
vkey == VK_DELETE ||
*** 337,346 ****
--- 353,373 ----
}
keybd_event(vkey, scancode, dwFlags, 0);
}
}
+ UINT AwtRobot::HandleUnicodeKeys(jint key, DWORD dwFlags)
+ {
+ NSWinInput::INPUT ip;
+ ip.type = 1; //INPUT_KEYBOARD;
+ ip.ki.wVk = 0;
+ ip.ki.wScan = key;
+ ip.ki.dwFlags = (DWORD)(dwFlags | 4); //KEYEVENTF_UNICODE(4)
+ ip.ki.dwExtraInfo = 0;
+ return SendInput(1, (LPINPUT)&ip, sizeof(INPUT));
+ }
+
//
// utility function to get the C++ object from the Java one
//
// (static)
AwtRobot * AwtRobot::GetRobot( jobject self )
< prev index next >