< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page
rev 47594 : imported patch Interpreter-Poll-7
rev 47595 : imported patch Interpreter-Poll-Wide_Ret-8
rev 47597 : imported patch Interpreter-Poll-Switch-10


2356   // load lo & hi
2357   __ movl(rcx, Address(rbx, BytesPerInt));
2358   __ movl(rdx, Address(rbx, 2 * BytesPerInt));
2359   __ bswapl(rcx);
2360   __ bswapl(rdx);
2361   // check against lo & hi
2362   __ cmpl(rax, rcx);
2363   __ jcc(Assembler::less, default_case);
2364   __ cmpl(rax, rdx);
2365   __ jcc(Assembler::greater, default_case);
2366   // lookup dispatch offset
2367   __ subl(rax, rcx);
2368   __ movl(rdx, Address(rbx, rax, Address::times_4, 3 * BytesPerInt));
2369   __ profile_switch_case(rax, rbx, rcx);
2370   // continue execution
2371   __ bind(continue_execution);
2372   __ bswapl(rdx);
2373   LP64_ONLY(__ movl2ptr(rdx, rdx));
2374   __ load_unsigned_byte(rbx, Address(rbcp, rdx, Address::times_1));
2375   __ addptr(rbcp, rdx);
2376   __ dispatch_only(vtos);
2377   // handle default
2378   __ bind(default_case);
2379   __ profile_switch_default(rax);
2380   __ movl(rdx, Address(rbx, 0));
2381   __ jmp(continue_execution);
2382 }
2383 
2384 void TemplateTable::lookupswitch() {
2385   transition(itos, itos);
2386   __ stop("lookupswitch bytecode should have been rewritten");
2387 }
2388 
2389 void TemplateTable::fast_linearswitch() {
2390   transition(itos, vtos);
2391   Label loop_entry, loop, found, continue_execution;
2392   // bswap rax so we can avoid bswapping the table entries
2393   __ bswapl(rax);
2394   // align r13
2395   __ lea(rbx, at_bcp(BytesPerInt)); // btw: should be able to get rid of
2396                                     // this instruction (change offsets


2404   __ bind(loop);
2405   __ cmpl(rax, Address(rbx, rcx, Address::times_8, 2 * BytesPerInt));
2406   __ jcc(Assembler::equal, found);
2407   __ bind(loop_entry);
2408   __ decrementl(rcx);
2409   __ jcc(Assembler::greaterEqual, loop);
2410   // default case
2411   __ profile_switch_default(rax);
2412   __ movl(rdx, Address(rbx, 0));
2413   __ jmp(continue_execution);
2414   // entry found -> get offset
2415   __ bind(found);
2416   __ movl(rdx, Address(rbx, rcx, Address::times_8, 3 * BytesPerInt));
2417   __ profile_switch_case(rcx, rax, rbx);
2418   // continue execution
2419   __ bind(continue_execution);
2420   __ bswapl(rdx);
2421   __ movl2ptr(rdx, rdx);
2422   __ load_unsigned_byte(rbx, Address(rbcp, rdx, Address::times_1));
2423   __ addptr(rbcp, rdx);
2424   __ dispatch_only(vtos);
2425 }
2426 
2427 void TemplateTable::fast_binaryswitch() {
2428   transition(itos, vtos);
2429   // Implementation using the following core algorithm:
2430   //
2431   // int binary_search(int key, LookupswitchPair* array, int n) {
2432   //   // Binary search according to "Methodik des Programmierens" by
2433   //   // Edsger W. Dijkstra and W.H.J. Feijen, Addison Wesley Germany 1985.
2434   //   int i = 0;
2435   //   int j = n;
2436   //   while (i+1 < j) {
2437   //     // invariant P: 0 <= i < j <= n and (a[i] <= key < a[j] or Q)
2438   //     // with      Q: for all i: 0 <= i < n: key < a[i]
2439   //     // where a stands for the array and assuming that the (inexisting)
2440   //     // element a[n] is infinitely big.
2441   //     int h = (i + j) >> 1;
2442   //     // i < h < j
2443   //     if (key < array[h].fast_match()) {
2444   //       j = h;


2508 
2509   // end of binary search, result index is i (must check again!)
2510   Label default_case;
2511   // Convert array[i].match to native byte-ordering before compare
2512   __ movl(temp, Address(array, i, Address::times_8));
2513   __ bswapl(temp);
2514   __ cmpl(key, temp);
2515   __ jcc(Assembler::notEqual, default_case);
2516 
2517   // entry found -> j = offset
2518   __ movl(j , Address(array, i, Address::times_8, BytesPerInt));
2519   __ profile_switch_case(i, key, array);
2520   __ bswapl(j);
2521   LP64_ONLY(__ movslq(j, j));
2522 
2523   NOT_LP64(__ restore_bcp());
2524   NOT_LP64(__ restore_locals());                           // restore rdi
2525 
2526   __ load_unsigned_byte(rbx, Address(rbcp, j, Address::times_1));
2527   __ addptr(rbcp, j);
2528   __ dispatch_only(vtos);
2529 
2530   // default case -> j = default offset
2531   __ bind(default_case);
2532   __ profile_switch_default(i);
2533   __ movl(j, Address(array, -2 * BytesPerInt));
2534   __ bswapl(j);
2535   LP64_ONLY(__ movslq(j, j));
2536 
2537   NOT_LP64(__ restore_bcp());
2538   NOT_LP64(__ restore_locals());
2539 
2540   __ load_unsigned_byte(rbx, Address(rbcp, j, Address::times_1));
2541   __ addptr(rbcp, j);
2542   __ dispatch_only(vtos);
2543 }
2544 
2545 void TemplateTable::_return(TosState state) {
2546   transition(state, state);
2547 
2548   assert(_desc->calls_vm(),
2549          "inconsistent calls_vm information"); // call in remove_activation
2550 
2551   if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2552     assert(state == vtos, "only valid state");
2553     Register robj = LP64_ONLY(c_rarg1) NOT_LP64(rax);
2554     __ movptr(robj, aaddress(0));
2555     __ load_klass(rdi, robj);
2556     __ movl(rdi, Address(rdi, Klass::access_flags_offset()));
2557     __ testl(rdi, JVM_ACC_HAS_FINALIZER);
2558     Label skip_register_finalizer;
2559     __ jcc(Assembler::zero, skip_register_finalizer);
2560 
2561     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), robj);
2562 




2356   // load lo & hi
2357   __ movl(rcx, Address(rbx, BytesPerInt));
2358   __ movl(rdx, Address(rbx, 2 * BytesPerInt));
2359   __ bswapl(rcx);
2360   __ bswapl(rdx);
2361   // check against lo & hi
2362   __ cmpl(rax, rcx);
2363   __ jcc(Assembler::less, default_case);
2364   __ cmpl(rax, rdx);
2365   __ jcc(Assembler::greater, default_case);
2366   // lookup dispatch offset
2367   __ subl(rax, rcx);
2368   __ movl(rdx, Address(rbx, rax, Address::times_4, 3 * BytesPerInt));
2369   __ profile_switch_case(rax, rbx, rcx);
2370   // continue execution
2371   __ bind(continue_execution);
2372   __ bswapl(rdx);
2373   LP64_ONLY(__ movl2ptr(rdx, rdx));
2374   __ load_unsigned_byte(rbx, Address(rbcp, rdx, Address::times_1));
2375   __ addptr(rbcp, rdx);
2376   __ dispatch_only(vtos, true);
2377   // handle default
2378   __ bind(default_case);
2379   __ profile_switch_default(rax);
2380   __ movl(rdx, Address(rbx, 0));
2381   __ jmp(continue_execution);
2382 }
2383 
2384 void TemplateTable::lookupswitch() {
2385   transition(itos, itos);
2386   __ stop("lookupswitch bytecode should have been rewritten");
2387 }
2388 
2389 void TemplateTable::fast_linearswitch() {
2390   transition(itos, vtos);
2391   Label loop_entry, loop, found, continue_execution;
2392   // bswap rax so we can avoid bswapping the table entries
2393   __ bswapl(rax);
2394   // align r13
2395   __ lea(rbx, at_bcp(BytesPerInt)); // btw: should be able to get rid of
2396                                     // this instruction (change offsets


2404   __ bind(loop);
2405   __ cmpl(rax, Address(rbx, rcx, Address::times_8, 2 * BytesPerInt));
2406   __ jcc(Assembler::equal, found);
2407   __ bind(loop_entry);
2408   __ decrementl(rcx);
2409   __ jcc(Assembler::greaterEqual, loop);
2410   // default case
2411   __ profile_switch_default(rax);
2412   __ movl(rdx, Address(rbx, 0));
2413   __ jmp(continue_execution);
2414   // entry found -> get offset
2415   __ bind(found);
2416   __ movl(rdx, Address(rbx, rcx, Address::times_8, 3 * BytesPerInt));
2417   __ profile_switch_case(rcx, rax, rbx);
2418   // continue execution
2419   __ bind(continue_execution);
2420   __ bswapl(rdx);
2421   __ movl2ptr(rdx, rdx);
2422   __ load_unsigned_byte(rbx, Address(rbcp, rdx, Address::times_1));
2423   __ addptr(rbcp, rdx);
2424   __ dispatch_only(vtos, true);
2425 }
2426 
2427 void TemplateTable::fast_binaryswitch() {
2428   transition(itos, vtos);
2429   // Implementation using the following core algorithm:
2430   //
2431   // int binary_search(int key, LookupswitchPair* array, int n) {
2432   //   // Binary search according to "Methodik des Programmierens" by
2433   //   // Edsger W. Dijkstra and W.H.J. Feijen, Addison Wesley Germany 1985.
2434   //   int i = 0;
2435   //   int j = n;
2436   //   while (i+1 < j) {
2437   //     // invariant P: 0 <= i < j <= n and (a[i] <= key < a[j] or Q)
2438   //     // with      Q: for all i: 0 <= i < n: key < a[i]
2439   //     // where a stands for the array and assuming that the (inexisting)
2440   //     // element a[n] is infinitely big.
2441   //     int h = (i + j) >> 1;
2442   //     // i < h < j
2443   //     if (key < array[h].fast_match()) {
2444   //       j = h;


2508 
2509   // end of binary search, result index is i (must check again!)
2510   Label default_case;
2511   // Convert array[i].match to native byte-ordering before compare
2512   __ movl(temp, Address(array, i, Address::times_8));
2513   __ bswapl(temp);
2514   __ cmpl(key, temp);
2515   __ jcc(Assembler::notEqual, default_case);
2516 
2517   // entry found -> j = offset
2518   __ movl(j , Address(array, i, Address::times_8, BytesPerInt));
2519   __ profile_switch_case(i, key, array);
2520   __ bswapl(j);
2521   LP64_ONLY(__ movslq(j, j));
2522 
2523   NOT_LP64(__ restore_bcp());
2524   NOT_LP64(__ restore_locals());                           // restore rdi
2525 
2526   __ load_unsigned_byte(rbx, Address(rbcp, j, Address::times_1));
2527   __ addptr(rbcp, j);
2528   __ dispatch_only(vtos, true);
2529 
2530   // default case -> j = default offset
2531   __ bind(default_case);
2532   __ profile_switch_default(i);
2533   __ movl(j, Address(array, -2 * BytesPerInt));
2534   __ bswapl(j);
2535   LP64_ONLY(__ movslq(j, j));
2536 
2537   NOT_LP64(__ restore_bcp());
2538   NOT_LP64(__ restore_locals());
2539 
2540   __ load_unsigned_byte(rbx, Address(rbcp, j, Address::times_1));
2541   __ addptr(rbcp, j);
2542   __ dispatch_only(vtos, true);
2543 }
2544 
2545 void TemplateTable::_return(TosState state) {
2546   transition(state, state);
2547 
2548   assert(_desc->calls_vm(),
2549          "inconsistent calls_vm information"); // call in remove_activation
2550 
2551   if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2552     assert(state == vtos, "only valid state");
2553     Register robj = LP64_ONLY(c_rarg1) NOT_LP64(rax);
2554     __ movptr(robj, aaddress(0));
2555     __ load_klass(rdi, robj);
2556     __ movl(rdi, Address(rdi, Klass::access_flags_offset()));
2557     __ testl(rdi, JVM_ACC_HAS_FINALIZER);
2558     Label skip_register_finalizer;
2559     __ jcc(Assembler::zero, skip_register_finalizer);
2560 
2561     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), robj);
2562 


< prev index next >