< prev index next >

src/cpu/x86/vm/sharedRuntime_x86_64.cpp

Print this page




 348     map->set_callee_saved(STACK_OFFSET( r13H_off ), r13->as_VMReg()->next());
 349     map->set_callee_saved(STACK_OFFSET( r14H_off ), r14->as_VMReg()->next());
 350     map->set_callee_saved(STACK_OFFSET( r15H_off ), r15->as_VMReg()->next());
 351     map->set_callee_saved(STACK_OFFSET(xmm0H_off ), xmm0->as_VMReg()->next());
 352     map->set_callee_saved(STACK_OFFSET(xmm1H_off ), xmm1->as_VMReg()->next());
 353     map->set_callee_saved(STACK_OFFSET(xmm2H_off ), xmm2->as_VMReg()->next());
 354     map->set_callee_saved(STACK_OFFSET(xmm3H_off ), xmm3->as_VMReg()->next());
 355     map->set_callee_saved(STACK_OFFSET(xmm4H_off ), xmm4->as_VMReg()->next());
 356     map->set_callee_saved(STACK_OFFSET(xmm5H_off ), xmm5->as_VMReg()->next());
 357     map->set_callee_saved(STACK_OFFSET(xmm6H_off ), xmm6->as_VMReg()->next());
 358     map->set_callee_saved(STACK_OFFSET(xmm7H_off ), xmm7->as_VMReg()->next());
 359     map->set_callee_saved(STACK_OFFSET(xmm8H_off ), xmm8->as_VMReg()->next());
 360     map->set_callee_saved(STACK_OFFSET(xmm9H_off ), xmm9->as_VMReg()->next());
 361     map->set_callee_saved(STACK_OFFSET(xmm10H_off), xmm10->as_VMReg()->next());
 362     map->set_callee_saved(STACK_OFFSET(xmm11H_off), xmm11->as_VMReg()->next());
 363     map->set_callee_saved(STACK_OFFSET(xmm12H_off), xmm12->as_VMReg()->next());
 364     map->set_callee_saved(STACK_OFFSET(xmm13H_off), xmm13->as_VMReg()->next());
 365     map->set_callee_saved(STACK_OFFSET(xmm14H_off), xmm14->as_VMReg()->next());
 366     map->set_callee_saved(STACK_OFFSET(xmm15H_off), xmm15->as_VMReg()->next());
 367     if (UseAVX > 2) {
 368       map->set_callee_saved(STACK_OFFSET(xmm16H_off), xmm16->as_VMReg());
 369       map->set_callee_saved(STACK_OFFSET(xmm17H_off), xmm17->as_VMReg());
 370       map->set_callee_saved(STACK_OFFSET(xmm18H_off), xmm18->as_VMReg());
 371       map->set_callee_saved(STACK_OFFSET(xmm19H_off), xmm19->as_VMReg());
 372       map->set_callee_saved(STACK_OFFSET(xmm20H_off), xmm20->as_VMReg());
 373       map->set_callee_saved(STACK_OFFSET(xmm21H_off), xmm21->as_VMReg());
 374       map->set_callee_saved(STACK_OFFSET(xmm22H_off), xmm22->as_VMReg());
 375       map->set_callee_saved(STACK_OFFSET(xmm23H_off), xmm23->as_VMReg());
 376       map->set_callee_saved(STACK_OFFSET(xmm24H_off), xmm24->as_VMReg());
 377       map->set_callee_saved(STACK_OFFSET(xmm25H_off), xmm25->as_VMReg());
 378       map->set_callee_saved(STACK_OFFSET(xmm26H_off), xmm26->as_VMReg());
 379       map->set_callee_saved(STACK_OFFSET(xmm27H_off), xmm27->as_VMReg());
 380       map->set_callee_saved(STACK_OFFSET(xmm28H_off), xmm28->as_VMReg());
 381       map->set_callee_saved(STACK_OFFSET(xmm29H_off), xmm29->as_VMReg());
 382       map->set_callee_saved(STACK_OFFSET(xmm30H_off), xmm30->as_VMReg());
 383       map->set_callee_saved(STACK_OFFSET(xmm31H_off), xmm31->as_VMReg());
 384     }
 385   }
 386 
 387   return map;
 388 }
 389 
 390 void RegisterSaver::restore_live_registers(MacroAssembler* masm, bool restore_vectors) {
 391   if (frame::arg_reg_save_area_bytes != 0) {
 392     // Pop arg register save area
 393     __ addptr(rsp, frame::arg_reg_save_area_bytes);
 394   }
 395 #ifdef COMPILER2
 396   if (restore_vectors) {
 397     // Restore upper half of YMM registes (0..15)
 398     assert(UseAVX > 0, "512bit vectors are supported only with AVX");
 399     assert(MaxVectorSize == 64, "only 512bit vectors are supported now");
 400     __ vinsertf128h(xmm0, Address(rsp,  0));
 401     __ vinsertf128h(xmm1, Address(rsp, 16));
 402     __ vinsertf128h(xmm2, Address(rsp, 32));
 403     __ vinsertf128h(xmm3, Address(rsp, 48));


 449       __ vinsertf64x4h(xmm12, Address(rsp, 384));
 450       __ vinsertf64x4h(xmm13, Address(rsp, 416));
 451       __ vinsertf64x4h(xmm14, Address(rsp, 448));
 452       __ vinsertf64x4h(xmm15, Address(rsp, 480));
 453       __ vinsertf64x4h(xmm16, Address(rsp, 512));
 454       __ vinsertf64x4h(xmm17, Address(rsp, 544));
 455       __ vinsertf64x4h(xmm18, Address(rsp, 576));
 456       __ vinsertf64x4h(xmm19, Address(rsp, 608));
 457       __ vinsertf64x4h(xmm20, Address(rsp, 640));
 458       __ vinsertf64x4h(xmm21, Address(rsp, 672));
 459       __ vinsertf64x4h(xmm22, Address(rsp, 704));
 460       __ vinsertf64x4h(xmm23, Address(rsp, 736));
 461       __ vinsertf64x4h(xmm24, Address(rsp, 768));
 462       __ vinsertf64x4h(xmm25, Address(rsp, 800));
 463       __ vinsertf64x4h(xmm26, Address(rsp, 832));
 464       __ vinsertf64x4h(xmm27, Address(rsp, 864));
 465       __ vinsertf64x4h(xmm28, Address(rsp, 896));
 466       __ vinsertf64x4h(xmm29, Address(rsp, 928));
 467       __ vinsertf64x4h(xmm30, Address(rsp, 960));
 468       __ vinsertf64x4h(xmm31, Address(rsp, 992));
 469       __ subptr(rsp, 1024);
 470     }
 471   }
 472 #else
 473   assert(!restore_vectors, "vectors are generated only by C2");
 474 #endif
 475   // Recover CPU state
 476   __ pop_CPU_state();
 477   // Get the rbp described implicitly by the calling convention (no oopMap)
 478   __ pop(rbp);
 479 }
 480 
 481 void RegisterSaver::restore_result_registers(MacroAssembler* masm) {
 482 
 483   // Just restore result register. Only used by deoptimization. By
 484   // now any callee save register that needs to be restored to a c2
 485   // caller of the deoptee has been extracted into the vframeArray
 486   // and will be stuffed into the c2i adapter we create for later
 487   // restoration so only result registers need to be restored here.
 488 
 489   // Restore fp result register




 348     map->set_callee_saved(STACK_OFFSET( r13H_off ), r13->as_VMReg()->next());
 349     map->set_callee_saved(STACK_OFFSET( r14H_off ), r14->as_VMReg()->next());
 350     map->set_callee_saved(STACK_OFFSET( r15H_off ), r15->as_VMReg()->next());
 351     map->set_callee_saved(STACK_OFFSET(xmm0H_off ), xmm0->as_VMReg()->next());
 352     map->set_callee_saved(STACK_OFFSET(xmm1H_off ), xmm1->as_VMReg()->next());
 353     map->set_callee_saved(STACK_OFFSET(xmm2H_off ), xmm2->as_VMReg()->next());
 354     map->set_callee_saved(STACK_OFFSET(xmm3H_off ), xmm3->as_VMReg()->next());
 355     map->set_callee_saved(STACK_OFFSET(xmm4H_off ), xmm4->as_VMReg()->next());
 356     map->set_callee_saved(STACK_OFFSET(xmm5H_off ), xmm5->as_VMReg()->next());
 357     map->set_callee_saved(STACK_OFFSET(xmm6H_off ), xmm6->as_VMReg()->next());
 358     map->set_callee_saved(STACK_OFFSET(xmm7H_off ), xmm7->as_VMReg()->next());
 359     map->set_callee_saved(STACK_OFFSET(xmm8H_off ), xmm8->as_VMReg()->next());
 360     map->set_callee_saved(STACK_OFFSET(xmm9H_off ), xmm9->as_VMReg()->next());
 361     map->set_callee_saved(STACK_OFFSET(xmm10H_off), xmm10->as_VMReg()->next());
 362     map->set_callee_saved(STACK_OFFSET(xmm11H_off), xmm11->as_VMReg()->next());
 363     map->set_callee_saved(STACK_OFFSET(xmm12H_off), xmm12->as_VMReg()->next());
 364     map->set_callee_saved(STACK_OFFSET(xmm13H_off), xmm13->as_VMReg()->next());
 365     map->set_callee_saved(STACK_OFFSET(xmm14H_off), xmm14->as_VMReg()->next());
 366     map->set_callee_saved(STACK_OFFSET(xmm15H_off), xmm15->as_VMReg()->next());
 367     if (UseAVX > 2) {
 368       map->set_callee_saved(STACK_OFFSET(xmm16H_off), xmm16->as_VMReg()->next());
 369       map->set_callee_saved(STACK_OFFSET(xmm17H_off), xmm17->as_VMReg()->next());
 370       map->set_callee_saved(STACK_OFFSET(xmm18H_off), xmm18->as_VMReg()->next());
 371       map->set_callee_saved(STACK_OFFSET(xmm19H_off), xmm19->as_VMReg()->next());
 372       map->set_callee_saved(STACK_OFFSET(xmm20H_off), xmm20->as_VMReg()->next());
 373       map->set_callee_saved(STACK_OFFSET(xmm21H_off), xmm21->as_VMReg()->next());
 374       map->set_callee_saved(STACK_OFFSET(xmm22H_off), xmm22->as_VMReg()->next());
 375       map->set_callee_saved(STACK_OFFSET(xmm23H_off), xmm23->as_VMReg()->next());
 376       map->set_callee_saved(STACK_OFFSET(xmm24H_off), xmm24->as_VMReg()->next());
 377       map->set_callee_saved(STACK_OFFSET(xmm25H_off), xmm25->as_VMReg()->next());
 378       map->set_callee_saved(STACK_OFFSET(xmm26H_off), xmm26->as_VMReg()->next());
 379       map->set_callee_saved(STACK_OFFSET(xmm27H_off), xmm27->as_VMReg()->next());
 380       map->set_callee_saved(STACK_OFFSET(xmm28H_off), xmm28->as_VMReg()->next());
 381       map->set_callee_saved(STACK_OFFSET(xmm29H_off), xmm29->as_VMReg()->next());
 382       map->set_callee_saved(STACK_OFFSET(xmm30H_off), xmm30->as_VMReg()->next());
 383       map->set_callee_saved(STACK_OFFSET(xmm31H_off), xmm31->as_VMReg()->next());
 384     }
 385   }
 386 
 387   return map;
 388 }
 389 
 390 void RegisterSaver::restore_live_registers(MacroAssembler* masm, bool restore_vectors) {
 391   if (frame::arg_reg_save_area_bytes != 0) {
 392     // Pop arg register save area
 393     __ addptr(rsp, frame::arg_reg_save_area_bytes);
 394   }
 395 #ifdef COMPILER2
 396   if (restore_vectors) {
 397     // Restore upper half of YMM registes (0..15)
 398     assert(UseAVX > 0, "512bit vectors are supported only with AVX");
 399     assert(MaxVectorSize == 64, "only 512bit vectors are supported now");
 400     __ vinsertf128h(xmm0, Address(rsp,  0));
 401     __ vinsertf128h(xmm1, Address(rsp, 16));
 402     __ vinsertf128h(xmm2, Address(rsp, 32));
 403     __ vinsertf128h(xmm3, Address(rsp, 48));


 449       __ vinsertf64x4h(xmm12, Address(rsp, 384));
 450       __ vinsertf64x4h(xmm13, Address(rsp, 416));
 451       __ vinsertf64x4h(xmm14, Address(rsp, 448));
 452       __ vinsertf64x4h(xmm15, Address(rsp, 480));
 453       __ vinsertf64x4h(xmm16, Address(rsp, 512));
 454       __ vinsertf64x4h(xmm17, Address(rsp, 544));
 455       __ vinsertf64x4h(xmm18, Address(rsp, 576));
 456       __ vinsertf64x4h(xmm19, Address(rsp, 608));
 457       __ vinsertf64x4h(xmm20, Address(rsp, 640));
 458       __ vinsertf64x4h(xmm21, Address(rsp, 672));
 459       __ vinsertf64x4h(xmm22, Address(rsp, 704));
 460       __ vinsertf64x4h(xmm23, Address(rsp, 736));
 461       __ vinsertf64x4h(xmm24, Address(rsp, 768));
 462       __ vinsertf64x4h(xmm25, Address(rsp, 800));
 463       __ vinsertf64x4h(xmm26, Address(rsp, 832));
 464       __ vinsertf64x4h(xmm27, Address(rsp, 864));
 465       __ vinsertf64x4h(xmm28, Address(rsp, 896));
 466       __ vinsertf64x4h(xmm29, Address(rsp, 928));
 467       __ vinsertf64x4h(xmm30, Address(rsp, 960));
 468       __ vinsertf64x4h(xmm31, Address(rsp, 992));
 469       __ addptr(rsp, 1024);
 470     }
 471   }
 472 #else
 473   assert(!restore_vectors, "vectors are generated only by C2");
 474 #endif
 475   // Recover CPU state
 476   __ pop_CPU_state();
 477   // Get the rbp described implicitly by the calling convention (no oopMap)
 478   __ pop(rbp);
 479 }
 480 
 481 void RegisterSaver::restore_result_registers(MacroAssembler* masm) {
 482 
 483   // Just restore result register. Only used by deoptimization. By
 484   // now any callee save register that needs to be restored to a c2
 485   // caller of the deoptee has been extracted into the vframeArray
 486   // and will be stuffed into the c2i adapter we create for later
 487   // restoration so only result registers need to be restored here.
 488 
 489   // Restore fp result register


< prev index next >