390 oop_maps = generate_handle_exception(id, sasm);
391 }
392 break;
393
394 case new_instance_id:
395 case fast_new_instance_id:
396 case fast_new_instance_init_check_id:
397 {
398 Register G5_klass = G5; // Incoming
399 Register O0_obj = O0; // Outgoing
400
401 if (id == new_instance_id) {
402 __ set_info("new_instance", dont_gc_arguments);
403 } else if (id == fast_new_instance_id) {
404 __ set_info("fast new_instance", dont_gc_arguments);
405 } else {
406 assert(id == fast_new_instance_init_check_id, "bad StubID");
407 __ set_info("fast new_instance init check", dont_gc_arguments);
408 }
409
410 if ((id == fast_new_instance_id || id == fast_new_instance_init_check_id) &&
411 UseTLAB && Universe::heap()->supports_inline_contig_alloc()) {
412 Label slow_path;
413 Register G1_obj_size = G1;
414 Register G3_t1 = G3;
415 Register G4_t2 = G4;
416 assert_different_registers(G5_klass, G1_obj_size, G3_t1, G4_t2);
417
418 // Push a frame since we may do dtrace notification for the
419 // allocation which requires calling out and we don't want
420 // to stomp the real return address.
421 __ save_frame(0);
422
423 if (id == fast_new_instance_init_check_id) {
424 // make sure the klass is initialized
425 __ ldub(G5_klass, in_bytes(InstanceKlass::init_state_offset()), G3_t1);
426 __ cmp(G3_t1, InstanceKlass::fully_initialized);
427 __ br(Assembler::notEqual, false, Assembler::pn, slow_path);
428 __ delayed()->nop();
429 }
430 #ifdef ASSERT
431 // assert object can be fast path allocated
|
390 oop_maps = generate_handle_exception(id, sasm);
391 }
392 break;
393
394 case new_instance_id:
395 case fast_new_instance_id:
396 case fast_new_instance_init_check_id:
397 {
398 Register G5_klass = G5; // Incoming
399 Register O0_obj = O0; // Outgoing
400
401 if (id == new_instance_id) {
402 __ set_info("new_instance", dont_gc_arguments);
403 } else if (id == fast_new_instance_id) {
404 __ set_info("fast new_instance", dont_gc_arguments);
405 } else {
406 assert(id == fast_new_instance_init_check_id, "bad StubID");
407 __ set_info("fast new_instance init check", dont_gc_arguments);
408 }
409
410 // If TLAB is disabled, see if there is support for inlining contiguous
411 // allocations.
412 // Otherwise, just go to the slow path.
413 if ((id == fast_new_instance_id || id == fast_new_instance_init_check_id) &&
414 !UseTLAB && Universe::heap()->supports_inline_contig_alloc()) {
415 Label slow_path;
416 Register G1_obj_size = G1;
417 Register G3_t1 = G3;
418 Register G4_t2 = G4;
419 assert_different_registers(G5_klass, G1_obj_size, G3_t1, G4_t2);
420
421 // Push a frame since we may do dtrace notification for the
422 // allocation which requires calling out and we don't want
423 // to stomp the real return address.
424 __ save_frame(0);
425
426 if (id == fast_new_instance_init_check_id) {
427 // make sure the klass is initialized
428 __ ldub(G5_klass, in_bytes(InstanceKlass::init_state_offset()), G3_t1);
429 __ cmp(G3_t1, InstanceKlass::fully_initialized);
430 __ br(Assembler::notEqual, false, Assembler::pn, slow_path);
431 __ delayed()->nop();
432 }
433 #ifdef ASSERT
434 // assert object can be fast path allocated
|