1453 transform_later(result_region);
1454 transform_later(result_phi_rawoop);
1455 transform_later(result_phi_rawmem);
1456 transform_later(result_phi_i_o);
1457 // This completes all paths into the result merge point
1458 }
1459
1460
1461 // Helper for PhaseMacroExpand::expand_allocate_common.
1462 // Initializes the newly-allocated storage.
1463 Node*
1464 PhaseMacroExpand::initialize_object(AllocateNode* alloc,
1465 Node* control, Node* rawmem, Node* object,
1466 Node* klass_node, Node* length,
1467 Node* size_in_bytes) {
1468 InitializeNode* init = alloc->initialization();
1469 // Store the klass & mark bits
1470 Node* mark_node = NULL;
1471 // For now only enable fast locking for non-array types
1472 if (UseBiasedLocking && (length == NULL)) {
1473 mark_node = make_load(control, rawmem, klass_node, Klass::prototype_header_offset_in_bytes() + sizeof(oopDesc), TypeRawPtr::BOTTOM, T_ADDRESS);
1474 } else {
1475 mark_node = makecon(TypeRawPtr::make((address)markOopDesc::prototype()));
1476 }
1477 rawmem = make_store(control, rawmem, object, oopDesc::mark_offset_in_bytes(), mark_node, T_ADDRESS);
1478
1479 rawmem = make_store(control, rawmem, object, oopDesc::klass_offset_in_bytes(), klass_node, T_OBJECT);
1480 int header_size = alloc->minimum_header_size(); // conservatively small
1481
1482 // Array length
1483 if (length != NULL) { // Arrays need length field
1484 rawmem = make_store(control, rawmem, object, arrayOopDesc::length_offset_in_bytes(), length, T_INT);
1485 // conservatively small header size:
1486 header_size = arrayOopDesc::base_offset_in_bytes(T_BYTE);
1487 ciKlass* k = _igvn.type(klass_node)->is_klassptr()->klass();
1488 if (k->is_array_klass()) // we know the exact header size in most cases:
1489 header_size = Klass::layout_helper_header_size(k->layout_helper());
1490 }
1491
1492 // Clear the object body, if necessary.
1493 if (init == NULL) {
1941 markOopDesc::biased_lock_pattern, true);
1942 // fast_lock_region->in(1) is set to slow path.
1943 fast_lock_mem_phi->init_req(1, mem);
1944
1945 // Now check that the lock is biased to the current thread and has
1946 // the same epoch and bias as Klass::_prototype_header.
1947
1948 // Special-case a fresh allocation to avoid building nodes:
1949 Node* klass_node = AllocateNode::Ideal_klass(obj, &_igvn);
1950 if (klass_node == NULL) {
1951 Node* k_adr = basic_plus_adr(obj, oopDesc::klass_offset_in_bytes());
1952 klass_node = transform_later( LoadKlassNode::make(_igvn, mem, k_adr, _igvn.type(k_adr)->is_ptr()) );
1953 #ifdef _LP64
1954 if (UseCompressedOops && klass_node->is_DecodeN()) {
1955 assert(klass_node->in(1)->Opcode() == Op_LoadNKlass, "sanity");
1956 klass_node->in(1)->init_req(0, ctrl);
1957 } else
1958 #endif
1959 klass_node->init_req(0, ctrl);
1960 }
1961 Node *proto_node = make_load(ctrl, mem, klass_node, Klass::prototype_header_offset_in_bytes() + sizeof(oopDesc), TypeX_X, TypeX_X->basic_type());
1962
1963 Node* thread = transform_later(new (C, 1) ThreadLocalNode());
1964 Node* cast_thread = transform_later(new (C, 2) CastP2XNode(ctrl, thread));
1965 Node* o_node = transform_later(new (C, 3) OrXNode(cast_thread, proto_node));
1966 Node* x_node = transform_later(new (C, 3) XorXNode(o_node, mark_node));
1967
1968 // Get slow path - mark word does NOT match the value.
1969 Node* not_biased_ctrl = opt_bits_test(ctrl, region, 3, x_node,
1970 (~markOopDesc::age_mask_in_place), 0);
1971 // region->in(3) is set to fast path - the object is biased to the current thread.
1972 mem_phi->init_req(3, mem);
1973
1974
1975 // Mark word does NOT match the value (thread | Klass::_prototype_header).
1976
1977
1978 // First, check biased pattern.
1979 // Get fast path - _prototype_header has the same biased lock pattern.
1980 ctrl = opt_bits_test(not_biased_ctrl, fast_lock_region, 2, x_node,
1981 markOopDesc::biased_lock_mask_in_place, 0, true);
|
1453 transform_later(result_region);
1454 transform_later(result_phi_rawoop);
1455 transform_later(result_phi_rawmem);
1456 transform_later(result_phi_i_o);
1457 // This completes all paths into the result merge point
1458 }
1459
1460
1461 // Helper for PhaseMacroExpand::expand_allocate_common.
1462 // Initializes the newly-allocated storage.
1463 Node*
1464 PhaseMacroExpand::initialize_object(AllocateNode* alloc,
1465 Node* control, Node* rawmem, Node* object,
1466 Node* klass_node, Node* length,
1467 Node* size_in_bytes) {
1468 InitializeNode* init = alloc->initialization();
1469 // Store the klass & mark bits
1470 Node* mark_node = NULL;
1471 // For now only enable fast locking for non-array types
1472 if (UseBiasedLocking && (length == NULL)) {
1473 mark_node = make_load(control, rawmem, klass_node, Klass::prototype_header_offset_in_bytes(), TypeRawPtr::BOTTOM, T_ADDRESS);
1474 } else {
1475 mark_node = makecon(TypeRawPtr::make((address)markOopDesc::prototype()));
1476 }
1477 rawmem = make_store(control, rawmem, object, oopDesc::mark_offset_in_bytes(), mark_node, T_ADDRESS);
1478
1479 rawmem = make_store(control, rawmem, object, oopDesc::klass_offset_in_bytes(), klass_node, T_OBJECT);
1480 int header_size = alloc->minimum_header_size(); // conservatively small
1481
1482 // Array length
1483 if (length != NULL) { // Arrays need length field
1484 rawmem = make_store(control, rawmem, object, arrayOopDesc::length_offset_in_bytes(), length, T_INT);
1485 // conservatively small header size:
1486 header_size = arrayOopDesc::base_offset_in_bytes(T_BYTE);
1487 ciKlass* k = _igvn.type(klass_node)->is_klassptr()->klass();
1488 if (k->is_array_klass()) // we know the exact header size in most cases:
1489 header_size = Klass::layout_helper_header_size(k->layout_helper());
1490 }
1491
1492 // Clear the object body, if necessary.
1493 if (init == NULL) {
1941 markOopDesc::biased_lock_pattern, true);
1942 // fast_lock_region->in(1) is set to slow path.
1943 fast_lock_mem_phi->init_req(1, mem);
1944
1945 // Now check that the lock is biased to the current thread and has
1946 // the same epoch and bias as Klass::_prototype_header.
1947
1948 // Special-case a fresh allocation to avoid building nodes:
1949 Node* klass_node = AllocateNode::Ideal_klass(obj, &_igvn);
1950 if (klass_node == NULL) {
1951 Node* k_adr = basic_plus_adr(obj, oopDesc::klass_offset_in_bytes());
1952 klass_node = transform_later( LoadKlassNode::make(_igvn, mem, k_adr, _igvn.type(k_adr)->is_ptr()) );
1953 #ifdef _LP64
1954 if (UseCompressedOops && klass_node->is_DecodeN()) {
1955 assert(klass_node->in(1)->Opcode() == Op_LoadNKlass, "sanity");
1956 klass_node->in(1)->init_req(0, ctrl);
1957 } else
1958 #endif
1959 klass_node->init_req(0, ctrl);
1960 }
1961 Node *proto_node = make_load(ctrl, mem, klass_node, Klass::prototype_header_offset_in_bytes(), TypeX_X, TypeX_X->basic_type());
1962
1963 Node* thread = transform_later(new (C, 1) ThreadLocalNode());
1964 Node* cast_thread = transform_later(new (C, 2) CastP2XNode(ctrl, thread));
1965 Node* o_node = transform_later(new (C, 3) OrXNode(cast_thread, proto_node));
1966 Node* x_node = transform_later(new (C, 3) XorXNode(o_node, mark_node));
1967
1968 // Get slow path - mark word does NOT match the value.
1969 Node* not_biased_ctrl = opt_bits_test(ctrl, region, 3, x_node,
1970 (~markOopDesc::age_mask_in_place), 0);
1971 // region->in(3) is set to fast path - the object is biased to the current thread.
1972 mem_phi->init_req(3, mem);
1973
1974
1975 // Mark word does NOT match the value (thread | Klass::_prototype_header).
1976
1977
1978 // First, check biased pattern.
1979 // Get fast path - _prototype_header has the same biased lock pattern.
1980 ctrl = opt_bits_test(not_biased_ctrl, fast_lock_region, 2, x_node,
1981 markOopDesc::biased_lock_mask_in_place, 0, true);
|