3598 BasicType active_type = in_bytes(PtrQueue::byte_width_of_active()) == 4 ? T_INT : T_BYTE;
3599 assert(in_bytes(PtrQueue::byte_width_of_active()) == 4 || in_bytes(PtrQueue::byte_width_of_active()) == 1, "flag width");
3600
3601 // Offsets into the thread
3602 const int marking_offset = in_bytes(JavaThread::satb_mark_queue_offset() + // 648
3603 PtrQueue::byte_offset_of_active());
3604 const int index_offset = in_bytes(JavaThread::satb_mark_queue_offset() + // 656
3605 PtrQueue::byte_offset_of_index());
3606 const int buffer_offset = in_bytes(JavaThread::satb_mark_queue_offset() + // 652
3607 PtrQueue::byte_offset_of_buf());
3608
3609 // Now the actual pointers into the thread
3610 Node* marking_adr = __ AddP(no_base, tls, __ ConX(marking_offset));
3611 Node* buffer_adr = __ AddP(no_base, tls, __ ConX(buffer_offset));
3612 Node* index_adr = __ AddP(no_base, tls, __ ConX(index_offset));
3613
3614 // Now some of the values
3615 Node* marking = __ load(__ ctrl(), marking_adr, TypeInt::INT, active_type, Compile::AliasIdxRaw);
3616
3617 // if (!marking)
3618 __ if_then(marking, BoolTest::ne, zero); {
3619 BasicType index_bt = TypeX_X->basic_type();
3620 assert(sizeof(size_t) == type2aelembytes(index_bt), "Loading G1 PtrQueue::_index with wrong size.");
3621 Node* index = __ load(__ ctrl(), index_adr, TypeX_X, index_bt, Compile::AliasIdxRaw);
3622
3623 if (do_load) {
3624 // load original value
3625 // alias_idx correct??
3626 pre_val = __ load(__ ctrl(), adr, val_type, bt, alias_idx);
3627 }
3628
3629 // if (pre_val != NULL)
3630 __ if_then(pre_val, BoolTest::ne, null()); {
3631 Node* buffer = __ load(__ ctrl(), buffer_adr, TypeRawPtr::NOTNULL, T_ADDRESS, Compile::AliasIdxRaw);
3632
3633 // is the queue for this thread full?
3634 __ if_then(index, BoolTest::ne, zeroX, likely); {
3635
3636 // decrement the index
3637 Node* next_index = _gvn.transform(new (C) SubXNode(index, __ ConX(sizeof(intptr_t))));
3638
|
3598 BasicType active_type = in_bytes(PtrQueue::byte_width_of_active()) == 4 ? T_INT : T_BYTE;
3599 assert(in_bytes(PtrQueue::byte_width_of_active()) == 4 || in_bytes(PtrQueue::byte_width_of_active()) == 1, "flag width");
3600
3601 // Offsets into the thread
3602 const int marking_offset = in_bytes(JavaThread::satb_mark_queue_offset() + // 648
3603 PtrQueue::byte_offset_of_active());
3604 const int index_offset = in_bytes(JavaThread::satb_mark_queue_offset() + // 656
3605 PtrQueue::byte_offset_of_index());
3606 const int buffer_offset = in_bytes(JavaThread::satb_mark_queue_offset() + // 652
3607 PtrQueue::byte_offset_of_buf());
3608
3609 // Now the actual pointers into the thread
3610 Node* marking_adr = __ AddP(no_base, tls, __ ConX(marking_offset));
3611 Node* buffer_adr = __ AddP(no_base, tls, __ ConX(buffer_offset));
3612 Node* index_adr = __ AddP(no_base, tls, __ ConX(index_offset));
3613
3614 // Now some of the values
3615 Node* marking = __ load(__ ctrl(), marking_adr, TypeInt::INT, active_type, Compile::AliasIdxRaw);
3616
3617 // if (!marking)
3618 __ if_then(marking, BoolTest::ne, zero, unlikely); {
3619 BasicType index_bt = TypeX_X->basic_type();
3620 assert(sizeof(size_t) == type2aelembytes(index_bt), "Loading G1 PtrQueue::_index with wrong size.");
3621 Node* index = __ load(__ ctrl(), index_adr, TypeX_X, index_bt, Compile::AliasIdxRaw);
3622
3623 if (do_load) {
3624 // load original value
3625 // alias_idx correct??
3626 pre_val = __ load(__ ctrl(), adr, val_type, bt, alias_idx);
3627 }
3628
3629 // if (pre_val != NULL)
3630 __ if_then(pre_val, BoolTest::ne, null()); {
3631 Node* buffer = __ load(__ ctrl(), buffer_adr, TypeRawPtr::NOTNULL, T_ADDRESS, Compile::AliasIdxRaw);
3632
3633 // is the queue for this thread full?
3634 __ if_then(index, BoolTest::ne, zeroX, likely); {
3635
3636 // decrement the index
3637 Node* next_index = _gvn.transform(new (C) SubXNode(index, __ ConX(sizeof(intptr_t))));
3638
|