44 }
45
46 AwtCmdIDList::~AwtCmdIDList()
47 {
48 free(m_array);
49 }
50
51
52 // Build a new free list from a newly allocated memory. This only
53 // happens after malloc/realloc, and new free entries are contiguous
54 // from first_index to m_capacity-1
55 INLINE void AwtCmdIDList::BuildFreeList(UINT first_index)
56 {
57 DASSERT(m_first_free == -1);
58 for (UINT i = first_index; i < m_capacity-1; ++i)
59 m_array[i].next_free_index = i+1;
60 m_array[m_capacity-1].next_free_index = -1; // nil
61 m_first_free = first_index; // head of the free list
62 }
63
64 // Assign an id to the object. Recycle the first free entry from the
65 // head of the free list or allocate more memory for a new free list.
66 UINT AwtCmdIDList::Add(AwtObject* obj)
67 {
68 CriticalSection::Lock l(m_lock);
69
70 if (m_first_free == -1) { // out of free ids
71 if (m_capacity == ARRAY_MAXIMUM_SIZE) {
72 // Really bad - out of ids. Since we hardly can have *so*
73 // many items simultaneously in existence, we have an id
74 // leak somewhere.
75 DASSERT(FALSE);
76 return 0;
77 }
78 else { // snarf a bigger arena
79 UINT old_capacity = m_capacity; // will be the first free entry
80 m_capacity += ARRAY_SIZE_INCREMENT;
81 if (m_capacity > ARRAY_MAXIMUM_SIZE)
82 m_capacity = ARRAY_MAXIMUM_SIZE;
83 m_array = (CmdIDEntry *)SAFE_SIZE_ARRAY_REALLOC(safe_Realloc, m_array,
84 m_capacity, sizeof(CmdIDEntry*));
85 BuildFreeList(old_capacity);
86 }
87 }
88
89 DASSERT(m_first_free != -1);
90 UINT newid = m_first_free; // use the entry from the head of the list
91 m_first_free = m_array[newid].next_free_index; // advance free pointer
92 m_array[newid].obj = obj;
93
94 return newid;
95 }
96
97 // Return the object associated with this id..
98 AwtObject* AwtCmdIDList::Lookup(UINT id)
99 {
100 CriticalSection::Lock l(m_lock);
101 DASSERT(id < m_capacity);
102 if (m_array[id].next_free_index <= ARRAY_MAXIMUM_SIZE) {
103 return NULL;
104 }
105 return m_array[id].obj;
106 }
|
44 }
45
46 AwtCmdIDList::~AwtCmdIDList()
47 {
48 free(m_array);
49 }
50
51
52 // Build a new free list from a newly allocated memory. This only
53 // happens after malloc/realloc, and new free entries are contiguous
54 // from first_index to m_capacity-1
55 INLINE void AwtCmdIDList::BuildFreeList(UINT first_index)
56 {
57 DASSERT(m_first_free == -1);
58 for (UINT i = first_index; i < m_capacity-1; ++i)
59 m_array[i].next_free_index = i+1;
60 m_array[m_capacity-1].next_free_index = -1; // nil
61 m_first_free = first_index; // head of the free list
62 }
63
64
65 jboolean AwtCmdIDList::isFreeIDAvailable() {
66 CriticalSection::Lock l(m_lock);
67
68 if (m_first_free == -1) { // out of free ids
69 if (m_capacity == ARRAY_MAXIMUM_SIZE) {
70 return JNI_FALSE;
71 }
72 }
73 return JNI_TRUE;
74 }
75
76 // Assign an id to the object. Recycle the first free entry from the
77 // head of the free list or allocate more memory for a new free list.
78 UINT AwtCmdIDList::Add(AwtObject* obj)
79 {
80 CriticalSection::Lock l(m_lock);
81 if (!isFreeIDAvailable()) {
82 throw std::bad_alloc(); // fatal error
83 }
84
85 if (m_first_free == -1) { // out of free ids
86 // snarf a bigger arena
87 UINT old_capacity = m_capacity; // will be the first free entry
88 m_capacity += ARRAY_SIZE_INCREMENT;
89 if (m_capacity > ARRAY_MAXIMUM_SIZE)
90 m_capacity = ARRAY_MAXIMUM_SIZE;
91 m_array = (CmdIDEntry *)SAFE_SIZE_ARRAY_REALLOC(safe_Realloc, m_array,
92 m_capacity, sizeof(CmdIDEntry*));
93 BuildFreeList(old_capacity);
94 }
95
96 DASSERT(m_first_free != -1);
97 UINT newid = m_first_free; // use the entry from the head of the list
98 m_first_free = m_array[newid].next_free_index; // advance free pointer
99 m_array[newid].obj = obj;
100
101 return newid;
102 }
103
104 // Return the object associated with this id..
105 AwtObject* AwtCmdIDList::Lookup(UINT id)
106 {
107 CriticalSection::Lock l(m_lock);
108 DASSERT(id < m_capacity);
109 if (m_array[id].next_free_index <= ARRAY_MAXIMUM_SIZE) {
110 return NULL;
111 }
112 return m_array[id].obj;
113 }
|