express: make a thread safe weak pointer implementation (#321)

To access a WeakPointerTo in a thread-safe way, use something like this:

    if (auto ptr = weak_ptr.lock()) {
      ..use ptr as regular PointerTo
    }

The new implementation no longer needs a reference to be stored to all weak pointers on the WeakReferenceList; a mere count of weak pointers is sufficient.  Therefore, callbacks theoretically no longer require a WeakPointerTo to be constructed.

The WeakPointerTo class is not actually atomic; it could be made so, but I don't believe it's worth it at this time.
This commit is contained in:
rdb 2018-05-15 13:34:42 +02:00
parent 47f7d3f297
commit 0bb81a43c9
35 changed files with 434 additions and 208 deletions

View File

@ -30,10 +30,13 @@ dec(TVOLATILE AtomicAdjustDummyImpl::Integer &var) {
/** /**
* Atomically computes var += delta. It is legal for delta to be negative. * Atomically computes var += delta. It is legal for delta to be negative.
* Returns the result of the addition.
*/ */
ALWAYS_INLINE void AtomicAdjustDummyImpl:: ALWAYS_INLINE AtomicAdjustDummyImpl::Integer AtomicAdjustDummyImpl::
add(TVOLATILE AtomicAdjustDummyImpl::Integer &var, AtomicAdjustDummyImpl::Integer delta) { add(TVOLATILE AtomicAdjustDummyImpl::Integer &var, AtomicAdjustDummyImpl::Integer delta) {
var += delta; Integer new_value = var + delta;
var = new_value;
return new_value;
} }
/** /**

View File

@ -31,7 +31,7 @@ public:
ALWAYS_INLINE static void inc(TVOLATILE Integer &var); ALWAYS_INLINE static void inc(TVOLATILE Integer &var);
ALWAYS_INLINE static bool dec(TVOLATILE Integer &var); ALWAYS_INLINE static bool dec(TVOLATILE Integer &var);
ALWAYS_INLINE static void add(TVOLATILE Integer &var, Integer delta); ALWAYS_INLINE static Integer add(TVOLATILE Integer &var, Integer delta);
ALWAYS_INLINE static Integer set(TVOLATILE Integer &var, Integer new_value); ALWAYS_INLINE static Integer set(TVOLATILE Integer &var, Integer new_value);
ALWAYS_INLINE static Integer get(const TVOLATILE Integer &var); ALWAYS_INLINE static Integer get(const TVOLATILE Integer &var);

View File

@ -30,11 +30,12 @@ dec(TVOLATILE AtomicAdjustGccImpl::Integer &var) {
/** /**
* Atomically computes var += delta. It is legal for delta to be negative. * Atomically computes var += delta. It is legal for delta to be negative.
* Returns the result of the addition.
*/ */
INLINE void AtomicAdjustGccImpl:: INLINE AtomicAdjustGccImpl::Integer AtomicAdjustGccImpl::
add(TVOLATILE AtomicAdjustGccImpl::Integer &var, add(TVOLATILE AtomicAdjustGccImpl::Integer &var,
AtomicAdjustGccImpl::Integer delta) { AtomicAdjustGccImpl::Integer delta) {
__atomic_fetch_add(&var, delta, __ATOMIC_SEQ_CST); return __atomic_add_fetch(&var, delta, __ATOMIC_SEQ_CST);
} }
/** /**

View File

@ -35,7 +35,7 @@ public:
INLINE static void inc(TVOLATILE Integer &var); INLINE static void inc(TVOLATILE Integer &var);
INLINE static bool dec(TVOLATILE Integer &var); INLINE static bool dec(TVOLATILE Integer &var);
INLINE static void add(TVOLATILE Integer &var, Integer delta); INLINE static Integer add(TVOLATILE Integer &var, Integer delta);
INLINE static Integer set(TVOLATILE Integer &var, Integer new_value); INLINE static Integer set(TVOLATILE Integer &var, Integer new_value);
INLINE static Integer get(const TVOLATILE Integer &var); INLINE static Integer get(const TVOLATILE Integer &var);

View File

@ -59,14 +59,18 @@ dec(TVOLATILE AtomicAdjustI386Impl::Integer &var) {
/** /**
* Atomically computes var += delta. It is legal for delta to be negative. * Atomically computes var += delta. It is legal for delta to be negative.
* Returns the result of the addition.
*/ */
INLINE void AtomicAdjustI386Impl:: INLINE AtomicAdjustI386Impl::Integer AtomicAdjustI386Impl::
add(TVOLATILE AtomicAdjustI386Impl::Integer &var, AtomicAdjustI386Impl::Integer delta) { add(TVOLATILE AtomicAdjustI386Impl::Integer &var, AtomicAdjustI386Impl::Integer delta) {
assert((((size_t)&var) & (sizeof(Integer) - 1)) == 0); assert((((size_t)&var) & (sizeof(Integer) - 1)) == 0);
Integer orig_value = var; Integer orig_value = var;
while (compare_and_exchange(var, orig_value, orig_value + delta) != orig_value) { Integer new_value = orig_value + delta;
while (compare_and_exchange(var, orig_value, new_value) != orig_value) {
orig_value = var; orig_value = var;
new_value = orig_value + delta;
} }
return new_value;
} }
/** /**

View File

@ -34,7 +34,7 @@ public:
INLINE static void inc(TVOLATILE Integer &var); INLINE static void inc(TVOLATILE Integer &var);
INLINE static bool dec(TVOLATILE Integer &var); INLINE static bool dec(TVOLATILE Integer &var);
INLINE static void add(TVOLATILE Integer &var, Integer delta); INLINE static Integer add(TVOLATILE Integer &var, Integer delta);
INLINE static Integer set(TVOLATILE Integer &var, Integer new_value); INLINE static Integer set(TVOLATILE Integer &var, Integer new_value);
INLINE static Integer get(const TVOLATILE Integer &var); INLINE static Integer get(const TVOLATILE Integer &var);

View File

@ -35,13 +35,16 @@ dec(TVOLATILE AtomicAdjustPosixImpl::Integer &var) {
/** /**
* Atomically computes var += delta. It is legal for delta to be negative. * Atomically computes var += delta. It is legal for delta to be negative.
* Returns the result of the addition.
*/ */
INLINE void AtomicAdjustPosixImpl:: INLINE AtomicAdjustPosixImpl::Integer AtomicAdjustPosixImpl::
add(TVOLATILE AtomicAdjustPosixImpl::Integer &var, add(TVOLATILE AtomicAdjustPosixImpl::Integer &var,
AtomicAdjustPosixImpl::Integer delta) { AtomicAdjustPosixImpl::Integer delta) {
pthread_mutex_lock(&_mutex); pthread_mutex_lock(&_mutex);
var += delta; Integer new_value = var + delta;
var = new_value;
pthread_mutex_unlock(&_mutex); pthread_mutex_unlock(&_mutex);
return new_value;
} }
/** /**

View File

@ -35,7 +35,7 @@ public:
INLINE static void inc(TVOLATILE Integer &var); INLINE static void inc(TVOLATILE Integer &var);
INLINE static bool dec(TVOLATILE Integer &var); INLINE static bool dec(TVOLATILE Integer &var);
INLINE static void add(TVOLATILE Integer &var, Integer delta); INLINE static Integer add(TVOLATILE Integer &var, Integer delta);
INLINE static Integer set(TVOLATILE Integer &var, Integer new_value); INLINE static Integer set(TVOLATILE Integer &var, Integer new_value);
INLINE static Integer get(const TVOLATILE Integer &var); INLINE static Integer get(const TVOLATILE Integer &var);

View File

@ -40,17 +40,21 @@ dec(TVOLATILE AtomicAdjustWin32Impl::Integer &var) {
/** /**
* Atomically computes var += delta. It is legal for delta to be negative. * Atomically computes var += delta. It is legal for delta to be negative.
* Returns the result of the addition.
*/ */
INLINE void AtomicAdjustWin32Impl:: INLINE AtomicAdjustWin32Impl::Integer AtomicAdjustWin32Impl::
add(TVOLATILE AtomicAdjustWin32Impl::Integer &var, AtomicAdjustWin32Impl::Integer delta) { add(TVOLATILE AtomicAdjustWin32Impl::Integer &var, AtomicAdjustWin32Impl::Integer delta) {
assert((((size_t)&var) & (sizeof(Integer) - 1)) == 0); assert((((size_t)&var) & (sizeof(Integer) - 1)) == 0);
#ifdef _WIN64 #ifdef _WIN64
InterlockedAdd64(&var, delta); return InterlockedAdd64(&var, delta);
#else #else
AtomicAdjustWin32Impl::Integer orig_value = var; Integer orig_value = var;
while (compare_and_exchange(var, orig_value, orig_value + delta) != orig_value) { Integer new_value = orig_value + delta;
while (compare_and_exchange(var, orig_value, new_value) != orig_value) {
orig_value = var; orig_value = var;
new_value = orig_value + delta;
} }
return new_value;
#endif // _WIN64 #endif // _WIN64
} }

View File

@ -44,7 +44,7 @@ public:
ALWAYS_INLINE static void inc(TVOLATILE Integer &var); ALWAYS_INLINE static void inc(TVOLATILE Integer &var);
ALWAYS_INLINE static bool dec(TVOLATILE Integer &var); ALWAYS_INLINE static bool dec(TVOLATILE Integer &var);
INLINE static void add(TVOLATILE Integer &var, Integer delta); INLINE static Integer add(TVOLATILE Integer &var, Integer delta);
ALWAYS_INLINE static Integer set(TVOLATILE Integer &var, Integer new_value); ALWAYS_INLINE static Integer set(TVOLATILE Integer &var, Integer new_value);
ALWAYS_INLINE static Integer get(const TVOLATILE Integer &var); ALWAYS_INLINE static Integer get(const TVOLATILE Integer &var);

View File

@ -153,7 +153,7 @@ apply_transform(const TransformState *transform) {
if ((*ati).first.is_valid_pointer() && if ((*ati).first.is_valid_pointer() &&
(*ati).second.is_valid_pointer()) { (*ati).second.is_valid_pointer()) {
// Here's our cached result. // Here's our cached result.
return (*ati).second.p(); return (*ati).second.lock();
} }
} }

View File

@ -23,11 +23,7 @@ CharacterJointEffect() {
* Returns the Character that will get update() called on it when this node's * Returns the Character that will get update() called on it when this node's
* relative transform is queried, or NULL if there is no such character. * relative transform is queried, or NULL if there is no such character.
*/ */
INLINE Character *CharacterJointEffect:: INLINE PT(Character) CharacterJointEffect::
get_character() const { get_character() const {
if (_character.is_valid_pointer()) { return _character.lock();
return _character;
} else {
return NULL;
}
} }

View File

@ -89,8 +89,9 @@ safe_to_combine() const {
void CharacterJointEffect:: void CharacterJointEffect::
output(ostream &out) const { output(ostream &out) const {
out << get_type(); out << get_type();
if (_character.is_valid_pointer()) { PT(Character) character = get_character();
out << "(" << _character->get_name() << ")"; if (character != nullptr) {
out << "(" << character->get_name() << ")";
} else { } else {
out << "(**invalid**)"; out << "(**invalid**)";
} }
@ -122,8 +123,8 @@ void CharacterJointEffect::
cull_callback(CullTraverser *trav, CullTraverserData &data, cull_callback(CullTraverser *trav, CullTraverserData &data,
CPT(TransformState) &node_transform, CPT(TransformState) &node_transform,
CPT(RenderState) &) const { CPT(RenderState) &) const {
if (_character.is_valid_pointer()) { if (auto character = _character.lock()) {
_character->update(); character->update();
} }
node_transform = data.node()->get_transform(); node_transform = data.node()->get_transform();
} }
@ -150,8 +151,8 @@ void CharacterJointEffect::
adjust_transform(CPT(TransformState) &net_transform, adjust_transform(CPT(TransformState) &net_transform,
CPT(TransformState) &node_transform, CPT(TransformState) &node_transform,
const PandaNode *node) const { const PandaNode *node) const {
if (_character.is_valid_pointer()) { if (auto character = _character.lock()) {
_character->update(); character->update();
} }
node_transform = node->get_transform(); node_transform = node->get_transform();
} }
@ -202,11 +203,8 @@ void CharacterJointEffect::
write_datagram(BamWriter *manager, Datagram &dg) { write_datagram(BamWriter *manager, Datagram &dg) {
RenderEffect::write_datagram(manager, dg); RenderEffect::write_datagram(manager, dg);
if (_character.is_valid_pointer()) { PT(Character) character = get_character();
manager->write_pointer(dg, _character); manager->write_pointer(dg, character);
} else {
manager->write_pointer(dg, NULL);
}
} }
/** /**

View File

@ -38,7 +38,7 @@ private:
PUBLISHED: PUBLISHED:
static CPT(RenderEffect) make(Character *character); static CPT(RenderEffect) make(Character *character);
INLINE Character *get_character() const; INLINE PT(Character) get_character() const;
public: public:
virtual bool safe_to_transform() const; virtual bool safe_to_transform() const;

View File

@ -112,9 +112,9 @@ ReferenceCount::
nassertv(_ref_count == 0 || _ref_count == local_ref_count); nassertv(_ref_count == 0 || _ref_count == local_ref_count);
// Tell our weak reference holders that we're going away now. // Tell our weak reference holders that we're going away now.
if (_weak_list != (WeakReferenceList *)NULL) { if (_weak_list != nullptr) {
delete (WeakReferenceList *)_weak_list; ((WeakReferenceList *)_weak_list)->mark_deleted();
_weak_list = (WeakReferenceList *)NULL; _weak_list = nullptr;
} }
#ifndef NDEBUG #ifndef NDEBUG
@ -253,6 +253,9 @@ has_weak_list() const {
* Returns the WeakReferenceList associated with this ReferenceCount object. * Returns the WeakReferenceList associated with this ReferenceCount object.
* If there has never been a WeakReferenceList associated with this object, * If there has never been a WeakReferenceList associated with this object,
* creates one now. * creates one now.
*
* The returned object will be deleted automatically when all weak and strong
* references to the object have gone.
*/ */
INLINE WeakReferenceList *ReferenceCount:: INLINE WeakReferenceList *ReferenceCount::
get_weak_list() const { get_weak_list() const {
@ -264,14 +267,21 @@ get_weak_list() const {
/** /**
* Adds the indicated PointerToVoid as a weak reference to this object. * Adds the indicated PointerToVoid as a weak reference to this object.
* Returns an object that will persist as long as any reference (strong or
* weak) exists, for calling unref() or checking whether the object still
* exists.
*/ */
INLINE void ReferenceCount:: INLINE WeakReferenceList *ReferenceCount::
weak_ref(WeakPointerToVoid *ptv) { weak_ref() {
TAU_PROFILE("void ReferenceCount::weak_ref()", " ", TAU_USER); TAU_PROFILE("void ReferenceCount::weak_ref()", " ", TAU_USER);
#ifdef _DEBUG #ifdef _DEBUG
nassertv(test_ref_count_integrity()); nassertr(test_ref_count_integrity(), nullptr);
#else
nassertr(_ref_count != deleted_ref_count, nullptr);
#endif #endif
get_weak_list()->add_reference(ptv); WeakReferenceList *weak_ref = get_weak_list();
weak_ref->ref();
return weak_ref;
} }
/** /**
@ -279,13 +289,15 @@ weak_ref(WeakPointerToVoid *ptv) {
* must have previously been added via a call to weak_ref(). * must have previously been added via a call to weak_ref().
*/ */
INLINE void ReferenceCount:: INLINE void ReferenceCount::
weak_unref(WeakPointerToVoid *ptv) { weak_unref() {
TAU_PROFILE("void ReferenceCount::weak_unref()", " ", TAU_USER); TAU_PROFILE("void ReferenceCount::weak_unref()", " ", TAU_USER);
#ifdef _DEBUG #ifdef _DEBUG
nassertv(test_ref_count_integrity()); nassertv(test_ref_count_integrity());
#endif #endif
nassertv(has_weak_list()); WeakReferenceList *weak_list = (WeakReferenceList *)_weak_list;
((WeakReferenceList *)_weak_list)->clear_reference(ptv); nassertv(weak_list != nullptr);
bool nonzero = weak_list->unref();
nassertv(nonzero);
} }
/** /**

View File

@ -60,8 +60,8 @@ public:
INLINE bool has_weak_list() const; INLINE bool has_weak_list() const;
INLINE WeakReferenceList *get_weak_list() const; INLINE WeakReferenceList *get_weak_list() const;
INLINE void weak_ref(WeakPointerToVoid *ptv); INLINE WeakReferenceList *weak_ref();
INLINE void weak_unref(WeakPointerToVoid *ptv); INLINE void weak_unref();
protected: protected:
bool do_test_ref_count_integrity() const; bool do_test_ref_count_integrity() const;

View File

@ -72,6 +72,26 @@ operator T * () const {
return (To *)WeakPointerToBase<T>::_void_ptr; return (To *)WeakPointerToBase<T>::_void_ptr;
} }
/**
* A thread-safe way to access the underlying pointer; will silently return
* null if the underlying pointer was deleted or null.
*/
template<class T>
INLINE PointerTo<T> WeakPointerTo<T>::
lock() const {
WeakReferenceList *weak_ref = this->_weak_ref;
if (weak_ref != nullptr) {
PointerTo<T> ptr;
weak_ref->_lock.acquire();
if (!weak_ref->was_deleted()) {
ptr = (To *)WeakPointerToBase<T>::_void_ptr;
}
weak_ref->_lock.release();
return ptr;
}
return nullptr;
}
/** /**
* Returns an ordinary pointer instead of a WeakPointerTo. Useful to work * Returns an ordinary pointer instead of a WeakPointerTo. Useful to work
* around compiler problems, particularly for implicit upcasts. * around compiler problems, particularly for implicit upcasts.
@ -207,6 +227,26 @@ operator const T * () const {
return (To *)WeakPointerToBase<T>::_void_ptr; return (To *)WeakPointerToBase<T>::_void_ptr;
} }
/**
* A thread-safe way to access the underlying pointer; will silently return
* null if the underlying pointer was deleted or null.
*/
template<class T>
INLINE ConstPointerTo<T> WeakConstPointerTo<T>::
lock() const {
WeakReferenceList *weak_ref = this->_weak_ref;
if (weak_ref != nullptr) {
ConstPointerTo<T> ptr;
weak_ref->_lock.acquire();
if (!weak_ref->was_deleted()) {
ptr = (const To *)WeakPointerToBase<T>::_void_ptr;
}
weak_ref->_lock.release();
return ptr;
}
return nullptr;
}
/** /**
* Returns an ordinary pointer instead of a WeakConstPointerTo. Useful to * Returns an ordinary pointer instead of a WeakConstPointerTo. Useful to
* work around compiler problems, particularly for implicit upcasts. * work around compiler problems, particularly for implicit upcasts.

View File

@ -41,6 +41,7 @@ public:
INLINE operator T *() const; INLINE operator T *() const;
PUBLISHED: PUBLISHED:
INLINE PointerTo<T> lock() const;
INLINE To *p() const; INLINE To *p() const;
INLINE To *get_orig() const; INLINE To *get_orig() const;
@ -77,6 +78,7 @@ public:
INLINE operator const T *() const; INLINE operator const T *() const;
PUBLISHED: PUBLISHED:
INLINE ConstPointerTo<T> lock() const;
INLINE const To *p() const; INLINE const To *p() const;
INLINE const To *get_orig() const; INLINE const To *get_orig() const;

View File

@ -17,7 +17,13 @@
template<class T> template<class T>
INLINE WeakPointerToBase<T>:: INLINE WeakPointerToBase<T>::
WeakPointerToBase(To *ptr) { WeakPointerToBase(To *ptr) {
reassign(ptr); _void_ptr = (To *)ptr;
if (ptr != nullptr) {
_weak_ref = ptr->weak_ref();
#ifdef DO_MEMORY_USAGE
update_type(ptr);
#endif
}
} }
/** /**
@ -26,7 +32,13 @@ WeakPointerToBase(To *ptr) {
template<class T> template<class T>
INLINE WeakPointerToBase<T>:: INLINE WeakPointerToBase<T>::
WeakPointerToBase(const PointerToBase<T> &copy) { WeakPointerToBase(const PointerToBase<T> &copy) {
reassign(copy); // This double-casting is a bit of a cheat to get around the inheritance
// issue--it's difficult to declare a template class to be a friend.
To *ptr = (To *)((const WeakPointerToBase<To> *)&copy)->_void_ptr;
_void_ptr = ptr;
if (ptr != nullptr) {
_weak_ref = ptr->weak_ref();
}
} }
/** /**
@ -36,11 +48,35 @@ template<class T>
INLINE WeakPointerToBase<T>:: INLINE WeakPointerToBase<T>::
WeakPointerToBase(const WeakPointerToBase<T> &copy) { WeakPointerToBase(const WeakPointerToBase<T> &copy) {
_void_ptr = copy._void_ptr; _void_ptr = copy._void_ptr;
_ptr_was_deleted = copy._ptr_was_deleted;
if (is_valid_pointer()) { // Don't bother increasing the weak reference count if the object was
To *ptr = (To *)_void_ptr; // already deleted.
ptr->weak_ref(this); WeakReferenceList *weak_ref = copy._weak_ref;
if (weak_ref != nullptr && !weak_ref->was_deleted()) {
_weak_ref = copy._weak_ref;
_weak_ref->ref();
}
}
/**
*
*/
template<class T>
INLINE WeakPointerToBase<T>::
WeakPointerToBase(WeakPointerToBase<T> &&from) NOEXCEPT {
// Protect against self-move-assignment.
if (from._void_ptr != this->_void_ptr) {
WeakReferenceList *old_ref = (To *)this->_weak_ref;
this->_void_ptr = from._void_ptr;
this->_weak_ref = from._weak_ref;
from._void_ptr = nullptr;
from._weak_ref = nullptr;
// Now delete the old pointer.
if (old_ref != nullptr && !old_ref->unref()) {
delete old_ref;
}
} }
} }
@ -50,7 +86,10 @@ WeakPointerToBase(const WeakPointerToBase<T> &copy) {
template<class T> template<class T>
INLINE WeakPointerToBase<T>:: INLINE WeakPointerToBase<T>::
~WeakPointerToBase() { ~WeakPointerToBase() {
reassign((To *)NULL); WeakReferenceList *old_ref = (WeakReferenceList *)_weak_ref;
if (old_ref != nullptr && !old_ref->unref()) {
delete old_ref;
}
} }
/** /**
@ -60,34 +99,23 @@ INLINE WeakPointerToBase<T>::
template<class T> template<class T>
void WeakPointerToBase<T>:: void WeakPointerToBase<T>::
reassign(To *ptr) { reassign(To *ptr) {
if (ptr != (To *)_void_ptr || _ptr_was_deleted) { if (ptr != (To *)_void_ptr) {
To *old_ptr = (To *)_void_ptr; WeakReferenceList *old_ref = (WeakReferenceList *)_weak_ref;
_void_ptr = (void *)ptr; _void_ptr = (void *)ptr;
if (ptr != (To *)NULL) { if (ptr != nullptr) {
ptr->weak_ref(this); _weak_ref = ptr->weak_ref();
#ifdef DO_MEMORY_USAGE #ifdef DO_MEMORY_USAGE
if (MemoryUsage::get_track_memory_usage()) { update_type(ptr);
// Make sure the MemoryUsage record knows what the TypeHandle is, if
// we know it ourselves.
TypeHandle type = get_type_handle(To);
if (type == TypeHandle::none()) {
do_init_type(To);
type = get_type_handle(To);
}
if (type != TypeHandle::none()) {
MemoryUsage::update_type(ptr, type);
}
}
#endif #endif
} else {
_weak_ref = nullptr;
} }
// Now remove the old reference. // Now remove the old reference.
if (old_ptr != (To *)NULL && !_ptr_was_deleted) { if (old_ref != nullptr && !old_ref->unref()) {
old_ptr->weak_unref(this); delete old_ref;
} }
_ptr_was_deleted = false;
} }
} }
@ -108,8 +136,69 @@ reassign(const PointerToBase<To> &copy) {
template<class T> template<class T>
INLINE void WeakPointerToBase<T>:: INLINE void WeakPointerToBase<T>::
reassign(const WeakPointerToBase<To> &copy) { reassign(const WeakPointerToBase<To> &copy) {
nassertv(!copy.was_deleted()); void *new_ptr = copy._void_ptr;
reassign((To *)copy._void_ptr); if (new_ptr != _void_ptr) {
WeakReferenceList *old_ref = (WeakReferenceList *)_weak_ref;
_void_ptr = new_ptr;
// Don't bother increasing the weak reference count if the object was
// already deleted.
WeakReferenceList *weak_ref = copy._weak_ref;
if (weak_ref != nullptr && !weak_ref->was_deleted()) {
weak_ref->ref();
_weak_ref = weak_ref;
} else {
_weak_ref = nullptr;
}
// Now remove the old reference.
if (old_ref != nullptr && !old_ref->unref()) {
delete old_ref;
}
}
}
/**
*
*/
template<class T>
INLINE void WeakPointerToBase<T>::
reassign(WeakPointerToBase<To> &&from) NOEXCEPT {
// Protect against self-move-assignment.
if (from._void_ptr != this->_void_ptr) {
WeakReferenceList *old_ref = (WeakReferenceList *)this->_weak_ref;
this->_void_ptr = from._void_ptr;
this->_weak_ref = from._weak_ref;
from._void_ptr = nullptr;
from._weak_ref = nullptr;
// Now delete the old pointer.
if (old_ref != nullptr && !old_ref->unref()) {
delete old_ref;
}
}
}
/**
* Ensures that the MemoryUsage record for the pointer has the right type of
* object, if we know the type ourselves.
*/
template<class T>
INLINE void WeakPointerToBase<T>::
update_type(To *ptr) {
#ifdef DO_MEMORY_USAGE
if (MemoryUsage::get_track_memory_usage()) {
TypeHandle type = get_type_handle(To);
if (type == TypeHandle::none()) {
do_init_type(To);
type = get_type_handle(To);
}
if (type != TypeHandle::none()) {
MemoryUsage::update_type(ptr, type);
}
}
#endif // DO_MEMORY_USAGE
} }
#ifndef CPPPARSER #ifndef CPPPARSER
@ -323,8 +412,6 @@ operator < (const PointerToBase<To> &other) const {
#endif // CPPPARSER #endif // CPPPARSER
/** /**
* A convenient way to set the PointerTo object to NULL. (Assignment to a NULL * A convenient way to set the PointerTo object to NULL. (Assignment to a NULL
* pointer also works, of course.) * pointer also works, of course.)
@ -332,7 +419,14 @@ operator < (const PointerToBase<To> &other) const {
template<class T> template<class T>
INLINE void WeakPointerToBase<T>:: INLINE void WeakPointerToBase<T>::
clear() { clear() {
reassign((To *)NULL); WeakReferenceList *old_ref = (WeakReferenceList *)_weak_ref;
_void_ptr = nullptr;
_weak_ref = nullptr;
// Now remove the old reference.
if (old_ref != nullptr && !old_ref->unref()) {
delete old_ref;
}
} }
/** /**
@ -346,7 +440,9 @@ clear() {
template<class T> template<class T>
INLINE void WeakPointerToBase<T>:: INLINE void WeakPointerToBase<T>::
refresh() const { refresh() const {
((WeakPointerToBase<T> *)this)->reassign((To *)_void_ptr); if (_void_ptr != nullptr) {
((WeakPointerToBase<T> *)this)->reassign((To *)_void_ptr);
}
} }
/** /**

View File

@ -31,11 +31,15 @@ protected:
INLINE WeakPointerToBase(To *ptr); INLINE WeakPointerToBase(To *ptr);
INLINE WeakPointerToBase(const PointerToBase<T> &copy); INLINE WeakPointerToBase(const PointerToBase<T> &copy);
INLINE WeakPointerToBase(const WeakPointerToBase<T> &copy); INLINE WeakPointerToBase(const WeakPointerToBase<T> &copy);
INLINE WeakPointerToBase(WeakPointerToBase<T> &&from) NOEXCEPT;
INLINE ~WeakPointerToBase(); INLINE ~WeakPointerToBase();
void reassign(To *ptr); void reassign(To *ptr);
INLINE void reassign(const PointerToBase<To> &copy); INLINE void reassign(const PointerToBase<To> &copy);
INLINE void reassign(const WeakPointerToBase<To> &copy); INLINE void reassign(const WeakPointerToBase<To> &copy);
INLINE void reassign(WeakPointerToBase<To> &&from) NOEXCEPT;
INLINE void update_type(To *ptr);
// No assignment or retrieval functions are declared in WeakPointerToBase, // No assignment or retrieval functions are declared in WeakPointerToBase,
// because we will have to specialize on const vs. non-const later. // because we will have to specialize on const vs. non-const later.

View File

@ -15,46 +15,34 @@
* *
*/ */
INLINE WeakPointerToVoid:: INLINE WeakPointerToVoid::
WeakPointerToVoid() : WeakPointerToVoid() : _weak_ref(nullptr) {
_ptr_was_deleted(false),
_callback(NULL) {
} }
/** /**
* This is intended only to be called by the WeakPointerList destructor. It * Sets a callback that will be made when the pointer is deleted. Does
* indicates that the object that we were pointing to has just been deleted. * nothing if this is a null pointer.
*/
INLINE void WeakPointerToVoid::
mark_deleted() {
nassertv(!_ptr_was_deleted);
_ptr_was_deleted = true;
if (_callback != (WeakPointerCallback *)NULL) {
_callback->wp_callback(_void_ptr);
}
}
/**
* Sets a callback that will be made when the pointer is deleted. If a
* previous callback has already been set, it will be replaced.
* *
* If the pointer has already been deleted, the callback will be made * If the pointer has already been deleted, the callback will be made
* immediately. * immediately.
*/ */
INLINE void WeakPointerToVoid:: INLINE void WeakPointerToVoid::
set_callback(WeakPointerCallback *callback) { add_callback(WeakPointerCallback *callback) const {
_callback = callback; if (_weak_ref != nullptr && !_weak_ref->was_deleted()) {
if (_ptr_was_deleted && _callback != (WeakPointerCallback *)NULL) { _weak_ref->add_callback(callback, _void_ptr);
_callback->wp_callback(_void_ptr); } else if (_void_ptr != nullptr) {
callback->wp_callback(_void_ptr);
_weak_ref = nullptr;
} }
} }
/** /**
* Returns the callback that will be made when the pointer is deleted, or NULL * Removes a previously added callback.
* if no callback has been set.
*/ */
INLINE WeakPointerCallback *WeakPointerToVoid:: INLINE void WeakPointerToVoid::
get_callback() const { remove_callback(WeakPointerCallback *callback) const {
return _callback; if (_weak_ref != nullptr) {
_weak_ref->remove_callback(callback);
}
} }
/** /**
@ -63,7 +51,7 @@ get_callback() const {
*/ */
INLINE bool WeakPointerToVoid:: INLINE bool WeakPointerToVoid::
was_deleted() const { was_deleted() const {
return _ptr_was_deleted; return _void_ptr != nullptr && (_weak_ref == nullptr || _weak_ref->was_deleted());
} }
/** /**
@ -72,5 +60,5 @@ was_deleted() const {
*/ */
INLINE bool WeakPointerToVoid:: INLINE bool WeakPointerToVoid::
is_valid_pointer() const { is_valid_pointer() const {
return (_void_ptr != (void *)NULL) && !_ptr_was_deleted; return _weak_ref != nullptr && !_weak_ref->was_deleted();
} }

View File

@ -27,18 +27,15 @@ protected:
INLINE WeakPointerToVoid(); INLINE WeakPointerToVoid();
public: public:
INLINE void mark_deleted(); INLINE void add_callback(WeakPointerCallback *callback) const;
INLINE void remove_callback(WeakPointerCallback *callback) const;
INLINE void set_callback(WeakPointerCallback *callback);
INLINE WeakPointerCallback *get_callback() const;
PUBLISHED: PUBLISHED:
INLINE bool was_deleted() const; INLINE bool was_deleted() const;
INLINE bool is_valid_pointer() const; INLINE bool is_valid_pointer() const;
protected: protected:
bool _ptr_was_deleted; mutable WeakReferenceList *_weak_ref;
WeakPointerCallback *_callback;
}; };
#include "weakPointerToVoid.I" #include "weakPointerToVoid.I"

View File

@ -10,3 +10,30 @@
* @author drose * @author drose
* @date 2004-09-27 * @date 2004-09-27
*/ */
/**
* Increases the number of weak references.
*/
INLINE void WeakReferenceList::
ref() const {
AtomicAdjust::inc(_count);
}
/**
* Decreases the number of weak references. Returns true if, after this,
* there are still any weak or strong references remaining, or false if this
* structure should be deleted right away.
*/
INLINE bool WeakReferenceList::
unref() const {
return AtomicAdjust::dec(_count);
}
/**
* Returns true if the object represented has been deleted, ie. there are only
* weak references left pointing to the object.
*/
INLINE bool WeakReferenceList::
was_deleted() const {
return AtomicAdjust::get(_count) < _alive_offset;
}

View File

@ -19,7 +19,7 @@
* *
*/ */
WeakReferenceList:: WeakReferenceList::
WeakReferenceList() { WeakReferenceList() : _count(_alive_offset) {
} }
/** /**
@ -27,30 +27,33 @@ WeakReferenceList() {
*/ */
WeakReferenceList:: WeakReferenceList::
~WeakReferenceList() { ~WeakReferenceList() {
_lock.acquire(); nassertv(_count == 0);
Pointers::iterator pi;
for (pi = _pointers.begin(); pi != _pointers.end(); ++pi) {
(*pi)->mark_deleted();
}
_lock.release();
} }
/** /**
* Intended to be called only by WeakPointerTo (or by any class implementing a * Adds the callback to the list of callbacks that will be called when the
* weak reference-counting pointer), this adds the indicated PointerToVoid * underlying pointer is deleted. If it has already been deleted, it will
* structure to the list of such structures that are maintaining a weak * be called immediately.
* pointer to this object.
* *
* When the WeakReferenceList destructs (presumably because its owning object * The data pointer can be an arbitrary pointer and is passed as only argument
* destructs), the pointer within the PointerToVoid object will be set to * to the callback.
* NULL.
*/ */
void WeakReferenceList:: void WeakReferenceList::
add_reference(WeakPointerToVoid *ptv) { add_callback(WeakPointerCallback *callback, void *data) {
nassertv(callback != nullptr);
_lock.acquire(); _lock.acquire();
bool inserted = _pointers.insert(ptv).second; // We need to check again whether the object is deleted after grabbing the
// lock, despite having already done this in weakPointerTo.I, since it may
// have been deleted in the meantime.
bool deleted = was_deleted();
if (!deleted) {
_callbacks.insert(make_pair(callback, data));
}
_lock.release(); _lock.release();
nassertv(inserted);
if (deleted) {
callback->wp_callback(data);
}
} }
/** /**
@ -60,13 +63,33 @@ add_reference(WeakPointerToVoid *ptv) {
* pointer to this object. * pointer to this object.
*/ */
void WeakReferenceList:: void WeakReferenceList::
clear_reference(WeakPointerToVoid *ptv) { remove_callback(WeakPointerCallback *callback) {
nassertv(callback != nullptr);
_lock.acquire(); _lock.acquire();
Pointers::iterator pi = _pointers.find(ptv); _callbacks.erase(callback);
bool valid = (pi != _pointers.end());
if (valid) {
_pointers.erase(pi);
}
_lock.release(); _lock.release();
nassertv(valid); }
/**
* Called only by the ReferenceCount pointer to indicate that it has been
* deleted.
*/
void WeakReferenceList::
mark_deleted() {
_lock.acquire();
Callbacks::iterator ci;
for (ci = _callbacks.begin(); ci != _callbacks.end(); ++ci) {
(*ci).first->wp_callback((*ci).second);
}
_callbacks.clear();
// Decrement the special offset added to the weak pointer count to indicate
// that it can be deleted when all the weak references have gone.
AtomicAdjust::Integer result = AtomicAdjust::add(_count, -_alive_offset);
_lock.release();
if (result == 0) {
// There are no weak references remaining either, so delete this.
delete this;
}
nassertv(result >= 0);
} }

View File

@ -15,30 +15,48 @@
#define WEAKREFERENCELIST_H #define WEAKREFERENCELIST_H
#include "pandabase.h" #include "pandabase.h"
#include "pset.h" #include "pmap.h"
#include "mutexImpl.h" #include "mutexImpl.h"
class WeakPointerToVoid; class WeakPointerCallback;
/** /**
* This is a list of WeakPointerTo's that share a reference to a given * This is an object shared by all the weak pointers that point to the same
* ReferenceCount object. It is stored in a separate class since it is * ReferenceCount object. It is created whenever a weak reference to an
* assumed that most ReferenceCount objects do not need to store this list at * object is created, and can outlive the object until all weak references
* all; this avoids bloating every ReferenceCount object in the world with the * have disappeared.
* size of this object.
*/ */
class EXPCL_PANDAEXPRESS WeakReferenceList { class EXPCL_PANDAEXPRESS WeakReferenceList {
public: public:
WeakReferenceList(); WeakReferenceList();
~WeakReferenceList(); ~WeakReferenceList();
void add_reference(WeakPointerToVoid *ptv); INLINE void ref() const;
void clear_reference(WeakPointerToVoid *ptv); INLINE bool unref() const;
INLINE bool was_deleted() const;
void add_callback(WeakPointerCallback *callback, void *data);
void remove_callback(WeakPointerCallback *callback);
private: private:
typedef pset<WeakPointerToVoid *> Pointers; void mark_deleted();
Pointers _pointers;
public:
// This lock protects the callbacks below, but it also protects the object
// from being deleted during a call to WeakPointerTo::lock().
MutexImpl _lock; MutexImpl _lock;
private:
typedef pmap<WeakPointerCallback *, void *> Callbacks;
Callbacks _callbacks;
// This has a very large number added to it if the object is still alive.
// It could be 1, but having it be a large number makes it easy to check
// whether the object has been deleted or not.
static const AtomicAdjust::Integer _alive_offset = (1 << 30);
mutable AtomicAdjust::Integer _count;
friend class ReferenceCount;
}; };
#include "weakReferenceList.I" #include "weakReferenceList.I"

View File

@ -436,42 +436,43 @@ set_state_and_transform(const RenderState *target_rs,
altered |= Shader::SSD_projection; altered |= Shader::SSD_projection;
} }
if (_state_rs.was_deleted() || _state_rs == (const RenderState *)NULL) { CPT(RenderState) state_rs = _state_rs.lock();
if (state_rs == nullptr) {
// Reset all of the state. // Reset all of the state.
altered |= Shader::SSD_general; altered |= Shader::SSD_general;
_state_rs = target_rs; _state_rs = target_rs;
} else if (_state_rs != target_rs) { } else if (state_rs != target_rs) {
// The state has changed since last time. // The state has changed since last time.
if (_state_rs->get_attrib(ColorAttrib::get_class_slot()) != if (state_rs->get_attrib(ColorAttrib::get_class_slot()) !=
target_rs->get_attrib(ColorAttrib::get_class_slot())) { target_rs->get_attrib(ColorAttrib::get_class_slot())) {
altered |= Shader::SSD_color; altered |= Shader::SSD_color;
} }
if (_state_rs->get_attrib(ColorScaleAttrib::get_class_slot()) != if (state_rs->get_attrib(ColorScaleAttrib::get_class_slot()) !=
target_rs->get_attrib(ColorScaleAttrib::get_class_slot())) { target_rs->get_attrib(ColorScaleAttrib::get_class_slot())) {
altered |= Shader::SSD_colorscale; altered |= Shader::SSD_colorscale;
} }
if (_state_rs->get_attrib(MaterialAttrib::get_class_slot()) != if (state_rs->get_attrib(MaterialAttrib::get_class_slot()) !=
target_rs->get_attrib(MaterialAttrib::get_class_slot())) { target_rs->get_attrib(MaterialAttrib::get_class_slot())) {
altered |= Shader::SSD_material; altered |= Shader::SSD_material;
} }
if (_state_rs->get_attrib(ShaderAttrib::get_class_slot()) != if (state_rs->get_attrib(ShaderAttrib::get_class_slot()) !=
target_rs->get_attrib(ShaderAttrib::get_class_slot())) { target_rs->get_attrib(ShaderAttrib::get_class_slot())) {
altered |= Shader::SSD_shaderinputs; altered |= Shader::SSD_shaderinputs;
} }
if (_state_rs->get_attrib(FogAttrib::get_class_slot()) != if (state_rs->get_attrib(FogAttrib::get_class_slot()) !=
target_rs->get_attrib(FogAttrib::get_class_slot())) { target_rs->get_attrib(FogAttrib::get_class_slot())) {
altered |= Shader::SSD_fog; altered |= Shader::SSD_fog;
} }
if (_state_rs->get_attrib(LightAttrib::get_class_slot()) != if (state_rs->get_attrib(LightAttrib::get_class_slot()) !=
target_rs->get_attrib(LightAttrib::get_class_slot())) { target_rs->get_attrib(LightAttrib::get_class_slot())) {
altered |= Shader::SSD_light; altered |= Shader::SSD_light;
} }
if (_state_rs->get_attrib(ClipPlaneAttrib::get_class_slot()) != if (state_rs->get_attrib(ClipPlaneAttrib::get_class_slot()) !=
target_rs->get_attrib(ClipPlaneAttrib::get_class_slot())) { target_rs->get_attrib(ClipPlaneAttrib::get_class_slot())) {
altered |= Shader::SSD_clip_planes; altered |= Shader::SSD_clip_planes;
} }
if (_state_rs->get_attrib(TexMatrixAttrib::get_class_slot()) != if (state_rs->get_attrib(TexMatrixAttrib::get_class_slot()) !=
target_rs->get_attrib(TexMatrixAttrib::get_class_slot())) { target_rs->get_attrib(TexMatrixAttrib::get_class_slot())) {
altered |= Shader::SSD_tex_matrix; altered |= Shader::SSD_tex_matrix;
} }

View File

@ -39,8 +39,8 @@ CLP(GeomMunger)(GraphicsStateGuardian *gsg, const RenderState *state) :
// TexGen object gets deleted. // TexGen object gets deleted.
_texture = (const TextureAttrib *)state->get_attrib(TextureAttrib::get_class_slot()); _texture = (const TextureAttrib *)state->get_attrib(TextureAttrib::get_class_slot());
_tex_gen = (const TexGenAttrib *)state->get_attrib(TexGenAttrib::get_class_slot()); _tex_gen = (const TexGenAttrib *)state->get_attrib(TexGenAttrib::get_class_slot());
_texture.set_callback(this); _texture.add_callback(this);
_tex_gen.set_callback(this); _tex_gen.add_callback(this);
} }
} }
@ -56,6 +56,11 @@ CLP(GeomMunger)::
(*gci)->remove_munger(this); (*gci)->remove_munger(this);
} }
_geom_contexts.clear(); _geom_contexts.clear();
if ((_flags & F_parallel_arrays) == 0) {
_texture.remove_callback(this);
_tex_gen.remove_callback(this);
}
} }
/** /**
@ -220,15 +225,15 @@ munge_format_impl(const GeomVertexFormat *orig,
} }
// Put only the used texture coordinates into the interleaved array. // Put only the used texture coordinates into the interleaved array.
if (_texture != (TextureAttrib *)NULL) { if (auto texture = _texture.lock()) {
typedef pset<const InternalName *> UsedStages; typedef pset<const InternalName *> UsedStages;
UsedStages used_stages; UsedStages used_stages;
int num_stages = _texture->get_num_on_stages(); int num_stages = texture->get_num_on_stages();
for (int i = 0; i < num_stages; ++i) { for (int i = 0; i < num_stages; ++i) {
TextureStage *stage = _texture->get_on_stage(i); TextureStage *stage = texture->get_on_stage(i);
if (_tex_gen == (TexGenAttrib *)NULL || CPT(TexGenAttrib) tex_gen = _tex_gen.lock();
!_tex_gen->has_stage(stage)) { if (tex_gen == nullptr || !tex_gen->has_stage(stage)) {
InternalName *name = stage->get_texcoord_name(); InternalName *name = stage->get_texcoord_name();
if (used_stages.insert(name).second) { if (used_stages.insert(name).second) {
// This is the first time we've encountered this texcoord name. // This is the first time we've encountered this texcoord name.
@ -360,15 +365,15 @@ premunge_format_impl(const GeomVertexFormat *orig) {
// Put only the used texture coordinates into the interleaved array. The // Put only the used texture coordinates into the interleaved array. The
// others will be kept around, but in a parallel array. // others will be kept around, but in a parallel array.
if (_texture != (TextureAttrib *)NULL) { if (auto texture = _texture.lock()) {
typedef pset<const InternalName *> UsedStages; typedef pset<const InternalName *> UsedStages;
UsedStages used_stages; UsedStages used_stages;
int num_stages = _texture->get_num_on_stages(); int num_stages = texture->get_num_on_stages();
for (int i = 0; i < num_stages; ++i) { for (int i = 0; i < num_stages; ++i) {
TextureStage *stage = _texture->get_on_stage(i); TextureStage *stage = texture->get_on_stage(i);
if (_tex_gen == (TexGenAttrib *)NULL || CPT(TexGenAttrib) tex_gen = _tex_gen.lock();
!_tex_gen->has_stage(stage)) { if (tex_gen == nullptr || !tex_gen->has_stage(stage)) {
InternalName *name = stage->get_texcoord_name(); InternalName *name = stage->get_texcoord_name();
if (used_stages.insert(name).second) { if (used_stages.insert(name).second) {
// This is the first time we've encountered this texcoord name. // This is the first time we've encountered this texcoord name.

View File

@ -1899,46 +1899,47 @@ set_state_and_transform(const RenderState *target_rs,
altered |= Shader::SSD_projection; altered |= Shader::SSD_projection;
} }
if (_state_rs.was_deleted() || _state_rs == (const RenderState *)NULL) { CPT(RenderState) state_rs = _state_rs.lock();
if (state_rs == nullptr) {
// Reset all of the state. // Reset all of the state.
altered |= Shader::SSD_general; altered |= Shader::SSD_general;
_state_rs = target_rs; _state_rs = target_rs;
} else if (_state_rs != target_rs) { } else if (state_rs != target_rs) {
// The state has changed since last time. // The state has changed since last time.
if (_state_rs->get_attrib(ColorAttrib::get_class_slot()) != if (state_rs->get_attrib(ColorAttrib::get_class_slot()) !=
target_rs->get_attrib(ColorAttrib::get_class_slot())) { target_rs->get_attrib(ColorAttrib::get_class_slot())) {
altered |= Shader::SSD_color; altered |= Shader::SSD_color;
} }
if (_state_rs->get_attrib(ColorScaleAttrib::get_class_slot()) != if (state_rs->get_attrib(ColorScaleAttrib::get_class_slot()) !=
target_rs->get_attrib(ColorScaleAttrib::get_class_slot())) { target_rs->get_attrib(ColorScaleAttrib::get_class_slot())) {
altered |= Shader::SSD_colorscale; altered |= Shader::SSD_colorscale;
} }
if (_state_rs->get_attrib(MaterialAttrib::get_class_slot()) != if (state_rs->get_attrib(MaterialAttrib::get_class_slot()) !=
target_rs->get_attrib(MaterialAttrib::get_class_slot())) { target_rs->get_attrib(MaterialAttrib::get_class_slot())) {
altered |= Shader::SSD_material; altered |= Shader::SSD_material;
} }
if (_state_rs->get_attrib(ShaderAttrib::get_class_slot()) != if (state_rs->get_attrib(ShaderAttrib::get_class_slot()) !=
target_rs->get_attrib(ShaderAttrib::get_class_slot())) { target_rs->get_attrib(ShaderAttrib::get_class_slot())) {
altered |= Shader::SSD_shaderinputs; altered |= Shader::SSD_shaderinputs;
} }
if (_state_rs->get_attrib(FogAttrib::get_class_slot()) != if (state_rs->get_attrib(FogAttrib::get_class_slot()) !=
target_rs->get_attrib(FogAttrib::get_class_slot())) { target_rs->get_attrib(FogAttrib::get_class_slot())) {
altered |= Shader::SSD_fog; altered |= Shader::SSD_fog;
} }
if (_state_rs->get_attrib(LightAttrib::get_class_slot()) != if (state_rs->get_attrib(LightAttrib::get_class_slot()) !=
target_rs->get_attrib(LightAttrib::get_class_slot())) { target_rs->get_attrib(LightAttrib::get_class_slot())) {
altered |= Shader::SSD_light; altered |= Shader::SSD_light;
} }
if (_state_rs->get_attrib(ClipPlaneAttrib::get_class_slot()) != if (state_rs->get_attrib(ClipPlaneAttrib::get_class_slot()) !=
target_rs->get_attrib(ClipPlaneAttrib::get_class_slot())) { target_rs->get_attrib(ClipPlaneAttrib::get_class_slot())) {
altered |= Shader::SSD_clip_planes; altered |= Shader::SSD_clip_planes;
} }
if (_state_rs->get_attrib(TexMatrixAttrib::get_class_slot()) != if (state_rs->get_attrib(TexMatrixAttrib::get_class_slot()) !=
target_rs->get_attrib(TexMatrixAttrib::get_class_slot())) { target_rs->get_attrib(TexMatrixAttrib::get_class_slot())) {
altered |= Shader::SSD_tex_matrix; altered |= Shader::SSD_tex_matrix;
} }
if (_state_rs->get_attrib(TextureAttrib::get_class_slot()) != if (state_rs->get_attrib(TextureAttrib::get_class_slot()) !=
target_rs->get_attrib(TextureAttrib::get_class_slot())) { target_rs->get_attrib(TextureAttrib::get_class_slot())) {
altered |= Shader::SSD_texture; altered |= Shader::SSD_texture;
} }

View File

@ -30,9 +30,9 @@ CLP(TimerQueryContext)::
// has already shut down, though, too bad. This means we never get to // has already shut down, though, too bad. This means we never get to
// free this index, but presumably the app is already shutting down // free this index, but presumably the app is already shutting down
// anyway. // anyway.
if (!_glgsg.was_deleted()) { if (auto glgsg = _glgsg.lock()) {
LightMutexHolder holder(_glgsg->_lock); LightMutexHolder holder(glgsg->_lock);
_glgsg->_deleted_queries.push_back(_index); glgsg->_deleted_queries.push_back(_index);
_index = 0; _index = 0;
} }
} }

View File

@ -32,8 +32,8 @@ munge_state(const RenderState *state) {
int id = get_gsg()->_id; int id = get_gsg()->_id;
int mi = munged_states.find(id); int mi = munged_states.find(id);
if (mi != -1) { if (mi != -1) {
if (!munged_states.get_data(mi).was_deleted()) { if (auto munged_state = munged_states.get_data(mi).lock()) {
return munged_states.get_data(mi).p(); return munged_state;
} else { } else {
munged_states.remove_element(mi); munged_states.remove_element(mi);
} }

View File

@ -74,23 +74,30 @@ was_deleted() const {
} }
/** /**
* Returns the NodePath held within this object. * Returns the NodePath held within this object, or an empty NodePath with the
* error flag set if the object was deleted.
*/ */
INLINE NodePath WeakNodePath:: INLINE NodePath WeakNodePath::
get_node_path() const { get_node_path() const {
nassertr_always(!was_deleted(), NodePath::fail());
NodePath result; NodePath result;
result._head = _head; result._head = _head.lock();
if (!_head.is_null() && result._head == nullptr) {
result._error_type = NodePath::ET_fail;
}
return result; return result;
} }
/** /**
* Returns the PandaNode held within this object. * Returns the PandaNode held within this object, or nullptr if the object was
* deleted.
*/ */
INLINE PandaNode *WeakNodePath:: INLINE PT(PandaNode) WeakNodePath::
node() const { node() const {
nassertr_always(!is_empty(), (PandaNode *)NULL); if (auto head = _head.lock()) {
return _head->get_node(); return head->get_node();
} else {
return nullptr;
}
} }
/** /**
@ -190,10 +197,9 @@ compare_to(const WeakNodePath &other) const {
*/ */
INLINE int WeakNodePath:: INLINE int WeakNodePath::
get_key() const { get_key() const {
if (is_empty() || was_deleted()) { if (auto head = _head.lock()) {
return _backup_key; _backup_key = head->get_key();
} }
((WeakNodePath *)this)->_backup_key = _head->get_key();
return _backup_key; return _backup_key;
} }

View File

@ -42,7 +42,7 @@ public:
INLINE bool was_deleted() const; INLINE bool was_deleted() const;
INLINE NodePath get_node_path() const; INLINE NodePath get_node_path() const;
INLINE PandaNode *node() const; INLINE PT(PandaNode) node() const;
INLINE bool operator == (const NodePath &other) const; INLINE bool operator == (const NodePath &other) const;
INLINE bool operator != (const NodePath &other) const; INLINE bool operator != (const NodePath &other) const;
@ -60,7 +60,7 @@ public:
private: private:
WPT(NodePathComponent) _head; WPT(NodePathComponent) _head;
int _backup_key; mutable int _backup_key;
}; };
INLINE ostream &operator << (ostream &out, const WeakNodePath &node_path); INLINE ostream &operator << (ostream &out, const WeakNodePath &node_path);

View File

@ -60,14 +60,11 @@ get_thread_sync_name(int index) const {
/** /**
* Returns the Panda Thread object associated with the indicated PStatThread. * Returns the Panda Thread object associated with the indicated PStatThread.
*/ */
INLINE Thread *PStatClient:: INLINE PT(Thread) PStatClient::
get_thread_object(int index) const { get_thread_object(int index) const {
nassertr(index >= 0 && index < AtomicAdjust::get(_num_threads), NULL); nassertr(index >= 0 && index < AtomicAdjust::get(_num_threads), NULL);
InternalThread *thread = get_thread_ptr(index); InternalThread *thread = get_thread_ptr(index);
if (thread->_thread.was_deleted()) { return thread->_thread.lock();
return NULL;
}
return thread->_thread;
} }
/** /**

View File

@ -73,7 +73,7 @@ PUBLISHED:
MAKE_SEQ(get_threads, get_num_threads, get_thread); MAKE_SEQ(get_threads, get_num_threads, get_thread);
INLINE string get_thread_name(int index) const; INLINE string get_thread_name(int index) const;
INLINE string get_thread_sync_name(int index) const; INLINE string get_thread_sync_name(int index) const;
INLINE Thread *get_thread_object(int index) const; INLINE PT(Thread) get_thread_object(int index) const;
PStatThread get_main_thread() const; PStatThread get_main_thread() const;
PStatThread get_current_thread() const; PStatThread get_current_thread() const;

View File

@ -337,7 +337,7 @@ remove_element(size_t n) {
clear_element(i); clear_element(i);
--_num_entries; --_num_entries;
} else { } else {
size_t wants_index = get_hash(_table[i]._key); size_t wants_index = get_hash(_table[i]._key.get_orig());
if (wants_index != i) { if (wants_index != i) {
// This one was a hash conflict; try to put it where it belongs. We // This one was a hash conflict; try to put it where it belongs. We
// can't just put it in n, since maybe it belongs somewhere after n. // can't just put it in n, since maybe it belongs somewhere after n.