mirror of
https://github.com/panda3d/panda3d.git
synced 2025-10-01 09:23:03 -04:00
express: make a thread safe weak pointer implementation (#321)
To access a WeakPointerTo in a thread-safe way, use something like this: if (auto ptr = weak_ptr.lock()) { ..use ptr as regular PointerTo } The new implementation no longer needs a reference to be stored to all weak pointers on the WeakReferenceList; a mere count of weak pointers is sufficient. Therefore, callbacks theoretically no longer require a WeakPointerTo to be constructed. The WeakPointerTo class is not actually atomic; it could be made so, but I don't believe it's worth it at this time.
This commit is contained in:
parent
47f7d3f297
commit
0bb81a43c9
@ -30,10 +30,13 @@ dec(TVOLATILE AtomicAdjustDummyImpl::Integer &var) {
|
||||
|
||||
/**
|
||||
* Atomically computes var += delta. It is legal for delta to be negative.
|
||||
* Returns the result of the addition.
|
||||
*/
|
||||
ALWAYS_INLINE void AtomicAdjustDummyImpl::
|
||||
ALWAYS_INLINE AtomicAdjustDummyImpl::Integer AtomicAdjustDummyImpl::
|
||||
add(TVOLATILE AtomicAdjustDummyImpl::Integer &var, AtomicAdjustDummyImpl::Integer delta) {
|
||||
var += delta;
|
||||
Integer new_value = var + delta;
|
||||
var = new_value;
|
||||
return new_value;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -31,7 +31,7 @@ public:
|
||||
|
||||
ALWAYS_INLINE static void inc(TVOLATILE Integer &var);
|
||||
ALWAYS_INLINE static bool dec(TVOLATILE Integer &var);
|
||||
ALWAYS_INLINE static void add(TVOLATILE Integer &var, Integer delta);
|
||||
ALWAYS_INLINE static Integer add(TVOLATILE Integer &var, Integer delta);
|
||||
ALWAYS_INLINE static Integer set(TVOLATILE Integer &var, Integer new_value);
|
||||
ALWAYS_INLINE static Integer get(const TVOLATILE Integer &var);
|
||||
|
||||
|
@ -30,11 +30,12 @@ dec(TVOLATILE AtomicAdjustGccImpl::Integer &var) {
|
||||
|
||||
/**
|
||||
* Atomically computes var += delta. It is legal for delta to be negative.
|
||||
* Returns the result of the addition.
|
||||
*/
|
||||
INLINE void AtomicAdjustGccImpl::
|
||||
INLINE AtomicAdjustGccImpl::Integer AtomicAdjustGccImpl::
|
||||
add(TVOLATILE AtomicAdjustGccImpl::Integer &var,
|
||||
AtomicAdjustGccImpl::Integer delta) {
|
||||
__atomic_fetch_add(&var, delta, __ATOMIC_SEQ_CST);
|
||||
return __atomic_add_fetch(&var, delta, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -35,7 +35,7 @@ public:
|
||||
|
||||
INLINE static void inc(TVOLATILE Integer &var);
|
||||
INLINE static bool dec(TVOLATILE Integer &var);
|
||||
INLINE static void add(TVOLATILE Integer &var, Integer delta);
|
||||
INLINE static Integer add(TVOLATILE Integer &var, Integer delta);
|
||||
INLINE static Integer set(TVOLATILE Integer &var, Integer new_value);
|
||||
INLINE static Integer get(const TVOLATILE Integer &var);
|
||||
|
||||
|
@ -59,14 +59,18 @@ dec(TVOLATILE AtomicAdjustI386Impl::Integer &var) {
|
||||
|
||||
/**
|
||||
* Atomically computes var += delta. It is legal for delta to be negative.
|
||||
* Returns the result of the addition.
|
||||
*/
|
||||
INLINE void AtomicAdjustI386Impl::
|
||||
INLINE AtomicAdjustI386Impl::Integer AtomicAdjustI386Impl::
|
||||
add(TVOLATILE AtomicAdjustI386Impl::Integer &var, AtomicAdjustI386Impl::Integer delta) {
|
||||
assert((((size_t)&var) & (sizeof(Integer) - 1)) == 0);
|
||||
Integer orig_value = var;
|
||||
while (compare_and_exchange(var, orig_value, orig_value + delta) != orig_value) {
|
||||
Integer new_value = orig_value + delta;
|
||||
while (compare_and_exchange(var, orig_value, new_value) != orig_value) {
|
||||
orig_value = var;
|
||||
new_value = orig_value + delta;
|
||||
}
|
||||
return new_value;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -34,7 +34,7 @@ public:
|
||||
|
||||
INLINE static void inc(TVOLATILE Integer &var);
|
||||
INLINE static bool dec(TVOLATILE Integer &var);
|
||||
INLINE static void add(TVOLATILE Integer &var, Integer delta);
|
||||
INLINE static Integer add(TVOLATILE Integer &var, Integer delta);
|
||||
INLINE static Integer set(TVOLATILE Integer &var, Integer new_value);
|
||||
INLINE static Integer get(const TVOLATILE Integer &var);
|
||||
|
||||
|
@ -35,13 +35,16 @@ dec(TVOLATILE AtomicAdjustPosixImpl::Integer &var) {
|
||||
|
||||
/**
|
||||
* Atomically computes var += delta. It is legal for delta to be negative.
|
||||
* Returns the result of the addition.
|
||||
*/
|
||||
INLINE void AtomicAdjustPosixImpl::
|
||||
INLINE AtomicAdjustPosixImpl::Integer AtomicAdjustPosixImpl::
|
||||
add(TVOLATILE AtomicAdjustPosixImpl::Integer &var,
|
||||
AtomicAdjustPosixImpl::Integer delta) {
|
||||
pthread_mutex_lock(&_mutex);
|
||||
var += delta;
|
||||
Integer new_value = var + delta;
|
||||
var = new_value;
|
||||
pthread_mutex_unlock(&_mutex);
|
||||
return new_value;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -35,7 +35,7 @@ public:
|
||||
|
||||
INLINE static void inc(TVOLATILE Integer &var);
|
||||
INLINE static bool dec(TVOLATILE Integer &var);
|
||||
INLINE static void add(TVOLATILE Integer &var, Integer delta);
|
||||
INLINE static Integer add(TVOLATILE Integer &var, Integer delta);
|
||||
INLINE static Integer set(TVOLATILE Integer &var, Integer new_value);
|
||||
INLINE static Integer get(const TVOLATILE Integer &var);
|
||||
|
||||
|
@ -40,17 +40,21 @@ dec(TVOLATILE AtomicAdjustWin32Impl::Integer &var) {
|
||||
|
||||
/**
|
||||
* Atomically computes var += delta. It is legal for delta to be negative.
|
||||
* Returns the result of the addition.
|
||||
*/
|
||||
INLINE void AtomicAdjustWin32Impl::
|
||||
INLINE AtomicAdjustWin32Impl::Integer AtomicAdjustWin32Impl::
|
||||
add(TVOLATILE AtomicAdjustWin32Impl::Integer &var, AtomicAdjustWin32Impl::Integer delta) {
|
||||
assert((((size_t)&var) & (sizeof(Integer) - 1)) == 0);
|
||||
#ifdef _WIN64
|
||||
InterlockedAdd64(&var, delta);
|
||||
return InterlockedAdd64(&var, delta);
|
||||
#else
|
||||
AtomicAdjustWin32Impl::Integer orig_value = var;
|
||||
while (compare_and_exchange(var, orig_value, orig_value + delta) != orig_value) {
|
||||
Integer orig_value = var;
|
||||
Integer new_value = orig_value + delta;
|
||||
while (compare_and_exchange(var, orig_value, new_value) != orig_value) {
|
||||
orig_value = var;
|
||||
new_value = orig_value + delta;
|
||||
}
|
||||
return new_value;
|
||||
#endif // _WIN64
|
||||
}
|
||||
|
||||
|
@ -44,7 +44,7 @@ public:
|
||||
|
||||
ALWAYS_INLINE static void inc(TVOLATILE Integer &var);
|
||||
ALWAYS_INLINE static bool dec(TVOLATILE Integer &var);
|
||||
INLINE static void add(TVOLATILE Integer &var, Integer delta);
|
||||
INLINE static Integer add(TVOLATILE Integer &var, Integer delta);
|
||||
ALWAYS_INLINE static Integer set(TVOLATILE Integer &var, Integer new_value);
|
||||
ALWAYS_INLINE static Integer get(const TVOLATILE Integer &var);
|
||||
|
||||
|
@ -153,7 +153,7 @@ apply_transform(const TransformState *transform) {
|
||||
if ((*ati).first.is_valid_pointer() &&
|
||||
(*ati).second.is_valid_pointer()) {
|
||||
// Here's our cached result.
|
||||
return (*ati).second.p();
|
||||
return (*ati).second.lock();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -23,11 +23,7 @@ CharacterJointEffect() {
|
||||
* Returns the Character that will get update() called on it when this node's
|
||||
* relative transform is queried, or NULL if there is no such character.
|
||||
*/
|
||||
INLINE Character *CharacterJointEffect::
|
||||
INLINE PT(Character) CharacterJointEffect::
|
||||
get_character() const {
|
||||
if (_character.is_valid_pointer()) {
|
||||
return _character;
|
||||
} else {
|
||||
return NULL;
|
||||
}
|
||||
return _character.lock();
|
||||
}
|
||||
|
@ -89,8 +89,9 @@ safe_to_combine() const {
|
||||
void CharacterJointEffect::
|
||||
output(ostream &out) const {
|
||||
out << get_type();
|
||||
if (_character.is_valid_pointer()) {
|
||||
out << "(" << _character->get_name() << ")";
|
||||
PT(Character) character = get_character();
|
||||
if (character != nullptr) {
|
||||
out << "(" << character->get_name() << ")";
|
||||
} else {
|
||||
out << "(**invalid**)";
|
||||
}
|
||||
@ -122,8 +123,8 @@ void CharacterJointEffect::
|
||||
cull_callback(CullTraverser *trav, CullTraverserData &data,
|
||||
CPT(TransformState) &node_transform,
|
||||
CPT(RenderState) &) const {
|
||||
if (_character.is_valid_pointer()) {
|
||||
_character->update();
|
||||
if (auto character = _character.lock()) {
|
||||
character->update();
|
||||
}
|
||||
node_transform = data.node()->get_transform();
|
||||
}
|
||||
@ -150,8 +151,8 @@ void CharacterJointEffect::
|
||||
adjust_transform(CPT(TransformState) &net_transform,
|
||||
CPT(TransformState) &node_transform,
|
||||
const PandaNode *node) const {
|
||||
if (_character.is_valid_pointer()) {
|
||||
_character->update();
|
||||
if (auto character = _character.lock()) {
|
||||
character->update();
|
||||
}
|
||||
node_transform = node->get_transform();
|
||||
}
|
||||
@ -202,11 +203,8 @@ void CharacterJointEffect::
|
||||
write_datagram(BamWriter *manager, Datagram &dg) {
|
||||
RenderEffect::write_datagram(manager, dg);
|
||||
|
||||
if (_character.is_valid_pointer()) {
|
||||
manager->write_pointer(dg, _character);
|
||||
} else {
|
||||
manager->write_pointer(dg, NULL);
|
||||
}
|
||||
PT(Character) character = get_character();
|
||||
manager->write_pointer(dg, character);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -38,7 +38,7 @@ private:
|
||||
PUBLISHED:
|
||||
static CPT(RenderEffect) make(Character *character);
|
||||
|
||||
INLINE Character *get_character() const;
|
||||
INLINE PT(Character) get_character() const;
|
||||
|
||||
public:
|
||||
virtual bool safe_to_transform() const;
|
||||
|
@ -112,9 +112,9 @@ ReferenceCount::
|
||||
nassertv(_ref_count == 0 || _ref_count == local_ref_count);
|
||||
|
||||
// Tell our weak reference holders that we're going away now.
|
||||
if (_weak_list != (WeakReferenceList *)NULL) {
|
||||
delete (WeakReferenceList *)_weak_list;
|
||||
_weak_list = (WeakReferenceList *)NULL;
|
||||
if (_weak_list != nullptr) {
|
||||
((WeakReferenceList *)_weak_list)->mark_deleted();
|
||||
_weak_list = nullptr;
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
@ -253,6 +253,9 @@ has_weak_list() const {
|
||||
* Returns the WeakReferenceList associated with this ReferenceCount object.
|
||||
* If there has never been a WeakReferenceList associated with this object,
|
||||
* creates one now.
|
||||
*
|
||||
* The returned object will be deleted automatically when all weak and strong
|
||||
* references to the object have gone.
|
||||
*/
|
||||
INLINE WeakReferenceList *ReferenceCount::
|
||||
get_weak_list() const {
|
||||
@ -264,14 +267,21 @@ get_weak_list() const {
|
||||
|
||||
/**
|
||||
* Adds the indicated PointerToVoid as a weak reference to this object.
|
||||
* Returns an object that will persist as long as any reference (strong or
|
||||
* weak) exists, for calling unref() or checking whether the object still
|
||||
* exists.
|
||||
*/
|
||||
INLINE void ReferenceCount::
|
||||
weak_ref(WeakPointerToVoid *ptv) {
|
||||
INLINE WeakReferenceList *ReferenceCount::
|
||||
weak_ref() {
|
||||
TAU_PROFILE("void ReferenceCount::weak_ref()", " ", TAU_USER);
|
||||
#ifdef _DEBUG
|
||||
nassertv(test_ref_count_integrity());
|
||||
nassertr(test_ref_count_integrity(), nullptr);
|
||||
#else
|
||||
nassertr(_ref_count != deleted_ref_count, nullptr);
|
||||
#endif
|
||||
get_weak_list()->add_reference(ptv);
|
||||
WeakReferenceList *weak_ref = get_weak_list();
|
||||
weak_ref->ref();
|
||||
return weak_ref;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -279,13 +289,15 @@ weak_ref(WeakPointerToVoid *ptv) {
|
||||
* must have previously been added via a call to weak_ref().
|
||||
*/
|
||||
INLINE void ReferenceCount::
|
||||
weak_unref(WeakPointerToVoid *ptv) {
|
||||
weak_unref() {
|
||||
TAU_PROFILE("void ReferenceCount::weak_unref()", " ", TAU_USER);
|
||||
#ifdef _DEBUG
|
||||
nassertv(test_ref_count_integrity());
|
||||
#endif
|
||||
nassertv(has_weak_list());
|
||||
((WeakReferenceList *)_weak_list)->clear_reference(ptv);
|
||||
WeakReferenceList *weak_list = (WeakReferenceList *)_weak_list;
|
||||
nassertv(weak_list != nullptr);
|
||||
bool nonzero = weak_list->unref();
|
||||
nassertv(nonzero);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -60,8 +60,8 @@ public:
|
||||
INLINE bool has_weak_list() const;
|
||||
INLINE WeakReferenceList *get_weak_list() const;
|
||||
|
||||
INLINE void weak_ref(WeakPointerToVoid *ptv);
|
||||
INLINE void weak_unref(WeakPointerToVoid *ptv);
|
||||
INLINE WeakReferenceList *weak_ref();
|
||||
INLINE void weak_unref();
|
||||
|
||||
protected:
|
||||
bool do_test_ref_count_integrity() const;
|
||||
|
@ -72,6 +72,26 @@ operator T * () const {
|
||||
return (To *)WeakPointerToBase<T>::_void_ptr;
|
||||
}
|
||||
|
||||
/**
|
||||
* A thread-safe way to access the underlying pointer; will silently return
|
||||
* null if the underlying pointer was deleted or null.
|
||||
*/
|
||||
template<class T>
|
||||
INLINE PointerTo<T> WeakPointerTo<T>::
|
||||
lock() const {
|
||||
WeakReferenceList *weak_ref = this->_weak_ref;
|
||||
if (weak_ref != nullptr) {
|
||||
PointerTo<T> ptr;
|
||||
weak_ref->_lock.acquire();
|
||||
if (!weak_ref->was_deleted()) {
|
||||
ptr = (To *)WeakPointerToBase<T>::_void_ptr;
|
||||
}
|
||||
weak_ref->_lock.release();
|
||||
return ptr;
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an ordinary pointer instead of a WeakPointerTo. Useful to work
|
||||
* around compiler problems, particularly for implicit upcasts.
|
||||
@ -207,6 +227,26 @@ operator const T * () const {
|
||||
return (To *)WeakPointerToBase<T>::_void_ptr;
|
||||
}
|
||||
|
||||
/**
|
||||
* A thread-safe way to access the underlying pointer; will silently return
|
||||
* null if the underlying pointer was deleted or null.
|
||||
*/
|
||||
template<class T>
|
||||
INLINE ConstPointerTo<T> WeakConstPointerTo<T>::
|
||||
lock() const {
|
||||
WeakReferenceList *weak_ref = this->_weak_ref;
|
||||
if (weak_ref != nullptr) {
|
||||
ConstPointerTo<T> ptr;
|
||||
weak_ref->_lock.acquire();
|
||||
if (!weak_ref->was_deleted()) {
|
||||
ptr = (const To *)WeakPointerToBase<T>::_void_ptr;
|
||||
}
|
||||
weak_ref->_lock.release();
|
||||
return ptr;
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an ordinary pointer instead of a WeakConstPointerTo. Useful to
|
||||
* work around compiler problems, particularly for implicit upcasts.
|
||||
|
@ -41,6 +41,7 @@ public:
|
||||
INLINE operator T *() const;
|
||||
|
||||
PUBLISHED:
|
||||
INLINE PointerTo<T> lock() const;
|
||||
INLINE To *p() const;
|
||||
INLINE To *get_orig() const;
|
||||
|
||||
@ -77,6 +78,7 @@ public:
|
||||
INLINE operator const T *() const;
|
||||
|
||||
PUBLISHED:
|
||||
INLINE ConstPointerTo<T> lock() const;
|
||||
INLINE const To *p() const;
|
||||
INLINE const To *get_orig() const;
|
||||
|
||||
|
@ -17,7 +17,13 @@
|
||||
template<class T>
|
||||
INLINE WeakPointerToBase<T>::
|
||||
WeakPointerToBase(To *ptr) {
|
||||
reassign(ptr);
|
||||
_void_ptr = (To *)ptr;
|
||||
if (ptr != nullptr) {
|
||||
_weak_ref = ptr->weak_ref();
|
||||
#ifdef DO_MEMORY_USAGE
|
||||
update_type(ptr);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -26,7 +32,13 @@ WeakPointerToBase(To *ptr) {
|
||||
template<class T>
|
||||
INLINE WeakPointerToBase<T>::
|
||||
WeakPointerToBase(const PointerToBase<T> ©) {
|
||||
reassign(copy);
|
||||
// This double-casting is a bit of a cheat to get around the inheritance
|
||||
// issue--it's difficult to declare a template class to be a friend.
|
||||
To *ptr = (To *)((const WeakPointerToBase<To> *)©)->_void_ptr;
|
||||
_void_ptr = ptr;
|
||||
if (ptr != nullptr) {
|
||||
_weak_ref = ptr->weak_ref();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -36,11 +48,35 @@ template<class T>
|
||||
INLINE WeakPointerToBase<T>::
|
||||
WeakPointerToBase(const WeakPointerToBase<T> ©) {
|
||||
_void_ptr = copy._void_ptr;
|
||||
_ptr_was_deleted = copy._ptr_was_deleted;
|
||||
|
||||
if (is_valid_pointer()) {
|
||||
To *ptr = (To *)_void_ptr;
|
||||
ptr->weak_ref(this);
|
||||
// Don't bother increasing the weak reference count if the object was
|
||||
// already deleted.
|
||||
WeakReferenceList *weak_ref = copy._weak_ref;
|
||||
if (weak_ref != nullptr && !weak_ref->was_deleted()) {
|
||||
_weak_ref = copy._weak_ref;
|
||||
_weak_ref->ref();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
template<class T>
|
||||
INLINE WeakPointerToBase<T>::
|
||||
WeakPointerToBase(WeakPointerToBase<T> &&from) NOEXCEPT {
|
||||
// Protect against self-move-assignment.
|
||||
if (from._void_ptr != this->_void_ptr) {
|
||||
WeakReferenceList *old_ref = (To *)this->_weak_ref;
|
||||
|
||||
this->_void_ptr = from._void_ptr;
|
||||
this->_weak_ref = from._weak_ref;
|
||||
from._void_ptr = nullptr;
|
||||
from._weak_ref = nullptr;
|
||||
|
||||
// Now delete the old pointer.
|
||||
if (old_ref != nullptr && !old_ref->unref()) {
|
||||
delete old_ref;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -50,7 +86,10 @@ WeakPointerToBase(const WeakPointerToBase<T> ©) {
|
||||
template<class T>
|
||||
INLINE WeakPointerToBase<T>::
|
||||
~WeakPointerToBase() {
|
||||
reassign((To *)NULL);
|
||||
WeakReferenceList *old_ref = (WeakReferenceList *)_weak_ref;
|
||||
if (old_ref != nullptr && !old_ref->unref()) {
|
||||
delete old_ref;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -60,34 +99,23 @@ INLINE WeakPointerToBase<T>::
|
||||
template<class T>
|
||||
void WeakPointerToBase<T>::
|
||||
reassign(To *ptr) {
|
||||
if (ptr != (To *)_void_ptr || _ptr_was_deleted) {
|
||||
To *old_ptr = (To *)_void_ptr;
|
||||
if (ptr != (To *)_void_ptr) {
|
||||
WeakReferenceList *old_ref = (WeakReferenceList *)_weak_ref;
|
||||
|
||||
_void_ptr = (void *)ptr;
|
||||
if (ptr != (To *)NULL) {
|
||||
ptr->weak_ref(this);
|
||||
if (ptr != nullptr) {
|
||||
_weak_ref = ptr->weak_ref();
|
||||
#ifdef DO_MEMORY_USAGE
|
||||
if (MemoryUsage::get_track_memory_usage()) {
|
||||
// Make sure the MemoryUsage record knows what the TypeHandle is, if
|
||||
// we know it ourselves.
|
||||
TypeHandle type = get_type_handle(To);
|
||||
if (type == TypeHandle::none()) {
|
||||
do_init_type(To);
|
||||
type = get_type_handle(To);
|
||||
}
|
||||
if (type != TypeHandle::none()) {
|
||||
MemoryUsage::update_type(ptr, type);
|
||||
}
|
||||
}
|
||||
update_type(ptr);
|
||||
#endif
|
||||
} else {
|
||||
_weak_ref = nullptr;
|
||||
}
|
||||
|
||||
// Now remove the old reference.
|
||||
if (old_ptr != (To *)NULL && !_ptr_was_deleted) {
|
||||
old_ptr->weak_unref(this);
|
||||
if (old_ref != nullptr && !old_ref->unref()) {
|
||||
delete old_ref;
|
||||
}
|
||||
|
||||
_ptr_was_deleted = false;
|
||||
}
|
||||
}
|
||||
|
||||
@ -108,8 +136,69 @@ reassign(const PointerToBase<To> ©) {
|
||||
template<class T>
|
||||
INLINE void WeakPointerToBase<T>::
|
||||
reassign(const WeakPointerToBase<To> ©) {
|
||||
nassertv(!copy.was_deleted());
|
||||
reassign((To *)copy._void_ptr);
|
||||
void *new_ptr = copy._void_ptr;
|
||||
if (new_ptr != _void_ptr) {
|
||||
WeakReferenceList *old_ref = (WeakReferenceList *)_weak_ref;
|
||||
_void_ptr = new_ptr;
|
||||
|
||||
// Don't bother increasing the weak reference count if the object was
|
||||
// already deleted.
|
||||
WeakReferenceList *weak_ref = copy._weak_ref;
|
||||
if (weak_ref != nullptr && !weak_ref->was_deleted()) {
|
||||
weak_ref->ref();
|
||||
_weak_ref = weak_ref;
|
||||
} else {
|
||||
_weak_ref = nullptr;
|
||||
}
|
||||
|
||||
// Now remove the old reference.
|
||||
if (old_ref != nullptr && !old_ref->unref()) {
|
||||
delete old_ref;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
template<class T>
|
||||
INLINE void WeakPointerToBase<T>::
|
||||
reassign(WeakPointerToBase<To> &&from) NOEXCEPT {
|
||||
// Protect against self-move-assignment.
|
||||
if (from._void_ptr != this->_void_ptr) {
|
||||
WeakReferenceList *old_ref = (WeakReferenceList *)this->_weak_ref;
|
||||
|
||||
this->_void_ptr = from._void_ptr;
|
||||
this->_weak_ref = from._weak_ref;
|
||||
from._void_ptr = nullptr;
|
||||
from._weak_ref = nullptr;
|
||||
|
||||
// Now delete the old pointer.
|
||||
if (old_ref != nullptr && !old_ref->unref()) {
|
||||
delete old_ref;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that the MemoryUsage record for the pointer has the right type of
|
||||
* object, if we know the type ourselves.
|
||||
*/
|
||||
template<class T>
|
||||
INLINE void WeakPointerToBase<T>::
|
||||
update_type(To *ptr) {
|
||||
#ifdef DO_MEMORY_USAGE
|
||||
if (MemoryUsage::get_track_memory_usage()) {
|
||||
TypeHandle type = get_type_handle(To);
|
||||
if (type == TypeHandle::none()) {
|
||||
do_init_type(To);
|
||||
type = get_type_handle(To);
|
||||
}
|
||||
if (type != TypeHandle::none()) {
|
||||
MemoryUsage::update_type(ptr, type);
|
||||
}
|
||||
}
|
||||
#endif // DO_MEMORY_USAGE
|
||||
}
|
||||
|
||||
#ifndef CPPPARSER
|
||||
@ -323,8 +412,6 @@ operator < (const PointerToBase<To> &other) const {
|
||||
|
||||
#endif // CPPPARSER
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* A convenient way to set the PointerTo object to NULL. (Assignment to a NULL
|
||||
* pointer also works, of course.)
|
||||
@ -332,7 +419,14 @@ operator < (const PointerToBase<To> &other) const {
|
||||
template<class T>
|
||||
INLINE void WeakPointerToBase<T>::
|
||||
clear() {
|
||||
reassign((To *)NULL);
|
||||
WeakReferenceList *old_ref = (WeakReferenceList *)_weak_ref;
|
||||
_void_ptr = nullptr;
|
||||
_weak_ref = nullptr;
|
||||
|
||||
// Now remove the old reference.
|
||||
if (old_ref != nullptr && !old_ref->unref()) {
|
||||
delete old_ref;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -346,7 +440,9 @@ clear() {
|
||||
template<class T>
|
||||
INLINE void WeakPointerToBase<T>::
|
||||
refresh() const {
|
||||
((WeakPointerToBase<T> *)this)->reassign((To *)_void_ptr);
|
||||
if (_void_ptr != nullptr) {
|
||||
((WeakPointerToBase<T> *)this)->reassign((To *)_void_ptr);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -31,11 +31,15 @@ protected:
|
||||
INLINE WeakPointerToBase(To *ptr);
|
||||
INLINE WeakPointerToBase(const PointerToBase<T> ©);
|
||||
INLINE WeakPointerToBase(const WeakPointerToBase<T> ©);
|
||||
INLINE WeakPointerToBase(WeakPointerToBase<T> &&from) NOEXCEPT;
|
||||
INLINE ~WeakPointerToBase();
|
||||
|
||||
void reassign(To *ptr);
|
||||
INLINE void reassign(const PointerToBase<To> ©);
|
||||
INLINE void reassign(const WeakPointerToBase<To> ©);
|
||||
INLINE void reassign(WeakPointerToBase<To> &&from) NOEXCEPT;
|
||||
|
||||
INLINE void update_type(To *ptr);
|
||||
|
||||
// No assignment or retrieval functions are declared in WeakPointerToBase,
|
||||
// because we will have to specialize on const vs. non-const later.
|
||||
|
@ -15,46 +15,34 @@
|
||||
*
|
||||
*/
|
||||
INLINE WeakPointerToVoid::
|
||||
WeakPointerToVoid() :
|
||||
_ptr_was_deleted(false),
|
||||
_callback(NULL) {
|
||||
WeakPointerToVoid() : _weak_ref(nullptr) {
|
||||
}
|
||||
|
||||
/**
|
||||
* This is intended only to be called by the WeakPointerList destructor. It
|
||||
* indicates that the object that we were pointing to has just been deleted.
|
||||
*/
|
||||
INLINE void WeakPointerToVoid::
|
||||
mark_deleted() {
|
||||
nassertv(!_ptr_was_deleted);
|
||||
_ptr_was_deleted = true;
|
||||
if (_callback != (WeakPointerCallback *)NULL) {
|
||||
_callback->wp_callback(_void_ptr);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a callback that will be made when the pointer is deleted. If a
|
||||
* previous callback has already been set, it will be replaced.
|
||||
* Sets a callback that will be made when the pointer is deleted. Does
|
||||
* nothing if this is a null pointer.
|
||||
*
|
||||
* If the pointer has already been deleted, the callback will be made
|
||||
* immediately.
|
||||
*/
|
||||
INLINE void WeakPointerToVoid::
|
||||
set_callback(WeakPointerCallback *callback) {
|
||||
_callback = callback;
|
||||
if (_ptr_was_deleted && _callback != (WeakPointerCallback *)NULL) {
|
||||
_callback->wp_callback(_void_ptr);
|
||||
add_callback(WeakPointerCallback *callback) const {
|
||||
if (_weak_ref != nullptr && !_weak_ref->was_deleted()) {
|
||||
_weak_ref->add_callback(callback, _void_ptr);
|
||||
} else if (_void_ptr != nullptr) {
|
||||
callback->wp_callback(_void_ptr);
|
||||
_weak_ref = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the callback that will be made when the pointer is deleted, or NULL
|
||||
* if no callback has been set.
|
||||
* Removes a previously added callback.
|
||||
*/
|
||||
INLINE WeakPointerCallback *WeakPointerToVoid::
|
||||
get_callback() const {
|
||||
return _callback;
|
||||
INLINE void WeakPointerToVoid::
|
||||
remove_callback(WeakPointerCallback *callback) const {
|
||||
if (_weak_ref != nullptr) {
|
||||
_weak_ref->remove_callback(callback);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -63,7 +51,7 @@ get_callback() const {
|
||||
*/
|
||||
INLINE bool WeakPointerToVoid::
|
||||
was_deleted() const {
|
||||
return _ptr_was_deleted;
|
||||
return _void_ptr != nullptr && (_weak_ref == nullptr || _weak_ref->was_deleted());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -72,5 +60,5 @@ was_deleted() const {
|
||||
*/
|
||||
INLINE bool WeakPointerToVoid::
|
||||
is_valid_pointer() const {
|
||||
return (_void_ptr != (void *)NULL) && !_ptr_was_deleted;
|
||||
return _weak_ref != nullptr && !_weak_ref->was_deleted();
|
||||
}
|
||||
|
@ -27,18 +27,15 @@ protected:
|
||||
INLINE WeakPointerToVoid();
|
||||
|
||||
public:
|
||||
INLINE void mark_deleted();
|
||||
|
||||
INLINE void set_callback(WeakPointerCallback *callback);
|
||||
INLINE WeakPointerCallback *get_callback() const;
|
||||
INLINE void add_callback(WeakPointerCallback *callback) const;
|
||||
INLINE void remove_callback(WeakPointerCallback *callback) const;
|
||||
|
||||
PUBLISHED:
|
||||
INLINE bool was_deleted() const;
|
||||
INLINE bool is_valid_pointer() const;
|
||||
|
||||
protected:
|
||||
bool _ptr_was_deleted;
|
||||
WeakPointerCallback *_callback;
|
||||
mutable WeakReferenceList *_weak_ref;
|
||||
};
|
||||
|
||||
#include "weakPointerToVoid.I"
|
||||
|
@ -10,3 +10,30 @@
|
||||
* @author drose
|
||||
* @date 2004-09-27
|
||||
*/
|
||||
|
||||
/**
|
||||
* Increases the number of weak references.
|
||||
*/
|
||||
INLINE void WeakReferenceList::
|
||||
ref() const {
|
||||
AtomicAdjust::inc(_count);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decreases the number of weak references. Returns true if, after this,
|
||||
* there are still any weak or strong references remaining, or false if this
|
||||
* structure should be deleted right away.
|
||||
*/
|
||||
INLINE bool WeakReferenceList::
|
||||
unref() const {
|
||||
return AtomicAdjust::dec(_count);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the object represented has been deleted, ie. there are only
|
||||
* weak references left pointing to the object.
|
||||
*/
|
||||
INLINE bool WeakReferenceList::
|
||||
was_deleted() const {
|
||||
return AtomicAdjust::get(_count) < _alive_offset;
|
||||
}
|
||||
|
@ -19,7 +19,7 @@
|
||||
*
|
||||
*/
|
||||
WeakReferenceList::
|
||||
WeakReferenceList() {
|
||||
WeakReferenceList() : _count(_alive_offset) {
|
||||
}
|
||||
|
||||
/**
|
||||
@ -27,30 +27,33 @@ WeakReferenceList() {
|
||||
*/
|
||||
WeakReferenceList::
|
||||
~WeakReferenceList() {
|
||||
_lock.acquire();
|
||||
Pointers::iterator pi;
|
||||
for (pi = _pointers.begin(); pi != _pointers.end(); ++pi) {
|
||||
(*pi)->mark_deleted();
|
||||
}
|
||||
_lock.release();
|
||||
nassertv(_count == 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Intended to be called only by WeakPointerTo (or by any class implementing a
|
||||
* weak reference-counting pointer), this adds the indicated PointerToVoid
|
||||
* structure to the list of such structures that are maintaining a weak
|
||||
* pointer to this object.
|
||||
* Adds the callback to the list of callbacks that will be called when the
|
||||
* underlying pointer is deleted. If it has already been deleted, it will
|
||||
* be called immediately.
|
||||
*
|
||||
* When the WeakReferenceList destructs (presumably because its owning object
|
||||
* destructs), the pointer within the PointerToVoid object will be set to
|
||||
* NULL.
|
||||
* The data pointer can be an arbitrary pointer and is passed as only argument
|
||||
* to the callback.
|
||||
*/
|
||||
void WeakReferenceList::
|
||||
add_reference(WeakPointerToVoid *ptv) {
|
||||
add_callback(WeakPointerCallback *callback, void *data) {
|
||||
nassertv(callback != nullptr);
|
||||
_lock.acquire();
|
||||
bool inserted = _pointers.insert(ptv).second;
|
||||
// We need to check again whether the object is deleted after grabbing the
|
||||
// lock, despite having already done this in weakPointerTo.I, since it may
|
||||
// have been deleted in the meantime.
|
||||
bool deleted = was_deleted();
|
||||
if (!deleted) {
|
||||
_callbacks.insert(make_pair(callback, data));
|
||||
}
|
||||
_lock.release();
|
||||
nassertv(inserted);
|
||||
|
||||
if (deleted) {
|
||||
callback->wp_callback(data);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -60,13 +63,33 @@ add_reference(WeakPointerToVoid *ptv) {
|
||||
* pointer to this object.
|
||||
*/
|
||||
void WeakReferenceList::
|
||||
clear_reference(WeakPointerToVoid *ptv) {
|
||||
remove_callback(WeakPointerCallback *callback) {
|
||||
nassertv(callback != nullptr);
|
||||
_lock.acquire();
|
||||
Pointers::iterator pi = _pointers.find(ptv);
|
||||
bool valid = (pi != _pointers.end());
|
||||
if (valid) {
|
||||
_pointers.erase(pi);
|
||||
}
|
||||
_callbacks.erase(callback);
|
||||
_lock.release();
|
||||
nassertv(valid);
|
||||
}
|
||||
|
||||
/**
|
||||
* Called only by the ReferenceCount pointer to indicate that it has been
|
||||
* deleted.
|
||||
*/
|
||||
void WeakReferenceList::
|
||||
mark_deleted() {
|
||||
_lock.acquire();
|
||||
Callbacks::iterator ci;
|
||||
for (ci = _callbacks.begin(); ci != _callbacks.end(); ++ci) {
|
||||
(*ci).first->wp_callback((*ci).second);
|
||||
}
|
||||
_callbacks.clear();
|
||||
|
||||
// Decrement the special offset added to the weak pointer count to indicate
|
||||
// that it can be deleted when all the weak references have gone.
|
||||
AtomicAdjust::Integer result = AtomicAdjust::add(_count, -_alive_offset);
|
||||
_lock.release();
|
||||
if (result == 0) {
|
||||
// There are no weak references remaining either, so delete this.
|
||||
delete this;
|
||||
}
|
||||
nassertv(result >= 0);
|
||||
}
|
||||
|
@ -15,30 +15,48 @@
|
||||
#define WEAKREFERENCELIST_H
|
||||
|
||||
#include "pandabase.h"
|
||||
#include "pset.h"
|
||||
#include "pmap.h"
|
||||
#include "mutexImpl.h"
|
||||
|
||||
class WeakPointerToVoid;
|
||||
class WeakPointerCallback;
|
||||
|
||||
/**
|
||||
* This is a list of WeakPointerTo's that share a reference to a given
|
||||
* ReferenceCount object. It is stored in a separate class since it is
|
||||
* assumed that most ReferenceCount objects do not need to store this list at
|
||||
* all; this avoids bloating every ReferenceCount object in the world with the
|
||||
* size of this object.
|
||||
* This is an object shared by all the weak pointers that point to the same
|
||||
* ReferenceCount object. It is created whenever a weak reference to an
|
||||
* object is created, and can outlive the object until all weak references
|
||||
* have disappeared.
|
||||
*/
|
||||
class EXPCL_PANDAEXPRESS WeakReferenceList {
|
||||
public:
|
||||
WeakReferenceList();
|
||||
~WeakReferenceList();
|
||||
|
||||
void add_reference(WeakPointerToVoid *ptv);
|
||||
void clear_reference(WeakPointerToVoid *ptv);
|
||||
INLINE void ref() const;
|
||||
INLINE bool unref() const;
|
||||
INLINE bool was_deleted() const;
|
||||
|
||||
void add_callback(WeakPointerCallback *callback, void *data);
|
||||
void remove_callback(WeakPointerCallback *callback);
|
||||
|
||||
private:
|
||||
typedef pset<WeakPointerToVoid *> Pointers;
|
||||
Pointers _pointers;
|
||||
void mark_deleted();
|
||||
|
||||
public:
|
||||
// This lock protects the callbacks below, but it also protects the object
|
||||
// from being deleted during a call to WeakPointerTo::lock().
|
||||
MutexImpl _lock;
|
||||
|
||||
private:
|
||||
typedef pmap<WeakPointerCallback *, void *> Callbacks;
|
||||
Callbacks _callbacks;
|
||||
|
||||
// This has a very large number added to it if the object is still alive.
|
||||
// It could be 1, but having it be a large number makes it easy to check
|
||||
// whether the object has been deleted or not.
|
||||
static const AtomicAdjust::Integer _alive_offset = (1 << 30);
|
||||
mutable AtomicAdjust::Integer _count;
|
||||
|
||||
friend class ReferenceCount;
|
||||
};
|
||||
|
||||
#include "weakReferenceList.I"
|
||||
|
@ -436,42 +436,43 @@ set_state_and_transform(const RenderState *target_rs,
|
||||
altered |= Shader::SSD_projection;
|
||||
}
|
||||
|
||||
if (_state_rs.was_deleted() || _state_rs == (const RenderState *)NULL) {
|
||||
CPT(RenderState) state_rs = _state_rs.lock();
|
||||
if (state_rs == nullptr) {
|
||||
// Reset all of the state.
|
||||
altered |= Shader::SSD_general;
|
||||
_state_rs = target_rs;
|
||||
|
||||
} else if (_state_rs != target_rs) {
|
||||
} else if (state_rs != target_rs) {
|
||||
// The state has changed since last time.
|
||||
if (_state_rs->get_attrib(ColorAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(ColorAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(ColorAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_color;
|
||||
}
|
||||
if (_state_rs->get_attrib(ColorScaleAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(ColorScaleAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(ColorScaleAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_colorscale;
|
||||
}
|
||||
if (_state_rs->get_attrib(MaterialAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(MaterialAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(MaterialAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_material;
|
||||
}
|
||||
if (_state_rs->get_attrib(ShaderAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(ShaderAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(ShaderAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_shaderinputs;
|
||||
}
|
||||
if (_state_rs->get_attrib(FogAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(FogAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(FogAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_fog;
|
||||
}
|
||||
if (_state_rs->get_attrib(LightAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(LightAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(LightAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_light;
|
||||
}
|
||||
if (_state_rs->get_attrib(ClipPlaneAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(ClipPlaneAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(ClipPlaneAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_clip_planes;
|
||||
}
|
||||
if (_state_rs->get_attrib(TexMatrixAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(TexMatrixAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(TexMatrixAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_tex_matrix;
|
||||
}
|
||||
|
@ -39,8 +39,8 @@ CLP(GeomMunger)(GraphicsStateGuardian *gsg, const RenderState *state) :
|
||||
// TexGen object gets deleted.
|
||||
_texture = (const TextureAttrib *)state->get_attrib(TextureAttrib::get_class_slot());
|
||||
_tex_gen = (const TexGenAttrib *)state->get_attrib(TexGenAttrib::get_class_slot());
|
||||
_texture.set_callback(this);
|
||||
_tex_gen.set_callback(this);
|
||||
_texture.add_callback(this);
|
||||
_tex_gen.add_callback(this);
|
||||
}
|
||||
}
|
||||
|
||||
@ -56,6 +56,11 @@ CLP(GeomMunger)::
|
||||
(*gci)->remove_munger(this);
|
||||
}
|
||||
_geom_contexts.clear();
|
||||
|
||||
if ((_flags & F_parallel_arrays) == 0) {
|
||||
_texture.remove_callback(this);
|
||||
_tex_gen.remove_callback(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -220,15 +225,15 @@ munge_format_impl(const GeomVertexFormat *orig,
|
||||
}
|
||||
|
||||
// Put only the used texture coordinates into the interleaved array.
|
||||
if (_texture != (TextureAttrib *)NULL) {
|
||||
if (auto texture = _texture.lock()) {
|
||||
typedef pset<const InternalName *> UsedStages;
|
||||
UsedStages used_stages;
|
||||
|
||||
int num_stages = _texture->get_num_on_stages();
|
||||
int num_stages = texture->get_num_on_stages();
|
||||
for (int i = 0; i < num_stages; ++i) {
|
||||
TextureStage *stage = _texture->get_on_stage(i);
|
||||
if (_tex_gen == (TexGenAttrib *)NULL ||
|
||||
!_tex_gen->has_stage(stage)) {
|
||||
TextureStage *stage = texture->get_on_stage(i);
|
||||
CPT(TexGenAttrib) tex_gen = _tex_gen.lock();
|
||||
if (tex_gen == nullptr || !tex_gen->has_stage(stage)) {
|
||||
InternalName *name = stage->get_texcoord_name();
|
||||
if (used_stages.insert(name).second) {
|
||||
// This is the first time we've encountered this texcoord name.
|
||||
@ -360,15 +365,15 @@ premunge_format_impl(const GeomVertexFormat *orig) {
|
||||
|
||||
// Put only the used texture coordinates into the interleaved array. The
|
||||
// others will be kept around, but in a parallel array.
|
||||
if (_texture != (TextureAttrib *)NULL) {
|
||||
if (auto texture = _texture.lock()) {
|
||||
typedef pset<const InternalName *> UsedStages;
|
||||
UsedStages used_stages;
|
||||
|
||||
int num_stages = _texture->get_num_on_stages();
|
||||
int num_stages = texture->get_num_on_stages();
|
||||
for (int i = 0; i < num_stages; ++i) {
|
||||
TextureStage *stage = _texture->get_on_stage(i);
|
||||
if (_tex_gen == (TexGenAttrib *)NULL ||
|
||||
!_tex_gen->has_stage(stage)) {
|
||||
TextureStage *stage = texture->get_on_stage(i);
|
||||
CPT(TexGenAttrib) tex_gen = _tex_gen.lock();
|
||||
if (tex_gen == nullptr || !tex_gen->has_stage(stage)) {
|
||||
InternalName *name = stage->get_texcoord_name();
|
||||
if (used_stages.insert(name).second) {
|
||||
// This is the first time we've encountered this texcoord name.
|
||||
|
@ -1899,46 +1899,47 @@ set_state_and_transform(const RenderState *target_rs,
|
||||
altered |= Shader::SSD_projection;
|
||||
}
|
||||
|
||||
if (_state_rs.was_deleted() || _state_rs == (const RenderState *)NULL) {
|
||||
CPT(RenderState) state_rs = _state_rs.lock();
|
||||
if (state_rs == nullptr) {
|
||||
// Reset all of the state.
|
||||
altered |= Shader::SSD_general;
|
||||
_state_rs = target_rs;
|
||||
|
||||
} else if (_state_rs != target_rs) {
|
||||
} else if (state_rs != target_rs) {
|
||||
// The state has changed since last time.
|
||||
if (_state_rs->get_attrib(ColorAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(ColorAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(ColorAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_color;
|
||||
}
|
||||
if (_state_rs->get_attrib(ColorScaleAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(ColorScaleAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(ColorScaleAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_colorscale;
|
||||
}
|
||||
if (_state_rs->get_attrib(MaterialAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(MaterialAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(MaterialAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_material;
|
||||
}
|
||||
if (_state_rs->get_attrib(ShaderAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(ShaderAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(ShaderAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_shaderinputs;
|
||||
}
|
||||
if (_state_rs->get_attrib(FogAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(FogAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(FogAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_fog;
|
||||
}
|
||||
if (_state_rs->get_attrib(LightAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(LightAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(LightAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_light;
|
||||
}
|
||||
if (_state_rs->get_attrib(ClipPlaneAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(ClipPlaneAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(ClipPlaneAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_clip_planes;
|
||||
}
|
||||
if (_state_rs->get_attrib(TexMatrixAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(TexMatrixAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(TexMatrixAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_tex_matrix;
|
||||
}
|
||||
if (_state_rs->get_attrib(TextureAttrib::get_class_slot()) !=
|
||||
if (state_rs->get_attrib(TextureAttrib::get_class_slot()) !=
|
||||
target_rs->get_attrib(TextureAttrib::get_class_slot())) {
|
||||
altered |= Shader::SSD_texture;
|
||||
}
|
||||
|
@ -30,9 +30,9 @@ CLP(TimerQueryContext)::
|
||||
// has already shut down, though, too bad. This means we never get to
|
||||
// free this index, but presumably the app is already shutting down
|
||||
// anyway.
|
||||
if (!_glgsg.was_deleted()) {
|
||||
LightMutexHolder holder(_glgsg->_lock);
|
||||
_glgsg->_deleted_queries.push_back(_index);
|
||||
if (auto glgsg = _glgsg.lock()) {
|
||||
LightMutexHolder holder(glgsg->_lock);
|
||||
glgsg->_deleted_queries.push_back(_index);
|
||||
_index = 0;
|
||||
}
|
||||
}
|
||||
|
@ -32,8 +32,8 @@ munge_state(const RenderState *state) {
|
||||
int id = get_gsg()->_id;
|
||||
int mi = munged_states.find(id);
|
||||
if (mi != -1) {
|
||||
if (!munged_states.get_data(mi).was_deleted()) {
|
||||
return munged_states.get_data(mi).p();
|
||||
if (auto munged_state = munged_states.get_data(mi).lock()) {
|
||||
return munged_state;
|
||||
} else {
|
||||
munged_states.remove_element(mi);
|
||||
}
|
||||
|
@ -74,23 +74,30 @@ was_deleted() const {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the NodePath held within this object.
|
||||
* Returns the NodePath held within this object, or an empty NodePath with the
|
||||
* error flag set if the object was deleted.
|
||||
*/
|
||||
INLINE NodePath WeakNodePath::
|
||||
get_node_path() const {
|
||||
nassertr_always(!was_deleted(), NodePath::fail());
|
||||
NodePath result;
|
||||
result._head = _head;
|
||||
result._head = _head.lock();
|
||||
if (!_head.is_null() && result._head == nullptr) {
|
||||
result._error_type = NodePath::ET_fail;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the PandaNode held within this object.
|
||||
* Returns the PandaNode held within this object, or nullptr if the object was
|
||||
* deleted.
|
||||
*/
|
||||
INLINE PandaNode *WeakNodePath::
|
||||
INLINE PT(PandaNode) WeakNodePath::
|
||||
node() const {
|
||||
nassertr_always(!is_empty(), (PandaNode *)NULL);
|
||||
return _head->get_node();
|
||||
if (auto head = _head.lock()) {
|
||||
return head->get_node();
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -190,10 +197,9 @@ compare_to(const WeakNodePath &other) const {
|
||||
*/
|
||||
INLINE int WeakNodePath::
|
||||
get_key() const {
|
||||
if (is_empty() || was_deleted()) {
|
||||
return _backup_key;
|
||||
if (auto head = _head.lock()) {
|
||||
_backup_key = head->get_key();
|
||||
}
|
||||
((WeakNodePath *)this)->_backup_key = _head->get_key();
|
||||
return _backup_key;
|
||||
}
|
||||
|
||||
|
@ -42,7 +42,7 @@ public:
|
||||
INLINE bool was_deleted() const;
|
||||
|
||||
INLINE NodePath get_node_path() const;
|
||||
INLINE PandaNode *node() const;
|
||||
INLINE PT(PandaNode) node() const;
|
||||
|
||||
INLINE bool operator == (const NodePath &other) const;
|
||||
INLINE bool operator != (const NodePath &other) const;
|
||||
@ -60,7 +60,7 @@ public:
|
||||
|
||||
private:
|
||||
WPT(NodePathComponent) _head;
|
||||
int _backup_key;
|
||||
mutable int _backup_key;
|
||||
};
|
||||
|
||||
INLINE ostream &operator << (ostream &out, const WeakNodePath &node_path);
|
||||
|
@ -60,14 +60,11 @@ get_thread_sync_name(int index) const {
|
||||
/**
|
||||
* Returns the Panda Thread object associated with the indicated PStatThread.
|
||||
*/
|
||||
INLINE Thread *PStatClient::
|
||||
INLINE PT(Thread) PStatClient::
|
||||
get_thread_object(int index) const {
|
||||
nassertr(index >= 0 && index < AtomicAdjust::get(_num_threads), NULL);
|
||||
InternalThread *thread = get_thread_ptr(index);
|
||||
if (thread->_thread.was_deleted()) {
|
||||
return NULL;
|
||||
}
|
||||
return thread->_thread;
|
||||
return thread->_thread.lock();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -73,7 +73,7 @@ PUBLISHED:
|
||||
MAKE_SEQ(get_threads, get_num_threads, get_thread);
|
||||
INLINE string get_thread_name(int index) const;
|
||||
INLINE string get_thread_sync_name(int index) const;
|
||||
INLINE Thread *get_thread_object(int index) const;
|
||||
INLINE PT(Thread) get_thread_object(int index) const;
|
||||
|
||||
PStatThread get_main_thread() const;
|
||||
PStatThread get_current_thread() const;
|
||||
|
@ -337,7 +337,7 @@ remove_element(size_t n) {
|
||||
clear_element(i);
|
||||
--_num_entries;
|
||||
} else {
|
||||
size_t wants_index = get_hash(_table[i]._key);
|
||||
size_t wants_index = get_hash(_table[i]._key.get_orig());
|
||||
if (wants_index != i) {
|
||||
// This one was a hash conflict; try to put it where it belongs. We
|
||||
// can't just put it in n, since maybe it belongs somewhere after n.
|
||||
|
Loading…
x
Reference in New Issue
Block a user