mirror of
https://github.com/panda3d/panda3d.git
synced 2025-09-29 08:15:18 -04:00
Merge branch 'release/1.10.x'
This commit is contained in:
commit
cf60284ca9
@ -548,7 +548,7 @@ class SelectionRay(SelectionQueue):
|
||||
if xy:
|
||||
mx = xy[0]
|
||||
my = xy[1]
|
||||
elif direct:
|
||||
elif base.direct:
|
||||
mx = SEditor.dr.mouseX
|
||||
my = SEditor.dr.mouseY
|
||||
else:
|
||||
|
@ -4,7 +4,7 @@ import time
|
||||
|
||||
class RotatingLog:
|
||||
"""
|
||||
A file() (or open()) replacement that will automatically open and write
|
||||
An `open()` replacement that will automatically open and write
|
||||
to a new file if the prior file is too large or after a time interval.
|
||||
"""
|
||||
|
||||
|
@ -669,6 +669,8 @@ class DirectSession(DirectObject):
|
||||
if not taskMgr.hasTaskNamed('resizeObjectHandles'):
|
||||
dnp = self.selected.last
|
||||
if dnp:
|
||||
direct = base.direct
|
||||
|
||||
if self.manipulationControl.fMultiView:
|
||||
for i in range(3):
|
||||
sf = 30.0 * direct.drList[i].orthoFactor
|
||||
|
@ -56,7 +56,7 @@ class DistributedLargeBlobSender(DistributedObject.DistributedObject):
|
||||
except OSError:
|
||||
DistributedLargeBlobSender.notify.error(
|
||||
'could not access %s' % bPath)
|
||||
f = file(filename, 'rb')
|
||||
f = open(filename, 'rb')
|
||||
self.blob = f.read()
|
||||
f.close()
|
||||
os.unlink(filename)
|
||||
|
@ -42,7 +42,7 @@ class DistributedLargeBlobSenderAI(DistributedObjectAI.DistributedObjectAI):
|
||||
break
|
||||
# NOTE: there's a small chance of a race condition here, if
|
||||
# the file is created by another AI just after the stat fails
|
||||
f = file(filename, 'wb')
|
||||
f = open(filename, 'wb')
|
||||
f.write(s)
|
||||
f.close()
|
||||
os.chdir(origDir)
|
||||
|
2
direct/src/dist/FreezeTool.py
vendored
2
direct/src/dist/FreezeTool.py
vendored
@ -77,6 +77,8 @@ defaultHiddenImports = {
|
||||
'numpy.core._dtype_ctypes',
|
||||
'numpy.core._methods',
|
||||
],
|
||||
'pandas.compat': ['lzma', 'cmath'],
|
||||
'pandas._libs.tslibs.conversion': ['pandas._libs.tslibs.base'],
|
||||
}
|
||||
|
||||
# These are overrides for specific modules.
|
||||
|
7
direct/src/dist/commands.py
vendored
7
direct/src/dist/commands.py
vendored
@ -4,7 +4,6 @@ See the :ref:`distribution` section of the programming manual for information
|
||||
on how to use these commands.
|
||||
"""
|
||||
|
||||
import collections
|
||||
import os
|
||||
import plistlib
|
||||
import pkg_resources
|
||||
@ -211,8 +210,10 @@ class build_apps(setuptools.Command):
|
||||
# manylinux1/linux
|
||||
'libdl.so.*', 'libstdc++.so.*', 'libm.so.*', 'libgcc_s.so.*',
|
||||
'libpthread.so.*', 'libc.so.*', 'ld-linux-x86-64.so.*',
|
||||
'libgl.so.*', 'libx11.so.*', 'libreadline.so.*', 'libncursesw.so.*',
|
||||
'libbz2.so.*', 'libz.so.*', 'liblzma.so.*', 'librt.so.*', 'libutil.so.*',
|
||||
'libgl.so.*', 'libx11.so.*', 'libncursesw.so.*', 'libz.so.*',
|
||||
'librt.so.*', 'libutil.so.*', 'libnsl.so.1', 'libXext.so.6',
|
||||
'libXrender.so.1', 'libICE.so.6', 'libSM.so.6',
|
||||
'libgobject-2.0.so.0', 'libgthread-2.0.so.0', 'libglib-2.0.so.0',
|
||||
|
||||
# macOS
|
||||
'/usr/lib/libc++.1.dylib',
|
||||
|
@ -24,7 +24,7 @@ class DistributedCameraOV(DistributedObjectOV):
|
||||
self.fixtures = fixtures
|
||||
|
||||
def storeToFile(self, name):
|
||||
f = file('cameras-%s.txt' % name, 'w')
|
||||
f = open('cameras-%s.txt' % name, 'w')
|
||||
f.writelines(self.getObject().pack())
|
||||
f.close()
|
||||
|
||||
@ -35,7 +35,7 @@ class DistributedCameraOV(DistributedObjectOV):
|
||||
|
||||
def loadFromFile(self, name):
|
||||
self.b_setFixtures([])
|
||||
f = file('cameras-%s.txt' % name, 'r');
|
||||
f = open('cameras-%s.txt' % name, 'r')
|
||||
for line in f.readlines():
|
||||
pos,hpr,fov = self.unpackFixture(line)
|
||||
self.addFixture([pos[0],pos[1],pos[2],
|
||||
|
@ -97,7 +97,7 @@ class DistributedCartesianGridAI(DistributedNodeAI, CartesianGridBase):
|
||||
def updateGridTask(self, task=None):
|
||||
# Run through all grid objects and update their parents if needed
|
||||
missingObjs = []
|
||||
for avId in self.gridObjects.keys():
|
||||
for avId in list(self.gridObjects.keys()):
|
||||
av = self.gridObjects[avId]
|
||||
# handle a missing object after it is already gone?
|
||||
if (av.isEmpty()):
|
||||
|
@ -26,7 +26,7 @@ class DistributedNodeUD(DistributedObjectUD):
|
||||
|
||||
def setParentStr(self, parentToken):
|
||||
self.notify.debugCall()
|
||||
if len(parentTokenStr) > 0:
|
||||
if len(parentToken) > 0:
|
||||
self.do_setParent(parentToken)
|
||||
|
||||
def setParent(self, parentToken):
|
||||
|
@ -270,9 +270,7 @@ class DoCollectionManager:
|
||||
|
||||
def deleteDistributedObjects(self):
|
||||
# Get rid of all the distributed objects
|
||||
for doId in self.doId2do.keys():
|
||||
# Look up the object
|
||||
do = self.doId2do[doId]
|
||||
for doId, do in list(self.doId2do.items()):
|
||||
self.deleteDistObject(do)
|
||||
|
||||
# Get rid of everything that manages distributed objects
|
||||
|
@ -125,7 +125,7 @@ class ForceGroup(DirectObject):
|
||||
elif isinstance(f, AngularForce):
|
||||
if isinstance(f, AngularVectorForce):
|
||||
vec = f.getQuat()
|
||||
file.write(fname + ' = AngularVectorForce(Quat(%.4f, %.4f, %.4f))\n' % (vec[0], vec[1], vec[2], vec[3]))
|
||||
file.write(fname + ' = AngularVectorForce(Quat(%.4f, %.4f, %.4f, %.4f))\n' % (vec[0], vec[1], vec[2], vec[3]))
|
||||
file.write(fname + '.setActive(%d)\n' % f.getActive())
|
||||
file.write(targ + '.addForce(%s)\n' % fname)
|
||||
|
||||
|
@ -1,4 +1,6 @@
|
||||
from . import ForceGroup
|
||||
from direct.showbase.PhysicsManagerGlobal import physicsMgr
|
||||
|
||||
|
||||
class GlobalForceGroup(ForceGroup.ForceGroup):
|
||||
|
||||
@ -6,7 +8,7 @@ class GlobalForceGroup(ForceGroup.ForceGroup):
|
||||
ForceGroup.ForceGroup.__init__(self, name)
|
||||
|
||||
def addForce(self, force):
|
||||
ForceGroup.ForceGroup.addForce(force)
|
||||
ForceGroup.ForceGroup.addForce(self, force)
|
||||
if (force.isLinear() == 0):
|
||||
# Physics manager will need an angular integrator
|
||||
base.addAngularIntegrator()
|
||||
@ -16,7 +18,7 @@ class GlobalForceGroup(ForceGroup.ForceGroup):
|
||||
physicsMgr.addAngularForce(force)
|
||||
|
||||
def removeForce(self, force):
|
||||
ForceGroup.ForceGroup.removeForce(force)
|
||||
ForceGroup.ForceGroup.removeForce(self, force)
|
||||
if (force.isLinear() == 1):
|
||||
physicsMgr.removeLinearForce(force)
|
||||
else:
|
||||
|
@ -30,6 +30,7 @@ from copyreg import dispatch_table
|
||||
# with the local pickle.py.
|
||||
pickle = __import__('pickle')
|
||||
|
||||
PicklingError = pickle.PicklingError
|
||||
BasePickler = pickle._Pickler
|
||||
BaseUnpickler = pickle._Unpickler
|
||||
|
||||
|
@ -34,6 +34,7 @@ __all__ = [
|
||||
'Semaphore', 'BoundedSemaphore',
|
||||
'Event',
|
||||
'Timer',
|
||||
'ThreadError',
|
||||
'local',
|
||||
'current_thread',
|
||||
'main_thread',
|
||||
@ -46,6 +47,7 @@ TIMEOUT_MAX = _thread.TIMEOUT_MAX
|
||||
|
||||
local = _thread._local
|
||||
_newname = _thread._newname
|
||||
ThreadError = _thread.error
|
||||
|
||||
class ThreadBase:
|
||||
""" A base class for both Thread and ExternalThread in this
|
||||
|
19
tests/physics/test_GlobalForceGroup.py
Normal file
19
tests/physics/test_GlobalForceGroup.py
Normal file
@ -0,0 +1,19 @@
|
||||
from direct.particles.GlobalForceGroup import GlobalForceGroup
|
||||
from panda3d import physics
|
||||
|
||||
|
||||
def test_GlobalForceGroup():
|
||||
gfg = GlobalForceGroup()
|
||||
|
||||
force1 = physics.LinearVectorForce((1, 0, 0))
|
||||
force2 = physics.LinearVectorForce((0, 1, 0))
|
||||
gfg.addForce(force1)
|
||||
assert tuple(gfg) == (force1,)
|
||||
gfg.addForce(force2)
|
||||
assert tuple(gfg) == (force1, force2)
|
||||
gfg.removeForce(force1)
|
||||
assert tuple(gfg) == (force2,)
|
||||
gfg.removeForce(force1)
|
||||
assert tuple(gfg) == (force2,)
|
||||
gfg.removeForce(force2)
|
||||
assert tuple(gfg) == ()
|
@ -1,4 +1,5 @@
|
||||
from direct.stdpy.pickle import dumps, loads
|
||||
from direct.stdpy.pickle import dumps, loads, PicklingError
|
||||
import pytest
|
||||
|
||||
|
||||
def test_reduce_persist():
|
||||
@ -9,3 +10,12 @@ def test_reduce_persist():
|
||||
|
||||
parent2, child2 = loads(dumps([parent, child]))
|
||||
assert tuple(parent2.children) == (child2,)
|
||||
|
||||
|
||||
def test_pickle_error():
|
||||
class ErroneousPickleable(object):
|
||||
def __reduce__(self):
|
||||
return 12345
|
||||
|
||||
with pytest.raises(PicklingError):
|
||||
dumps(ErroneousPickleable())
|
||||
|
7
tests/stdpy/test_threading.py
Normal file
7
tests/stdpy/test_threading.py
Normal file
@ -0,0 +1,7 @@
|
||||
from direct.stdpy import threading
|
||||
import pytest
|
||||
|
||||
|
||||
def test_threading_error():
|
||||
with pytest.raises(threading.ThreadError):
|
||||
threading.stack_size()
|
Loading…
x
Reference in New Issue
Block a user