mirror of
https://github.com/panda3d/panda3d.git
synced 2025-09-28 07:48:37 -04:00
Merge branch 'release/1.10.x'
This commit is contained in:
commit
613441060b
@ -1370,7 +1370,7 @@ class ObjectHandles(NodePath, DirectObject):
|
||||
self.setScalingFactor(1)
|
||||
|
||||
def setScalingFactor(self, scaleFactor):
|
||||
self.ohScalingFactor = self.ohScalingFactor * scaleFactor
|
||||
self.ohScalingFactor = scaleFactor
|
||||
self.scalingNode.setScale(self.ohScalingFactor * self.directScalingFactor)
|
||||
|
||||
def getScalingFactor(self):
|
||||
|
30
direct/src/dist/FreezeTool.py
vendored
30
direct/src/dist/FreezeTool.py
vendored
@ -81,6 +81,25 @@ defaultHiddenImports = {
|
||||
'pandas._libs.tslibs.conversion': ['pandas._libs.tslibs.base'],
|
||||
}
|
||||
|
||||
|
||||
# These are modules that import other modules but shouldn't pick them up as
|
||||
# dependencies (usually because they are optional). This prevents picking up
|
||||
# unwanted dependencies.
|
||||
ignoreImports = {
|
||||
'direct.showbase.PythonUtil': ['pstats', 'profile'],
|
||||
|
||||
'toml.encoder': ['numpy'],
|
||||
}
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
# importlib.metadata is a "provisional" module introduced in Python 3.8 that
|
||||
# conditionally pulls in dependency-rich packages like "email" and "pep517"
|
||||
# (the latter of which is a thirdparty package!) But it's only imported in
|
||||
# one obscure corner, so we don't want to pull it in by default.
|
||||
ignoreImports['importlib._bootstrap_external'] = ['importlib.metadata']
|
||||
ignoreImports['importlib.metadata'] = ['pep517']
|
||||
|
||||
|
||||
# These are overrides for specific modules.
|
||||
overrideModules = {
|
||||
# Used by the warnings module, among others, to get line numbers. Since
|
||||
@ -997,7 +1016,7 @@ class Freezer:
|
||||
|
||||
# Scan the directory, looking for .py files.
|
||||
modules = []
|
||||
for basename in os.listdir(pathname):
|
||||
for basename in sorted(os.listdir(pathname)):
|
||||
if basename.endswith('.py') and basename != '__init__.py':
|
||||
modules.append(basename[:-3])
|
||||
|
||||
@ -1031,7 +1050,7 @@ class Freezer:
|
||||
modulePath = self.getModulePath(topName)
|
||||
if modulePath:
|
||||
for dirname in modulePath:
|
||||
for basename in os.listdir(dirname):
|
||||
for basename in sorted(os.listdir(dirname)):
|
||||
if os.path.exists(os.path.join(dirname, basename, '__init__.py')):
|
||||
parentName = '%s.%s' % (topName, basename)
|
||||
newParentName = '%s.%s' % (newTopName, basename)
|
||||
@ -2499,6 +2518,11 @@ class PandaModuleFinder(modulefinder.ModuleFinder):
|
||||
if name in self.badmodules:
|
||||
self._add_badmodule(name, caller)
|
||||
return
|
||||
|
||||
if level <= 0 and caller and caller.__name__ in ignoreImports:
|
||||
if name in ignoreImports[caller.__name__]:
|
||||
return
|
||||
|
||||
try:
|
||||
self.import_hook(name, caller, level=level)
|
||||
except ImportError as msg:
|
||||
@ -2679,7 +2703,7 @@ class PandaModuleFinder(modulefinder.ModuleFinder):
|
||||
except OSError:
|
||||
self.msg(2, "can't list directory", dir)
|
||||
continue
|
||||
for name in names:
|
||||
for name in sorted(names):
|
||||
mod = None
|
||||
for suff in self.suffixes:
|
||||
n = len(suff)
|
||||
|
1
direct/src/dist/commands.py
vendored
1
direct/src/dist/commands.py
vendored
@ -978,6 +978,7 @@ class build_apps(setuptools.Command):
|
||||
|
||||
rootdir = os.getcwd()
|
||||
for dirname, subdirlist, filelist in os.walk(rootdir):
|
||||
subdirlist.sort()
|
||||
dirpath = os.path.relpath(dirname, rootdir)
|
||||
if skip_directory(dirpath):
|
||||
self.announce('skipping directory {}'.format(dirpath))
|
||||
|
30
direct/src/dist/installers.py
vendored
30
direct/src/dist/installers.py
vendored
@ -4,6 +4,7 @@ import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import zipfile
|
||||
import struct
|
||||
|
||||
import panda3d.core as p3d
|
||||
|
||||
@ -14,7 +15,8 @@ def create_zip(command, basename, build_dir):
|
||||
zf.write(build_dir, base_dir)
|
||||
|
||||
for dirpath, dirnames, filenames in os.walk(build_dir):
|
||||
for name in sorted(dirnames):
|
||||
dirnames.sort()
|
||||
for name in dirnames:
|
||||
path = os.path.normpath(os.path.join(dirpath, name))
|
||||
zf.write(path, path.replace(build_dir, base_dir, 1))
|
||||
for name in filenames:
|
||||
@ -28,16 +30,39 @@ def create_tarball(command, basename, build_dir, tar_compression):
|
||||
build_cmd = command.get_finalized_command('build_apps')
|
||||
binary_names = list(build_cmd.console_apps.keys()) + list(build_cmd.gui_apps.keys())
|
||||
|
||||
source_date = os.environ.get('SOURCE_DATE_EPOCH', '').strip()
|
||||
if source_date:
|
||||
max_mtime = int(source_date)
|
||||
else:
|
||||
max_mtime = None
|
||||
|
||||
def tarfilter(tarinfo):
|
||||
if tarinfo.isdir() or os.path.basename(tarinfo.name) in binary_names:
|
||||
tarinfo.mode = 0o755
|
||||
else:
|
||||
tarinfo.mode = 0o644
|
||||
|
||||
# This isn't interesting information to retain for distribution.
|
||||
tarinfo.uid = 0
|
||||
tarinfo.gid = 0
|
||||
tarinfo.uname = ""
|
||||
tarinfo.gname = ""
|
||||
|
||||
if max_mtime is not None and tarinfo.mtime >= max_mtime:
|
||||
tarinfo.mtime = max_mtime
|
||||
|
||||
return tarinfo
|
||||
|
||||
with tarfile.open('{}.tar.{}'.format(basename, tar_compression), 'w|{}'.format(tar_compression)) as tf:
|
||||
filename = '{}.tar.{}'.format(basename, tar_compression)
|
||||
with tarfile.open(filename, 'w|{}'.format(tar_compression)) as tf:
|
||||
tf.add(build_dir, base_dir, filter=tarfilter)
|
||||
|
||||
if tar_compression == 'gz' and max_mtime is not None:
|
||||
# Python provides no elegant way to overwrite the gzip timestamp.
|
||||
with open(filename, 'r+b') as fp:
|
||||
fp.seek(4)
|
||||
fp.write(struct.pack("<L", max_mtime))
|
||||
|
||||
|
||||
def create_gztar(command, basename, build_dir):
|
||||
return create_tarball(command, basename, build_dir, 'gz')
|
||||
@ -128,6 +153,7 @@ def create_nsis(command, basename, build_dir):
|
||||
nsi_dir = p3d.Filename.fromOsSpecific(build_cmd.build_base)
|
||||
build_root_dir = p3d.Filename.fromOsSpecific(build_dir)
|
||||
for root, dirs, files in os.walk(build_dir):
|
||||
dirs.sort()
|
||||
for name in files:
|
||||
basefile = p3d.Filename.fromOsSpecific(os.path.join(root, name))
|
||||
file = p3d.Filename(basefile)
|
||||
|
@ -329,7 +329,7 @@ class TaskManager:
|
||||
|
||||
def add(self, funcOrTask, name = None, sort = None, extraArgs = None,
|
||||
priority = None, uponDeath = None, appendTask = False,
|
||||
taskChain = None, owner = None):
|
||||
taskChain = None, owner = None, delay = None):
|
||||
"""
|
||||
Add a new task to the taskMgr. The task will begin executing
|
||||
immediately, or next frame if its sort value has already
|
||||
@ -382,12 +382,17 @@ class TaskManager:
|
||||
is called when the task terminates. This is all the
|
||||
ownermeans.
|
||||
|
||||
delay: an optional amount of seconds to wait before starting
|
||||
the task (equivalent to doMethodLater).
|
||||
|
||||
Returns:
|
||||
The new Task object that has been added, or the original
|
||||
Task object that was passed in.
|
||||
"""
|
||||
|
||||
task = self.__setupTask(funcOrTask, name, priority, sort, extraArgs, taskChain, appendTask, owner, uponDeath)
|
||||
if delay is not None:
|
||||
task.setDelay(delay)
|
||||
self.mgr.add(task)
|
||||
return task
|
||||
|
||||
|
@ -197,7 +197,11 @@ get_compiler() {
|
||||
*/
|
||||
string PandaSystem::
|
||||
get_build_date() {
|
||||
#ifdef PANDA_BUILD_DATE_STR
|
||||
return PANDA_BUILD_DATE_STR;
|
||||
#else
|
||||
return __DATE__ " " __TIME__;
|
||||
#endif
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -544,7 +544,15 @@ main(int argc, char **argv) {
|
||||
// Make up a file identifier. This is just some bogus number that should be
|
||||
// the same in both the compiled-in code and in the database, so we can
|
||||
// check synchronicity at load time.
|
||||
int file_identifier = time(nullptr);
|
||||
// We allow overriding this value by setting SOURCE_DATE_EPOCH to support
|
||||
// reproducible builds.
|
||||
int file_identifier;
|
||||
const char *source_date_epoch = getenv("SOURCE_DATE_EPOCH");
|
||||
if (source_date_epoch != nullptr && source_date_epoch[0] != 0) {
|
||||
file_identifier = atoi(source_date_epoch);
|
||||
} else {
|
||||
file_identifier = time(nullptr);
|
||||
}
|
||||
InterrogateModuleDef *def = builder.make_module_def(file_identifier);
|
||||
|
||||
pofstream * the_output_include = nullptr;
|
||||
|
@ -2598,6 +2598,14 @@ def CreatePandaVersionFiles():
|
||||
if GIT_COMMIT:
|
||||
pandaversion_h += "\n#define PANDA_GIT_COMMIT_STR \"%s\"\n" % (GIT_COMMIT)
|
||||
|
||||
# Allow creating a deterministic build by setting this.
|
||||
source_date = os.environ.get("SOURCE_DATE_EPOCH")
|
||||
if source_date:
|
||||
# This matches the GCC / Clang format for __DATE__ __TIME__
|
||||
source_date = time.gmtime(int(source_date))
|
||||
source_date = time.strftime('%b %e %Y %H:%M:%S', source_date)
|
||||
pandaversion_h += "\n#define PANDA_BUILD_DATE_STR \"%s\"\n" % (source_date)
|
||||
|
||||
checkpandaversion_cxx = CHECKPANDAVERSION_CXX.replace("$VERSION1",str(version1))
|
||||
checkpandaversion_cxx = checkpandaversion_cxx.replace("$VERSION2",str(version2))
|
||||
checkpandaversion_cxx = checkpandaversion_cxx.replace("$VERSION3",str(version3))
|
||||
|
@ -57,6 +57,7 @@ string dont_compress_str = "jpg,png,mp3,ogg";
|
||||
// Default text extensions. May be overridden with -X.
|
||||
string text_ext_str = "txt";
|
||||
|
||||
time_t source_date_epoch = (time_t)-1;
|
||||
bool got_record_timestamp_flag = false;
|
||||
bool record_timestamp_flag = true;
|
||||
|
||||
@ -430,6 +431,12 @@ add_files(const vector_string ¶ms) {
|
||||
needs_repack = true;
|
||||
}
|
||||
|
||||
if (multifile->get_record_timestamp() && source_date_epoch != (time_t)-1) {
|
||||
if (multifile->get_timestamp() > source_date_epoch) {
|
||||
multifile->set_timestamp(source_date_epoch);
|
||||
}
|
||||
}
|
||||
|
||||
if (needs_repack) {
|
||||
if (!multifile->repack()) {
|
||||
cerr << "Failed to write " << multifile_name << ".\n";
|
||||
@ -533,6 +540,12 @@ kill_files(const vector_string ¶ms) {
|
||||
}
|
||||
}
|
||||
|
||||
if (multifile->get_record_timestamp() && source_date_epoch != (time_t)-1) {
|
||||
if (multifile->get_timestamp() > source_date_epoch) {
|
||||
multifile->set_timestamp(source_date_epoch);
|
||||
}
|
||||
}
|
||||
|
||||
bool okflag = true;
|
||||
|
||||
if (multifile->needs_repack()) {
|
||||
@ -779,6 +792,11 @@ main(int argc, char **argv) {
|
||||
}
|
||||
}
|
||||
|
||||
const char *source_date_epoch_str = getenv("SOURCE_DATE_EPOCH");
|
||||
if (source_date_epoch_str != nullptr && source_date_epoch_str[0] != 0) {
|
||||
source_date_epoch = (time_t)strtoll(source_date_epoch_str, nullptr, 10);
|
||||
}
|
||||
|
||||
extern char *optarg;
|
||||
extern int optind;
|
||||
static const char *optflags = "crutxkvz123456789Z:T:X:S:f:OC:ep:P:F:h";
|
||||
|
@ -67,6 +67,17 @@ get_timestamp() const {
|
||||
return _timestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Changes the overall mudification timestamp of the multifile. Note that this
|
||||
* will be reset to the current time every time you modify a subfile.
|
||||
* Only set this if you know what you are doing!
|
||||
*/
|
||||
INLINE void Multifile::
|
||||
set_timestamp(time_t timestamp) {
|
||||
_timestamp = timestamp;
|
||||
_timestamp_dirty = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the flag indicating whether timestamps should be recorded within the
|
||||
* Multifile or not. The default is true, indicating the Multifile will
|
||||
|
@ -59,6 +59,7 @@ PUBLISHED:
|
||||
INLINE bool needs_repack() const;
|
||||
|
||||
INLINE time_t get_timestamp() const;
|
||||
INLINE void set_timestamp(time_t timestamp);
|
||||
|
||||
INLINE void set_record_timestamp(bool record_timestamp);
|
||||
INLINE bool get_record_timestamp() const;
|
||||
|
@ -112,6 +112,10 @@ xform(const LMatrix4 &mat) {
|
||||
|
||||
// Transform the center
|
||||
_center = _center * mat;
|
||||
|
||||
if (cinf(_radius)) {
|
||||
set_infinite();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -201,14 +201,26 @@ write_man_page(std::ostream &out) {
|
||||
// Generate a date string for inclusion into the footer.
|
||||
char date_str[256];
|
||||
date_str[0] = 0;
|
||||
time_t current_time = time(nullptr);
|
||||
time_t current_time;
|
||||
tm *today = nullptr;
|
||||
|
||||
// This variable overrides the time we write to the footer.
|
||||
const char *source_date_epoch = getenv("SOURCE_DATE_EPOCH");
|
||||
if (source_date_epoch == nullptr || source_date_epoch[0] == 0 ||
|
||||
(current_time = (time_t)strtoll(source_date_epoch, nullptr, 10)) <= 0) {
|
||||
current_time = time(nullptr);
|
||||
if (current_time != (time_t)-1) {
|
||||
today = localtime(¤t_time);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Format as UTC to avoid inconsistency being introduced due to timezones.
|
||||
today = gmtime(¤t_time);
|
||||
}
|
||||
|
||||
if (current_time != (time_t) -1) {
|
||||
tm *today = localtime(¤t_time);
|
||||
if (today == nullptr || 0 == strftime(date_str, 256, "%d %B %Y", today)) {
|
||||
date_str[0] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
out << " 1 \"" << date_str << "\" \""
|
||||
<< PandaSystem::get_version_string() << "\" Panda3D\n";
|
||||
|
Loading…
x
Reference in New Issue
Block a user