Merge branch 'release/1.10.x'

This commit is contained in:
rdb 2021-01-18 23:47:36 +01:00
commit 613441060b
13 changed files with 135 additions and 13 deletions

View File

@ -1370,7 +1370,7 @@ class ObjectHandles(NodePath, DirectObject):
self.setScalingFactor(1) self.setScalingFactor(1)
def setScalingFactor(self, scaleFactor): def setScalingFactor(self, scaleFactor):
self.ohScalingFactor = self.ohScalingFactor * scaleFactor self.ohScalingFactor = scaleFactor
self.scalingNode.setScale(self.ohScalingFactor * self.directScalingFactor) self.scalingNode.setScale(self.ohScalingFactor * self.directScalingFactor)
def getScalingFactor(self): def getScalingFactor(self):

View File

@ -81,6 +81,25 @@ defaultHiddenImports = {
'pandas._libs.tslibs.conversion': ['pandas._libs.tslibs.base'], 'pandas._libs.tslibs.conversion': ['pandas._libs.tslibs.base'],
} }
# These are modules that import other modules but shouldn't pick them up as
# dependencies (usually because they are optional). This prevents picking up
# unwanted dependencies.
ignoreImports = {
'direct.showbase.PythonUtil': ['pstats', 'profile'],
'toml.encoder': ['numpy'],
}
if sys.version_info >= (3, 8):
# importlib.metadata is a "provisional" module introduced in Python 3.8 that
# conditionally pulls in dependency-rich packages like "email" and "pep517"
# (the latter of which is a thirdparty package!) But it's only imported in
# one obscure corner, so we don't want to pull it in by default.
ignoreImports['importlib._bootstrap_external'] = ['importlib.metadata']
ignoreImports['importlib.metadata'] = ['pep517']
# These are overrides for specific modules. # These are overrides for specific modules.
overrideModules = { overrideModules = {
# Used by the warnings module, among others, to get line numbers. Since # Used by the warnings module, among others, to get line numbers. Since
@ -997,7 +1016,7 @@ class Freezer:
# Scan the directory, looking for .py files. # Scan the directory, looking for .py files.
modules = [] modules = []
for basename in os.listdir(pathname): for basename in sorted(os.listdir(pathname)):
if basename.endswith('.py') and basename != '__init__.py': if basename.endswith('.py') and basename != '__init__.py':
modules.append(basename[:-3]) modules.append(basename[:-3])
@ -1031,7 +1050,7 @@ class Freezer:
modulePath = self.getModulePath(topName) modulePath = self.getModulePath(topName)
if modulePath: if modulePath:
for dirname in modulePath: for dirname in modulePath:
for basename in os.listdir(dirname): for basename in sorted(os.listdir(dirname)):
if os.path.exists(os.path.join(dirname, basename, '__init__.py')): if os.path.exists(os.path.join(dirname, basename, '__init__.py')):
parentName = '%s.%s' % (topName, basename) parentName = '%s.%s' % (topName, basename)
newParentName = '%s.%s' % (newTopName, basename) newParentName = '%s.%s' % (newTopName, basename)
@ -2499,6 +2518,11 @@ class PandaModuleFinder(modulefinder.ModuleFinder):
if name in self.badmodules: if name in self.badmodules:
self._add_badmodule(name, caller) self._add_badmodule(name, caller)
return return
if level <= 0 and caller and caller.__name__ in ignoreImports:
if name in ignoreImports[caller.__name__]:
return
try: try:
self.import_hook(name, caller, level=level) self.import_hook(name, caller, level=level)
except ImportError as msg: except ImportError as msg:
@ -2679,7 +2703,7 @@ class PandaModuleFinder(modulefinder.ModuleFinder):
except OSError: except OSError:
self.msg(2, "can't list directory", dir) self.msg(2, "can't list directory", dir)
continue continue
for name in names: for name in sorted(names):
mod = None mod = None
for suff in self.suffixes: for suff in self.suffixes:
n = len(suff) n = len(suff)

View File

@ -978,6 +978,7 @@ class build_apps(setuptools.Command):
rootdir = os.getcwd() rootdir = os.getcwd()
for dirname, subdirlist, filelist in os.walk(rootdir): for dirname, subdirlist, filelist in os.walk(rootdir):
subdirlist.sort()
dirpath = os.path.relpath(dirname, rootdir) dirpath = os.path.relpath(dirname, rootdir)
if skip_directory(dirpath): if skip_directory(dirpath):
self.announce('skipping directory {}'.format(dirpath)) self.announce('skipping directory {}'.format(dirpath))

View File

@ -4,6 +4,7 @@ import subprocess
import sys import sys
import tarfile import tarfile
import zipfile import zipfile
import struct
import panda3d.core as p3d import panda3d.core as p3d
@ -14,7 +15,8 @@ def create_zip(command, basename, build_dir):
zf.write(build_dir, base_dir) zf.write(build_dir, base_dir)
for dirpath, dirnames, filenames in os.walk(build_dir): for dirpath, dirnames, filenames in os.walk(build_dir):
for name in sorted(dirnames): dirnames.sort()
for name in dirnames:
path = os.path.normpath(os.path.join(dirpath, name)) path = os.path.normpath(os.path.join(dirpath, name))
zf.write(path, path.replace(build_dir, base_dir, 1)) zf.write(path, path.replace(build_dir, base_dir, 1))
for name in filenames: for name in filenames:
@ -28,16 +30,39 @@ def create_tarball(command, basename, build_dir, tar_compression):
build_cmd = command.get_finalized_command('build_apps') build_cmd = command.get_finalized_command('build_apps')
binary_names = list(build_cmd.console_apps.keys()) + list(build_cmd.gui_apps.keys()) binary_names = list(build_cmd.console_apps.keys()) + list(build_cmd.gui_apps.keys())
source_date = os.environ.get('SOURCE_DATE_EPOCH', '').strip()
if source_date:
max_mtime = int(source_date)
else:
max_mtime = None
def tarfilter(tarinfo): def tarfilter(tarinfo):
if tarinfo.isdir() or os.path.basename(tarinfo.name) in binary_names: if tarinfo.isdir() or os.path.basename(tarinfo.name) in binary_names:
tarinfo.mode = 0o755 tarinfo.mode = 0o755
else: else:
tarinfo.mode = 0o644 tarinfo.mode = 0o644
# This isn't interesting information to retain for distribution.
tarinfo.uid = 0
tarinfo.gid = 0
tarinfo.uname = ""
tarinfo.gname = ""
if max_mtime is not None and tarinfo.mtime >= max_mtime:
tarinfo.mtime = max_mtime
return tarinfo return tarinfo
with tarfile.open('{}.tar.{}'.format(basename, tar_compression), 'w|{}'.format(tar_compression)) as tf: filename = '{}.tar.{}'.format(basename, tar_compression)
with tarfile.open(filename, 'w|{}'.format(tar_compression)) as tf:
tf.add(build_dir, base_dir, filter=tarfilter) tf.add(build_dir, base_dir, filter=tarfilter)
if tar_compression == 'gz' and max_mtime is not None:
# Python provides no elegant way to overwrite the gzip timestamp.
with open(filename, 'r+b') as fp:
fp.seek(4)
fp.write(struct.pack("<L", max_mtime))
def create_gztar(command, basename, build_dir): def create_gztar(command, basename, build_dir):
return create_tarball(command, basename, build_dir, 'gz') return create_tarball(command, basename, build_dir, 'gz')
@ -128,6 +153,7 @@ def create_nsis(command, basename, build_dir):
nsi_dir = p3d.Filename.fromOsSpecific(build_cmd.build_base) nsi_dir = p3d.Filename.fromOsSpecific(build_cmd.build_base)
build_root_dir = p3d.Filename.fromOsSpecific(build_dir) build_root_dir = p3d.Filename.fromOsSpecific(build_dir)
for root, dirs, files in os.walk(build_dir): for root, dirs, files in os.walk(build_dir):
dirs.sort()
for name in files: for name in files:
basefile = p3d.Filename.fromOsSpecific(os.path.join(root, name)) basefile = p3d.Filename.fromOsSpecific(os.path.join(root, name))
file = p3d.Filename(basefile) file = p3d.Filename(basefile)

View File

@ -329,7 +329,7 @@ class TaskManager:
def add(self, funcOrTask, name = None, sort = None, extraArgs = None, def add(self, funcOrTask, name = None, sort = None, extraArgs = None,
priority = None, uponDeath = None, appendTask = False, priority = None, uponDeath = None, appendTask = False,
taskChain = None, owner = None): taskChain = None, owner = None, delay = None):
""" """
Add a new task to the taskMgr. The task will begin executing Add a new task to the taskMgr. The task will begin executing
immediately, or next frame if its sort value has already immediately, or next frame if its sort value has already
@ -382,12 +382,17 @@ class TaskManager:
is called when the task terminates. This is all the is called when the task terminates. This is all the
ownermeans. ownermeans.
delay: an optional amount of seconds to wait before starting
the task (equivalent to doMethodLater).
Returns: Returns:
The new Task object that has been added, or the original The new Task object that has been added, or the original
Task object that was passed in. Task object that was passed in.
""" """
task = self.__setupTask(funcOrTask, name, priority, sort, extraArgs, taskChain, appendTask, owner, uponDeath) task = self.__setupTask(funcOrTask, name, priority, sort, extraArgs, taskChain, appendTask, owner, uponDeath)
if delay is not None:
task.setDelay(delay)
self.mgr.add(task) self.mgr.add(task)
return task return task

View File

@ -197,7 +197,11 @@ get_compiler() {
*/ */
string PandaSystem:: string PandaSystem::
get_build_date() { get_build_date() {
#ifdef PANDA_BUILD_DATE_STR
return PANDA_BUILD_DATE_STR;
#else
return __DATE__ " " __TIME__; return __DATE__ " " __TIME__;
#endif
} }
/** /**

View File

@ -544,7 +544,15 @@ main(int argc, char **argv) {
// Make up a file identifier. This is just some bogus number that should be // Make up a file identifier. This is just some bogus number that should be
// the same in both the compiled-in code and in the database, so we can // the same in both the compiled-in code and in the database, so we can
// check synchronicity at load time. // check synchronicity at load time.
int file_identifier = time(nullptr); // We allow overriding this value by setting SOURCE_DATE_EPOCH to support
// reproducible builds.
int file_identifier;
const char *source_date_epoch = getenv("SOURCE_DATE_EPOCH");
if (source_date_epoch != nullptr && source_date_epoch[0] != 0) {
file_identifier = atoi(source_date_epoch);
} else {
file_identifier = time(nullptr);
}
InterrogateModuleDef *def = builder.make_module_def(file_identifier); InterrogateModuleDef *def = builder.make_module_def(file_identifier);
pofstream * the_output_include = nullptr; pofstream * the_output_include = nullptr;

View File

@ -2598,6 +2598,14 @@ def CreatePandaVersionFiles():
if GIT_COMMIT: if GIT_COMMIT:
pandaversion_h += "\n#define PANDA_GIT_COMMIT_STR \"%s\"\n" % (GIT_COMMIT) pandaversion_h += "\n#define PANDA_GIT_COMMIT_STR \"%s\"\n" % (GIT_COMMIT)
# Allow creating a deterministic build by setting this.
source_date = os.environ.get("SOURCE_DATE_EPOCH")
if source_date:
# This matches the GCC / Clang format for __DATE__ __TIME__
source_date = time.gmtime(int(source_date))
source_date = time.strftime('%b %e %Y %H:%M:%S', source_date)
pandaversion_h += "\n#define PANDA_BUILD_DATE_STR \"%s\"\n" % (source_date)
checkpandaversion_cxx = CHECKPANDAVERSION_CXX.replace("$VERSION1",str(version1)) checkpandaversion_cxx = CHECKPANDAVERSION_CXX.replace("$VERSION1",str(version1))
checkpandaversion_cxx = checkpandaversion_cxx.replace("$VERSION2",str(version2)) checkpandaversion_cxx = checkpandaversion_cxx.replace("$VERSION2",str(version2))
checkpandaversion_cxx = checkpandaversion_cxx.replace("$VERSION3",str(version3)) checkpandaversion_cxx = checkpandaversion_cxx.replace("$VERSION3",str(version3))

View File

@ -57,6 +57,7 @@ string dont_compress_str = "jpg,png,mp3,ogg";
// Default text extensions. May be overridden with -X. // Default text extensions. May be overridden with -X.
string text_ext_str = "txt"; string text_ext_str = "txt";
time_t source_date_epoch = (time_t)-1;
bool got_record_timestamp_flag = false; bool got_record_timestamp_flag = false;
bool record_timestamp_flag = true; bool record_timestamp_flag = true;
@ -430,6 +431,12 @@ add_files(const vector_string &params) {
needs_repack = true; needs_repack = true;
} }
if (multifile->get_record_timestamp() && source_date_epoch != (time_t)-1) {
if (multifile->get_timestamp() > source_date_epoch) {
multifile->set_timestamp(source_date_epoch);
}
}
if (needs_repack) { if (needs_repack) {
if (!multifile->repack()) { if (!multifile->repack()) {
cerr << "Failed to write " << multifile_name << ".\n"; cerr << "Failed to write " << multifile_name << ".\n";
@ -533,6 +540,12 @@ kill_files(const vector_string &params) {
} }
} }
if (multifile->get_record_timestamp() && source_date_epoch != (time_t)-1) {
if (multifile->get_timestamp() > source_date_epoch) {
multifile->set_timestamp(source_date_epoch);
}
}
bool okflag = true; bool okflag = true;
if (multifile->needs_repack()) { if (multifile->needs_repack()) {
@ -779,6 +792,11 @@ main(int argc, char **argv) {
} }
} }
const char *source_date_epoch_str = getenv("SOURCE_DATE_EPOCH");
if (source_date_epoch_str != nullptr && source_date_epoch_str[0] != 0) {
source_date_epoch = (time_t)strtoll(source_date_epoch_str, nullptr, 10);
}
extern char *optarg; extern char *optarg;
extern int optind; extern int optind;
static const char *optflags = "crutxkvz123456789Z:T:X:S:f:OC:ep:P:F:h"; static const char *optflags = "crutxkvz123456789Z:T:X:S:f:OC:ep:P:F:h";

View File

@ -67,6 +67,17 @@ get_timestamp() const {
return _timestamp; return _timestamp;
} }
/**
* Changes the overall mudification timestamp of the multifile. Note that this
* will be reset to the current time every time you modify a subfile.
* Only set this if you know what you are doing!
*/
INLINE void Multifile::
set_timestamp(time_t timestamp) {
_timestamp = timestamp;
_timestamp_dirty = true;
}
/** /**
* Sets the flag indicating whether timestamps should be recorded within the * Sets the flag indicating whether timestamps should be recorded within the
* Multifile or not. The default is true, indicating the Multifile will * Multifile or not. The default is true, indicating the Multifile will

View File

@ -59,6 +59,7 @@ PUBLISHED:
INLINE bool needs_repack() const; INLINE bool needs_repack() const;
INLINE time_t get_timestamp() const; INLINE time_t get_timestamp() const;
INLINE void set_timestamp(time_t timestamp);
INLINE void set_record_timestamp(bool record_timestamp); INLINE void set_record_timestamp(bool record_timestamp);
INLINE bool get_record_timestamp() const; INLINE bool get_record_timestamp() const;

View File

@ -112,6 +112,10 @@ xform(const LMatrix4 &mat) {
// Transform the center // Transform the center
_center = _center * mat; _center = _center * mat;
if (cinf(_radius)) {
set_infinite();
}
} }
} }

View File

@ -201,14 +201,26 @@ write_man_page(std::ostream &out) {
// Generate a date string for inclusion into the footer. // Generate a date string for inclusion into the footer.
char date_str[256]; char date_str[256];
date_str[0] = 0; date_str[0] = 0;
time_t current_time = time(nullptr); time_t current_time;
tm *today = nullptr;
if (current_time != (time_t) -1) { // This variable overrides the time we write to the footer.
tm *today = localtime(&current_time); const char *source_date_epoch = getenv("SOURCE_DATE_EPOCH");
if (today == nullptr || 0 == strftime(date_str, 256, "%d %B %Y", today)) { if (source_date_epoch == nullptr || source_date_epoch[0] == 0 ||
date_str[0] = 0; (current_time = (time_t)strtoll(source_date_epoch, nullptr, 10)) <= 0) {
current_time = time(nullptr);
if (current_time != (time_t)-1) {
today = localtime(&current_time);
} }
} }
else {
// Format as UTC to avoid inconsistency being introduced due to timezones.
today = gmtime(&current_time);
}
if (today == nullptr || 0 == strftime(date_str, 256, "%d %B %Y", today)) {
date_str[0] = 0;
}
out << " 1 \"" << date_str << "\" \"" out << " 1 \"" << date_str << "\" \""
<< PandaSystem::get_version_string() << "\" Panda3D\n"; << PandaSystem::get_version_string() << "\" Panda3D\n";