use global task manager

This commit is contained in:
David Rose 2008-09-29 17:29:49 +00:00
parent 75c51589e8
commit c8913d4b66
18 changed files with 221 additions and 51 deletions

View File

@ -20,9 +20,10 @@
////////////////////////////////////////////////////////////////////
INLINE BindAnimRequest::
BindAnimRequest(const Filename &filename, const LoaderOptions &options,
Loader *loader,
AnimControl *control, int hierarchy_match_flags,
const PartSubset &subset) :
ModelLoadRequest(filename, options),
ModelLoadRequest(filename, options, loader),
_control(control),
_hierarchy_match_flags(hierarchy_match_flags),
_subset(subset)

View File

@ -35,6 +35,7 @@ public:
PUBLISHED:
INLINE BindAnimRequest(const Filename &filename,
const LoaderOptions &options,
Loader *loader,
AnimControl *control,
int hierarchy_match_flags,
const PartSubset &subset);

View File

@ -363,7 +363,7 @@ load_bind_anim(Loader *loader, const Filename &filename,
}
PT(BindAnimRequest) request =
new BindAnimRequest(filename, anim_options, control,
new BindAnimRequest(filename, anim_options, loader, control,
hierarchy_match_flags, subset);
loader->load_async(request);

View File

@ -503,6 +503,8 @@ remove_all_windows() {
// And, hey, let's stop the vertex paging threads, if any.
VertexDataPage::stop_threads();
AsyncTaskManager::get_global_ptr()->stop_threads();
#ifdef DO_PSTATS
PStatClient::get_global_pstats()->disconnect();
#endif

View File

@ -1345,26 +1345,35 @@ write_task_line(ostream &out, int indent_level, AsyncTask *task, double now) con
// For sleeping tasks, include the wake time, as an elapsed time
// in seconds.
string name = task->get_name().substr(0, 32);
sprintf(buffer, "%c%-32s %8.1f %8.1f %8.1f %8.1f %6d",
sprintf(buffer, "%c%-32s %8.1f",
servicing_flag, name.c_str(),
task->_wake_time - now,
task->_dt * 1000.0, task->get_average_dt() * 1000.0,
task->_max_dt * 1000.0,
task->_sort);
task->_wake_time - now);
} else {
// For active tasks, don't include a wake time. This means we
// have more space for the name.
string name = task->get_name().substr(0, 41);
sprintf(buffer, "%c%-41s %8.1f %8.1f %8.1f %6d",
servicing_flag, name.c_str(),
task->_dt * 1000.0, task->get_average_dt() * 1000.0,
task->_max_dt * 1000.0,
task->_sort);
sprintf(buffer, "%c%-41s",
servicing_flag, name.c_str());
}
nassertv(strlen(buffer) < buffer_size);
indent(out, indent_level)
<< buffer << "\n";
<< buffer;
if (task->_num_frames > 0) {
sprintf(buffer, " %8.1f %8.1f %8.1f %6d",
task->_dt * 1000.0, task->get_average_dt() * 1000.0,
task->_max_dt * 1000.0,
task->_sort);
} else {
// No statistics for a task that hasn't run yet.
sprintf(buffer, " %8s %8s %8s %6d",
"", "", "",
task->_sort);
}
nassertv(strlen(buffer) < buffer_size);
out << buffer << "\n";
}
////////////////////////////////////////////////////////////////////

View File

@ -52,6 +52,21 @@ get_num_tasks() const {
return _num_tasks;
}
////////////////////////////////////////////////////////////////////
// Function: AsyncTaskManager::get_global_ptr
// Access: Published
// Description: Returns a pointer to the global AsyncTaskManager.
// This is the AsyncTaskManager that most code should
// use for queueing tasks and suchlike.
////////////////////////////////////////////////////////////////////
INLINE AsyncTaskManager *AsyncTaskManager::
get_global_ptr() {
if (_global_ptr == (AsyncTaskManager *)NULL) {
make_global_ptr();
}
return _global_ptr;
}
////////////////////////////////////////////////////////////////////
// Function: AsyncTaskManager::add_task_by_name
// Access: Protected

View File

@ -25,6 +25,8 @@
#include "config_event.h"
#include <algorithm>
PT(AsyncTaskManager) AsyncTaskManager::_global_ptr;
TypeHandle AsyncTaskManager::_type_handle;
////////////////////////////////////////////////////////////////////
@ -673,3 +675,16 @@ do_output(ostream &out) const {
out << get_type() << " " << get_name()
<< "; " << _num_tasks << " tasks";
}
////////////////////////////////////////////////////////////////////
// Function: AsyncTaskManager::make_global_ptr
// Access: Private, Static
// Description: Called once per application to create the global
// task manager object.
////////////////////////////////////////////////////////////////////
void AsyncTaskManager::
make_global_ptr() {
nassertv(_global_ptr == (AsyncTaskManager *)NULL);
_global_ptr = new AsyncTaskManager("taskMgr");
}

View File

@ -95,6 +95,8 @@ PUBLISHED:
virtual void output(ostream &out) const;
virtual void write(ostream &out, int indent_level = 0) const;
INLINE static AsyncTaskManager *get_global_ptr();
protected:
AsyncTaskChain *do_make_task_chain(const string &name);
AsyncTaskChain *do_find_task_chain(const string &name);
@ -106,6 +108,9 @@ protected:
virtual void do_output(ostream &out) const;
private:
static void make_global_ptr();
protected:
class AsyncTaskSortName {
public:
@ -129,6 +134,8 @@ protected:
ConditionVarFull _frame_cvar; // Signalled when the clock ticks.
static PT(AsyncTaskManager) _global_ptr;
public:
static TypeHandle get_class_type() {
return _type_handle;

View File

@ -22,6 +22,7 @@
INLINE TextureReloadRequest::
TextureReloadRequest(PreparedGraphicsObjects *pgo, Texture *texture,
bool allow_compressed) :
AsyncTask(texture->get_name()),
_pgo(pgo),
_texture(texture),
_allow_compressed(allow_compressed),

View File

@ -108,6 +108,76 @@ add_file(const Filename &file, LoaderFileType *type) {
_files.push_back(cf);
}
////////////////////////////////////////////////////////////////////
// Function: Loader::set_task_manager
// Access: Published
// Description: Specifies the task manager that is used for
// asynchronous loads. The default is the global task
// manager.
////////////////////////////////////////////////////////////////////
INLINE void Loader::
set_task_manager(AsyncTaskManager *task_manager) {
_task_manager = task_manager;
}
////////////////////////////////////////////////////////////////////
// Function: Loader::get_task_manager
// Access: Published
// Description: Returns the task manager that is used for
// asynchronous loads.
////////////////////////////////////////////////////////////////////
INLINE AsyncTaskManager *Loader::
get_task_manager() const {
return _task_manager;
}
////////////////////////////////////////////////////////////////////
// Function: Loader::set_task_chain
// Access: Published
// Description: Specifies the task chain that is used for
// asynchronous loads. The default is the initial name
// of the Loader object.
////////////////////////////////////////////////////////////////////
INLINE void Loader::
set_task_chain(const string &task_chain) {
_task_chain = task_chain;
}
////////////////////////////////////////////////////////////////////
// Function: Loader::get_task_chain
// Access: Published
// Description: Returns the task chain that is used for
// asynchronous loads.
////////////////////////////////////////////////////////////////////
INLINE const string &Loader::
get_task_chain() const {
return _task_chain;
}
////////////////////////////////////////////////////////////////////
// Function: Loader::stop_threads
// Access: Published
// Description: Stop any threads used for asynchronous loads.
////////////////////////////////////////////////////////////////////
INLINE void Loader::
stop_threads() {
PT(AsyncTaskChain) chain = _task_manager->find_task_chain(_task_chain);
if (chain != (AsyncTaskChain *)NULL) {
chain->stop_threads();
}
}
////////////////////////////////////////////////////////////////////
// Function: Loader::remove
// Access: Published
// Description: Removes a pending asynchronous load request. Returns
// true if successful, false otherwise.
////////////////////////////////////////////////////////////////////
INLINE bool Loader::
remove(AsyncTask *task) {
return _task_manager->remove(task);
}
////////////////////////////////////////////////////////////////////
// Function: Loader::load_sync
// Access: Published
@ -141,16 +211,23 @@ load_sync(const Filename &filename, const LoaderOptions &options) const {
// or set the done_event on the request object and
// listen for that event. When the model is ready, you
// may retrieve it via request->get_model().
//
// If threading support is not enabled, or the Loader
// was created with 0 threads (that is,
// get_num_threads() returns 0), then this will be the
// same as a load_sync() call: the model will be loaded
// within the current thread, and this method will not
// return until the model has fully loaded.
////////////////////////////////////////////////////////////////////
INLINE void Loader::
load_async(AsyncTask *request) {
add(request);
poll();
request->set_task_chain(_task_chain);
_task_manager->add(request);
}
////////////////////////////////////////////////////////////////////
// Function: Loader::get_global_ptr
// Access: Published
// Description: Returns a pointer to the global Loader. This is the
// Loader that most code should use for loading models.
////////////////////////////////////////////////////////////////////
INLINE Loader *Loader::
get_global_ptr() {
if (_global_ptr == (Loader *)NULL) {
make_global_ptr();
}
return _global_ptr;
}

View File

@ -30,6 +30,7 @@
#include "bamFile.h"
bool Loader::_file_types_loaded = false;
PT(Loader) Loader::_global_ptr;
TypeHandle Loader::_type_handle;
////////////////////////////////////////////////////////////////////
@ -38,12 +39,14 @@ TypeHandle Loader::_type_handle;
// Description:
////////////////////////////////////////////////////////////////////
Loader::
Loader(const string &name, int num_threads) :
AsyncTaskManager(name)
Loader(const string &name) :
Namable(name)
{
PT(AsyncTaskChain) chain = make_task_chain("default");
if (num_threads < 0) {
// -1 means the default number of threads.
_task_manager = AsyncTaskManager::get_global_ptr();
_task_chain = name;
if (_task_manager->find_task_chain(_task_chain) == NULL) {
PT(AsyncTaskChain) chain = _task_manager->make_task_chain(_task_chain);
ConfigVariableInt loader_num_threads
("loader-num-threads", 1,
@ -55,10 +58,8 @@ Loader(const string &name, int num_threads) :
"asychronous thread. You can set this higher, particularly if "
"you have many CPU's available, to allow loading multiple models "
"simultaneously."));
chain->set_num_threads(loader_num_threads);
} else {
chain->set_num_threads(num_threads);
}
}
@ -87,7 +88,7 @@ void Loader::
output(ostream &out) const {
out << get_type() << " " << get_name();
int num_tasks = get_num_tasks();
int num_tasks = _task_manager->make_task_chain(_task_chain)->get_num_tasks();
if (num_tasks != 0) {
out << " (" << num_tasks << " models pending)";
}
@ -356,3 +357,16 @@ load_file_types() {
}
}
////////////////////////////////////////////////////////////////////
// Function: Loader::make_global_ptr
// Access: Private, Static
// Description: Called once per application to create the global
// loader object.
////////////////////////////////////////////////////////////////////
void Loader::
make_global_ptr() {
nassertv(_global_ptr == (Loader *)NULL);
_global_ptr = new Loader("taskMgr");
}

View File

@ -44,7 +44,7 @@ class LoaderFileType;
// loading interface may be used, but it loads
// synchronously.
////////////////////////////////////////////////////////////////////
class EXPCL_PANDA_PGRAPH Loader : public AsyncTaskManager {
class EXPCL_PANDA_PGRAPH Loader : public TypedReferenceCount, public Namable {
private:
class ConsiderFile {
public:
@ -73,7 +73,15 @@ PUBLISHED:
Files _files;
};
Loader(const string &name = "loader", int num_threads = -1);
Loader(const string &name = "loader");
INLINE void set_task_manager(AsyncTaskManager *task_manager);
INLINE AsyncTaskManager *get_task_manager() const;
INLINE void set_task_chain(const string &task_chain);
INLINE const string &get_task_chain() const;
BLOCKING INLINE void stop_threads();
INLINE bool remove(AsyncTask *task);
BLOCKING INLINE PT(PandaNode) load_sync(const Filename &filename,
const LoaderOptions &options = LoaderOptions()) const;
@ -84,21 +92,33 @@ PUBLISHED:
virtual void output(ostream &out) const;
INLINE static Loader *get_global_ptr();
private:
PT(PandaNode) load_file(const Filename &filename, const LoaderOptions &options) const;
PT(PandaNode) try_load_file(const Filename &pathname, const LoaderOptions &options,
LoaderFileType *requested_type) const;
static void make_global_ptr();
PT(AsyncTaskManager) _task_manager;
string _task_chain;
static void load_file_types();
static bool _file_types_loaded;
static PT(Loader) _global_ptr;
public:
static TypeHandle get_class_type() {
return _type_handle;
}
static void init_type() {
AsyncTaskManager::init_type();
TypedReferenceCount::init_type();
Namable::init_type();
register_type(_type_handle, "Loader",
AsyncTaskManager::get_class_type());
TypedReferenceCount::get_class_type(),
Namable::get_class_type());
}
virtual TypeHandle get_type() const {
return get_class_type();

View File

@ -21,6 +21,7 @@
////////////////////////////////////////////////////////////////////
INLINE ModelFlattenRequest::
ModelFlattenRequest(PandaNode *orig) :
AsyncTask(orig->get_name()),
_orig(orig),
_is_ready(false)
{

View File

@ -20,9 +20,12 @@
// via load_async(), to begin an asynchronous load.
////////////////////////////////////////////////////////////////////
INLINE ModelLoadRequest::
ModelLoadRequest(const Filename &filename, const LoaderOptions &options) :
ModelLoadRequest(const Filename &filename, const LoaderOptions &options,
Loader *loader) :
AsyncTask(filename.get_basename()),
_filename(filename),
_options(options),
_loader(loader),
_is_ready(false)
{
}
@ -49,6 +52,17 @@ get_options() const {
return _options;
}
////////////////////////////////////////////////////////////////////
// Function: ModelLoadRequest::get_loader
// Access: Published
// Description: Returns the Loader object associated with this
// asynchronous ModelLoadRequest.
////////////////////////////////////////////////////////////////////
INLINE Loader *ModelLoadRequest::
get_loader() const {
return _loader;
}
////////////////////////////////////////////////////////////////////
// Function: ModelLoadRequest::is_ready
// Access: Published

View File

@ -30,10 +30,7 @@ do_task() {
Thread::sleep(delay);
}
Loader *loader;
DCAST_INTO_R(loader, _manager, DS_done);
_model = loader->load_sync(_filename, _options);
_model = _loader->load_sync(_filename, _options);
_is_ready = true;
// Don't continue the task; we're done.

View File

@ -22,6 +22,7 @@
#include "loaderOptions.h"
#include "pandaNode.h"
#include "pointerTo.h"
#include "loader.h"
////////////////////////////////////////////////////////////////////
// Class : ModelLoadRequest
@ -36,10 +37,12 @@ public:
PUBLISHED:
INLINE ModelLoadRequest(const Filename &filename,
const LoaderOptions &options);
const LoaderOptions &options,
Loader *loader);
INLINE const Filename &get_filename() const;
INLINE const LoaderOptions &get_options() const;
INLINE Loader *get_loader() const;
INLINE bool is_ready() const;
INLINE PandaNode *get_model() const;
@ -50,6 +53,7 @@ protected:
private:
Filename _filename;
LoaderOptions _options;
PT(Loader) _loader;
bool _is_ready;
PT(PandaNode) _model;

View File

@ -20,8 +20,6 @@
ModelPool *ModelPool::_global_ptr = (ModelPool *)NULL;
static Loader *model_loader = NULL;
////////////////////////////////////////////////////////////////////
// Function: ModelPool::write
// Access: Published, Static
@ -73,9 +71,7 @@ ns_load_model(const string &filename, const LoaderOptions &options) {
new_options.set_flags((new_options.get_flags() | LoaderOptions::LF_no_ram_cache) &
~(LoaderOptions::LF_search | LoaderOptions::LF_report_errors));
if (model_loader == (Loader *)NULL) {
model_loader = new Loader("ModelPool", 0);
}
Loader *model_loader = Loader::get_global_ptr();
PT(PandaNode) panda_node = model_loader->load_sync(filename, new_options);
PT(ModelRoot) node;

View File

@ -24,8 +24,6 @@
FontPool *FontPool::_global_ptr = (FontPool *)NULL;
static Loader *model_loader = NULL;
////////////////////////////////////////////////////////////////////
// Function: FontPool::write
// Access: Published, Static
@ -94,9 +92,7 @@ ns_load_font(const string &str) {
string extension = filename.get_extension();
if (extension.empty() || extension == "egg" || extension == "bam") {
if (model_loader == (Loader *)NULL) {
model_loader = new Loader("FontPool", 0);
}
Loader *model_loader = Loader::get_global_ptr();
PT(PandaNode) node = model_loader->load_sync(filename);
if (node != (PandaNode *)NULL) {
// It is a model. Elevate all the priorities by 1, and make a