Add movies-sync-pages

This commit is contained in:
David Rose 2011-12-01 01:27:55 +00:00
parent d1c257fba9
commit 6eb257bb8a
10 changed files with 468 additions and 253 deletions

View File

@ -84,6 +84,17 @@ ConfigVariableInt pfm_vis_max_indices
"a single generated mesh. If the mesh would require more than that, "
"the mesh is subdivided into smaller pieces."));
ConfigVariableBool movies_sync_pages
("movies-sync-pages", true,
PRC_DESC("Set this true to force multi-page MovieTextures to hold pages "
"back if necessary until all pages are ready to render at once, "
"so that the multiple pages of a single movie are always in sync "
"with each other. Set this false to allow individual pages to be "
"visible as soon as they come available, which means pages might "
"sometimes be out of sync. This only affects multi-page MovieTextures "
"such as cube maps, 3-d textures, or stereo textures, or textures "
"with separate color and alpha channel movie sources."));
////////////////////////////////////////////////////////////////////
// Function: init_libgrutil
// Description: Initializes the library. This must be called at

View File

@ -40,6 +40,8 @@ extern ConfigVariableBool pfm_reverse_dimensions;
extern ConfigVariableInt pfm_vis_max_vertices;
extern ConfigVariableInt pfm_vis_max_indices;
extern ConfigVariableBool movies_sync_pages;
extern EXPCL_PANDA_GRUTIL void init_libgrutil();
#endif

View File

@ -68,7 +68,8 @@ CData() :
_clock(0.0),
_playing(false),
_loop_count(1),
_play_rate(1.0)
_play_rate(1.0),
_has_offset(false)
{
}
@ -86,7 +87,8 @@ CData(const CData &copy) :
_clock(0.0),
_playing(false),
_loop_count(1),
_play_rate(1.0)
_play_rate(1.0),
_has_offset(false)
{
}
@ -342,38 +344,60 @@ bool MovieTexture::
cull_callback(CullTraverser *, const CullTraverserData &) const {
Texture::CDReader cdata_tex(Texture::_cycler);
CDReader cdata(_cycler);
double offset;
int true_loop_count = 1;
if (cdata->_synchronize != 0) {
offset = cdata->_synchronize->get_time();
} else {
// Calculate the cursor position modulo the length of the movie.
double now = ClockObject::get_global_clock()->get_frame_time();
offset = cdata->_clock;
if (cdata->_playing) {
offset += now * cdata->_play_rate;
if (!cdata->_has_offset) {
// If we don't have a previously-computed timestamp (offset)
// cached, then compute a new one.
double offset;
int true_loop_count = 1;
if (cdata->_synchronize != 0) {
offset = cdata->_synchronize->get_time();
} else {
// Calculate the cursor position modulo the length of the movie.
double now = ClockObject::get_global_clock()->get_frame_time();
offset = cdata->_clock;
if (cdata->_playing) {
offset += now * cdata->_play_rate;
}
true_loop_count = cdata->_loop_count;
}
true_loop_count = cdata->_loop_count;
((CData *)cdata.p())->_offset = offset;
((CData *)cdata.p())->_true_loop_count = true_loop_count;
((CData *)cdata.p())->_has_offset = true;
}
for (int i=0; i<((int)(cdata->_pages.size())); i++) {
MovieVideoCursor *color = cdata->_pages[i]._color;
MovieVideoCursor *alpha = cdata->_pages[i]._alpha;
if (color && alpha) {
if (color->set_time(offset, true_loop_count)) {
color->fetch_into_texture_rgb((MovieTexture*)this, i);
}
if (alpha->set_time(offset, true_loop_count)) {
alpha->fetch_into_texture_alpha((MovieTexture*)this, i, cdata_tex->_alpha_file_channel);
}
} else if (color) {
bool result = color->set_time(offset, true_loop_count);
if (result) {
color->fetch_into_texture((MovieTexture*)this, i);
bool in_sync = do_update_frames(cdata);
if (!in_sync) {
// If it didn't successfully sync, try again--once. The second
// time it might be able to fill in some more recent frames.
in_sync = do_update_frames(cdata);
}
if (in_sync) {
// Now go back through and apply all the frames to the texture.
Pages::const_iterator pi;
for (pi = cdata->_pages.begin(); pi != cdata->_pages.end(); ++pi) {
const VideoPage &page = (*pi);
MovieVideoCursor *color = page._color;
MovieVideoCursor *alpha = page._alpha;
size_t i = pi - cdata->_pages.begin();
if (color != NULL && alpha != NULL) {
color->apply_to_texture_rgb(page._cbuffer, (MovieTexture*)this, i);
alpha->apply_to_texture_alpha(page._abuffer, (MovieTexture*)this, i, cdata_tex->_alpha_file_channel);
} else if (color != NULL) {
color->apply_to_texture(page._cbuffer, (MovieTexture*)this, i);
}
((VideoPage &)page)._cbuffer.clear();
((VideoPage &)page)._abuffer.clear();
}
// Clear the cached offset so we can update the frame next time.
((CData *)cdata.p())->_has_offset = false;
}
return true;
}
@ -693,6 +717,134 @@ unsynchronize() {
cdata->_synchronize = 0;
}
////////////////////////////////////////////////////////////////////
// Function: MovieTexture::do_update_frames
// Access: Private
// Description: Called internally to sync all of the frames to the
// current time. Returns true if successful, or false
// of some of the frames are out-of-date with each
// other.
////////////////////////////////////////////////////////////////////
bool MovieTexture::
do_update_frames(const CData *cdata) const {
// Throughout this method, we cast the VideoPage to non-const to
// update the _cbuffer or _abuffer member. We can do this safely
// because this is only a transparent cache value.
nassertr(cdata->_has_offset, false);
// First, go through and get all of the current frames.
Pages::const_iterator pi;
for (pi = cdata->_pages.begin(); pi != cdata->_pages.end(); ++pi) {
const VideoPage &page = (*pi);
MovieVideoCursor *color = page._color;
MovieVideoCursor *alpha = page._alpha;
if (color != NULL && page._cbuffer == NULL) {
if (color->set_time(cdata->_offset, cdata->_true_loop_count)) {
((VideoPage &)page)._cbuffer = color->fetch_buffer();
}
}
if (alpha != NULL && page._abuffer == NULL) {
if (alpha->set_time(cdata->_offset, cdata->_true_loop_count)) {
((VideoPage &)page)._abuffer = alpha->fetch_buffer();
}
}
}
if (!movies_sync_pages) {
// If movies-sync-pages is configured off, we don't care about
// syncing the pages, and we always return true here to render the
// pages we've got.
return true;
}
// Now make sure all of the frames are in sync with each other.
bool in_sync = true;
bool any_frames = false;
bool any_dropped = false;
PT(MovieVideoCursor::Buffer) newest;
for (pi = cdata->_pages.begin(); pi != cdata->_pages.end(); ++pi) {
const VideoPage &page = (*pi);
if (page._cbuffer == NULL) {
if (page._color != NULL) {
// This page isn't ready at all.
in_sync = false;
}
} else {
nassertr(page._color != NULL, true);
any_frames = true;
if (newest == NULL) {
newest = page._cbuffer;
} else {
int ref = newest->compare_timestamp(page._cbuffer);
if (ref != 0) {
// This page is ready, but out-of-date.
in_sync = false;
any_dropped = true;
if (ref < 0) {
newest = page._cbuffer;
}
}
}
}
if (page._abuffer == NULL) {
if (page._alpha != NULL) {
in_sync = false;
}
} else {
nassertr(page._alpha != NULL, true);
any_frames = true;
if (newest == NULL) {
newest = page._abuffer;
} else {
int ref = newest->compare_timestamp(page._abuffer);
if (ref != 0) {
in_sync = false;
any_dropped = true;
if (ref < 0) {
newest = page._abuffer;
}
}
}
}
}
if (!any_frames) {
// If no frames at all are ready yet, just carry on.
return true;
}
if (!in_sync) {
// If we're not in sync, throw away pages that are older than the
// newest available frame.
if (newest != NULL) {
Pages::const_iterator pi;
for (pi = cdata->_pages.begin(); pi != cdata->_pages.end(); ++pi) {
const VideoPage &page = (*pi);
if (page._cbuffer != NULL && newest->compare_timestamp(page._cbuffer) > 0) {
((VideoPage &)page)._cbuffer.clear();
any_dropped = true;
}
if (page._abuffer != NULL && newest->compare_timestamp(page._abuffer) > 0) {
((VideoPage &)page)._abuffer.clear();
any_dropped = true;
}
}
if (any_dropped) {
// If we dropped one or more frames for being out-of-sync,
// implying that compare_timestamp() is implemented, then we
// also want to update our internal offset value so that
// future frames will get the same value.
((CData *)cdata)->_offset = newest->get_timestamp();
}
}
}
return in_sync;
}
////////////////////////////////////////////////////////////////////
// Function: MovieTexture::register_with_read_factory
// Access: Public, Static

View File

@ -101,6 +101,10 @@ protected:
public:
PT(MovieVideoCursor) _color;
PT(MovieVideoCursor) _alpha;
// The current (but not yet applied) frame for each video.
PT(MovieVideoCursor::Buffer) _cbuffer;
PT(MovieVideoCursor::Buffer) _abuffer;
};
typedef pvector<VideoPage> Pages;
@ -124,6 +128,12 @@ protected:
int _loop_count;
double _play_rate;
PT(AudioSound) _synchronize;
// The remaining values represent a local cache only; it is not
// preserved through the pipeline.
bool _has_offset;
double _offset;
int _true_loop_count;
};
PipelineCycler<CData> _cycler;
@ -133,6 +143,9 @@ protected:
void do_recalculate_image_properties(CData *cdata, Texture::CData *cdata_tex,
const LoaderOptions &options);
private:
bool do_update_frames(const CData *cdata) const;
public:
static void register_with_read_factory();

View File

@ -19,9 +19,10 @@
// Description:
////////////////////////////////////////////////////////////////////
INLINE FfmpegVideoCursor::FfmpegBuffer::
FfmpegBuffer(size_t block_size) :
FfmpegBuffer(size_t block_size, double video_timebase) :
Buffer(block_size),
_begin_frame(-1),
_end_frame(0)
_end_frame(0),
_video_timebase(video_timebase)
{
}

View File

@ -30,6 +30,7 @@ extern "C" {
ReMutex FfmpegVideoCursor::_av_lock;
TypeHandle FfmpegVideoCursor::_type_handle;
TypeHandle FfmpegVideoCursor::FfmpegBuffer::_type_handle;
PStatCollector FfmpegVideoCursor::_fetch_buffer_pcollector("*:FFMPEG Video Decoding:Fetch");
PStatCollector FfmpegVideoCursor::_seek_pcollector("*:FFMPEG Video Decoding:Seek");
@ -308,7 +309,6 @@ stop_thread() {
MutexHolder holder(_lock);
_readahead_frames.clear();
_recycled_frames.clear();
}
////////////////////////////////////////////////////////////////////
@ -329,8 +329,8 @@ is_thread_started() const {
// Description: See MovieVideoCursor::set_time().
////////////////////////////////////////////////////////////////////
bool FfmpegVideoCursor::
set_time(double time, int loop_count) {
int frame = (int)(time / _video_timebase + 0.5);
set_time(double timestamp, int loop_count) {
int frame = (int)(timestamp / _video_timebase + 0.5);
if (_eof_known) {
if (loop_count == 0) {
@ -403,7 +403,6 @@ fetch_buffer() {
<< " at frame " << _current_frame << ", discarding frame at "
<< frame->_begin_frame << "\n";
}
do_recycle_frame(frame);
frame = _readahead_frames.front();
_readahead_frames.pop_front();
}
@ -416,7 +415,7 @@ fetch_buffer() {
<< " at frame " << _current_frame << ", encountered too-new frame at "
<< frame->_begin_frame << "\n";
}
do_recycle_all_frames();
do_clear_all_frames();
if (_thread_status == TS_wait || _thread_status == TS_seek || _thread_status == TS_readahead) {
_thread_status = TS_seek;
_seek_frame = _current_frame;
@ -439,15 +438,11 @@ fetch_buffer() {
bool too_new = frame->_begin_frame > _current_frame;
if (too_old || too_new) {
// The frame is too old or too new. Just recycle it.
do_recycle_frame(frame);
frame = NULL;
}
}
if (frame != NULL) {
if (_current_frame_buffer != NULL) {
do_recycle_frame(_current_frame_buffer);
}
_current_frame_buffer = frame;
if (ffmpeg_cat.is_debug()) {
ffmpeg_cat.debug()
@ -465,17 +460,6 @@ fetch_buffer() {
return frame.p();
}
////////////////////////////////////////////////////////////////////
// Function: FfmpegVideoCursor::release_buffer
// Access: Public, Virtual
// Description: Should be called after processing the Buffer object
// returned by fetch_buffer(), this releases the Buffer
// for future use again.
////////////////////////////////////////////////////////////////////
void FfmpegVideoCursor::
release_buffer(Buffer *buffer) {
}
////////////////////////////////////////////////////////////////////
// Function: FfmpegVideoCursor::make_new_buffer
// Access: Protected, Virtual
@ -484,7 +468,7 @@ release_buffer(Buffer *buffer) {
////////////////////////////////////////////////////////////////////
PT(MovieVideoCursor::Buffer) FfmpegVideoCursor::
make_new_buffer() {
PT(FfmpegBuffer) frame = new FfmpegBuffer(size_x() * size_y() * get_num_components());
PT(FfmpegBuffer) frame = new FfmpegBuffer(size_x() * size_y() * get_num_components(), _video_timebase);
return frame.p();
}
@ -683,8 +667,10 @@ thread_main() {
while (do_poll()) {
// Keep doing stuff as long as there's something to do.
_lock.release();
PStatClient::thread_tick(_sync_name);
Thread::consider_yield();
_lock.acquire();
}
}
@ -730,7 +716,6 @@ do_poll() {
} else {
// No frame.
_lock.acquire();
do_recycle_frame(frame);
}
return true;
}
@ -750,12 +735,11 @@ do_poll() {
if (_frame_ready) {
export_frame(frame);
_lock.acquire();
do_recycle_all_frames();
do_clear_all_frames();
_readahead_frames.push_back(frame);
} else {
_lock.acquire();
do_recycle_all_frames();
do_recycle_frame(frame);
do_clear_all_frames();
}
if (_thread_status == TS_seeking) {
@ -776,50 +760,24 @@ do_poll() {
////////////////////////////////////////////////////////////////////
// Function: FfmpegVideoCursor::do_alloc_frame
// Access: Private
// Description: Allocates a new Buffer object, or returns a
// previously-recycled object. Assumes the lock is
// Description: Allocates a new Buffer object. Assumes the lock is
// held.
////////////////////////////////////////////////////////////////////
PT(FfmpegVideoCursor::FfmpegBuffer) FfmpegVideoCursor::
do_alloc_frame() {
if (!_recycled_frames.empty()) {
PT(FfmpegBuffer) frame = _recycled_frames.front();
_recycled_frames.pop_front();
return frame;
}
PT(Buffer) buffer = make_new_buffer();
return (FfmpegBuffer *)buffer.p();
}
////////////////////////////////////////////////////////////////////
// Function: FfmpegVideoCursor::do_recycle_frame
// Function: FfmpegVideoCursor::do_clear_all_frames
// Access: Private
// Description: Recycles a previously-allocated Buffer object for
// future reuse. Assumes the lock is held.
// Description: Empties the entire readahead_frames queue.
// Assumes the lock is held.
////////////////////////////////////////////////////////////////////
void FfmpegVideoCursor::
do_recycle_frame(FfmpegBuffer *frame) {
_recycled_frames.push_back(frame);
}
////////////////////////////////////////////////////////////////////
// Function: FfmpegVideoCursor::do_recycle_all_frames
// Access: Private
// Description: Empties the entire readahead_frames queue into the
// recycle bin. Assumes the lock is held.
////////////////////////////////////////////////////////////////////
void FfmpegVideoCursor::
do_recycle_all_frames() {
while (!_readahead_frames.empty()) {
PT(FfmpegBuffer) frame = _readahead_frames.front();
_readahead_frames.pop_front();
if (ffmpeg_cat.is_spam()) {
ffmpeg_cat.spam()
<< "ffmpeg for " << _filename.get_basename()
<< " recycling frame at " << frame->_begin_frame << "\n";
}
_recycled_frames.push_back(frame);
}
do_clear_all_frames() {
_readahead_frames.clear();
}
////////////////////////////////////////////////////////////////////
@ -1331,4 +1289,43 @@ fillin(DatagramIterator &scan, BamReader *manager) {
manager->register_finalize(this);
}
////////////////////////////////////////////////////////////////////
// Function: FfmpegVideoCursor::FfmpegBuffer::compare_timestamp
// Access: Published, Virtual
// Description: Used to sort different buffers to ensure they
// correspond to the same source frame, particularly
// important when synchronizing the different pages of a
// multi-page texture.
//
// Returns 0 if the two buffers are of the same frame,
// <0 if this one comes earlier than the other one, and
// >0 if the other one comes earlier.
////////////////////////////////////////////////////////////////////
int FfmpegVideoCursor::FfmpegBuffer::
compare_timestamp(const Buffer *other) const {
const FfmpegBuffer *fother;
DCAST_INTO_R(fother, other, 0);
if (_end_frame * _video_timebase <= fother->_begin_frame * fother->_video_timebase) {
return -1;
} else if (_begin_frame * _video_timebase >= fother->_end_frame * fother->_video_timebase) {
return 1;
}
return 0;
}
////////////////////////////////////////////////////////////////////
// Function: FfmpegVideoCursor::FfmpegBuffer::get_timestamp
// Access: Published, Virtual
// Description: Returns the nearest timestamp value of this
// particular buffer. Ideally,
// MovieVideoCursor::set_time() for this timestamp would
// return this buffer again. This need be defined only
// if compare_timestamp() is also defined.
////////////////////////////////////////////////////////////////////
double FfmpegVideoCursor::FfmpegBuffer::
get_timestamp() const {
int mid_frame = (_begin_frame + _end_frame - 1) / 2;
return mid_frame * _video_timebase;
}
#endif // HAVE_FFMPEG

View File

@ -61,16 +61,37 @@ PUBLISHED:
bool is_thread_started() const;
public:
virtual bool set_time(double time, int loop_count);
virtual bool set_time(double timestamp, int loop_count);
virtual PT(Buffer) fetch_buffer();
virtual void release_buffer(Buffer *buffer);
protected:
class FfmpegBuffer : public Buffer {
public:
INLINE FfmpegBuffer(size_t block_size);
ALLOC_DELETED_CHAIN(FfmpegBuffer);
INLINE FfmpegBuffer(size_t block_size, double video_timebase);
virtual int compare_timestamp(const Buffer *other) const;
virtual double get_timestamp() const;
int _begin_frame;
int _end_frame;
double _video_timebase;
public:
static TypeHandle get_class_type() {
return _type_handle;
}
static void init_type() {
Buffer::init_type();
register_type(_type_handle, "FfmpegVideoCursor::FfmpegBuffer",
Buffer::get_class_type());
}
virtual TypeHandle get_type() const {
return get_class_type();
}
virtual TypeHandle force_init_type() {init_type(); return get_class_type();}
private:
static TypeHandle _type_handle;
};
virtual PT(Buffer) make_new_buffer();
@ -89,8 +110,8 @@ private:
// This global Mutex protects calls to avcodec_open/close/etc.
static ReMutex _av_lock;
// Protects _readahead_frames, _recycled_buffers, and all the
// immediately following members.
// Protects _readahead_frames and all the immediately following
// members.
Mutex _lock;
// Condition: the thread has something to do.
@ -98,7 +119,6 @@ private:
typedef pdeque<PT(FfmpegBuffer) > Buffers;
Buffers _readahead_frames;
Buffers _recycled_frames;
enum ThreadStatus {
TS_stopped,
TS_wait,
@ -120,8 +140,7 @@ private:
bool do_poll();
PT(FfmpegBuffer) do_alloc_frame();
void do_recycle_frame(FfmpegBuffer *frame);
void do_recycle_all_frames();
void do_clear_all_frames();
bool fetch_packet(int default_frame);
bool do_fetch_packet(int default_frame);
@ -179,6 +198,7 @@ public:
MovieVideoCursor::init_type();
register_type(_type_handle, "FfmpegVideoCursor",
MovieVideoCursor::get_class_type());
FfmpegBuffer::init_type();
}
virtual TypeHandle get_type() const {
return get_class_type();

View File

@ -163,25 +163,3 @@ ready() const {
return _ready;
}
////////////////////////////////////////////////////////////////////
// Function: MovieVideoCursor::Buffer::Constructor
// Access: Public
// Description:
////////////////////////////////////////////////////////////////////
INLINE MovieVideoCursor::Buffer::
Buffer(size_t block_size) :
_block_size(block_size)
{
_block = (unsigned char *)PANDA_MALLOC_ARRAY(_block_size);
}
////////////////////////////////////////////////////////////////////
// Function: MovieVideoCursor::Buffer::Destructor
// Access: Public, Virtual
// Description:
////////////////////////////////////////////////////////////////////
INLINE MovieVideoCursor::Buffer::
~Buffer() {
PANDA_FREE_ARRAY(_block);
}

View File

@ -19,7 +19,12 @@
#include "bamReader.h"
#include "bamWriter.h"
PStatCollector MovieVideoCursor::_copy_pcollector("*:Copy Video into Texture");
PStatCollector MovieVideoCursor::_copy_pcollector_ram("*:Copy Video into Texture:modify_ram_image");
PStatCollector MovieVideoCursor::_copy_pcollector_copy("*:Copy Video into Texture:copy");
TypeHandle MovieVideoCursor::_type_handle;
TypeHandle MovieVideoCursor::Buffer::_type_handle;
////////////////////////////////////////////////////////////////////
// Function: MovieVideoCursor::Default Constructor
@ -76,57 +81,58 @@ setup_texture(Texture *tex) const {
// loop_count >= 1, the time is clamped to the movie's
// length * loop_count. If loop_count <= 0, the time is
// understood to be modulo the movie's length.
//
// Returns true if a new frame is now available, false
// otherwise. If this returns true, you should
// immediately follow this with exactly *one* call to
// one of the fetch_*() methods.
// fetch_buffer().
//
// If the movie reports that it can_seek, you may also
// specify a time value less than the previous value you
// passed to set_time(). Otherwise, you may only
// specify a time value greater than or equal to
// the previous value.
//
// If the movie reports that it can_seek, it doesn't
// mean that it can do so quickly. It may have to
// rewind the movie and then fast forward to the
// desired location. Only if can_seek_fast returns
// true can it seek rapidly.
////////////////////////////////////////////////////////////////////
bool MovieVideoCursor::
set_time(double time, int loop_count) {
set_time(double timestamp, int loop_count) {
return true;
}
////////////////////////////////////////////////////////////////////
// Function: MovieVideoCursor::fetch_into_bitbucket
// Function: MovieVideoCursor::fetch_buffer
// Access: Published, Virtual
// Description: Discards the next video frame.
// Description: Gets the current video frame (as specified by
// set_time()) from the movie and returns it in a
// pre-allocated buffer. You may simply let the buffer
// dereference and delete itself when you are done with
// it.
//
// See fetch_buffer for more details.
// This may return NULL (even if set_time() returned
// true) if the frame is not available for some reason.
////////////////////////////////////////////////////////////////////
void MovieVideoCursor::
fetch_into_bitbucket() {
// This generic implementation is layered on fetch_buffer.
// It will work for any derived class, so it is never necessary to
// redefine this. It is probably possible to make a faster
// implementation, but since this function is rarely used, it
// probably isn't worth the trouble.
PT(Buffer) buffer = fetch_buffer();
if (buffer != NULL) {
release_buffer(buffer);
}
PT(MovieVideoCursor::Buffer) MovieVideoCursor::
fetch_buffer() {
return NULL;
}
////////////////////////////////////////////////////////////////////
// Function: MovieVideoCursor::fetch_into_texture
// Function: MovieVideoCursor::apply_to_texture
// Access: Published, Virtual
// Description: Reads the specified video frame into
// the specified texture.
//
// See fetch_buffer for more details.
// Description: Stores this buffer's contents in the indicated texture.
////////////////////////////////////////////////////////////////////
void MovieVideoCursor::
fetch_into_texture(Texture *t, int page) {
static PStatCollector fetch_into_texture_collector("*:Decode Video into Texture");
PStatTimer timer(fetch_into_texture_collector);
apply_to_texture(const Buffer *buffer, Texture *t, int page) {
if (buffer == NULL) {
return;
}
// This generic implementation is layered on fetch_buffer.
// It will work for any derived class, so it is never necessary to
// redefine this. However, it may be possible to make a faster
// implementation that uses fewer intermediate copies, depending
// on the capabilities of the underlying codec software.
PStatTimer timer(_copy_pcollector);
nassertv(t->get_x_size() >= size_x());
nassertv(t->get_y_size() >= size_y());
@ -134,20 +140,19 @@ fetch_into_texture(Texture *t, int page) {
nassertv(t->get_component_width() == 1);
nassertv(page < t->get_num_pages());
t->set_keep_ram_image(true);
PTA_uchar img = t->modify_ram_image();
PTA_uchar img;
{
PStatTimer timer2(_copy_pcollector_ram);
t->set_keep_ram_image(true);
img = t->modify_ram_image();
}
unsigned char *data = img.p() + page * t->get_expected_ram_page_size();
PT(Buffer) buffer = fetch_buffer();
if (buffer == NULL) {
// No image available.
return;
}
PStatTimer timer2(_copy_pcollector_copy);
if (t->get_x_size() == size_x() && t->get_num_components() == get_num_components()) {
memcpy(data, buffer->_block, size_x() * size_y() * get_num_components());
} else {
unsigned char *p = buffer->_block;
if (t->get_num_components() == get_num_components()) {
@ -172,26 +177,22 @@ fetch_into_texture(Texture *t, int page) {
}
}
}
release_buffer(buffer);
}
////////////////////////////////////////////////////////////////////
// Function: MovieVideoCursor::fetch_into_texture_alpha
// Function: MovieVideoCursor::apply_to_texture_alpha
// Access: Published, Virtual
// Description: Reads the specified video frame into
// the alpha channel of the supplied texture. The
// RGB channels of the texture are not touched.
//
// See fetch_buffer for more details.
// Description: Copies this buffer's contents into the alpha channel
// of the supplied texture. The RGB channels of the
// texture are not touched.
////////////////////////////////////////////////////////////////////
void MovieVideoCursor::
fetch_into_texture_alpha(Texture *t, int page, int alpha_src) {
apply_to_texture_alpha(const Buffer *buffer, Texture *t, int page, int alpha_src) {
if (buffer == NULL) {
return;
}
// This generic implementation is layered on fetch_buffer.
// It will work for any derived class, so it is never necessary to
// redefine this. However, it may be possible to make a faster
// implementation that uses fewer intermediate copies, depending
// on the capabilities of the underlying codec software.
PStatTimer timer(_copy_pcollector);
nassertv(t->get_x_size() >= size_x());
nassertv(t->get_y_size() >= size_y());
@ -200,17 +201,16 @@ fetch_into_texture_alpha(Texture *t, int page, int alpha_src) {
nassertv(page < t->get_z_size());
nassertv((alpha_src >= 0) && (alpha_src <= get_num_components()));
PT(Buffer) buffer = fetch_buffer();
if (buffer == NULL) {
// No image available.
return;
PTA_uchar img;
{
PStatTimer timer2(_copy_pcollector_ram);
t->set_keep_ram_image(true);
img = t->modify_ram_image();
}
t->set_keep_ram_image(true);
PTA_uchar img = t->modify_ram_image();
unsigned char *data = img.p() + page * t->get_expected_ram_page_size();
PStatTimer timer2(_copy_pcollector_copy);
int src_stride = size_x() * get_num_components();
int dst_stride = t->get_x_size() * 4;
if (alpha_src == 0) {
@ -234,45 +234,39 @@ fetch_into_texture_alpha(Texture *t, int page, int alpha_src) {
p += src_stride;
}
}
release_buffer(buffer);
}
////////////////////////////////////////////////////////////////////
// Function: MovieVideoCursor::fetch_into_texture_rgb
// Function: MovieVideoCursor::apply_to_texture_rgb
// Access: Published, Virtual
// Description: Reads the specified video frame into
// the RGB channels of the supplied texture. The alpha
// channel of the texture is not touched.
//
// See fetch_buffer for more details.
// Description: Copies this buffer's contents into the RGB channels
// of the supplied texture. The alpha channel of the
// texture is not touched.
////////////////////////////////////////////////////////////////////
void MovieVideoCursor::
fetch_into_texture_rgb(Texture *t, int page) {
apply_to_texture_rgb(const Buffer *buffer, Texture *t, int page) {
if (buffer == NULL) {
return;
}
// This generic implementation is layered on fetch_buffer.
// It will work for any derived class, so it is never necessary to
// redefine this. However, it may be possible to make a faster
// implementation that uses fewer intermediate copies, depending
// on the capabilities of the underlying codec software.
PStatTimer timer(_copy_pcollector);
nassertv(t->get_x_size() >= size_x());
nassertv(t->get_y_size() >= size_y());
nassertv(t->get_num_components() == 4);
nassertv(t->get_component_width() == 1);
nassertv(page < t->get_z_size());
PT(Buffer) buffer = fetch_buffer();
if (buffer == NULL) {
// No image available.
return;
PTA_uchar img;
{
PStatTimer timer2(_copy_pcollector_ram);
t->set_keep_ram_image(true);
img = t->modify_ram_image();
}
t->set_keep_ram_image(true);
PTA_uchar img = t->modify_ram_image();
unsigned char *data = img.p() + page * t->get_expected_ram_page_size();
PStatTimer timer2(_copy_pcollector_copy);
int src_stride = size_x() * get_num_components();
int src_width = get_num_components();
int dst_stride = t->get_x_size() * 4;
@ -286,46 +280,6 @@ fetch_into_texture_rgb(Texture *t, int page) {
data += dst_stride;
p += src_stride;
}
release_buffer(buffer);
}
////////////////////////////////////////////////////////////////////
// Function: MovieVideoCursor::fetch_buffer
// Access: Published, Virtual
// Description: Reads the specified video frame and returns it in a
// pre-allocated buffer. After you have copied the data
// from the buffer, you should call release_buffer() to
// make the space available again to populate the next
// frame. You may not call fetch_buffer() again until
// you have called release_buffer().
//
// If the movie reports that it can_seek, you may
// also specify a timestamp less than next_start.
// Otherwise, you may only specify a timestamp
// greater than or equal to next_start.
//
// If the movie reports that it can_seek, it doesn't
// mean that it can do so quickly. It may have to
// rewind the movie and then fast forward to the
// desired location. Only if can_seek_fast returns
// true can it seek rapidly.
////////////////////////////////////////////////////////////////////
PT(MovieVideoCursor::Buffer) MovieVideoCursor::
fetch_buffer() {
return NULL;
}
////////////////////////////////////////////////////////////////////
// Function: MovieVideoCursor::release_buffer
// Access: Public, Virtual
// Description: Should be called after processing the Buffer object
// returned by fetch_buffer(), this releases the Buffer
// for future use again.
////////////////////////////////////////////////////////////////////
void MovieVideoCursor::
release_buffer(Buffer *buffer) {
nassertv(buffer == _standard_buffer);
}
////////////////////////////////////////////////////////////////////
@ -333,8 +287,7 @@ release_buffer(Buffer *buffer) {
// Access: Protected
// Description: May be called by a derived class to return a single
// standard Buffer object to easily implement
// fetch_buffer(). The default release_buffer()
// implementation assumes this method is used.
// fetch_buffer().
////////////////////////////////////////////////////////////////////
MovieVideoCursor::Buffer *MovieVideoCursor::
get_standard_buffer() {
@ -397,3 +350,57 @@ fillin(DatagramIterator &scan, BamReader *manager) {
manager->read_pointer(scan); // _source
}
////////////////////////////////////////////////////////////////////
// Function: MovieVideoCursor::Buffer::Constructor
// Access: Public
// Description:
////////////////////////////////////////////////////////////////////
MovieVideoCursor::Buffer::
Buffer(size_t block_size) :
_block_size(block_size)
{
_deleted_chain = memory_hook->get_deleted_chain(_block_size);
_block = (unsigned char *)_deleted_chain->allocate(_block_size, get_class_type());
}
////////////////////////////////////////////////////////////////////
// Function: MovieVideoCursor::Buffer::Destructor
// Access: Published, Virtual
// Description:
////////////////////////////////////////////////////////////////////
MovieVideoCursor::Buffer::
~Buffer() {
_deleted_chain->deallocate(_block, get_class_type());
}
////////////////////////////////////////////////////////////////////
// Function: MovieVideoCursor::Buffer::compare_timestamp
// Access: Published, Virtual
// Description: Used to sort different buffers to ensure they
// correspond to the same source frame, particularly
// important when synchronizing the different pages of a
// multi-page texture.
//
// Returns 0 if the two buffers are of the same frame,
// <0 if this one comes earlier than the other one, and
// >0 if the other one comes earlier.
////////////////////////////////////////////////////////////////////
int MovieVideoCursor::Buffer::
compare_timestamp(const Buffer *other) const {
return 0;
}
////////////////////////////////////////////////////////////////////
// Function: MovieVideoCursor::Buffer::get_timestamp
// Access: Published, Virtual
// Description: Returns the nearest timestamp value of this
// particular buffer. Ideally,
// MovieVideoCursor::set_time() for this timestamp would
// return this buffer again. This need be defined only
// if compare_timestamp() is also defined.
////////////////////////////////////////////////////////////////////
double MovieVideoCursor::Buffer::
get_timestamp() const {
return 0.0;
}

View File

@ -19,6 +19,9 @@
#include "texture.h"
#include "pointerTo.h"
#include "memoryBase.h"
#include "pStatCollector.h"
#include "deletedChain.h"
#include "typedReferenceCount.h"
class MovieVideo;
class FactoryParams;
@ -58,22 +61,48 @@ PUBLISHED:
INLINE bool streaming() const;
void setup_texture(Texture *tex) const;
virtual bool set_time(double time, int loop_count);
virtual void fetch_into_bitbucket();
virtual void fetch_into_texture(Texture *t, int page);
virtual void fetch_into_texture_rgb(Texture *t, int page);
virtual void fetch_into_texture_alpha(Texture *t, int page, int alpha_src);
virtual bool set_time(double timestamp, int loop_count);
class Buffer : public TypedReferenceCount {
public:
ALLOC_DELETED_CHAIN(Buffer);
Buffer(size_t block_size);
PUBLISHED:
virtual ~Buffer();
virtual int compare_timestamp(const Buffer *other) const;
virtual double get_timestamp() const;
public:
class Buffer : public ReferenceCount {
public:
INLINE Buffer(size_t block_size);
INLINE virtual ~Buffer();
unsigned char *_block;
size_t _block_size;
private:
DeletedBufferChain *_deleted_chain;
public:
static TypeHandle get_class_type() {
return _type_handle;
}
static void init_type() {
TypedReferenceCount::init_type();
register_type(_type_handle, "MovieVideoCursor::Buffer",
TypedReferenceCount::get_class_type());
}
virtual TypeHandle get_type() const {
return get_class_type();
}
virtual TypeHandle force_init_type() {init_type(); return get_class_type();}
private:
static TypeHandle _type_handle;
};
virtual PT(Buffer) fetch_buffer();
virtual void release_buffer(Buffer *buffer);
virtual void apply_to_texture(const Buffer *buffer, Texture *t, int page);
virtual void apply_to_texture_rgb(const Buffer *buffer, Texture *t, int page);
virtual void apply_to_texture_alpha(const Buffer *buffer, Texture *t, int page, int alpha_src);
protected:
Buffer *get_standard_buffer();
@ -93,6 +122,10 @@ protected:
PT(Buffer) _standard_buffer;
static PStatCollector _copy_pcollector;
static PStatCollector _copy_pcollector_ram;
static PStatCollector _copy_pcollector_copy;
public:
virtual void write_datagram(BamWriter *manager, Datagram &dg);
virtual int complete_pointers(TypedWritable **plist, BamReader *manager);
@ -108,6 +141,7 @@ public:
TypedWritableReferenceCount::init_type();
register_type(_type_handle, "MovieVideoCursor",
TypedWritableReferenceCount::get_class_type());
Buffer::init_type();
}
virtual TypeHandle get_type() const {
return get_class_type();