Skip to content

Commit

Permalink
GRAPHICS: Add back video playing
Browse files Browse the repository at this point in the history
This time, the video player can be easily adopted to play
several videos simultaneously. This will be necessary
for Sonic.

Right now, we still de-YUV the videos onto a surface held
in CPU memory, and then copy (with pixel format conversion)
it into video memory. This is not optimal and we should
probably change it to de-YUV directly onto the video
memory.
  • Loading branch information
DrMcCoy committed Jan 29, 2014
1 parent 55af39e commit bd83f4d
Show file tree
Hide file tree
Showing 9 changed files with 296 additions and 111 deletions.
13 changes: 12 additions & 1 deletion src/engines/aurora/util.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,18 @@ void playVideo(const Common::UString &video) {
try {
Video::Aurora::VideoPlayer videoPlayer(video);

videoPlayer.play();
videoPlayer.start();
while (videoPlayer.isPlaying() && !EventMan.quitRequested()) {
Events::Event event;
while (EventMan.pollEvent(event)) {
if (((event.type == Events::kEventKeyDown) && (event.key.keysym.sym == SDLK_ESCAPE)) ||
(event.type == Events::kEventMouseUp))
videoPlayer.abort();
}

EventMan.delay(10);
}

} catch (Common::Exception &e) {
Common::printException(e, "WARNING: ");
}
Expand Down
247 changes: 218 additions & 29 deletions src/video/aurora/videoplayer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,27 @@
* A video player.
*/

#include <OgreFrameListener.h>
#include <OgreMaterialManager.h>
#include <OgreTexture.h>
#include <OgreTextureUnitState.h>
#include <OgrePass.h>
#include <OgreTechnique.h>
#include <OgreOverlaySystem.h>
#include <OgreHardwarePixelBuffer.h>

#include "common/error.h"
#include "common/util.h"
#include "common/ustring.h"
#include "common/stream.h"
#include "common/uuid.h"
#include "common/threads.h"

#include "graphics/graphics.h"
#include "graphics/textureman.h"
#include "graphics/materialman.h"

#include "graphics/aurora/sceneman.h"

#include "video/decoder.h"
#include "video/actimagine.h"
Expand All @@ -49,11 +66,204 @@ namespace Video {

namespace Aurora {

VideoPlayer::VideoPlayer(const Common::UString &video) : _video(0) {
class VideoFrameRenderer : public Ogre::FrameListener {
private:
VideoDecoder *_video;

int _width;
int _height;
int _x;
int _y;

Ogre::Overlay *_overlay;

Ogre::PanelOverlayElement *_panelBack;
Ogre::PanelOverlayElement *_panelVideo;

Ogre::MaterialPtr _videoMaterial;
Ogre::TexturePtr _videoTexture;

public:
VideoFrameRenderer(VideoDecoder &video, int width, int height, int x, int y) :
_video(&video), _width(width), _height(height), _x(x), _y(y),
_overlay(0), _panelBack(0), _panelVideo(0) {

}

~VideoFrameRenderer() {
if (!_overlay)
return;

// Destroy the video player overlay

_panelVideo->hide();
_panelBack->hide();
_overlay->hide();

_panelBack->removeChild(_panelVideo->getName());
_overlay->remove2D(_panelBack);

Ogre::OverlayManager::getSingletonPtr()->destroyOverlayElement(_panelVideo);
Ogre::OverlayManager::getSingletonPtr()->destroyOverlayElement(_panelBack);
Ogre::OverlayManager::getSingletonPtr()->destroy(_overlay);
}

// Return the video size scaled to the requested dimensions restricted by video aspect ratio
void getScaledSize(int &scaledWidth, int &scaledHeight) {
int srcWidth, srcHeight;
_video->getVideoSize(srcWidth, srcHeight);

float ratio = (float)srcWidth / (float)srcHeight;

scaledWidth = _width;
scaledHeight = _width / ratio;
if (scaledHeight <= _height)
return;

scaledHeight = _height;
scaledWidth = _height * ratio;
}

// Return the texture coordinates for the video content within the surface
void getTextureCoords(float &textureX2, float &textureY2) {
int videoWidth, videoHeight, surfaceWidth, surfaceHeight;
_video->getVideoSize(videoWidth, videoHeight);
_video->getSurfaceSize(surfaceWidth, surfaceHeight);

textureX2 = (float) videoWidth / (float) surfaceWidth;
textureY2 = (float) videoHeight / (float) surfaceHeight;
}

void createOverlay() {
LOCK_FRAME();

// Dimensions and coordinates

int scaledWidth, scaledHeight;
getScaledSize(scaledWidth, scaledHeight);

int textureWidth, textureHeight;
_video->getSurfaceSize(textureWidth, textureHeight);

float textureX2, textureY2;
getTextureCoords(textureX2, textureY2);


Ogre::OverlayManager &overlayMan = *Ogre::OverlayManager::getSingletonPtr();

Common::UString nameOverlay = Common::generateIDRandomString();
Common::UString nameVideo = Common::generateIDRandomString();
Common::UString nameBack = Common::generateIDRandomString();


// Get the black background material

Ogre::MaterialPtr matBack = MaterialMan.getSolidColor(0.0, 0.0, 0.0);

// Create the material and texture for the video content

_videoTexture = Ogre::TextureManager::getSingleton().createManual(nameVideo.c_str(), "General",
Ogre::TEX_TYPE_2D, textureWidth, textureHeight, 1, Ogre::PF_BYTE_BGRA, Ogre::TU_DYNAMIC_WRITE_ONLY_DISCARDABLE | Ogre::TU_AUTOMIPMAP);

Ogre::HardwarePixelBufferSharedPtr buffer = _videoTexture->getBuffer();
memset(buffer->lock(Ogre::HardwareBuffer::HBL_DISCARD), 0, buffer->getSizeInBytes());
buffer->unlock();

_videoMaterial = Ogre::MaterialManager::getSingleton().create(nameVideo.c_str(), "General");

Ogre::TextureUnitState *texState = _videoMaterial->getTechnique(0)->getPass(0)->createTextureUnitState();
texState->setTexture(_videoTexture);

texState->setTextureAddressingMode(Ogre::TextureUnitState::TAM_WRAP);

// Create the background panel

_panelBack = (Ogre::PanelOverlayElement *) overlayMan.createOverlayElement("Panel", nameBack.c_str());
_panelBack->setMaterialName(matBack->getName());
_panelBack->setMetricsMode(Ogre::GMM_PIXELS);
_panelBack->setPosition(_x, _y);
_panelBack->setDimensions(_width, _height);
_panelBack->show();

// Create the video panel ontop the background panel

_panelVideo = (Ogre::PanelOverlayElement *) overlayMan.createOverlayElement("Panel", nameVideo.c_str());
_panelVideo->setMaterialName(_videoMaterial->getName());
_panelVideo->setMetricsMode(Ogre::GMM_PIXELS);
_panelVideo->setPosition(- (scaledWidth / 2), - (scaledHeight / 2));
_panelVideo->setDimensions(scaledWidth, scaledHeight);
_panelVideo->setHorizontalAlignment(Ogre::GHA_CENTER);
_panelVideo->setVerticalAlignment(Ogre::GVA_CENTER);
_panelVideo->setUV(0.0, textureY2, textureX2, 0.0);
_panelVideo->show();

_panelBack->addChild(_panelVideo);

// Create the overlay

_overlay = overlayMan.create(nameOverlay.c_str());
_overlay->setZOrder(645);
_overlay->add2D(_panelBack);
_overlay->show();
}

bool frameRenderingQueued(const Ogre::FrameEvent &event) {
LOCK_FRAME();

if (!_overlay)
createOverlay();

if (_video->getTimeToNextFrame() > 10)
return true;

// This renders the frame onto a surface and then copies/converts
// the pixels onto the texture. This is quite slow...
// We should probably de-YUV directly onto the texture instead.

_video->renderFrame();
_video->copyIntoTexture(_videoTexture);
return true;
}
};


VideoPlayer::VideoPlayer(const Common::UString &video, int width, int height, int x, int y) :
_video(0), _renderer(0) {

load(video);

if (width <= 0)
width = (GfxMan.getScreenWidth() - x);
if (height <= 0)
height = (GfxMan.getScreenHeight() - y);

_renderer = new VideoFrameRenderer(*_video, width, height, x, y);

LOCK_FRAME();
Ogre::Root::getSingleton().addFrameListener(_renderer);
}

VideoPlayer::~VideoPlayer() {
deinit();
}

void VideoPlayer::deinit() {
if (!Common::isMainThread()) {
Events::MainThreadFunctor<void> functor(boost::bind(&VideoPlayer::deinit, this));

return RequestMan.callInMainThread(functor);
}

LOCK_FRAME();

if (_renderer)
Ogre::Root::getSingleton().removeFrameListener(_renderer);

delete _renderer;

if (_video)
_video->abort();

delete _video;
}

Expand Down Expand Up @@ -84,39 +294,18 @@ void VideoPlayer::load(const Common::UString &name) {
delete video;
throw Common::Exception("Unsupported video resource type %d", (int) type);
}
}

_video->setScale(VideoDecoder::kScaleUpDown);
bool VideoPlayer::isPlaying() const {
return _video->isPlaying();
}

void VideoPlayer::play() {
return;
RequestMan.sync();
void VideoPlayer::abort() {
_video->abort();
}

void VideoPlayer::start() {
_video->start();

bool brk = false;

try {
Events::Event event;
while (!EventMan.quitRequested()) {

while (EventMan.pollEvent(event)) {
if ((event.type == Events::kEventKeyDown && event.key.keysym.sym == SDLK_ESCAPE) ||
(event.type == Events::kEventMouseUp))
brk = true;
}

if (brk || !_video->isPlaying())
break;

EventMan.delay(10);
}
} catch (...) {
_video->abort();
throw;
}

_video->abort();
}

} // End of namespace Aurora
Expand Down
13 changes: 11 additions & 2 deletions src/video/aurora/videoplayer.h
Original file line number Diff line number Diff line change
Expand Up @@ -40,18 +40,27 @@ class VideoDecoder;

namespace Aurora {

class VideoFrameRenderer;

/** A video player. */
class VideoPlayer {
public:
VideoPlayer(const Common::UString &video);
VideoPlayer(const Common::UString &video, int width = -1, int height = -1, int x = 0, int y = 0);
~VideoPlayer();

void play();
bool isPlaying() const;

void start();
void abort();

private:
VideoDecoder *_video;

VideoFrameRenderer *_renderer;


void load(const Common::UString &name);
void deinit();
};

} // End of namespace Aurora
Expand Down
2 changes: 0 additions & 2 deletions src/video/bink.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -229,8 +229,6 @@ void Bink::processData() {
delete frame.bits;
frame.bits = 0;

_needCopy = true;

_curFrame++;
}

Expand Down

0 comments on commit bd83f4d

Please sign in to comment.