mirror of
https://github.com/FunKey-Project/RetroFE.git
synced 2025-12-14 18:58:52 +01:00
410 lines
11 KiB
C++
410 lines
11 KiB
C++
/* This file is part of RetroFE.
|
|
*
|
|
* RetroFE is free software: you can redistribute it and/or modify
|
|
* it under the terms of the GNU General Public License as published by
|
|
* the Free Software Foundation, either version 3 of the License, or
|
|
* (at your option) any later version.
|
|
*
|
|
* RetroFE is distributed in the hope that it will be useful,
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
* GNU General Public License for more details.
|
|
*
|
|
* You should have received a copy of the GNU General Public License
|
|
* along with RetroFE. If not, see <http://www.gnu.org/licenses/>.
|
|
*/
|
|
#include "GStreamerVideo.h"
|
|
#include "../Graphics/ViewInfo.h"
|
|
#include "../Graphics/Component/Image.h"
|
|
#include "../Database/Configuration.h"
|
|
#include "../Utility/Log.h"
|
|
#include "../Utility/Utils.h"
|
|
#include "../SDL.h"
|
|
#include <sstream>
|
|
#include <cstring>
|
|
#include <cstdlib>
|
|
#include <cstdio>
|
|
#include <SDL2/SDL.h>
|
|
#include <sys/stat.h>
|
|
#include <sys/types.h>
|
|
#include <gst/app/gstappsink.h>
|
|
|
|
bool GStreamerVideo::initialized_ = false;
|
|
|
|
//todo: this started out as sandbox code. This class needs to be refactored
|
|
|
|
// MUST match video size
|
|
gboolean GStreamerVideo::busCallback(GstBus * /* bus */, GstMessage * /* msg */, gpointer /* data */)
|
|
{
|
|
// this callback only needs to be defined so we can loop the video once it completes
|
|
return TRUE;
|
|
}
|
|
GStreamerVideo::GStreamerVideo()
|
|
: playbin_(NULL)
|
|
, videoBin_(NULL)
|
|
, videoSink_(NULL)
|
|
, videoConvert_(NULL)
|
|
, videoConvertCaps_(NULL)
|
|
, videoBus_(NULL)
|
|
, texture_(NULL)
|
|
, height_(0)
|
|
, width_(0)
|
|
, videoBuffer_(NULL)
|
|
, videoBufferSize_(0)
|
|
, maxVideoBufferSize_(0)
|
|
, frameReady_(false)
|
|
, isPlaying_(false)
|
|
, playCount_(0)
|
|
, numLoops_(0)
|
|
{
|
|
}
|
|
GStreamerVideo::~GStreamerVideo()
|
|
{
|
|
stop();
|
|
|
|
if(videoBuffer_)
|
|
{
|
|
delete[] videoBuffer_;
|
|
videoBuffer_ = NULL;
|
|
videoBufferSize_ = 0;
|
|
maxVideoBufferSize_ = 0;
|
|
}
|
|
|
|
SDL_DestroyTexture(texture_);
|
|
texture_ = NULL;
|
|
|
|
freeElements();
|
|
}
|
|
|
|
void GStreamerVideo::setNumLoops(int n)
|
|
{
|
|
numLoops_ = n;
|
|
}
|
|
|
|
SDL_Texture *GStreamerVideo::getTexture() const
|
|
{
|
|
return texture_;
|
|
}
|
|
|
|
void GStreamerVideo::processNewBuffer (GstElement * /* fakesink */, GstBuffer *buf, GstPad *new_pad, gpointer userdata)
|
|
{
|
|
GStreamerVideo *video = (GStreamerVideo *)userdata;
|
|
GstMapInfo map;
|
|
SDL_LockMutex(SDL::getMutex());
|
|
|
|
if (!video->frameReady_ && video && video->isPlaying_ && gst_buffer_map (buf, &map, GST_MAP_READ))
|
|
{
|
|
if(!video->width_ || !video->height_)
|
|
{
|
|
GstCaps *caps = gst_pad_get_current_caps (new_pad);
|
|
GstStructure *s = gst_caps_get_structure(caps, 0);
|
|
|
|
gst_structure_get_int(s, "width", &video->width_);
|
|
gst_structure_get_int(s, "height", &video->height_);
|
|
}
|
|
|
|
if(video->height_ && video->width_)
|
|
{
|
|
// keep the largest video buffer allocated to avoid the penalty of reallocating and deallocating
|
|
if(!video->videoBuffer_ || video->maxVideoBufferSize_ < map.size)
|
|
{
|
|
if(video->videoBuffer_)
|
|
{
|
|
delete[] video->videoBuffer_;
|
|
}
|
|
|
|
video->videoBuffer_ = new char[map.size];
|
|
video->maxVideoBufferSize_ = map.size;
|
|
}
|
|
|
|
video->videoBufferSize_ = map.size;
|
|
|
|
memcpy(video->videoBuffer_, map.data, map.size);
|
|
gst_buffer_unmap(buf, &map);
|
|
video->frameReady_ = true;
|
|
}
|
|
}
|
|
SDL_UnlockMutex(SDL::getMutex());
|
|
}
|
|
|
|
|
|
bool GStreamerVideo::initialize()
|
|
{
|
|
if(initialized_)
|
|
{
|
|
return true;
|
|
}
|
|
|
|
std::string path = Utils::combinePath(Configuration::absolutePath, "Core");
|
|
gst_init(NULL, NULL);
|
|
|
|
#ifdef WIN32
|
|
GstRegistry *registry = gst_registry_get();
|
|
gst_registry_scan_path(registry, path.c_str());
|
|
#endif
|
|
|
|
initialized_ = true;
|
|
|
|
return true;
|
|
}
|
|
|
|
bool GStreamerVideo::deInitialize()
|
|
{
|
|
gst_deinit();
|
|
initialized_ = false;
|
|
return true;
|
|
}
|
|
|
|
|
|
bool GStreamerVideo::stop()
|
|
{
|
|
if(!initialized_)
|
|
{
|
|
return false;
|
|
}
|
|
|
|
if(videoSink_)
|
|
{
|
|
g_object_set(G_OBJECT(videoSink_), "signal-handoffs", FALSE, NULL);
|
|
}
|
|
|
|
if(playbin_)
|
|
{
|
|
(void)gst_element_set_state(playbin_, GST_STATE_NULL);
|
|
}
|
|
|
|
if(texture_)
|
|
{
|
|
SDL_DestroyTexture(texture_);
|
|
texture_ = NULL;
|
|
}
|
|
|
|
|
|
// FreeElements();
|
|
|
|
isPlaying_ = false;
|
|
height_ = 0;
|
|
width_ = 0;
|
|
frameReady_ = false;
|
|
|
|
return true;
|
|
}
|
|
|
|
bool GStreamerVideo::play(std::string file)
|
|
{
|
|
playCount_ = 0;
|
|
|
|
if(!initialized_)
|
|
{
|
|
return false;
|
|
}
|
|
|
|
stop();
|
|
|
|
currentFile_ = file;
|
|
|
|
const gchar *uriFile = gst_filename_to_uri (file.c_str(), NULL);
|
|
if(!uriFile)
|
|
{
|
|
return false;
|
|
}
|
|
else
|
|
{
|
|
Configuration::convertToAbsolutePath(Configuration::absolutePath, file);
|
|
file = uriFile;
|
|
|
|
if(!playbin_)
|
|
{
|
|
playbin_ = gst_element_factory_make("playbin", "player");
|
|
videoBin_ = gst_bin_new("SinkBin");
|
|
videoSink_ = gst_element_factory_make("fakesink", "video_sink");
|
|
videoConvert_ = gst_element_factory_make("capsfilter", "video_convert");
|
|
videoConvertCaps_ = gst_caps_from_string("video/x-raw,format=(string)YUY2");
|
|
height_ = 0;
|
|
width_ = 0;
|
|
if(!playbin_)
|
|
{
|
|
Logger::write(Logger::ZONE_DEBUG, "Video", "Could not create playbin");
|
|
freeElements();
|
|
return false;
|
|
}
|
|
if(!videoSink_)
|
|
{
|
|
Logger::write(Logger::ZONE_DEBUG, "Video", "Could not create video sink");
|
|
freeElements();
|
|
return false;
|
|
}
|
|
if(!videoConvert_)
|
|
{
|
|
Logger::write(Logger::ZONE_DEBUG, "Video", "Could not create video converter");
|
|
freeElements();
|
|
return false;
|
|
}
|
|
if(!videoConvertCaps_)
|
|
{
|
|
Logger::write(Logger::ZONE_DEBUG, "Video", "Could not create video caps");
|
|
freeElements();
|
|
return false;
|
|
}
|
|
|
|
gst_bin_add_many(GST_BIN(videoBin_), videoConvert_, videoSink_, NULL);
|
|
gst_element_link_filtered(videoConvert_, videoSink_, videoConvertCaps_);
|
|
GstPad *videoConvertSinkPad = gst_element_get_static_pad(videoConvert_, "sink");
|
|
|
|
if(!videoConvertSinkPad)
|
|
{
|
|
Logger::write(Logger::ZONE_DEBUG, "Video", "Could not get video convert sink pad");
|
|
freeElements();
|
|
return false;
|
|
}
|
|
|
|
g_object_set(G_OBJECT(videoSink_), "sync", TRUE, "qos", FALSE, NULL);
|
|
|
|
GstPad *videoSinkPad = gst_ghost_pad_new("sink", videoConvertSinkPad);
|
|
if(!videoSinkPad)
|
|
{
|
|
Logger::write(Logger::ZONE_DEBUG, "Video", "Could not get video bin sink pad");
|
|
freeElements();
|
|
gst_object_unref(videoConvertSinkPad);
|
|
videoConvertSinkPad = NULL;
|
|
return false;
|
|
}
|
|
|
|
gst_element_add_pad(videoBin_, videoSinkPad);
|
|
gst_object_unref(videoConvertSinkPad);
|
|
videoConvertSinkPad = NULL;
|
|
}
|
|
g_object_set(G_OBJECT(playbin_), "uri", file.c_str(), "video-sink", videoBin_, NULL);
|
|
|
|
isPlaying_ = true;
|
|
|
|
|
|
g_object_set(G_OBJECT(videoSink_), "signal-handoffs", TRUE, NULL);
|
|
g_signal_connect(videoSink_, "handoff", G_CALLBACK(processNewBuffer), this);
|
|
|
|
videoBus_ = gst_pipeline_get_bus(GST_PIPELINE(playbin_));
|
|
gst_bus_add_watch(videoBus_, &busCallback, this);
|
|
|
|
/* Start playing */
|
|
GstStateChangeReturn playState = gst_element_set_state(GST_ELEMENT(playbin_), GST_STATE_PLAYING);
|
|
if (playState != GST_STATE_CHANGE_ASYNC)
|
|
{
|
|
isPlaying_ = false;
|
|
std::stringstream ss;
|
|
ss << "Unable to set the pipeline to the playing state: ";
|
|
ss << playState;
|
|
Logger::write(Logger::ZONE_ERROR, "Video", ss.str());
|
|
freeElements();
|
|
return false;
|
|
}
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
void GStreamerVideo::freeElements()
|
|
{
|
|
if(videoBin_)
|
|
{
|
|
gst_object_unref(videoBin_);
|
|
videoBin_ = NULL;
|
|
}
|
|
if(videoSink_)
|
|
{
|
|
gst_object_unref(videoSink_);
|
|
videoSink_ = NULL;
|
|
}
|
|
if(videoConvert_)
|
|
{
|
|
gst_object_unref(videoConvert_);
|
|
videoConvert_ = NULL;
|
|
}
|
|
if(videoConvertCaps_)
|
|
{
|
|
gst_object_unref(videoConvertCaps_);
|
|
videoConvertCaps_ = NULL;
|
|
}
|
|
if(playbin_)
|
|
{
|
|
gst_object_unref(playbin_);
|
|
playbin_ = NULL;
|
|
}
|
|
}
|
|
|
|
|
|
int GStreamerVideo::getHeight()
|
|
{
|
|
return static_cast<int>(height_);
|
|
}
|
|
|
|
int GStreamerVideo::getWidth()
|
|
{
|
|
return static_cast<int>(width_);
|
|
}
|
|
|
|
|
|
void GStreamerVideo::draw()
|
|
{
|
|
frameReady_ = false;
|
|
}
|
|
|
|
void GStreamerVideo::update(float /* dt */)
|
|
{
|
|
SDL_LockMutex(SDL::getMutex());
|
|
if(!texture_ && width_ != 0 && height_ != 0)
|
|
{
|
|
texture_ = SDL_CreateTexture(SDL::getRenderer(), SDL_PIXELFORMAT_YUY2,
|
|
SDL_TEXTUREACCESS_STREAMING, width_, height_);
|
|
SDL_SetTextureBlendMode(texture_, SDL_BLENDMODE_BLEND);
|
|
}
|
|
|
|
if(videoBuffer_ && frameReady_ && texture_ && width_ && height_)
|
|
{
|
|
//todo: change to width of cap
|
|
void *pixels;
|
|
int pitch;
|
|
SDL_LockTexture(texture_, NULL, &pixels, &pitch);
|
|
memcpy(pixels, videoBuffer_, width_*height_*2); //todo: magic number
|
|
SDL_UnlockTexture(texture_);
|
|
}
|
|
SDL_UnlockMutex(SDL::getMutex());
|
|
|
|
|
|
if(videoBus_)
|
|
{
|
|
GstMessage *msg = gst_bus_pop(videoBus_);
|
|
if(msg)
|
|
{
|
|
if(GST_MESSAGE_TYPE(msg) == GST_MESSAGE_EOS)
|
|
{
|
|
playCount_++;
|
|
|
|
//todo: nesting hazard
|
|
// if number of loops is 0, set to infinite (todo: this is misleading, rename variable)
|
|
if(!numLoops_ || numLoops_ > playCount_)
|
|
{
|
|
gst_element_seek(playbin_,
|
|
1.0,
|
|
GST_FORMAT_TIME,
|
|
GST_SEEK_FLAG_FLUSH,
|
|
GST_SEEK_TYPE_SET,
|
|
0,
|
|
GST_SEEK_TYPE_NONE,
|
|
GST_CLOCK_TIME_NONE);
|
|
}
|
|
else
|
|
{
|
|
isPlaying_ = false;
|
|
}
|
|
}
|
|
|
|
gst_message_unref(msg);
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
bool GStreamerVideo::isPlaying()
|
|
{
|
|
return isPlaying_;
|
|
} |