o add bsnes

This commit is contained in:
optixx
2009-04-08 21:29:36 +02:00
parent bcb4a055e9
commit 27b58a09f2
413 changed files with 71887 additions and 0 deletions

346
bsnes/lib/ruby/video/direct3d.cpp Executable file
View File

@@ -0,0 +1,346 @@
#include <windows.h>
#include <d3d9.h>
#define D3DVERTEX (D3DFVF_XYZRHW | D3DFVF_TEX1)
namespace ruby {
#include "direct3d.hpp"
class pVideoD3D {
public:
VideoD3D &self;
LPDIRECT3D9 lpd3d;
LPDIRECT3DDEVICE9 device;
LPDIRECT3DVERTEXBUFFER9 vertex_buffer, *vertex_ptr;
D3DPRESENT_PARAMETERS presentation;
D3DSURFACE_DESC d3dsd;
D3DLOCKED_RECT d3dlr;
D3DRASTER_STATUS d3drs;
D3DCAPS9 d3dcaps;
LPDIRECT3DTEXTURE9 texture;
LPDIRECT3DSURFACE9 surface;
struct d3dvertex {
float x, y, z, rhw; //screen coords
float u, v; //texture coords
};
struct {
uint32_t t_usage, v_usage;
uint32_t t_pool, v_pool;
uint32_t lock;
uint32_t filter;
} flags;
struct {
bool dynamic; //device supports dynamic textures
bool stretchrect; //device supports StretchRect
} caps;
struct {
HWND handle;
bool synchronize;
unsigned filter;
} settings;
struct {
unsigned width;
unsigned height;
} state;
bool cap(Video::Setting setting) {
if(setting == Video::Handle) return true;
if(setting == Video::Synchronize) return true;
if(setting == Video::Filter) return true;
return false;
}
uintptr_t get(Video::Setting setting) {
if(setting == Video::Handle) return (uintptr_t)settings.handle;
if(setting == Video::Synchronize) return settings.synchronize;
if(setting == Video::Filter) return settings.filter;
return false;
}
bool set(Video::Setting setting, uintptr_t param) {
if(setting == Video::Handle) {
settings.handle = (HWND)param;
return true;
}
if(setting == Video::Synchronize) {
settings.synchronize = param;
return true;
}
if(setting == Video::Filter) {
settings.filter = param;
if(lpd3d) update_filter();
return true;
}
return false;
}
void update_filter() {
if(!device) return;
switch(settings.filter) { default:
case Video::FilterPoint: flags.filter = D3DTEXF_POINT; break;
case Video::FilterLinear: flags.filter = D3DTEXF_LINEAR; break;
}
device->SetSamplerState(0, D3DSAMP_MINFILTER, flags.filter);
device->SetSamplerState(0, D3DSAMP_MAGFILTER, flags.filter);
}
/* Vertex format:
0----------1
| /|
| / |
| / |
| / |
| / |
2----------3
(x,y) screen coords, in pixels
(u,v) texture coords, betweeen 0.0 (top, left) to 1.0 (bottom, right)
*/
void set_vertex(
uint32_t px, uint32_t py, uint32_t pw, uint32_t ph,
uint32_t tw, uint32_t th,
uint32_t x, uint32_t y, uint32_t w, uint32_t h
) {
d3dvertex vertex[4];
vertex[0].x = vertex[2].x = (double)(x );
vertex[1].x = vertex[3].x = (double)(x + w);
vertex[0].y = vertex[1].y = (double)(y );
vertex[2].y = vertex[3].y = (double)(y + h);
//Z-buffer and RHW are unused for 2D blit, set to normal values
vertex[0].z = vertex[1].z = vertex[2].z = vertex[3].z = 0.0;
vertex[0].rhw = vertex[1].rhw = vertex[2].rhw = vertex[3].rhw = 1.0;
double rw = (double)w / (double)pw * (double)tw;
double rh = (double)h / (double)ph * (double)th;
vertex[0].u = vertex[2].u = (double)(px ) / rw;
vertex[1].u = vertex[3].u = (double)(px + w) / rw;
vertex[0].v = vertex[1].v = (double)(py ) / rh;
vertex[2].v = vertex[3].v = (double)(py + h) / rh;
vertex_buffer->Lock(0, sizeof(d3dvertex) * 4, (void**)&vertex_ptr, 0);
memcpy(vertex_ptr, vertex, sizeof(d3dvertex) * 4);
vertex_buffer->Unlock();
device->SetStreamSource(0, vertex_buffer, 0, sizeof(d3dvertex));
}
void clear() {
if(!device) return;
if(caps.stretchrect == false && !texture) return;
if(caps.stretchrect == false) {
texture->GetLevelDesc(0, &d3dsd);
texture->GetSurfaceLevel(0, &surface);
}
if(surface) {
device->ColorFill(surface, 0, D3DCOLOR_XRGB(0x00, 0x00, 0x00));
if(caps.stretchrect == false) {
surface->Release();
}
}
//clear primary display and all backbuffers
for(int i = 0; i < 3; i++) {
device->Clear(0, 0, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0x00, 0x00, 0x00), 1.0f, 0);
device->Present(0, 0, 0, 0);
}
}
bool lock(uint32_t *&data, unsigned &pitch) {
if(caps.stretchrect == false) {
texture->GetLevelDesc(0, &d3dsd);
texture->GetSurfaceLevel(0, &surface);
}
surface->LockRect(&d3dlr, 0, flags.lock);
pitch = d3dlr.Pitch;
return data = (uint32_t*)d3dlr.pBits;
}
void unlock() {
surface->UnlockRect();
if(caps.stretchrect == false) surface->Release();
}
void refresh(unsigned width, unsigned height) {
if(!device) return;
RECT rd, rs; //dest, source rectangles
GetClientRect(settings.handle, &rd);
SetRect(&rs, 0, 0, width, height);
if(state.width != rd.right || state.height != rd.bottom) {
//if window size changed, D3DPRESENT_PARAMETERS must be updated
//failure to do so causes scaling issues on some ATI drivers
init();
}
device->BeginScene();
if(caps.stretchrect == true) {
LPDIRECT3DSURFACE9 temp;
device->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &temp);
device->StretchRect(surface, &rs, temp, 0, static_cast<D3DTEXTUREFILTERTYPE>(flags.filter));
temp->Release();
} else {
set_vertex(0, 0, width, height, 1024, 1024, 0, 0, rd.right, rd.bottom);
device->SetTexture(0, texture);
device->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
}
device->EndScene();
if(settings.synchronize) {
while(true) {
D3DRASTER_STATUS status;
device->GetRasterStatus(0, &status);
if(status.InVBlank == true) break;
}
}
device->Present(0, 0, 0, 0);
}
bool init() {
term();
RECT rd;
GetClientRect(settings.handle, &rd);
state.width = rd.right;
state.height = rd.bottom;
lpd3d = Direct3DCreate9(D3D_SDK_VERSION);
if(!lpd3d) return false;
memset(&presentation, 0, sizeof(presentation));
presentation.Flags = D3DPRESENTFLAG_VIDEO;
presentation.SwapEffect = D3DSWAPEFFECT_FLIP;
presentation.hDeviceWindow = settings.handle;
presentation.BackBufferCount = 1;
presentation.MultiSampleType = D3DMULTISAMPLE_NONE;
presentation.MultiSampleQuality = 0;
presentation.EnableAutoDepthStencil = false;
presentation.AutoDepthStencilFormat = D3DFMT_UNKNOWN;
presentation.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
presentation.Windowed = true;
presentation.BackBufferFormat = D3DFMT_UNKNOWN;
presentation.BackBufferWidth = 0;
presentation.BackBufferHeight = 0;
if(lpd3d->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, settings.handle,
D3DCREATE_SOFTWARE_VERTEXPROCESSING, &presentation, &device) != D3D_OK) {
return false;
}
//detect device capabilities
device->GetDeviceCaps(&d3dcaps);
if(d3dcaps.MaxTextureWidth < 1024 || d3dcaps.MaxTextureWidth < 1024) return false;
caps.dynamic = bool(d3dcaps.Caps2 & D3DCAPS2_DYNAMICTEXTURES);
caps.stretchrect = (d3dcaps.DevCaps2 & D3DDEVCAPS2_CAN_STRETCHRECT_FROM_TEXTURES) &&
(d3dcaps.StretchRectFilterCaps & D3DPTFILTERCAPS_MINFPOINT) &&
(d3dcaps.StretchRectFilterCaps & D3DPTFILTERCAPS_MAGFPOINT) &&
(d3dcaps.StretchRectFilterCaps & D3DPTFILTERCAPS_MINFLINEAR) &&
(d3dcaps.StretchRectFilterCaps & D3DPTFILTERCAPS_MAGFLINEAR);
if(caps.dynamic == true) {
flags.t_usage = D3DUSAGE_DYNAMIC;
flags.v_usage = D3DUSAGE_WRITEONLY | D3DUSAGE_DYNAMIC;
flags.t_pool = D3DPOOL_DEFAULT;
flags.v_pool = D3DPOOL_DEFAULT;
flags.lock = D3DLOCK_NOSYSLOCK | D3DLOCK_DISCARD;
} else {
flags.t_usage = 0;
flags.v_usage = D3DUSAGE_WRITEONLY;
flags.t_pool = D3DPOOL_MANAGED;
flags.v_pool = D3DPOOL_MANAGED;
flags.lock = D3DLOCK_NOSYSLOCK | D3DLOCK_DISCARD;
}
device->SetDialogBoxMode(false);
device->SetTextureStageState(0, D3DTSS_COLOROP, D3DTOP_SELECTARG1);
device->SetTextureStageState(0, D3DTSS_COLORARG1, D3DTA_TEXTURE);
device->SetTextureStageState(0, D3DTSS_COLORARG2, D3DTA_DIFFUSE);
device->SetTextureStageState(0, D3DTSS_ALPHAOP, D3DTOP_SELECTARG1);
device->SetTextureStageState(0, D3DTSS_ALPHAARG1, D3DTA_TEXTURE);
device->SetTextureStageState(0, D3DTSS_ALPHAARG2, D3DTA_DIFFUSE);
device->SetRenderState(D3DRS_LIGHTING, false);
device->SetRenderState(D3DRS_ZENABLE, false);
device->SetRenderState(D3DRS_CULLMODE, D3DCULL_NONE);
device->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_SRCALPHA);
device->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCALPHA);
device->SetRenderState(D3DRS_ALPHABLENDENABLE, false);
device->SetVertexShader(NULL);
device->SetFVF(D3DVERTEX);
if(caps.stretchrect == true) {
device->CreateOffscreenPlainSurface(1024, 1024, D3DFMT_X8R8G8B8,
D3DPOOL_DEFAULT, &surface, NULL);
} else {
device->CreateTexture(1024, 1024, 1, flags.t_usage, D3DFMT_X8R8G8B8,
static_cast<D3DPOOL>(flags.t_pool), &texture, NULL);
}
device->CreateVertexBuffer(sizeof(d3dvertex) * 4, flags.v_usage, D3DVERTEX,
static_cast<D3DPOOL>(flags.v_pool), &vertex_buffer, NULL);
update_filter();
clear();
return true;
}
void term() {
if(vertex_buffer) { vertex_buffer->Release(); vertex_buffer = 0; }
if(surface) { surface->Release(); surface = 0; }
if(texture) { texture->Release(); texture = 0; }
if(device) { device->Release(); device = 0; }
if(lpd3d) { lpd3d->Release(); lpd3d = 0; }
}
pVideoD3D(VideoD3D &self_) : self(self_) {
vertex_buffer = 0;
surface = 0;
texture = 0;
device = 0;
lpd3d = 0;
settings.handle = 0;
settings.synchronize = false;
settings.filter = Video::FilterLinear;
}
};
bool VideoD3D::cap(Setting setting) { return p.cap(setting); }
uintptr_t VideoD3D::get(Setting setting) { return p.get(setting); }
bool VideoD3D::set(Setting setting, uintptr_t param) { return p.set(setting, param); }
bool VideoD3D::lock(uint32_t *&data, unsigned &pitch) { return p.lock(data, pitch); }
void VideoD3D::unlock() { p.unlock(); }
void VideoD3D::clear() { p.clear(); }
void VideoD3D::refresh(unsigned width, unsigned height) { p.refresh(width, height); }
bool VideoD3D::init() { return p.init(); }
void VideoD3D::term() { p.term(); }
VideoD3D::VideoD3D() : p(*new pVideoD3D(*this)) {}
VideoD3D::~VideoD3D() { delete &p; }
} //namespace ruby
#undef D3DVERTEX

View File

@@ -0,0 +1,22 @@
class pVideoD3D;
class VideoD3D : public Video {
public:
bool cap(Setting);
uintptr_t get(Setting);
bool set(Setting, uintptr_t);
bool lock(uint32_t *&data, unsigned &pitch);
void unlock();
void clear();
void refresh(unsigned width, unsigned height);
bool init();
void term();
VideoD3D();
~VideoD3D();
private:
pVideoD3D &p;
};

View File

@@ -0,0 +1,178 @@
#include <windows.h>
#include <ddraw.h>
namespace ruby {
#include "directdraw.hpp"
class pVideoDD {
public:
VideoDD &self;
LPDIRECTDRAW lpdd;
LPDIRECTDRAW7 lpdd7;
LPDIRECTDRAWSURFACE7 screen, raster;
LPDIRECTDRAWCLIPPER clipper;
DDSURFACEDESC2 ddsd;
DDSCAPS2 ddscaps;
struct {
HWND handle;
bool synchronize;
} settings;
bool cap(Video::Setting setting) {
if(setting == Video::Handle) return true;
if(setting == Video::Synchronize) return true;
return false;
}
uintptr_t get(Video::Setting setting) {
if(setting == Video::Handle) return (uintptr_t)settings.handle;
if(setting == Video::Synchronize) return settings.synchronize;
return false;
}
bool set(Video::Setting setting, uintptr_t param) {
if(setting == Video::Handle) {
settings.handle = (HWND)param;
return true;
}
if(setting == Video::Synchronize) {
settings.synchronize = param;
return true;
}
return false;
}
void clear() {
DDBLTFX fx;
fx.dwSize = sizeof(DDBLTFX);
fx.dwFillColor = 0x00000000;
screen->Blt(0, 0, 0, DDBLT_WAIT | DDBLT_COLORFILL, &fx);
raster->Blt(0, 0, 0, DDBLT_WAIT | DDBLT_COLORFILL, &fx);
}
bool lock(uint32_t *&data, unsigned &pitch) {
if(raster->Lock(0, &ddsd, DDLOCK_WAIT, 0) != DD_OK) return false;
pitch = ddsd.lPitch;
return data = (uint32_t*)ddsd.lpSurface;
}
void unlock() {
raster->Unlock(0);
}
void refresh(unsigned r_width, unsigned r_height) {
if(settings.synchronize) {
while(true) {
BOOL in_vblank;
lpdd7->GetVerticalBlankStatus(&in_vblank);
if(in_vblank == true) break;
}
}
HRESULT hr;
RECT rd, rs;
SetRect(&rs, 0, 0, r_width, r_height);
POINT p = { 0, 0 };
ClientToScreen(settings.handle, &p);
GetClientRect(settings.handle, &rd);
OffsetRect(&rd, p.x, p.y);
if(screen->Blt(&rd, raster, &rs, DDBLT_WAIT, 0) == DDERR_SURFACELOST) {
screen->Restore();
raster->Restore();
}
}
bool init() {
term();
DirectDrawCreate(0, &lpdd, 0);
lpdd->QueryInterface(IID_IDirectDraw7, (void**)&lpdd7);
if(lpdd) { lpdd->Release(); lpdd = 0; }
lpdd7->SetCooperativeLevel(settings.handle, DDSCL_NORMAL);
memset(&ddsd, 0, sizeof(DDSURFACEDESC2));
ddsd.dwSize = sizeof(DDSURFACEDESC2);
ddsd.dwFlags = DDSD_CAPS;
ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE;
lpdd7->CreateSurface(&ddsd, &screen, 0);
lpdd7->CreateClipper(0, &clipper, 0);
clipper->SetHWnd(0, settings.handle);
screen->SetClipper(clipper);
create_raster();
clear();
return true;
}
void create_raster() {
screen->GetSurfaceDesc(&ddsd);
int depth = ddsd.ddpfPixelFormat.dwRGBBitCount;
if(depth == 32) goto try_native_surface;
memset(&ddsd, 0, sizeof(DDSURFACEDESC2));
ddsd.dwSize = sizeof(DDSURFACEDESC2);
ddsd.dwFlags = DDSD_CAPS | DDSD_WIDTH | DDSD_HEIGHT | DDSD_PIXELFORMAT;
ddsd.ddsCaps.dwCaps = DDSCAPS_OFFSCREENPLAIN | DDSCAPS_VIDEOMEMORY; //DDSCAPS_SYSTEMMEMORY
ddsd.dwWidth = 1024;
ddsd.dwHeight = 1024;
ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
ddsd.ddpfPixelFormat.dwFlags = DDPF_RGB;
ddsd.ddpfPixelFormat.dwRGBBitCount = 32;
ddsd.ddpfPixelFormat.dwRBitMask = 0xff0000;
ddsd.ddpfPixelFormat.dwGBitMask = 0x00ff00;
ddsd.ddpfPixelFormat.dwBBitMask = 0x0000ff;
if(lpdd7->CreateSurface(&ddsd, &raster, 0) == DD_OK) return;
try_native_surface:
memset(&ddsd, 0, sizeof(DDSURFACEDESC2));
ddsd.dwSize = sizeof(DDSURFACEDESC2);
ddsd.dwFlags = DDSD_CAPS | DDSD_WIDTH | DDSD_HEIGHT;
ddsd.ddsCaps.dwCaps = DDSCAPS_OFFSCREENPLAIN | DDSCAPS_VIDEOMEMORY; //DDSCAPS_SYSTEMMEMORY
ddsd.dwWidth = 1024;
ddsd.dwHeight = 1024;
if(lpdd7->CreateSurface(&ddsd, &raster, 0) == DD_OK) return;
}
void term() {
if(clipper) { clipper->Release(); clipper = 0; }
if(raster) { raster->Release(); raster = 0; }
if(screen) { screen->Release(); screen = 0; }
if(lpdd7) { lpdd7->Release(); lpdd7 = 0; }
if(lpdd) { lpdd->Release(); lpdd = 0; }
}
pVideoDD(VideoDD &self_) : self(self_) {
lpdd = 0;
lpdd7 = 0;
screen = 0;
raster = 0;
clipper = 0;
settings.handle = 0;
}
};
bool VideoDD::cap(Setting setting) { return p.cap(setting); }
uintptr_t VideoDD::get(Setting setting) { return p.get(setting); }
bool VideoDD::set(Setting setting, uintptr_t param) { return p.set(setting, param); }
bool VideoDD::lock(uint32_t *&data, unsigned &pitch) { return p.lock(data, pitch); }
void VideoDD::unlock() { p.unlock(); }
void VideoDD::clear() { p.clear(); }
void VideoDD::refresh(unsigned width, unsigned height) { p.refresh(width, height); }
bool VideoDD::init() { return p.init(); }
void VideoDD::term() { p.term(); }
VideoDD::VideoDD() : p(*new pVideoDD(*this)) {}
VideoDD::~VideoDD() { delete &p; }
} //namespace ruby

View File

@@ -0,0 +1,22 @@
class pVideoDD;
class VideoDD : public Video {
public:
bool cap(Setting);
uintptr_t get(Setting);
bool set(Setting, uintptr_t);
bool lock(uint32_t *&data, unsigned &pitch);
void unlock();
void clear();
void refresh(unsigned width, unsigned height);
bool init();
void term();
VideoDD();
~VideoDD();
private:
pVideoDD &p;
};

103
bsnes/lib/ruby/video/gdi.cpp Executable file
View File

@@ -0,0 +1,103 @@
#include <assert.h>
#include <windows.h>
namespace ruby {
#include "gdi.hpp"
class pVideoGDI {
public:
VideoGDI &self;
uint32_t *buffer;
HBITMAP bitmap;
HDC bitmapdc;
BITMAPINFO bmi;
struct {
HWND handle;
} settings;
bool cap(Video::Setting setting) {
if(setting == Video::Handle) return true;
return false;
}
uintptr_t get(Video::Setting setting) {
if(setting == Video::Handle) return (uintptr_t)settings.handle;
return false;
}
bool set(Video::Setting setting, uintptr_t param) {
if(setting == Video::Handle) {
settings.handle = (HWND)param;
return true;
}
return false;
}
bool lock(uint32_t *&data, unsigned &pitch) {
pitch = 1024 * 4;
return data = buffer;
}
void unlock() {}
void refresh(unsigned r_width, unsigned r_height) {
RECT rc;
GetClientRect(settings.handle, &rc);
SetDIBits(bitmapdc, bitmap, 0, r_height, (void*)buffer, &bmi, DIB_RGB_COLORS);
HDC hdc = GetDC(settings.handle);
StretchBlt(hdc, rc.left, rc.top, rc.right, rc.bottom, bitmapdc, 0, 1024 - r_height, r_width, r_height, SRCCOPY);
ReleaseDC(settings.handle, hdc);
}
bool init() {
HDC hdc = GetDC(settings.handle);
bitmapdc = CreateCompatibleDC(hdc);
assert(bitmapdc);
bitmap = CreateCompatibleBitmap(hdc, 1024, 1024);
assert(bitmap);
SelectObject(bitmapdc, bitmap);
ReleaseDC(settings.handle, hdc);
memset(&bmi, 0, sizeof(BITMAPINFO));
bmi.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
bmi.bmiHeader.biWidth = 1024;
bmi.bmiHeader.biHeight = -1024;
bmi.bmiHeader.biPlanes = 1;
bmi.bmiHeader.biBitCount = 32; //biBitCount of 15 is invalid, biBitCount of 16 is really RGB555
bmi.bmiHeader.biCompression = BI_RGB;
bmi.bmiHeader.biSizeImage = 1024 * 1024 * sizeof(uint32_t);
return true;
}
void term() {
DeleteObject(bitmap);
DeleteDC(bitmapdc);
}
pVideoGDI(VideoGDI &self_) : self(self_) {
buffer = (uint32_t*)malloc(1024 * 1024 * sizeof(uint32_t));
settings.handle = 0;
}
~pVideoGDI() {
if(buffer) free(buffer);
}
};
bool VideoGDI::cap(Setting setting) { return p.cap(setting); }
uintptr_t VideoGDI::get(Setting setting) { return p.get(setting); }
bool VideoGDI::set(Setting setting, uintptr_t param) { return p.set(setting, param); }
bool VideoGDI::lock(uint32_t *&data, unsigned &pitch) { return p.lock(data, pitch); }
void VideoGDI::unlock() { p.unlock(); }
void VideoGDI::refresh(unsigned width, unsigned height) { p.refresh(width, height); }
bool VideoGDI::init() { return p.init(); }
void VideoGDI::term() { p.term(); }
VideoGDI::VideoGDI() : p(*new pVideoGDI(*this)) {}
VideoGDI::~VideoGDI() { delete &p; }
} //namespace ruby

21
bsnes/lib/ruby/video/gdi.hpp Executable file
View File

@@ -0,0 +1,21 @@
class pVideoGDI;
class VideoGDI : public Video {
public:
bool cap(Setting);
uintptr_t get(Setting);
bool set(Setting, uintptr_t);
bool lock(uint32_t *&data, unsigned &pitch);
void unlock();
void refresh(unsigned width, unsigned height);
bool init();
void term();
VideoGDI();
~VideoGDI();
private:
pVideoGDI &p;
};

292
bsnes/lib/ruby/video/glx.cpp Executable file
View File

@@ -0,0 +1,292 @@
/*
video.glx
author: byuu
license: public domain
last updated: 2008-08-20
Design notes:
SGI's GLX is the X11/Xlib interface to OpenGL.
At the time of this writing, there are three relevant versions of the API: versions 1.2, 1.3 and 1.4.
Version 1.2 was released on March 4th, 1997.
Version 1.3 was released on October 19th, 1998.
Version 1.4 was released on December 16th, 2005.
Despite version 1.3 being roughly ten years old at this time, there are still many modern X11 GLX drivers
that lack full support for the specification. Most notable would be the official video drivers from ATI.
Given this, 1.4 support is pretty much hopeless to target.
Luckily, each version has been designed to be backwards compatible with the previous version. As well,
version 1.2 is wholly sufficient, albeit less convenient, to implement this video module.
Therefore, for the purpose of compatibility, this driver only uses GLX 1.2 or earlier API commands.
As well, it only uses raw Xlib API commands, so that it is compatible with any toolkit.
*/
#include <GL/gl.h>
#include <GL/glx.h>
namespace ruby {
#include "glx.hpp"
//returns true once window is mapped (created and displayed onscreen)
static Bool glx_wait_for_map_notify(Display *d, XEvent *e, char *arg) {
return (e->type == MapNotify) && (e->xmap.window == (Window)arg);
}
class pVideoGLX {
public:
VideoGLX &self;
uint32_t *buffer;
Display *display;
int screen;
Window xwindow;
Colormap colormap;
GLXContext glxcontext;
GLXWindow glxwindow;
GLuint gltexture;
struct {
int version_major, version_minor;
bool double_buffer;
bool is_direct;
} glx;
struct {
Window handle;
bool synchronize;
unsigned filter;
} settings;
bool cap(Video::Setting setting) {
if(setting == Video::Handle) return true;
if(setting == Video::Synchronize) return true;
if(setting == Video::Filter) return true;
return false;
}
uintptr_t get(Video::Setting setting) {
if(setting == Video::Handle) return settings.handle;
if(setting == Video::Synchronize) return settings.synchronize;
if(setting == Video::Filter) return settings.filter;
return false;
}
bool set(Video::Setting setting, uintptr_t param) {
if(setting == Video::Handle) {
settings.handle = param;
return true;
}
if(setting == Video::Synchronize) {
if(settings.synchronize != param) {
settings.synchronize = param;
if(glxcontext) {
term();
init();
}
return true;
}
}
if(setting == Video::Filter) {
settings.filter = param;
return true;
}
return false;
}
bool lock(uint32_t *&data, unsigned &pitch) {
pitch = 1024 * 4;
return data = buffer;
}
void unlock() {
}
void clear() {
memset(buffer, 0, 1024 * 1024 * sizeof(uint32_t));
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glFlush();
if(glx.double_buffer) glXSwapBuffers(display, glxwindow);
}
void refresh(unsigned width, unsigned height) {
//we must ensure that the child window is the same size as the parent window.
//unfortunately, we cannot hook the parent window resize event notification,
//as we did not create the parent window, nor have any knowledge of the toolkit used.
//therefore, inelegant as it may be, we query each window size and resize as needed.
XWindowAttributes parent, child;
XGetWindowAttributes(display, settings.handle, &parent);
XGetWindowAttributes(display, xwindow, &child);
if(child.width != parent.width || child.height != parent.height) {
XResizeWindow(display, xwindow, parent.width, parent.height);
}
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
settings.filter == Video::FilterPoint ? GL_NEAREST : GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
settings.filter == Video::FilterPoint ? GL_NEAREST : GL_LINEAR);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, parent.width, 0, parent.height, -1.0, 1.0);
glViewport(0, 0, parent.width, parent.height);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glPixelStorei(GL_UNPACK_ROW_LENGTH, 1024); //length of buffer in pixels
glTexSubImage2D(GL_TEXTURE_2D,
/* mip-map level = */ 0, /* x = */ 0, /* y = */ 0,
width, height, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, buffer);
//OpenGL projection sets 0,0 as *bottom-left* of screen.
//therefore, below vertices flip image to support top-left source.
//texture range = x1:0.0, y1:0.0, x2:1.0, y2:1.0
//vertex range = x1:0, y1:0, x2:width, y2:height
double w = double(width) / 1024.0;
double h = double(height) / 1024.0;
int u = parent.width;
int v = parent.height;
glBegin(GL_TRIANGLE_STRIP);
glTexCoord2f(0, 0); glVertex3i(0, v, 0);
glTexCoord2f(w, 0); glVertex3i(u, v, 0);
glTexCoord2f(0, h); glVertex3i(0, 0, 0);
glTexCoord2f(w, h); glVertex3i(u, 0, 0);
glEnd();
glFlush();
if(glx.double_buffer) glXSwapBuffers(display, glxwindow);
}
bool init() {
display = XOpenDisplay(0);
screen = DefaultScreen(display);
glXQueryVersion(display, &glx.version_major, &glx.version_minor);
//require GLX 1.2+ API
if(glx.version_major < 1 || (glx.version_major == 1 && glx.version_minor < 2)) return false;
buffer = new(zeromemory) uint32_t[1024 * 1024];
XWindowAttributes window_attributes;
XGetWindowAttributes(display, settings.handle, &window_attributes);
//let GLX determine the best Visual to use for GL output; provide a few hints
//note: some video drivers will override double buffering attribute
int elements = 0;
int attributelist[] = { GLX_RGBA, None };
int attributelist_sync[] = { GLX_RGBA, GLX_DOUBLEBUFFER, None };
XVisualInfo *vi = glXChooseVisual(display, screen,
settings.synchronize ? attributelist_sync : attributelist);
//Window settings.handle has already been realized, most likely with DefaultVisual.
//GLX requires that the GL output window has the same Visual as the GLX context.
//it is not possible to change the Visual of an already realized (created) window.
//therefore a new child window, using the same GLX Visual, must be created and binded to settings.handle.
colormap = XCreateColormap(display, RootWindow(display, vi->screen), vi->visual, AllocNone);
XSetWindowAttributes attributes;
attributes.colormap = colormap;
attributes.border_pixel = 0;
attributes.event_mask = StructureNotifyMask;
xwindow = XCreateWindow(display, /* parent = */ settings.handle,
/* x = */ 0, /* y = */ 0, window_attributes.width, window_attributes.height,
/* border_width = */ 0, vi->depth, InputOutput, vi->visual,
CWColormap | CWBorderPixel | CWEventMask, &attributes);
XSetWindowBackground(display, xwindow, /* color = */ 0);
XMapWindow(display, xwindow);
XEvent event;
//window must be realized (appear onscreen) before we make the context current
XIfEvent(display, &event, glx_wait_for_map_notify, (char*)xwindow);
glxcontext = glXCreateContext(display, vi, /* sharelist = */ 0, /* direct = */ GL_TRUE);
glXMakeCurrent(display, glxwindow = xwindow, glxcontext);
//read attributes of frame buffer for later use, as requested attributes from above are not always granted
int value = 0;
glXGetConfig(display, vi, GLX_DOUBLEBUFFER, &value);
glx.double_buffer = value;
glx.is_direct = glXIsDirect(display, glxcontext);
//disable unused features
glDisable(GL_ALPHA_TEST);
glDisable(GL_BLEND);
glDisable(GL_DEPTH_TEST);
glDisable(GL_POLYGON_SMOOTH);
glDisable(GL_STENCIL_TEST);
//enable useful and required features
glEnable(GL_DITHER);
glEnable(GL_TEXTURE_2D);
//create GL texture to copy buffer to
gltexture = 0;
glGenTextures(1, &gltexture);
glBindTexture(GL_TEXTURE_2D, gltexture);
glPixelStorei(GL_UNPACK_ROW_LENGTH, 1024);
glTexImage2D(GL_TEXTURE_2D,
/* mip-map level = */ 0, /* internal format = */ GL_RGB,
/* width = */ 1024, /* height = */ 1024, /* border = */ 0,
/* format = */ GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, buffer);
return true;
}
void term() {
if(gltexture) {
glDeleteTextures(1, &gltexture);
gltexture = 0;
}
if(glxcontext) {
glXDestroyContext(display, glxcontext);
glxcontext = 0;
}
if(xwindow) {
XUnmapWindow(display, xwindow);
xwindow = 0;
}
if(colormap) {
XFreeColormap(display, colormap);
colormap = 0;
}
if(buffer) {
delete[] buffer;
buffer = 0;
}
}
pVideoGLX(VideoGLX &self_) : self(self_) {
settings.handle = 0;
settings.synchronize = false;
xwindow = 0;
colormap = 0;
glxcontext = 0;
glxwindow = 0;
gltexture = 0;
}
~pVideoGLX() { term(); }
};
bool VideoGLX::cap(Setting setting) { return p.cap(setting); }
uintptr_t VideoGLX::get(Setting setting) { return p.get(setting); }
bool VideoGLX::set(Setting setting, uintptr_t param) { return p.set(setting, param); }
bool VideoGLX::lock(uint32_t *&data, unsigned &pitch) { return p.lock(data, pitch); }
void VideoGLX::unlock() { p.unlock(); }
void VideoGLX::clear() { p.clear(); }
void VideoGLX::refresh(unsigned width, unsigned height) { p.refresh(width, height); }
bool VideoGLX::init() { return p.init(); }
void VideoGLX::term() { p.term(); }
VideoGLX::VideoGLX() : p(*new pVideoGLX(*this)) {}
VideoGLX::~VideoGLX() { delete &p; }
} //namespace ruby

22
bsnes/lib/ruby/video/glx.hpp Executable file
View File

@@ -0,0 +1,22 @@
class pVideoGLX;
class VideoGLX : public Video {
public:
bool cap(Setting);
uintptr_t get(Setting);
bool set(Setting, uintptr_t);
bool lock(uint32_t *&data, unsigned &pitch);
void unlock();
void clear();
void refresh(unsigned width, unsigned height);
bool init();
void term();
VideoGLX();
~VideoGLX();
private:
pVideoGLX &p;
};

137
bsnes/lib/ruby/video/sdl.cpp Executable file
View File

@@ -0,0 +1,137 @@
#include <sys/ipc.h>
#include <sys/shm.h>
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <X11/Xatom.h>
#include <X11/extensions/Xv.h>
#include <X11/extensions/Xvlib.h>
#include <X11/extensions/XShm.h>
#include <SDL/SDL.h>
namespace ruby {
#include "sdl.hpp"
class pVideoSDL {
public:
VideoSDL &self;
Display *display;
SDL_Surface *screen, *buffer;
struct {
uintptr_t handle;
} settings;
bool cap(Video::Setting setting) {
if(setting == Video::Handle) return true;
return false;
}
uintptr_t get(Video::Setting setting) {
if(setting == Video::Handle) return settings.handle;
return false;
}
bool set(Video::Setting setting, uintptr_t param) {
if(setting == Video::Handle) {
settings.handle = param;
return true;
}
return false;
}
bool lock(uint32_t *&data, unsigned &pitch) {
if(SDL_MUSTLOCK(buffer)) SDL_LockSurface(buffer);
pitch = buffer->pitch;
return data = (uint32_t*)buffer->pixels;
}
void unlock() {
if(SDL_MUSTLOCK(buffer)) SDL_UnlockSurface(buffer);
}
void clear() {
if(SDL_MUSTLOCK(buffer)) SDL_LockSurface(buffer);
uint32_t *data = (uint32_t*)buffer->pixels;
for(unsigned y = 0; y < 1024; y++) {
for(unsigned x = 0; x < 1024; x++) {
*data++ |= 0xff000000;
}
data += (buffer->pitch >> 2) - 1024;
}
if(SDL_MUSTLOCK(buffer)) SDL_UnlockSurface(buffer);
refresh(1024, 1024);
}
void refresh(unsigned width, unsigned height) {
//ruby input is X8R8G8B8, top 8-bits are ignored.
//as SDL forces us to use a 32-bit buffer, we must set alpha to 255 (full opacity)
//to prevent blending against the window beneath when X window visual is 32-bits.
if(SDL_MUSTLOCK(buffer)) SDL_LockSurface(buffer);
uint32_t *data = (uint32_t*)buffer->pixels;
for(unsigned y = 0; y < height; y++) {
for(unsigned x = 0; x < width; x++) {
*data++ |= 0xff000000;
}
data += (buffer->pitch >> 2) - width;
}
if(SDL_MUSTLOCK(buffer)) SDL_UnlockSurface(buffer);
XWindowAttributes attributes;
XGetWindowAttributes(display, settings.handle, &attributes);
SDL_Rect src, dest;
src.x = 0;
src.y = 0;
src.w = width;
src.h = height;
dest.x = 0;
dest.y = 0;
dest.w = attributes.width;
dest.h = attributes.height;
SDL_SoftStretch(buffer, &src, screen, &dest);
SDL_UpdateRect(screen, dest.x, dest.y, dest.w, dest.h);
}
bool init() {
display = XOpenDisplay(0);
char env[512];
sprintf(env, "SDL_WINDOWID=%ld", settings.handle);
putenv(env);
SDL_InitSubSystem(SDL_INIT_VIDEO);
//screen depth must be 32, as 24bpp with a 32-bit X window visual produces no output.
screen = SDL_SetVideoMode(2560, 1600, 32, SDL_HWSURFACE);
//buffer depth must be 32, as this is the input format used by all ruby drivers.
buffer = SDL_CreateRGBSurface(SDL_HWSURFACE,
1024, 1024, 32, 0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000
);
return true;
}
void term() {
SDL_QuitSubSystem(SDL_INIT_VIDEO);
}
pVideoSDL(VideoSDL &self_) : self(self_) {
settings.handle = 0;
}
};
bool VideoSDL::cap(Setting setting) { return p.cap(setting); }
uintptr_t VideoSDL::get(Setting setting) { return p.get(setting); }
bool VideoSDL::set(Setting setting, uintptr_t param) { return p.set(setting, param); }
bool VideoSDL::lock(uint32_t *&data, unsigned &pitch) { return p.lock(data, pitch); }
void VideoSDL::unlock() { p.unlock(); }
void VideoSDL::clear() { p.clear(); }
void VideoSDL::refresh(unsigned width, unsigned height) { p.refresh(width, height); }
bool VideoSDL::init() { return p.init(); }
void VideoSDL::term() { p.term(); }
VideoSDL::VideoSDL() : p(*new pVideoSDL(*this)) {}
VideoSDL::~VideoSDL() { delete &p; }
} //namespace ruby

22
bsnes/lib/ruby/video/sdl.hpp Executable file
View File

@@ -0,0 +1,22 @@
class pVideoSDL;
class VideoSDL : public Video {
public:
bool cap(Setting);
uintptr_t get(Setting);
bool set(Setting, uintptr_t);
bool lock(uint32_t *&data, unsigned &pitch);
void unlock();
void clear();
void refresh(unsigned width, unsigned height);
bool init();
void term();
VideoSDL();
~VideoSDL();
private:
pVideoSDL &p;
};

214
bsnes/lib/ruby/video/wgl.cpp Executable file
View File

@@ -0,0 +1,214 @@
/*
video.wgl
authors: byuu, krom
license: public domain
last updated: 2008-08-20
*/
#include <windows.h>
#include <GL/gl.h>
#include <GL/glext.h>
namespace ruby {
#include "wgl.hpp"
class pVideoWGL {
public:
VideoWGL &self;
uint32_t *buffer;
HDC display;
HGLRC wglcontext;
HWND window;
HINSTANCE glwindow;
GLuint gltexture;
struct {
HWND handle;
bool synchronize;
unsigned filter;
} settings;
bool cap(Video::Setting setting) {
if(setting == Video::Handle) return true;
if(setting == Video::Synchronize) return true;
if(setting == Video::Filter) return true;
return false;
}
uintptr_t get(Video::Setting setting) {
if(setting == Video::Handle) return (uintptr_t)settings.handle;
if(setting == Video::Synchronize) return settings.synchronize;
if(setting == Video::Filter) return settings.filter;
return false;
}
bool set(Video::Setting setting, uintptr_t param) {
if(setting == Video::Handle) {
settings.handle = (HWND)param;
return true;
}
if(setting == Video::Synchronize) {
if(settings.synchronize != param) {
settings.synchronize = param;
if(wglcontext) {
term();
init();
}
}
}
if(setting == Video::Filter) {
settings.filter = param;
return true;
}
return false;
}
bool lock(uint32_t *&data, unsigned &pitch) {
pitch = 1024 * 4;
return data = buffer;
}
void unlock() {
}
void clear() {
memset(buffer, 0, 1024 * 1024 * sizeof(uint32_t));
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glFlush();
SwapBuffers(display);
}
void refresh(unsigned width, unsigned height) {
RECT rc;
GetClientRect(settings.handle, &rc);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
settings.filter == Video::FilterPoint ? GL_NEAREST : GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
settings.filter == Video::FilterPoint ? GL_NEAREST : GL_LINEAR);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, width, 0, height, -1.0, 1.0);
glViewport(0, 0, rc.right, rc.bottom);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glPixelStorei(GL_UNPACK_ROW_LENGTH, 1024); //length of buffer in pixels
glTexSubImage2D(GL_TEXTURE_2D,
/* mip-map level = */ 0, /* x = */ 0, /* y = */ 0,
width, height, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, buffer);
//OpenGL projection sets 0,0 as *bottom-left* of screen.
//therefore, below vertices flip image to support top-left source.
//texture range = x1:0.0, y1:0.0, x2:1.0, y2:1.0
//vertex range = x1:0, y1:0, x2:width, y2:height
double w = double(width) / 1024.0;
double h = double(height) / 1024.0;
int u = width;
int v = height;
glBegin(GL_TRIANGLE_STRIP);
glTexCoord2f(0, 0); glVertex3i(0, v, 0);
glTexCoord2f(w, 0); glVertex3i(u, v, 0);
glTexCoord2f(0, h); glVertex3i(0, 0, 0);
glTexCoord2f(w, h); glVertex3i(u, 0, 0);
glEnd();
glFlush();
SwapBuffers(display);
}
bool init() {
buffer = new(zeromemory) uint32_t[1024 * 1024];
GLuint pixel_format;
PIXELFORMATDESCRIPTOR pfd;
memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR));
pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | (settings.synchronize ? PFD_DOUBLEBUFFER : 0);
pfd.iPixelType = PFD_TYPE_RGBA;
display = GetDC(settings.handle);
pixel_format = ChoosePixelFormat(display, &pfd);
SetPixelFormat(display, pixel_format, &pfd);
wglcontext = wglCreateContext(display);
wglMakeCurrent(display, wglcontext);
//disable unused features
glDisable(GL_ALPHA_TEST);
glDisable(GL_BLEND);
glDisable(GL_DEPTH_TEST);
glDisable(GL_POLYGON_SMOOTH);
glDisable(GL_STENCIL_TEST);
//enable useful and required features
glEnable(GL_DITHER);
glEnable(GL_TEXTURE_2D);
//create GL texture to copy buffer to
gltexture = 0;
glGenTextures(1, &gltexture);
glBindTexture(GL_TEXTURE_2D, gltexture);
glPixelStorei(GL_UNPACK_ROW_LENGTH, 1024);
glTexImage2D(GL_TEXTURE_2D,
/* mip-map level = */ 0, /* internal format = */ GL_RGB,
/* width = */ 1024, /* height = */ 1024, /* border = */ 0,
/* format = */ GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, buffer);
return true;
}
void term() {
if(gltexture) {
glDeleteTextures(1, &gltexture);
gltexture = 0;
}
if(wglcontext) {
wglDeleteContext(wglcontext);
wglcontext = 0;
}
if(buffer) {
delete[] buffer;
buffer = 0;
}
}
pVideoWGL(VideoWGL &self_) : self(self_) {
settings.handle = 0;
settings.synchronize = false;
settings.filter = 0;
window = 0;
wglcontext = 0;
glwindow = 0;
gltexture = 0;
}
~pVideoWGL() { term(); }
};
bool VideoWGL::cap(Setting setting) { return p.cap(setting); }
uintptr_t VideoWGL::get(Setting setting) { return p.get(setting); }
bool VideoWGL::set(Setting setting, uintptr_t param) { return p.set(setting, param); }
bool VideoWGL::lock(uint32_t *&data, unsigned &pitch) { return p.lock(data, pitch); }
void VideoWGL::unlock() { p.unlock(); }
void VideoWGL::clear() { p.clear(); }
void VideoWGL::refresh(unsigned width, unsigned height) { p.refresh(width, height); }
bool VideoWGL::init() { return p.init(); }
void VideoWGL::term() { p.term(); }
VideoWGL::VideoWGL() : p(*new pVideoWGL(*this)) {}
VideoWGL::~VideoWGL() { delete &p; }
} //namespace ruby

22
bsnes/lib/ruby/video/wgl.hpp Executable file
View File

@@ -0,0 +1,22 @@
class pVideoWGL;
class VideoWGL : public Video {
public:
bool cap(Setting);
uintptr_t get(Setting);
bool set(Setting, uintptr_t);
bool lock(uint32_t *&data, unsigned &pitch);
void unlock();
void clear();
void refresh(unsigned width, unsigned height);
bool init();
void term();
VideoWGL();
~VideoWGL();
private:
pVideoWGL &p;
};

469
bsnes/lib/ruby/video/xv.cpp Executable file
View File

@@ -0,0 +1,469 @@
#include <sys/ipc.h>
#include <sys/shm.h>
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <X11/Xatom.h>
#include <X11/extensions/XShm.h>
#include <X11/extensions/Xv.h>
#include <X11/extensions/Xvlib.h>
extern "C" XvImage* XvShmCreateImage(Display*, XvPortID, int, char*, int, int, XShmSegmentInfo*);
namespace ruby {
#include "xv.hpp"
class pVideoXv {
public:
VideoXv &self;
uint32_t *buffer;
uint8_t *ytable, *utable, *vtable;
enum XvFormat {
XvFormatRGB32,
XvFormatRGB24,
XvFormatRGB16,
XvFormatRGB15,
XvFormatYUY2,
XvFormatUYVY,
XvFormatUnknown
};
struct {
Display *display;
GC gc;
Window window;
Colormap colormap;
XShmSegmentInfo shminfo;
int port;
int depth;
int visualid;
XvImage *image;
XvFormat format;
uint32_t fourcc;
} device;
struct {
Window handle;
bool synchronize;
} settings;
bool cap(Video::Setting setting) {
if(setting == Video::Handle) return true;
if(setting == Video::Synchronize) {
return XInternAtom(XOpenDisplay(0), "XV_SYNC_TO_VBLANK", true) != None;
}
return false;
}
uintptr_t get(Video::Setting setting) {
if(setting == Video::Handle) return settings.handle;
if(setting == Video::Synchronize) return settings.synchronize;
return false;
}
bool set(Video::Setting setting, uintptr_t param) {
if(setting == Video::Handle) {
settings.handle = param;
return true;
}
if(setting == Video::Synchronize) {
Display *display = XOpenDisplay(0);
Atom atom = XInternAtom(display, "XV_SYNC_TO_VBLANK", true);
if(atom != None && device.port >= 0) {
settings.synchronize = param;
XvSetPortAttribute(display, device.port, atom, settings.synchronize);
return true;
}
return false;
}
return false;
}
bool lock(uint32_t *&data, unsigned &pitch) {
pitch = 1024 * 4;
return data = buffer;
}
void unlock() {
}
void clear() {
memset(buffer, 0, 1024 * 1024 * sizeof(uint32_t));
//clear twice in case video is double buffered ...
refresh(1024, 1024);
refresh(1024, 1024);
}
void refresh(unsigned width, unsigned height) {
XWindowAttributes target;
XGetWindowAttributes(device.display, device.window, &target);
//we must ensure that the child window is the same size as the parent window.
//unfortunately, we cannot hook the parent window resize event notification,
//as we did not create the parent window, nor have any knowledge of the toolkit used.
//therefore, query each window size and resize as needed.
XWindowAttributes parent;
XGetWindowAttributes(device.display, settings.handle, &parent);
if(target.width != parent.width || target.height != parent.height) {
XResizeWindow(device.display, device.window, parent.width, parent.height);
}
//update target width and height attributes
XGetWindowAttributes(device.display, device.window, &target);
switch(device.format) {
case XvFormatRGB32: render_rgb32(width, height); break;
case XvFormatRGB24: render_rgb24(width, height); break;
case XvFormatRGB16: render_rgb16(width, height); break;
case XvFormatRGB15: render_rgb15(width, height); break;
case XvFormatYUY2: render_yuy2 (width, height); break;
case XvFormatUYVY: render_uyvy (width, height); break;
}
XvShmPutImage(device.display, device.port, device.window, device.gc, device.image,
0, 0, width, height,
0, 0, target.width, target.height,
true);
}
bool init() {
device.display = XOpenDisplay(0);
if(!XShmQueryExtension(device.display)) {
fprintf(stderr, "VideoXv: XShm extension not found.\n");
return false;
}
//find an appropriate Xv port
device.port = -1;
XvAdaptorInfo *adaptor_info;
unsigned adaptor_count;
XvQueryAdaptors(device.display, DefaultRootWindow(device.display), &adaptor_count, &adaptor_info);
for(unsigned i = 0; i < adaptor_count; i++) {
//find adaptor that supports both input (memory->drawable) and image (drawable->screen) masks
if(adaptor_info[i].num_formats < 1) continue;
if(!(adaptor_info[i].type & XvInputMask)) continue;
if(!(adaptor_info[i].type & XvImageMask)) continue;
device.port = adaptor_info[i].base_id;
device.depth = adaptor_info[i].formats->depth;
device.visualid = adaptor_info[i].formats->visual_id;
break;
}
XvFreeAdaptorInfo(adaptor_info);
if(device.port < 0) {
fprintf(stderr, "VideoXv: failed to find valid XvPort.\n");
return false;
}
//create child window to attach to parent window.
//this is so that even if parent window visual depth doesn't match Xv visual
//(common with composited windows), Xv can still render to child window.
XWindowAttributes window_attributes;
XGetWindowAttributes(device.display, settings.handle, &window_attributes);
XVisualInfo visualtemplate;
visualtemplate.visualid = device.visualid;
visualtemplate.screen = DefaultScreen(device.display);
visualtemplate.depth = device.depth;
visualtemplate.visual = 0;
int visualmatches = 0;
XVisualInfo *visualinfo = XGetVisualInfo(device.display, VisualIDMask | VisualScreenMask | VisualDepthMask, &visualtemplate, &visualmatches);
if(visualmatches < 1 || !visualinfo->visual) {
if(visualinfo) XFree(visualinfo);
fprintf(stderr, "VideoXv: unable to find Xv-compatible visual.\n");
return false;
}
device.colormap = XCreateColormap(device.display, settings.handle, visualinfo->visual, AllocNone);
XSetWindowAttributes attributes;
attributes.colormap = device.colormap;
attributes.border_pixel = 0;
attributes.event_mask = StructureNotifyMask;
device.window = XCreateWindow(device.display, /* parent = */ settings.handle,
/* x = */ 0, /* y = */ 0, window_attributes.width, window_attributes.height,
/* border_width = */ 0, device.depth, InputOutput, visualinfo->visual,
CWColormap | CWBorderPixel | CWEventMask, &attributes);
XFree(visualinfo);
XSetWindowBackground(device.display, device.window, /* color = */ 0);
XMapWindow(device.display, device.window);
device.gc = XCreateGC(device.display, device.window, 0, 0);
//set colorkey to auto paint, so that Xv video output is always visible
Atom atom = XInternAtom(device.display, "XV_AUTOPAINT_COLORKEY", true);
if(atom != None) XvSetPortAttribute(device.display, device.port, atom, 1);
//find optimal rendering format
device.format = XvFormatUnknown;
signed format_count;
XvImageFormatValues *format = XvListImageFormats(device.display, device.port, &format_count);
if(device.format == XvFormatUnknown) for(signed i = 0; i < format_count; i++) {
if(format[i].type == XvRGB && format[i].bits_per_pixel == 32) {
device.format = XvFormatRGB32;
device.fourcc = format[i].id;
break;
}
}
if(device.format == XvFormatUnknown) for(signed i = 0; i < format_count; i++) {
if(format[i].type == XvRGB && format[i].bits_per_pixel == 24) {
device.format = XvFormatRGB24;
device.fourcc = format[i].id;
break;
}
}
if(device.format == XvFormatUnknown) for(signed i = 0; i < format_count; i++) {
if(format[i].type == XvRGB && format[i].bits_per_pixel == 16) {
device.format = XvFormatRGB16;
device.fourcc = format[i].id;
break;
}
}
if(device.format == XvFormatUnknown) for(signed i = 0; i < format_count; i++) {
if(format[i].type == XvRGB && format[i].bits_per_pixel == 15) {
device.format = XvFormatRGB15;
device.fourcc = format[i].id;
break;
}
}
if(device.format == XvFormatUnknown) for(signed i = 0; i < format_count; i++) {
if(format[i].type == XvYUV && format[i].bits_per_pixel == 16 && format[i].format == XvPacked) {
if(format[i].component_order[0] == 'Y' && format[i].component_order[1] == 'U'
&& format[i].component_order[2] == 'Y' && format[i].component_order[3] == 'V'
) {
device.format = XvFormatYUY2;
device.fourcc = format[i].id;
break;
}
}
}
if(device.format == XvFormatUnknown) for(signed i = 0; i < format_count; i++) {
if(format[i].type == XvYUV && format[i].bits_per_pixel == 16 && format[i].format == XvPacked) {
if(format[i].component_order[0] == 'U' && format[i].component_order[1] == 'Y'
&& format[i].component_order[2] == 'V' && format[i].component_order[3] == 'Y'
) {
device.format = XvFormatUYVY;
device.fourcc = format[i].id;
break;
}
}
}
free(format);
if(device.format == XvFormatUnknown) {
fprintf(stderr, "VideoXv: unable to find a supported image format.\n");
return false;
}
device.image = XvShmCreateImage(device.display, device.port, device.fourcc, 0, 1024, 1024, &device.shminfo);
if(!device.image) {
fprintf(stderr, "VideoXv: XShmCreateImage failed.\n");
return false;
}
device.shminfo.shmid = shmget(IPC_PRIVATE, device.image->data_size, IPC_CREAT | 0777);
device.shminfo.shmaddr = device.image->data = (char*)shmat(device.shminfo.shmid, 0, 0);
device.shminfo.readOnly = false;
if(!XShmAttach(device.display, &device.shminfo)) {
fprintf(stderr, "VideoXv: XShmAttach failed.\n");
return false;
}
buffer = new uint32_t[1024 * 1024];
init_yuv_tables();
clear();
return true;
}
void term() {
XShmDetach(device.display, &device.shminfo);
if(device.window) {
XUnmapWindow(device.display, device.window);
device.window = 0;
}
if(device.colormap) {
XFreeColormap(device.display, device.colormap);
device.colormap = 0;
}
if(buffer) { delete[] buffer; buffer = 0; }
if(ytable) { delete[] ytable; ytable = 0; }
if(utable) { delete[] utable; utable = 0; }
if(vtable) { delete[] vtable; vtable = 0; }
}
void render_rgb32(unsigned width, unsigned height) {
uint32_t *input = (uint32_t*)buffer;
uint32_t *output = (uint32_t*)device.image->data;
for(unsigned y = 0; y < height; y++) {
memcpy(output, input, width * 4);
input += 1024 - width;
output += 1024 - width;
}
}
void render_rgb24(unsigned width, unsigned height) {
uint32_t *input = (uint32_t*)buffer;
uint8_t *output = (uint8_t*)device.image->data;
for(unsigned y = 0; y < height; y++) {
for(unsigned x = 0; x < width; x++) {
uint32_t p = *input++;
*output++ = p;
*output++ = p >> 8;
*output++ = p >> 16;
}
input += (1024 - width);
output += (1024 - width) * 3;
}
}
void render_rgb16(unsigned width, unsigned height) {
uint32_t *input = (uint32_t*)buffer;
uint16_t *output = (uint16_t*)device.image->data;
for(unsigned y = 0; y < height; y++) {
for(unsigned x = 0; x < width; x++) {
uint32_t p = *input++;
*output++ = ((p >> 8) & 0xf800) | ((p >> 5) & 0x07e0) | ((p >> 3) & 0x001f); //RGB32->RGB16
}
input += 1024 - width;
output += 1024 - width;
}
}
void render_rgb15(unsigned width, unsigned height) {
uint32_t *input = (uint32_t*)buffer;
uint16_t *output = (uint16_t*)device.image->data;
for(unsigned y = 0; y < height; y++) {
for(unsigned x = 0; x < width; x++) {
uint32_t p = *input++;
*output++ = ((p >> 9) & 0x7c00) | ((p >> 6) & 0x03e0) | ((p >> 3) & 0x001f); //RGB32->RGB15
}
input += 1024 - width;
output += 1024 - width;
}
}
void render_yuy2(unsigned width, unsigned height) {
uint32_t *input = (uint32_t*)buffer;
uint16_t *output = (uint16_t*)device.image->data;
for(unsigned y = 0; y < height; y++) {
for(unsigned x = 0; x < width >> 1; x++) {
uint32_t p0 = *input++;
uint32_t p1 = *input++;
p0 = ((p0 >> 8) & 0xf800) + ((p0 >> 5) & 0x07e0) + ((p0 >> 3) & 0x001f); //RGB32->RGB16
p1 = ((p1 >> 8) & 0xf800) + ((p1 >> 5) & 0x07e0) + ((p1 >> 3) & 0x001f); //RGB32->RGB16
uint8_t u = (utable[p0] + utable[p1]) >> 1;
uint8_t v = (vtable[p0] + vtable[p1]) >> 1;
*output++ = (u << 8) | ytable[p0];
*output++ = (v << 8) | ytable[p1];
}
input += 1024 - width;
output += 1024 - width;
}
}
void render_uyvy(unsigned width, unsigned height) {
uint32_t *input = (uint32_t*)buffer;
uint16_t *output = (uint16_t*)device.image->data;
for(unsigned y = 0; y < height; y++) {
for(unsigned x = 0; x < width >> 1; x++) {
uint32_t p0 = *input++;
uint32_t p1 = *input++;
p0 = ((p0 >> 8) & 0xf800) + ((p0 >> 5) & 0x07e0) + ((p0 >> 3) & 0x001f);
p1 = ((p1 >> 8) & 0xf800) + ((p1 >> 5) & 0x07e0) + ((p1 >> 3) & 0x001f);
uint8_t u = (utable[p0] + utable[p1]) >> 1;
uint8_t v = (vtable[p0] + vtable[p1]) >> 1;
*output++ = (ytable[p0] << 8) | u;
*output++ = (ytable[p1] << 8) | v;
}
input += 1024 - width;
output += 1024 - width;
}
}
void init_yuv_tables() {
ytable = new uint8_t[65536];
utable = new uint8_t[65536];
vtable = new uint8_t[65536];
for(unsigned i = 0; i < 65536; i++) {
//extract RGB565 color data from i
uint8_t r = (i >> 11) & 31, g = (i >> 5) & 63, b = (i) & 31;
r = (r << 3) | (r >> 2); //R5->R8
g = (g << 2) | (g >> 4); //G6->G8
b = (b << 3) | (b >> 2); //B5->B8
//ITU-R Recommendation BT.601
//double lr = 0.299, lg = 0.587, lb = 0.114;
int y = int( +(double(r) * 0.257) + (double(g) * 0.504) + (double(b) * 0.098) + 16.0 );
int u = int( -(double(r) * 0.148) - (double(g) * 0.291) + (double(b) * 0.439) + 128.0 );
int v = int( +(double(r) * 0.439) - (double(g) * 0.368) - (double(b) * 0.071) + 128.0 );
//ITU-R Recommendation BT.709
//double lr = 0.2126, lg = 0.7152, lb = 0.0722;
//int y = int( double(r) * lr + double(g) * lg + double(b) * lb );
//int u = int( (double(b) - y) / (2.0 - 2.0 * lb) + 128.0 );
//int v = int( (double(r) - y) / (2.0 - 2.0 * lr) + 128.0 );
ytable[i] = y < 0 ? 0 : y > 255 ? 255 : y;
utable[i] = u < 0 ? 0 : u > 255 ? 255 : u;
vtable[i] = v < 0 ? 0 : v > 255 ? 255 : v;
}
}
pVideoXv(VideoXv &self_) : self(self_) {
device.window = 0;
device.colormap = 0;
device.port = -1;
ytable = 0;
utable = 0;
vtable = 0;
settings.handle = 0;
settings.synchronize = false;
}
};
bool VideoXv::cap(Setting setting) { return p.cap(setting); }
uintptr_t VideoXv::get(Setting setting) { return p.get(setting); }
bool VideoXv::set(Setting setting, uintptr_t param) { return p.set(setting, param); }
bool VideoXv::lock(uint32_t *&data, unsigned &pitch) { return p.lock(data, pitch); }
void VideoXv::unlock() { p.unlock(); }
void VideoXv::clear() { p.clear(); }
void VideoXv::refresh(unsigned width, unsigned height) { p.refresh(width, height); }
bool VideoXv::init() { return p.init(); }
void VideoXv::term() { p.term(); }
VideoXv::VideoXv() : p(*new pVideoXv(*this)) {}
VideoXv::~VideoXv() { delete &p; }
}

22
bsnes/lib/ruby/video/xv.hpp Executable file
View File

@@ -0,0 +1,22 @@
class pVideoXv;
class VideoXv : public Video {
public:
bool cap(Setting);
uintptr_t get(Setting);
bool set(Setting, uintptr_t);
bool lock(uint32_t *&data, unsigned &pitch);
void unlock();
void clear();
void refresh(unsigned width, unsigned height);
bool init();
void term();
VideoXv();
~VideoXv();
private:
pVideoXv &p;
};