forked from Mirrorlandia_minetest/minetest
Implement rendering pipeline and post-processing (#12465)
Co-authored-by: Lars Mueller <appgurulars@gmx.de> Co-authored-by: sfan5 <sfan5@live.de> Co-authored-by: lhofhansl <lhofhansl@yahoo.com>
This commit is contained in:
parent
464043b8ab
commit
ff6dcfea82
@ -2,5 +2,5 @@ varying lowp vec4 varColor;
|
||||
|
||||
void main(void)
|
||||
{
|
||||
gl_FragColor = varColor;
|
||||
gl_FragData[0] = varColor;
|
||||
}
|
||||
|
@ -45,6 +45,9 @@ centroid varying vec2 varTexCoord;
|
||||
#endif
|
||||
varying vec3 eyeVec;
|
||||
varying float nightRatio;
|
||||
varying vec3 tsEyeVec;
|
||||
varying vec3 lightVec;
|
||||
varying vec3 tsLightVec;
|
||||
|
||||
const float fogStart = FOG_START;
|
||||
const float fogShadingParameter = 1.0 / ( 1.0 - fogStart);
|
||||
@ -359,40 +362,6 @@ float getShadow(sampler2D shadowsampler, vec2 smTexCoord, float realDistance)
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#if ENABLE_TONE_MAPPING
|
||||
|
||||
/* Hable's UC2 Tone mapping parameters
|
||||
A = 0.22;
|
||||
B = 0.30;
|
||||
C = 0.10;
|
||||
D = 0.20;
|
||||
E = 0.01;
|
||||
F = 0.30;
|
||||
W = 11.2;
|
||||
equation used: ((x * (A * x + C * B) + D * E) / (x * (A * x + B) + D * F)) - E / F
|
||||
*/
|
||||
|
||||
vec3 uncharted2Tonemap(vec3 x)
|
||||
{
|
||||
return ((x * (0.22 * x + 0.03) + 0.002) / (x * (0.22 * x + 0.3) + 0.06)) - 0.03333;
|
||||
}
|
||||
|
||||
vec4 applyToneMapping(vec4 color)
|
||||
{
|
||||
color = vec4(pow(color.rgb, vec3(2.2)), color.a);
|
||||
const float gamma = 1.6;
|
||||
const float exposureBias = 5.5;
|
||||
color.rgb = uncharted2Tonemap(exposureBias * color.rgb);
|
||||
// Precalculated white_scale from
|
||||
//vec3 whiteScale = 1.0 / uncharted2Tonemap(vec3(W));
|
||||
vec3 whiteScale = vec3(1.036015346);
|
||||
color.rgb *= whiteScale;
|
||||
return vec4(pow(color.rgb, vec3(1.0 / gamma)), color.a);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
|
||||
void main(void)
|
||||
{
|
||||
vec3 color;
|
||||
@ -470,10 +439,6 @@ void main(void)
|
||||
}
|
||||
#endif
|
||||
|
||||
#if ENABLE_TONE_MAPPING
|
||||
col = applyToneMapping(col);
|
||||
#endif
|
||||
|
||||
// Due to a bug in some (older ?) graphics stacks (possibly in the glsl compiler ?),
|
||||
// the fog will only be rendered correctly if the last operation before the
|
||||
// clamp() is an addition. Else, the clamp() seems to be ignored.
|
||||
@ -488,5 +453,5 @@ void main(void)
|
||||
col = mix(skyBgColor, col, clarity);
|
||||
col = vec4(col.rgb, base.a);
|
||||
|
||||
gl_FragColor = col;
|
||||
gl_FragData[0] = col;
|
||||
}
|
||||
|
@ -42,6 +42,7 @@ centroid varying vec2 varTexCoord;
|
||||
varying float perspective_factor;
|
||||
#endif
|
||||
|
||||
varying float area_enable_parallax;
|
||||
|
||||
varying vec3 eyeVec;
|
||||
varying float nightRatio;
|
||||
@ -193,6 +194,9 @@ void main(void)
|
||||
|
||||
vPosition = gl_Position.xyz;
|
||||
eyeVec = -(mWorldView * pos).xyz;
|
||||
#ifdef SECONDSTAGE
|
||||
normalPass = normalize((inVertexNormal+1)/2);
|
||||
#endif
|
||||
vNormal = inVertexNormal;
|
||||
|
||||
// Calculate color.
|
||||
|
@ -361,39 +361,6 @@ float getShadow(sampler2D shadowsampler, vec2 smTexCoord, float realDistance)
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#if ENABLE_TONE_MAPPING
|
||||
|
||||
/* Hable's UC2 Tone mapping parameters
|
||||
A = 0.22;
|
||||
B = 0.30;
|
||||
C = 0.10;
|
||||
D = 0.20;
|
||||
E = 0.01;
|
||||
F = 0.30;
|
||||
W = 11.2;
|
||||
equation used: ((x * (A * x + C * B) + D * E) / (x * (A * x + B) + D * F)) - E / F
|
||||
*/
|
||||
|
||||
vec3 uncharted2Tonemap(vec3 x)
|
||||
{
|
||||
return ((x * (0.22 * x + 0.03) + 0.002) / (x * (0.22 * x + 0.3) + 0.06)) - 0.03333;
|
||||
}
|
||||
|
||||
vec4 applyToneMapping(vec4 color)
|
||||
{
|
||||
color = vec4(pow(color.rgb, vec3(2.2)), color.a);
|
||||
const float gamma = 1.6;
|
||||
const float exposureBias = 5.5;
|
||||
color.rgb = uncharted2Tonemap(exposureBias * color.rgb);
|
||||
// Precalculated white_scale from
|
||||
//vec3 whiteScale = 1.0 / uncharted2Tonemap(vec3(W));
|
||||
vec3 whiteScale = vec3(1.036015346);
|
||||
color.rgb *= whiteScale;
|
||||
return vec4(pow(color.rgb, vec3(1.0 / gamma)), color.a);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
|
||||
void main(void)
|
||||
{
|
||||
@ -473,10 +440,6 @@ void main(void)
|
||||
}
|
||||
#endif
|
||||
|
||||
#if ENABLE_TONE_MAPPING
|
||||
col = applyToneMapping(col);
|
||||
#endif
|
||||
|
||||
// Due to a bug in some (older ?) graphics stacks (possibly in the glsl compiler ?),
|
||||
// the fog will only be rendered correctly if the last operation before the
|
||||
// clamp() is an addition. Else, the clamp() seems to be ignored.
|
||||
@ -491,5 +454,5 @@ void main(void)
|
||||
col = mix(skyBgColor, col, clarity);
|
||||
col = vec4(col.rgb, base.a);
|
||||
|
||||
gl_FragColor = col;
|
||||
gl_FragData[0] = col;
|
||||
}
|
||||
|
53
client/shaders/second_stage/opengl_fragment.glsl
Normal file
53
client/shaders/second_stage/opengl_fragment.glsl
Normal file
@ -0,0 +1,53 @@
|
||||
uniform sampler2D baseTexture;
|
||||
|
||||
#define rendered baseTexture
|
||||
|
||||
#ifdef GL_ES
|
||||
varying mediump vec2 varTexCoord;
|
||||
#else
|
||||
centroid varying vec2 varTexCoord;
|
||||
#endif
|
||||
|
||||
#if ENABLE_TONE_MAPPING
|
||||
|
||||
/* Hable's UC2 Tone mapping parameters
|
||||
A = 0.22;
|
||||
B = 0.30;
|
||||
C = 0.10;
|
||||
D = 0.20;
|
||||
E = 0.01;
|
||||
F = 0.30;
|
||||
W = 11.2;
|
||||
equation used: ((x * (A * x + C * B) + D * E) / (x * (A * x + B) + D * F)) - E / F
|
||||
*/
|
||||
|
||||
vec3 uncharted2Tonemap(vec3 x)
|
||||
{
|
||||
return ((x * (0.22 * x + 0.03) + 0.002) / (x * (0.22 * x + 0.3) + 0.06)) - 0.03333;
|
||||
}
|
||||
|
||||
vec4 applyToneMapping(vec4 color)
|
||||
{
|
||||
color = vec4(pow(color.rgb, vec3(2.2)), color.a);
|
||||
const float gamma = 1.6;
|
||||
const float exposureBias = 5.5;
|
||||
color.rgb = uncharted2Tonemap(exposureBias * color.rgb);
|
||||
// Precalculated white_scale from
|
||||
//vec3 whiteScale = 1.0 / uncharted2Tonemap(vec3(W));
|
||||
vec3 whiteScale = vec3(1.036015346);
|
||||
color.rgb *= whiteScale;
|
||||
return vec4(pow(color.rgb, vec3(1.0 / gamma)), color.a);
|
||||
}
|
||||
#endif
|
||||
|
||||
void main(void)
|
||||
{
|
||||
vec2 uv = varTexCoord.st;
|
||||
vec4 color = texture2D(rendered, uv).rgba;
|
||||
|
||||
#if ENABLE_TONE_MAPPING
|
||||
color = applyToneMapping(color);
|
||||
#endif
|
||||
|
||||
gl_FragColor = vec4(color.rgb, 1.0); // force full alpha to avoid holes in the image.
|
||||
}
|
11
client/shaders/second_stage/opengl_vertex.glsl
Normal file
11
client/shaders/second_stage/opengl_vertex.glsl
Normal file
@ -0,0 +1,11 @@
|
||||
#ifdef GL_ES
|
||||
varying mediump vec2 varTexCoord;
|
||||
#else
|
||||
centroid varying vec2 varTexCoord;
|
||||
#endif
|
||||
|
||||
void main(void)
|
||||
{
|
||||
varTexCoord.st = inTexCoord0.st;
|
||||
gl_Position = inVertexPosition;
|
||||
}
|
@ -8,5 +8,5 @@ void main(void)
|
||||
vec2 uv = varTexCoord.st;
|
||||
vec4 color = texture2D(baseTexture, uv);
|
||||
color.rgb *= varColor.rgb;
|
||||
gl_FragColor = color;
|
||||
gl_FragData[0] = color;
|
||||
}
|
||||
|
@ -21,10 +21,11 @@ set(client_SRCS
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/render/core.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/render/factory.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/render/interlaced.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/render/pageflip.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/render/plain.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/render/sidebyside.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/render/stereo.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/render/secondstage.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/render/pipeline.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/activeobjectmgr.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/camera.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/client.cpp
|
||||
|
@ -627,14 +627,11 @@ void Camera::wield(const ItemStack &item)
|
||||
|
||||
void Camera::drawWieldedTool(irr::core::matrix4* translation)
|
||||
{
|
||||
// Clear Z buffer so that the wielded tool stays in front of world geometry
|
||||
m_wieldmgr->getVideoDriver()->clearBuffers(video::ECBF_DEPTH);
|
||||
|
||||
// Draw the wielded node (in a separate scene manager)
|
||||
scene::ICameraSceneNode* cam = m_wieldmgr->getActiveCamera();
|
||||
cam->setAspectRatio(m_cameranode->getAspectRatio());
|
||||
cam->setFOV(72.0*M_PI/180.0);
|
||||
cam->setNearValue(10);
|
||||
cam->setNearValue(40); // give wield tool smaller z-depth than the world in most cases.
|
||||
cam->setFarValue(1000);
|
||||
if (translation != NULL)
|
||||
{
|
||||
|
@ -150,7 +150,7 @@ void Hud::drawItem(const ItemStack &item, const core::rect<s32>& rect,
|
||||
bool selected)
|
||||
{
|
||||
if (selected) {
|
||||
/* draw hihlighting around selected item */
|
||||
/* draw highlighting around selected item */
|
||||
if (use_hotbar_selected_image) {
|
||||
core::rect<s32> imgrect2 = rect;
|
||||
imgrect2.UpperLeftCorner.X -= (m_padding*2);
|
||||
|
@ -19,17 +19,18 @@ with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
*/
|
||||
|
||||
#include "anaglyph.h"
|
||||
#include "client/camera.h"
|
||||
|
||||
void RenderingCoreAnaglyph::drawAll()
|
||||
{
|
||||
renderBothImages();
|
||||
drawPostFx();
|
||||
drawHUD();
|
||||
}
|
||||
|
||||
void RenderingCoreAnaglyph::setupMaterial(int color_mask)
|
||||
/// SetColorMaskStep step
|
||||
|
||||
SetColorMaskStep::SetColorMaskStep(int _color_mask)
|
||||
: color_mask(_color_mask)
|
||||
{}
|
||||
|
||||
void SetColorMaskStep::run(PipelineContext &context)
|
||||
{
|
||||
video::SOverrideMaterial &mat = driver->getOverrideMaterial();
|
||||
video::SOverrideMaterial &mat = context.device->getVideoDriver()->getOverrideMaterial();
|
||||
mat.reset();
|
||||
mat.Material.ColorMask = color_mask;
|
||||
mat.EnableFlags = video::EMF_COLOR_MASK;
|
||||
@ -38,15 +39,53 @@ void RenderingCoreAnaglyph::setupMaterial(int color_mask)
|
||||
scene::ESNRP_SHADOW;
|
||||
}
|
||||
|
||||
void RenderingCoreAnaglyph::useEye(bool right)
|
||||
/// ClearDepthBufferTarget
|
||||
|
||||
ClearDepthBufferTarget::ClearDepthBufferTarget(RenderTarget *_target) :
|
||||
target(_target)
|
||||
{}
|
||||
|
||||
void ClearDepthBufferTarget::activate(PipelineContext &context)
|
||||
{
|
||||
RenderingCoreStereo::useEye(right);
|
||||
driver->clearBuffers(video::ECBF_DEPTH);
|
||||
setupMaterial(right ? video::ECP_GREEN | video::ECP_BLUE : video::ECP_RED);
|
||||
target->activate(context);
|
||||
context.device->getVideoDriver()->clearBuffers(video::ECBF_DEPTH);
|
||||
}
|
||||
|
||||
void RenderingCoreAnaglyph::resetEye()
|
||||
ConfigureOverrideMaterialTarget::ConfigureOverrideMaterialTarget(RenderTarget *_upstream, bool _enable) :
|
||||
upstream(_upstream), enable(_enable)
|
||||
{
|
||||
setupMaterial(video::ECP_ALL);
|
||||
RenderingCoreStereo::resetEye();
|
||||
}
|
||||
|
||||
void ConfigureOverrideMaterialTarget::activate(PipelineContext &context)
|
||||
{
|
||||
upstream->activate(context);
|
||||
context.device->getVideoDriver()->getOverrideMaterial().Enabled = enable;
|
||||
}
|
||||
|
||||
|
||||
void populateAnaglyphPipeline(RenderPipeline *pipeline, Client *client)
|
||||
{
|
||||
// clear depth buffer every time 3D is rendered
|
||||
auto step3D = pipeline->own(create3DStage(client, v2f(1.0)));
|
||||
auto screen = pipeline->createOwned<ScreenTarget>();
|
||||
auto clear_depth = pipeline->createOwned<ClearDepthBufferTarget>(screen);
|
||||
auto enable_override_material = pipeline->createOwned<ConfigureOverrideMaterialTarget>(clear_depth, true);
|
||||
step3D->setRenderTarget(enable_override_material);
|
||||
|
||||
// left eye
|
||||
pipeline->addStep(pipeline->createOwned<OffsetCameraStep>(false));
|
||||
pipeline->addStep(pipeline->createOwned<SetColorMaskStep>(video::ECP_RED));
|
||||
pipeline->addStep(step3D);
|
||||
|
||||
// right eye
|
||||
pipeline->addStep(pipeline->createOwned<OffsetCameraStep>(true));
|
||||
pipeline->addStep(pipeline->createOwned<SetColorMaskStep>(video::ECP_GREEN | video::ECP_BLUE));
|
||||
pipeline->addStep(step3D);
|
||||
|
||||
// reset
|
||||
pipeline->addStep(pipeline->createOwned<OffsetCameraStep>(0.0f));
|
||||
pipeline->addStep(pipeline->createOwned<SetColorMaskStep>(video::ECP_ALL));
|
||||
|
||||
pipeline->addStep(pipeline->createOwned<MapPostFxStep>());
|
||||
pipeline->addStep(pipeline->createOwned<DrawHUD>());
|
||||
}
|
||||
|
@ -20,15 +20,50 @@ with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
|
||||
#pragma once
|
||||
#include "stereo.h"
|
||||
#include "pipeline.h"
|
||||
|
||||
class RenderingCoreAnaglyph : public RenderingCoreStereo
|
||||
/**
|
||||
* Set color mask when rendering the next steps
|
||||
*/
|
||||
class SetColorMaskStep : public TrivialRenderStep
|
||||
{
|
||||
protected:
|
||||
void setupMaterial(int color_mask);
|
||||
void useEye(bool right) override;
|
||||
void resetEye() override;
|
||||
|
||||
public:
|
||||
using RenderingCoreStereo::RenderingCoreStereo;
|
||||
void drawAll() override;
|
||||
SetColorMaskStep(int color_mask);
|
||||
|
||||
void run(PipelineContext &context) override;
|
||||
private:
|
||||
int color_mask;
|
||||
};
|
||||
|
||||
/**
|
||||
* Resets depth buffer of the current render target
|
||||
*
|
||||
*/
|
||||
class ClearDepthBufferTarget : public RenderTarget
|
||||
{
|
||||
public:
|
||||
ClearDepthBufferTarget(RenderTarget *target);
|
||||
|
||||
void reset(PipelineContext &context) override {}
|
||||
void activate(PipelineContext &context) override;
|
||||
private:
|
||||
RenderTarget *target;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Enables or disables override material when activated
|
||||
*
|
||||
*/
|
||||
class ConfigureOverrideMaterialTarget : public RenderTarget
|
||||
{
|
||||
public:
|
||||
ConfigureOverrideMaterialTarget(RenderTarget *upstream, bool enable);
|
||||
|
||||
virtual void activate(PipelineContext &context) override;
|
||||
private:
|
||||
RenderTarget *upstream;
|
||||
bool enable;
|
||||
};
|
||||
|
||||
void populateAnaglyphPipeline(RenderPipeline *pipeline, Client *client);
|
||||
|
@ -19,111 +19,48 @@ with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
*/
|
||||
|
||||
#include "core.h"
|
||||
#include "client/camera.h"
|
||||
#include "client/client.h"
|
||||
#include "client/clientmap.h"
|
||||
#include "client/hud.h"
|
||||
#include "client/minimap.h"
|
||||
#include "plain.h"
|
||||
#include "client/shadows/dynamicshadowsrender.h"
|
||||
#include "settings.h"
|
||||
|
||||
RenderingCore::RenderingCore(IrrlichtDevice *_device, Client *_client, Hud *_hud)
|
||||
: device(_device), driver(device->getVideoDriver()), smgr(device->getSceneManager()),
|
||||
guienv(device->getGUIEnvironment()), client(_client), camera(client->getCamera()),
|
||||
mapper(client->getMinimap()), hud(_hud),
|
||||
shadow_renderer(nullptr)
|
||||
RenderingCore::RenderingCore(IrrlichtDevice *_device, Client *_client, Hud *_hud,
|
||||
ShadowRenderer *_shadow_renderer, RenderPipeline *_pipeline, v2f _virtual_size_scale)
|
||||
: device(_device), client(_client), hud(_hud), shadow_renderer(_shadow_renderer),
|
||||
pipeline(_pipeline), virtual_size_scale(_virtual_size_scale)
|
||||
{
|
||||
screensize = driver->getScreenSize();
|
||||
virtual_size = screensize;
|
||||
|
||||
// disable if unsupported
|
||||
if (g_settings->getBool("enable_dynamic_shadows") && (
|
||||
g_settings->get("video_driver") != "opengl" ||
|
||||
!g_settings->getBool("enable_shaders"))) {
|
||||
g_settings->setBool("enable_dynamic_shadows", false);
|
||||
}
|
||||
|
||||
if (g_settings->getBool("enable_shaders") &&
|
||||
g_settings->getBool("enable_dynamic_shadows")) {
|
||||
shadow_renderer = new ShadowRenderer(device, client);
|
||||
}
|
||||
}
|
||||
|
||||
RenderingCore::~RenderingCore()
|
||||
{
|
||||
clearTextures();
|
||||
delete pipeline;
|
||||
delete shadow_renderer;
|
||||
}
|
||||
|
||||
void RenderingCore::initialize()
|
||||
{
|
||||
// have to be called late as the VMT is not ready in the constructor:
|
||||
initTextures();
|
||||
if (shadow_renderer)
|
||||
shadow_renderer->initialize();
|
||||
}
|
||||
pipeline->addStep<RenderShadowMapStep>();
|
||||
|
||||
void RenderingCore::updateScreenSize()
|
||||
{
|
||||
virtual_size = screensize;
|
||||
clearTextures();
|
||||
initTextures();
|
||||
createPipeline();
|
||||
}
|
||||
|
||||
void RenderingCore::draw(video::SColor _skycolor, bool _show_hud, bool _show_minimap,
|
||||
bool _draw_wield_tool, bool _draw_crosshair)
|
||||
{
|
||||
v2u32 ss = driver->getScreenSize();
|
||||
if (screensize != ss) {
|
||||
screensize = ss;
|
||||
updateScreenSize();
|
||||
}
|
||||
skycolor = _skycolor;
|
||||
show_hud = _show_hud;
|
||||
show_minimap = _show_minimap;
|
||||
draw_wield_tool = _draw_wield_tool;
|
||||
draw_crosshair = _draw_crosshair;
|
||||
v2u32 screensize = device->getVideoDriver()->getScreenSize();
|
||||
virtual_size = v2u32(screensize.X * virtual_size_scale.X, screensize.Y * virtual_size_scale.Y);
|
||||
|
||||
if (shadow_renderer) {
|
||||
// This is necessary to render shadows for animations correctly
|
||||
smgr->getRootSceneNode()->OnAnimate(device->getTimer()->getTime());
|
||||
shadow_renderer->update();
|
||||
}
|
||||
PipelineContext context(device, client, hud, shadow_renderer, _skycolor, screensize);
|
||||
context.draw_crosshair = _draw_crosshair;
|
||||
context.draw_wield_tool = _draw_wield_tool;
|
||||
context.show_hud = _show_hud;
|
||||
context.show_minimap = _show_minimap;
|
||||
|
||||
beforeDraw();
|
||||
drawAll();
|
||||
pipeline->reset(context);
|
||||
pipeline->run(context);
|
||||
}
|
||||
|
||||
void RenderingCore::draw3D()
|
||||
v2u32 RenderingCore::getVirtualSize() const
|
||||
{
|
||||
smgr->drawAll();
|
||||
if (shadow_renderer)
|
||||
shadow_renderer->drawDebug();
|
||||
|
||||
driver->setTransform(video::ETS_WORLD, core::IdentityMatrix);
|
||||
if (!show_hud)
|
||||
return;
|
||||
hud->drawBlockBounds();
|
||||
hud->drawSelectionMesh();
|
||||
if (draw_wield_tool)
|
||||
camera->drawWieldedTool();
|
||||
}
|
||||
|
||||
void RenderingCore::drawHUD()
|
||||
{
|
||||
if (show_hud) {
|
||||
if (draw_crosshair)
|
||||
hud->drawCrosshair();
|
||||
|
||||
hud->drawHotbar(client->getEnv().getLocalPlayer()->getWieldIndex());
|
||||
hud->drawLuaElements(camera->getOffset());
|
||||
camera->drawNametags();
|
||||
if (mapper && show_minimap)
|
||||
mapper->drawMinimap();
|
||||
}
|
||||
guienv->drawAll();
|
||||
}
|
||||
|
||||
void RenderingCore::drawPostFx()
|
||||
{
|
||||
client->getEnv().getClientMap().renderPostFx(camera->getCameraMode());
|
||||
}
|
||||
return virtual_size;
|
||||
}
|
@ -26,43 +26,28 @@ class Camera;
|
||||
class Client;
|
||||
class Hud;
|
||||
class Minimap;
|
||||
class RenderPipeline;
|
||||
class RenderTarget;
|
||||
|
||||
class RenderingCore
|
||||
{
|
||||
protected:
|
||||
v2u32 screensize;
|
||||
v2u32 virtual_size;
|
||||
video::SColor skycolor;
|
||||
bool show_hud;
|
||||
bool show_minimap;
|
||||
bool draw_wield_tool;
|
||||
bool draw_crosshair;
|
||||
|
||||
IrrlichtDevice *device;
|
||||
video::IVideoDriver *driver;
|
||||
scene::ISceneManager *smgr;
|
||||
gui::IGUIEnvironment *guienv;
|
||||
|
||||
Client *client;
|
||||
Camera *camera;
|
||||
Minimap *mapper;
|
||||
Hud *hud;
|
||||
|
||||
ShadowRenderer *shadow_renderer;
|
||||
|
||||
void updateScreenSize();
|
||||
virtual void initTextures() {}
|
||||
virtual void clearTextures() {}
|
||||
RenderPipeline *pipeline;
|
||||
|
||||
virtual void beforeDraw() {}
|
||||
virtual void drawAll() = 0;
|
||||
v2f virtual_size_scale;
|
||||
v2u32 virtual_size { 0, 0 };
|
||||
|
||||
void draw3D();
|
||||
void drawHUD();
|
||||
void drawPostFx();
|
||||
virtual void createPipeline() {}
|
||||
|
||||
public:
|
||||
RenderingCore(IrrlichtDevice *_device, Client *_client, Hud *_hud);
|
||||
RenderingCore(IrrlichtDevice *device, Client *client, Hud *hud,
|
||||
ShadowRenderer *shadow_renderer, RenderPipeline *pipeline,
|
||||
v2f virtual_size_scale);
|
||||
RenderingCore(const RenderingCore &) = delete;
|
||||
RenderingCore(RenderingCore &&) = delete;
|
||||
virtual ~RenderingCore();
|
||||
@ -74,7 +59,7 @@ public:
|
||||
void draw(video::SColor _skycolor, bool _show_hud, bool _show_minimap,
|
||||
bool _draw_wield_tool, bool _draw_crosshair);
|
||||
|
||||
inline v2u32 getVirtualSize() const { return virtual_size; }
|
||||
v2u32 getVirtualSize() const;
|
||||
|
||||
ShadowRenderer *get_shadow_renderer() { return shadow_renderer; };
|
||||
};
|
||||
|
@ -23,30 +23,63 @@ with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
#include "plain.h"
|
||||
#include "anaglyph.h"
|
||||
#include "interlaced.h"
|
||||
#include "pageflip.h"
|
||||
#include "sidebyside.h"
|
||||
#include "secondstage.h"
|
||||
#include "client/shadows/dynamicshadowsrender.h"
|
||||
|
||||
struct CreatePipelineResult
|
||||
{
|
||||
v2f virtual_size_scale;
|
||||
ShadowRenderer *shadow_renderer { nullptr };
|
||||
RenderPipeline *pipeline { nullptr };
|
||||
};
|
||||
|
||||
void createPipeline(const std::string &stereo_mode, IrrlichtDevice *device, Client *client, Hud *hud, CreatePipelineResult &result);
|
||||
|
||||
RenderingCore *createRenderingCore(const std::string &stereo_mode, IrrlichtDevice *device,
|
||||
Client *client, Hud *hud)
|
||||
{
|
||||
if (stereo_mode == "none")
|
||||
return new RenderingCorePlain(device, client, hud);
|
||||
if (stereo_mode == "anaglyph")
|
||||
return new RenderingCoreAnaglyph(device, client, hud);
|
||||
if (stereo_mode == "interlaced")
|
||||
return new RenderingCoreInterlaced(device, client, hud);
|
||||
#ifdef STEREO_PAGEFLIP_SUPPORTED
|
||||
if (stereo_mode == "pageflip")
|
||||
return new RenderingCorePageflip(device, client, hud);
|
||||
#endif
|
||||
if (stereo_mode == "sidebyside")
|
||||
return new RenderingCoreSideBySide(device, client, hud);
|
||||
if (stereo_mode == "topbottom")
|
||||
return new RenderingCoreSideBySide(device, client, hud, true);
|
||||
if (stereo_mode == "crossview")
|
||||
return new RenderingCoreSideBySide(device, client, hud, false, true);
|
||||
CreatePipelineResult created_pipeline;
|
||||
createPipeline(stereo_mode, device, client, hud, created_pipeline);
|
||||
return new RenderingCore(device, client, hud,
|
||||
created_pipeline.shadow_renderer, created_pipeline.pipeline, created_pipeline.virtual_size_scale);
|
||||
}
|
||||
|
||||
void createPipeline(const std::string &stereo_mode, IrrlichtDevice *device, Client *client, Hud *hud, CreatePipelineResult &result)
|
||||
{
|
||||
result.shadow_renderer = createShadowRenderer(device, client);
|
||||
result.virtual_size_scale = v2f(1.0f);
|
||||
result.pipeline = new RenderPipeline();
|
||||
|
||||
if (result.shadow_renderer)
|
||||
result.pipeline->addStep<RenderShadowMapStep>();
|
||||
|
||||
if (stereo_mode == "none") {
|
||||
populatePlainPipeline(result.pipeline, client);
|
||||
return;
|
||||
}
|
||||
if (stereo_mode == "anaglyph") {
|
||||
populateAnaglyphPipeline(result.pipeline, client);
|
||||
return;
|
||||
}
|
||||
if (stereo_mode == "interlaced") {
|
||||
populateInterlacedPipeline(result.pipeline, client);
|
||||
return;
|
||||
}
|
||||
if (stereo_mode == "sidebyside") {
|
||||
populateSideBySidePipeline(result.pipeline, client, false, false, result.virtual_size_scale);
|
||||
return;
|
||||
}
|
||||
if (stereo_mode == "topbottom") {
|
||||
populateSideBySidePipeline(result.pipeline, client, true, false, result.virtual_size_scale);
|
||||
return;
|
||||
}
|
||||
if (stereo_mode == "crossview") {
|
||||
populateSideBySidePipeline(result.pipeline, client, false, true, result.virtual_size_scale);
|
||||
return;
|
||||
}
|
||||
|
||||
// fallback to plain renderer
|
||||
errorstream << "Invalid rendering mode: " << stereo_mode << std::endl;
|
||||
return new RenderingCorePlain(device, client, hud);
|
||||
}
|
||||
populatePlainPipeline(result.pipeline, client);
|
||||
}
|
@ -19,102 +19,66 @@ with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
*/
|
||||
|
||||
#include "interlaced.h"
|
||||
#include "secondstage.h"
|
||||
#include "client/client.h"
|
||||
#include "client/shader.h"
|
||||
#include "client/tile.h"
|
||||
#include "client/camera.h"
|
||||
|
||||
RenderingCoreInterlaced::RenderingCoreInterlaced(
|
||||
IrrlichtDevice *_device, Client *_client, Hud *_hud)
|
||||
: RenderingCoreStereo(_device, _client, _hud)
|
||||
InitInterlacedMaskStep::InitInterlacedMaskStep(TextureBuffer *_buffer, u8 _index) :
|
||||
buffer(_buffer), index(_index)
|
||||
{
|
||||
initMaterial();
|
||||
}
|
||||
|
||||
void RenderingCoreInterlaced::initMaterial()
|
||||
void InitInterlacedMaskStep::run(PipelineContext &context)
|
||||
{
|
||||
IShaderSource *s = client->getShaderSource();
|
||||
mat.UseMipMaps = false;
|
||||
mat.ZBuffer = false;
|
||||
#if IRRLICHT_VERSION_MAJOR == 1 && IRRLICHT_VERSION_MINOR > 8
|
||||
mat.ZWriteEnable = video::EZW_OFF;
|
||||
#else
|
||||
mat.ZWriteEnable = false;
|
||||
#endif
|
||||
u32 shader = s->getShader("3d_interlaced_merge", TILE_MATERIAL_BASIC);
|
||||
mat.MaterialType = s->getShaderInfo(shader).material;
|
||||
for (int k = 0; k < 3; ++k) {
|
||||
mat.TextureLayer[k].AnisotropicFilter = false;
|
||||
mat.TextureLayer[k].BilinearFilter = false;
|
||||
mat.TextureLayer[k].TrilinearFilter = false;
|
||||
mat.TextureLayer[k].TextureWrapU = video::ETC_CLAMP_TO_EDGE;
|
||||
mat.TextureLayer[k].TextureWrapV = video::ETC_CLAMP_TO_EDGE;
|
||||
}
|
||||
}
|
||||
video::ITexture *mask = buffer->getTexture(index);
|
||||
if (!mask)
|
||||
return;
|
||||
if (mask == last_mask)
|
||||
return;
|
||||
last_mask = mask;
|
||||
|
||||
void RenderingCoreInterlaced::initTextures()
|
||||
{
|
||||
v2u32 image_size{screensize.X, screensize.Y / 2};
|
||||
left = driver->addRenderTargetTexture(
|
||||
image_size, "3d_render_left", video::ECF_A8R8G8B8);
|
||||
right = driver->addRenderTargetTexture(
|
||||
image_size, "3d_render_right", video::ECF_A8R8G8B8);
|
||||
mask = driver->addTexture(screensize, "3d_render_mask", video::ECF_A8R8G8B8);
|
||||
initMask();
|
||||
mat.TextureLayer[0].Texture = left;
|
||||
mat.TextureLayer[1].Texture = right;
|
||||
mat.TextureLayer[2].Texture = mask;
|
||||
}
|
||||
|
||||
void RenderingCoreInterlaced::clearTextures()
|
||||
{
|
||||
driver->removeTexture(left);
|
||||
driver->removeTexture(right);
|
||||
driver->removeTexture(mask);
|
||||
}
|
||||
|
||||
void RenderingCoreInterlaced::initMask()
|
||||
{
|
||||
auto size = mask->getSize();
|
||||
u8 *data = reinterpret_cast<u8 *>(mask->lock());
|
||||
for (u32 j = 0; j < screensize.Y; j++) {
|
||||
for (u32 j = 0; j < size.Height; j++) {
|
||||
u8 val = j % 2 ? 0xff : 0x00;
|
||||
memset(data, val, 4 * screensize.X);
|
||||
data += 4 * screensize.X;
|
||||
memset(data, val, 4 * size.Width);
|
||||
data += 4 * size.Width;
|
||||
}
|
||||
mask->unlock();
|
||||
}
|
||||
|
||||
void RenderingCoreInterlaced::drawAll()
|
||||
void populateInterlacedPipeline(RenderPipeline *pipeline, Client *client)
|
||||
{
|
||||
renderBothImages();
|
||||
merge();
|
||||
drawHUD();
|
||||
}
|
||||
static const u8 TEXTURE_LEFT = 0;
|
||||
static const u8 TEXTURE_RIGHT = 1;
|
||||
static const u8 TEXTURE_MASK = 2;
|
||||
|
||||
void RenderingCoreInterlaced::merge()
|
||||
{
|
||||
static const video::S3DVertex vertices[4] = {
|
||||
video::S3DVertex(1.0, -1.0, 0.0, 0.0, 0.0, -1.0,
|
||||
video::SColor(255, 0, 255, 255), 1.0, 0.0),
|
||||
video::S3DVertex(-1.0, -1.0, 0.0, 0.0, 0.0, -1.0,
|
||||
video::SColor(255, 255, 0, 255), 0.0, 0.0),
|
||||
video::S3DVertex(-1.0, 1.0, 0.0, 0.0, 0.0, -1.0,
|
||||
video::SColor(255, 255, 255, 0), 0.0, 1.0),
|
||||
video::S3DVertex(1.0, 1.0, 0.0, 0.0, 0.0, -1.0,
|
||||
video::SColor(255, 255, 255, 255), 1.0, 1.0),
|
||||
};
|
||||
static const u16 indices[6] = {0, 1, 2, 2, 3, 0};
|
||||
driver->setMaterial(mat);
|
||||
driver->drawVertexPrimitiveList(&vertices, 4, &indices, 2);
|
||||
}
|
||||
TextureBuffer *buffer = pipeline->createOwned<TextureBuffer>();
|
||||
buffer->setTexture(TEXTURE_LEFT, v2f(1.0f, 0.5f), "3d_render_left", video::ECF_A8R8G8B8);
|
||||
buffer->setTexture(TEXTURE_RIGHT, v2f(1.0f, 0.5f), "3d_render_right", video::ECF_A8R8G8B8);
|
||||
buffer->setTexture(TEXTURE_MASK, v2f(1.0f, 1.0f), "3d_render_mask", video::ECF_A8R8G8B8);
|
||||
|
||||
void RenderingCoreInterlaced::useEye(bool _right)
|
||||
{
|
||||
driver->setRenderTarget(_right ? right : left, true, true, skycolor);
|
||||
RenderingCoreStereo::useEye(_right);
|
||||
}
|
||||
pipeline->addStep<InitInterlacedMaskStep>(buffer, TEXTURE_MASK);
|
||||
|
||||
void RenderingCoreInterlaced::resetEye()
|
||||
{
|
||||
driver->setRenderTarget(nullptr, false, false, skycolor);
|
||||
RenderingCoreStereo::resetEye();
|
||||
}
|
||||
auto step3D = pipeline->own(create3DStage(client, v2f(1.0f, 0.5f)));
|
||||
|
||||
// eyes
|
||||
for (bool right : { false, true }) {
|
||||
pipeline->addStep<OffsetCameraStep>(right);
|
||||
auto output = pipeline->createOwned<TextureBufferOutput>(buffer, right ? TEXTURE_RIGHT : TEXTURE_LEFT);
|
||||
pipeline->addStep<SetRenderTargetStep>(step3D, output);
|
||||
pipeline->addStep(step3D);
|
||||
pipeline->addStep<MapPostFxStep>();
|
||||
}
|
||||
|
||||
pipeline->addStep<OffsetCameraStep>(0.0f);
|
||||
IShaderSource *s = client->getShaderSource();
|
||||
u32 shader = s->getShader("3d_interlaced_merge", TILE_MATERIAL_BASIC);
|
||||
video::E_MATERIAL_TYPE material = s->getShaderInfo(shader).material;
|
||||
auto texture_map = { TEXTURE_LEFT, TEXTURE_RIGHT, TEXTURE_MASK };
|
||||
auto merge = pipeline->addStep<PostProcessingStep>(material, texture_map);
|
||||
merge->setRenderSource(buffer);
|
||||
merge->setRenderTarget(pipeline->createOwned<ScreenTarget>());
|
||||
pipeline->addStep<DrawHUD>();
|
||||
}
|
@ -21,23 +21,15 @@ with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
#pragma once
|
||||
#include "stereo.h"
|
||||
|
||||
class RenderingCoreInterlaced : public RenderingCoreStereo
|
||||
class InitInterlacedMaskStep : public TrivialRenderStep
|
||||
{
|
||||
protected:
|
||||
video::ITexture *left = nullptr;
|
||||
video::ITexture *right = nullptr;
|
||||
video::ITexture *mask = nullptr;
|
||||
video::SMaterial mat;
|
||||
|
||||
void initMaterial();
|
||||
void initTextures() override;
|
||||
void clearTextures() override;
|
||||
void initMask();
|
||||
void useEye(bool right) override;
|
||||
void resetEye() override;
|
||||
void merge();
|
||||
|
||||
public:
|
||||
RenderingCoreInterlaced(IrrlichtDevice *_device, Client *_client, Hud *_hud);
|
||||
void drawAll() override;
|
||||
InitInterlacedMaskStep(TextureBuffer *buffer, u8 index);
|
||||
void run(PipelineContext &context);
|
||||
private:
|
||||
TextureBuffer *buffer;
|
||||
video::ITexture *last_mask { nullptr };
|
||||
u8 index;
|
||||
};
|
||||
|
||||
void populateInterlacedPipeline(RenderPipeline *pipeline, Client *client);
|
||||
|
@ -1,59 +0,0 @@
|
||||
/*
|
||||
Minetest
|
||||
Copyright (C) 2010-2013 celeron55, Perttu Ahola <celeron55@gmail.com>
|
||||
Copyright (C) 2017 numzero, Lobachevskiy Vitaliy <numzer0@yandex.ru>
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU Lesser General Public License as published by
|
||||
the Free Software Foundation; either version 2.1 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Lesser General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public License along
|
||||
with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*/
|
||||
|
||||
#include "pageflip.h"
|
||||
|
||||
#ifdef STEREO_PAGEFLIP_SUPPORTED
|
||||
|
||||
void RenderingCorePageflip::initTextures()
|
||||
{
|
||||
hud = driver->addRenderTargetTexture(
|
||||
screensize, "3d_render_hud", video::ECF_A8R8G8B8);
|
||||
}
|
||||
|
||||
void RenderingCorePageflip::clearTextures()
|
||||
{
|
||||
driver->removeTexture(hud);
|
||||
}
|
||||
|
||||
void RenderingCorePageflip::drawAll()
|
||||
{
|
||||
driver->setRenderTarget(hud, true, true, video::SColor(0, 0, 0, 0));
|
||||
drawHUD();
|
||||
driver->setRenderTarget(nullptr, false, false, skycolor);
|
||||
renderBothImages();
|
||||
}
|
||||
|
||||
void RenderingCorePageflip::useEye(bool _right)
|
||||
{
|
||||
driver->setRenderTarget(_right ? video::ERT_STEREO_RIGHT_BUFFER
|
||||
: video::ERT_STEREO_LEFT_BUFFER,
|
||||
true, true, skycolor);
|
||||
RenderingCoreStereo::useEye(_right);
|
||||
}
|
||||
|
||||
void RenderingCorePageflip::resetEye()
|
||||
{
|
||||
driver->draw2DImage(hud, v2s32(0, 0));
|
||||
driver->setRenderTarget(video::ERT_FRAME_BUFFER, false, false, skycolor);
|
||||
RenderingCoreStereo::resetEye();
|
||||
}
|
||||
|
||||
#endif // STEREO_PAGEFLIP_SUPPORTED
|
277
src/client/render/pipeline.cpp
Normal file
277
src/client/render/pipeline.cpp
Normal file
@ -0,0 +1,277 @@
|
||||
/*
|
||||
Minetest
|
||||
Copyright (C) 2022 x2048, Dmitry Kostenko <codeforsmile@gmail.com>
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU Lesser General Public License as published by
|
||||
the Free Software Foundation; either version 2.1 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Lesser General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public License along
|
||||
with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*/
|
||||
|
||||
#include "pipeline.h"
|
||||
#include "client/client.h"
|
||||
#include "client/hud.h"
|
||||
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
|
||||
|
||||
TextureBuffer::~TextureBuffer()
|
||||
{
|
||||
if (m_render_target)
|
||||
m_driver->removeRenderTarget(m_render_target);
|
||||
m_render_target = nullptr;
|
||||
for (u32 index = 0; index < m_textures.size(); index++)
|
||||
m_driver->removeTexture(m_textures[index]);
|
||||
m_textures.clear();
|
||||
}
|
||||
|
||||
video::ITexture *TextureBuffer::getTexture(u8 index)
|
||||
{
|
||||
if (index == m_depth_texture_index)
|
||||
return m_depth_texture;
|
||||
if (index >= m_textures.size())
|
||||
return nullptr;
|
||||
return m_textures[index];
|
||||
}
|
||||
|
||||
|
||||
void TextureBuffer::setTexture(u8 index, core::dimension2du size, const std::string &name, video::ECOLOR_FORMAT format)
|
||||
{
|
||||
assert(index != NO_DEPTH_TEXTURE);
|
||||
|
||||
if (m_definitions.size() <= index)
|
||||
m_definitions.resize(index + 1);
|
||||
|
||||
if (m_depth_texture_index == index)
|
||||
m_depth_texture_index = NO_DEPTH_TEXTURE;
|
||||
|
||||
auto &definition = m_definitions[index];
|
||||
definition.valid = true;
|
||||
definition.dirty = true;
|
||||
definition.fixed_size = true;
|
||||
definition.size = size;
|
||||
definition.name = name;
|
||||
definition.format = format;
|
||||
}
|
||||
|
||||
void TextureBuffer::setTexture(u8 index, v2f scale_factor, const std::string &name, video::ECOLOR_FORMAT format)
|
||||
{
|
||||
assert(index != NO_DEPTH_TEXTURE);
|
||||
|
||||
if (m_definitions.size() <= index)
|
||||
m_definitions.resize(index + 1);
|
||||
|
||||
if (m_depth_texture_index == index)
|
||||
m_depth_texture_index = NO_DEPTH_TEXTURE;
|
||||
|
||||
auto &definition = m_definitions[index];
|
||||
definition.valid = true;
|
||||
definition.dirty = true;
|
||||
definition.fixed_size = false;
|
||||
definition.scale_factor = scale_factor;
|
||||
definition.name = name;
|
||||
definition.format = format;
|
||||
}
|
||||
|
||||
void TextureBuffer::setDepthTexture(u8 index, core::dimension2du size, const std::string &name, video::ECOLOR_FORMAT format)
|
||||
{
|
||||
assert(index != NO_DEPTH_TEXTURE);
|
||||
setTexture(index, size, name, format);
|
||||
m_depth_texture_index = index;
|
||||
}
|
||||
|
||||
void TextureBuffer::setDepthTexture(u8 index, v2f scale_factor, const std::string &name, video::ECOLOR_FORMAT format)
|
||||
{
|
||||
assert(index != NO_DEPTH_TEXTURE);
|
||||
setTexture(index, scale_factor, name, format);
|
||||
m_depth_texture_index = index;
|
||||
}
|
||||
|
||||
void TextureBuffer::reset(PipelineContext &context)
|
||||
{
|
||||
if (!m_driver)
|
||||
m_driver = context.device->getVideoDriver();
|
||||
|
||||
// remove extra textures
|
||||
if (m_textures.size() > m_definitions.size()) {
|
||||
for (unsigned i = m_definitions.size(); i < m_textures.size(); i++)
|
||||
if (m_textures[i])
|
||||
m_driver->removeTexture(m_textures[i]);
|
||||
|
||||
m_textures.set_used(m_definitions.size());
|
||||
}
|
||||
|
||||
// add placeholders for new definitions
|
||||
while (m_textures.size() < m_definitions.size())
|
||||
m_textures.push_back(nullptr);
|
||||
|
||||
// change textures to match definitions
|
||||
bool modified = false;
|
||||
for (u32 i = 0; i < m_definitions.size(); i++) {
|
||||
video::ITexture **ptr = &m_textures[i];
|
||||
if (i == m_depth_texture_index) {
|
||||
if (*ptr) {
|
||||
m_driver->removeTexture(*ptr);
|
||||
*ptr = nullptr;
|
||||
}
|
||||
ptr = &m_depth_texture;
|
||||
}
|
||||
|
||||
if (ensureTexture(ptr, m_definitions[i], context))
|
||||
modified = true;
|
||||
m_definitions[i].dirty = false;
|
||||
}
|
||||
|
||||
// make sude depth texture is removed and reset
|
||||
if (m_depth_texture_index == NO_DEPTH_TEXTURE && m_depth_texture) {
|
||||
m_driver->removeTexture(m_depth_texture);
|
||||
m_depth_texture = nullptr;
|
||||
}
|
||||
|
||||
if (!m_render_target)
|
||||
m_render_target = m_driver->addRenderTarget();
|
||||
|
||||
if (modified)
|
||||
m_render_target->setTexture(m_textures, m_depth_texture);
|
||||
|
||||
RenderTarget::reset(context);
|
||||
}
|
||||
|
||||
void TextureBuffer::activate(PipelineContext &context)
|
||||
{
|
||||
m_driver->setRenderTargetEx(m_render_target, m_clear ? video::ECBF_DEPTH | video::ECBF_COLOR : 0, context.clear_color);
|
||||
RenderTarget::activate(context);
|
||||
}
|
||||
|
||||
bool TextureBuffer::ensureTexture(video::ITexture **texture, const TextureDefinition& definition, PipelineContext &context)
|
||||
{
|
||||
bool modify;
|
||||
core::dimension2du size;
|
||||
if (definition.valid) {
|
||||
if (definition.fixed_size)
|
||||
size = definition.size;
|
||||
else
|
||||
size = core::dimension2du(
|
||||
(u32)(context.target_size.X * definition.scale_factor.X),
|
||||
(u32)(context.target_size.Y * definition.scale_factor.Y));
|
||||
|
||||
modify = definition.dirty || (*texture == nullptr) || (*texture)->getSize() != size;
|
||||
}
|
||||
else {
|
||||
modify = (*texture != nullptr);
|
||||
}
|
||||
|
||||
if (!modify)
|
||||
return false;
|
||||
|
||||
if (*texture)
|
||||
m_driver->removeTexture(*texture);
|
||||
|
||||
if (definition.valid)
|
||||
*texture = m_driver->addRenderTargetTexture(size, definition.name.c_str(), definition.format);
|
||||
else
|
||||
*texture = nullptr;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
TextureBufferOutput::TextureBufferOutput(TextureBuffer *_buffer, u8 _texture_index)
|
||||
: buffer(_buffer), texture_index(_texture_index)
|
||||
{}
|
||||
|
||||
void TextureBufferOutput::activate(PipelineContext &context)
|
||||
{
|
||||
auto texture = buffer->getTexture(texture_index);
|
||||
auto driver = context.device->getVideoDriver();
|
||||
driver->setRenderTarget(texture, m_clear, m_clear, context.clear_color);
|
||||
driver->OnResize(texture->getSize());
|
||||
|
||||
RenderTarget::activate(context);
|
||||
}
|
||||
|
||||
u8 DynamicSource::getTextureCount()
|
||||
{
|
||||
assert(isConfigured());
|
||||
return upstream->getTextureCount();
|
||||
}
|
||||
|
||||
video::ITexture *DynamicSource::getTexture(u8 index)
|
||||
{
|
||||
assert(isConfigured());
|
||||
return upstream->getTexture(index);
|
||||
}
|
||||
|
||||
void ScreenTarget::activate(PipelineContext &context)
|
||||
{
|
||||
auto driver = context.device->getVideoDriver();
|
||||
driver->setRenderTarget(nullptr, m_clear, m_clear, context.clear_color);
|
||||
driver->OnResize(size);
|
||||
RenderTarget::activate(context);
|
||||
}
|
||||
|
||||
void DynamicTarget::activate(PipelineContext &context)
|
||||
{
|
||||
if (!isConfigured())
|
||||
throw std::logic_error("Dynamic render target is not configured before activation.");
|
||||
upstream->activate(context);
|
||||
}
|
||||
|
||||
void ScreenTarget::reset(PipelineContext &context)
|
||||
{
|
||||
RenderTarget::reset(context);
|
||||
size = context.device->getVideoDriver()->getScreenSize();
|
||||
}
|
||||
|
||||
SetRenderTargetStep::SetRenderTargetStep(RenderStep *_step, RenderTarget *_target)
|
||||
: step(_step), target(_target)
|
||||
{
|
||||
}
|
||||
|
||||
void SetRenderTargetStep::run(PipelineContext &context)
|
||||
{
|
||||
step->setRenderTarget(target);
|
||||
}
|
||||
|
||||
RenderSource *RenderPipeline::getInput()
|
||||
{
|
||||
return &m_input;
|
||||
}
|
||||
|
||||
RenderTarget *RenderPipeline::getOutput()
|
||||
{
|
||||
return &m_output;
|
||||
}
|
||||
|
||||
void RenderPipeline::run(PipelineContext &context)
|
||||
{
|
||||
v2u32 original_size = context.target_size;
|
||||
context.target_size = v2u32(original_size.X * scale.X, original_size.Y * scale.Y);
|
||||
|
||||
for (auto &object : m_objects)
|
||||
object->reset(context);
|
||||
|
||||
for (auto &step: m_pipeline)
|
||||
step->run(context);
|
||||
|
||||
context.target_size = original_size;
|
||||
}
|
||||
|
||||
void RenderPipeline::setRenderSource(RenderSource *source)
|
||||
{
|
||||
m_input.setRenderSource(source);
|
||||
}
|
||||
|
||||
void RenderPipeline::setRenderTarget(RenderTarget *target)
|
||||
{
|
||||
m_output.setRenderTarget(target);
|
||||
}
|
431
src/client/render/pipeline.h
Normal file
431
src/client/render/pipeline.h
Normal file
@ -0,0 +1,431 @@
|
||||
/*
|
||||
Minetest
|
||||
Copyright (C) 2022 x2048, Dmitry Kostenko <codeforsmile@gmail.com>
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU Lesser General Public License as published by
|
||||
the Free Software Foundation; either version 2.1 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Lesser General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public License along
|
||||
with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*/
|
||||
#pragma once
|
||||
|
||||
#include "irrlichttypes_extrabloated.h"
|
||||
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
class RenderSource;
|
||||
class RenderTarget;
|
||||
class RenderStep;
|
||||
class Client;
|
||||
class Hud;
|
||||
class ShadowRenderer;
|
||||
|
||||
struct PipelineContext
|
||||
{
|
||||
PipelineContext(IrrlichtDevice *_device, Client *_client, Hud *_hud, ShadowRenderer *_shadow_renderer, video::SColor _color, v2u32 _target_size)
|
||||
: device(_device), client(_client), hud(_hud), shadow_renderer(_shadow_renderer), clear_color(_color), target_size(_target_size)
|
||||
{
|
||||
}
|
||||
|
||||
IrrlichtDevice *device;
|
||||
Client *client;
|
||||
Hud *hud;
|
||||
ShadowRenderer *shadow_renderer;
|
||||
video::SColor clear_color;
|
||||
v2u32 target_size;
|
||||
|
||||
bool show_hud {true};
|
||||
bool show_minimap {true};
|
||||
bool draw_wield_tool {true};
|
||||
bool draw_crosshair {true};
|
||||
};
|
||||
|
||||
/**
|
||||
* Base object that can be owned by RenderPipeline
|
||||
*
|
||||
*/
|
||||
class RenderPipelineObject
|
||||
{
|
||||
public:
|
||||
virtual ~RenderPipelineObject() = default;
|
||||
virtual void reset(PipelineContext &context) {}
|
||||
};
|
||||
|
||||
/**
|
||||
* Represents a source of rendering information such as textures
|
||||
*/
|
||||
class RenderSource : virtual public RenderPipelineObject
|
||||
{
|
||||
public:
|
||||
/**
|
||||
* Return the number of textures in the source.
|
||||
*/
|
||||
virtual u8 getTextureCount() = 0;
|
||||
|
||||
/**
|
||||
* Get a texture by index.
|
||||
* Returns nullptr is the texture does not exist.
|
||||
*/
|
||||
virtual video::ITexture *getTexture(u8 index) = 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* Represents a render target (screen or framebuffer)
|
||||
*/
|
||||
class RenderTarget : virtual public RenderPipelineObject
|
||||
{
|
||||
public:
|
||||
/**
|
||||
* Activate the render target and configure OpenGL state for the output.
|
||||
* This is usually done by @see RenderStep implementations.
|
||||
*/
|
||||
virtual void activate(PipelineContext &context)
|
||||
{
|
||||
m_clear = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets the state of the object for the next pipeline iteration
|
||||
*/
|
||||
virtual void reset(PipelineContext &context) override
|
||||
{
|
||||
m_clear = true;
|
||||
}
|
||||
|
||||
protected:
|
||||
bool m_clear {true};
|
||||
};
|
||||
|
||||
/**
|
||||
* Texture buffer represents a framebuffer with a multiple attached textures.
|
||||
*
|
||||
* @note Use of TextureBuffer requires use of gl_FragData[] in the shader
|
||||
*/
|
||||
class TextureBuffer : public RenderSource, public RenderTarget
|
||||
{
|
||||
public:
|
||||
virtual ~TextureBuffer() override;
|
||||
|
||||
/**
|
||||
* Configure fixed-size texture for the specific index
|
||||
*
|
||||
* @param index index of the texture
|
||||
* @param size width and height of the texture in pixels
|
||||
* @param height height of the texture in pixels
|
||||
* @param name unique name of the texture
|
||||
* @param format color format
|
||||
*/
|
||||
void setTexture(u8 index, core::dimension2du size, const std::string& name, video::ECOLOR_FORMAT format);
|
||||
|
||||
/**
|
||||
* Configure relative-size texture for the specific index
|
||||
*
|
||||
* @param index index of the texture
|
||||
* @param scale_factor relation of the texture dimensions to the screen dimensions
|
||||
* @param name unique name of the texture
|
||||
* @param format color format
|
||||
*/
|
||||
void setTexture(u8 index, v2f scale_factor, const std::string& name, video::ECOLOR_FORMAT format);
|
||||
|
||||
/**
|
||||
* @Configure depth texture and assign index
|
||||
*
|
||||
* @param index index to use for the depth texture
|
||||
* @param size width and height of the texture in pixels
|
||||
* @param name unique name for the texture
|
||||
* @param format color format
|
||||
*/
|
||||
void setDepthTexture(u8 index, core::dimension2du size, const std::string& name, video::ECOLOR_FORMAT format);
|
||||
|
||||
/**
|
||||
* @Configure depth texture and assign index
|
||||
*
|
||||
* @param index index to use for the depth texture
|
||||
* @param scale_factor relation of the texture dimensions to the screen dimensions
|
||||
* @param name unique name for the texture
|
||||
* @param format color format
|
||||
*/
|
||||
void setDepthTexture(u8 index, v2f scale_factor, const std::string& name, video::ECOLOR_FORMAT format);
|
||||
|
||||
virtual u8 getTextureCount() override { return m_textures.size(); }
|
||||
virtual video::ITexture *getTexture(u8 index) override;
|
||||
virtual void activate(PipelineContext &context) override;
|
||||
virtual void reset(PipelineContext &context) override;
|
||||
private:
|
||||
static const u8 NO_DEPTH_TEXTURE = 255;
|
||||
|
||||
struct TextureDefinition
|
||||
{
|
||||
bool valid { false };
|
||||
bool fixed_size { false };
|
||||
bool dirty { false };
|
||||
v2f scale_factor;
|
||||
core::dimension2du size;
|
||||
std::string name;
|
||||
video::ECOLOR_FORMAT format;
|
||||
};
|
||||
|
||||
/**
|
||||
* Make sure the texture in the given slot matches the texture definition given the current context.
|
||||
* @param textureSlot address of the texture pointer to verify and populate.
|
||||
* @param definition logical definition of the texture
|
||||
* @param context current context of the rendering pipeline
|
||||
* @return true if a new texture was created and put into the slot
|
||||
* @return false if the slot was not modified
|
||||
*/
|
||||
bool ensureTexture(video::ITexture **textureSlot, const TextureDefinition& definition, PipelineContext &context);
|
||||
|
||||
video::IVideoDriver *m_driver { nullptr };
|
||||
std::vector<TextureDefinition> m_definitions;
|
||||
core::array<video::ITexture *> m_textures;
|
||||
video::ITexture *m_depth_texture { nullptr };
|
||||
u8 m_depth_texture_index { NO_DEPTH_TEXTURE };
|
||||
video::IRenderTarget *m_render_target { nullptr };
|
||||
};
|
||||
|
||||
/**
|
||||
* Targets output to designated texture in texture buffer
|
||||
*/
|
||||
class TextureBufferOutput : public RenderTarget
|
||||
{
|
||||
public:
|
||||
TextureBufferOutput(TextureBuffer *buffer, u8 texture_index);
|
||||
void activate(PipelineContext &context) override;
|
||||
private:
|
||||
TextureBuffer *buffer;
|
||||
u8 texture_index;
|
||||
};
|
||||
|
||||
/**
|
||||
* Allows remapping texture indicies in another RenderSource.
|
||||
*
|
||||
* @note all unmapped indexes are passed through to the underlying render source.
|
||||
*/
|
||||
class RemappingSource : RenderSource
|
||||
{
|
||||
public:
|
||||
RemappingSource(RenderSource *source)
|
||||
: m_source(source)
|
||||
{}
|
||||
|
||||
/**
|
||||
* Maps texture index to a different index in the dependent source.
|
||||
*
|
||||
* @param index texture index as requested by the @see RenderStep.
|
||||
* @param target_index matching texture index in the underlying @see RenderSource.
|
||||
*/
|
||||
void setMapping(u8 index, u8 target_index)
|
||||
{
|
||||
if (index >= m_mappings.size()) {
|
||||
u8 start = m_mappings.size();
|
||||
m_mappings.resize(index);
|
||||
for (u8 i = start; i < m_mappings.size(); ++i)
|
||||
m_mappings[i] = i;
|
||||
}
|
||||
|
||||
m_mappings[index] = target_index;
|
||||
}
|
||||
|
||||
virtual u8 getTextureCount() override
|
||||
{
|
||||
return m_mappings.size();
|
||||
}
|
||||
|
||||
virtual video::ITexture *getTexture(u8 index) override
|
||||
{
|
||||
if (index < m_mappings.size())
|
||||
index = m_mappings[index];
|
||||
|
||||
return m_source->getTexture(index);
|
||||
}
|
||||
public:
|
||||
RenderSource *m_source;
|
||||
std::vector<u8> m_mappings;
|
||||
};
|
||||
|
||||
class DynamicSource : public RenderSource
|
||||
{
|
||||
public:
|
||||
bool isConfigured() { return upstream != nullptr; }
|
||||
void setRenderSource(RenderSource *value) { upstream = value; }
|
||||
|
||||
/**
|
||||
* Return the number of textures in the source.
|
||||
*/
|
||||
virtual u8 getTextureCount() override;
|
||||
|
||||
/**
|
||||
* Get a texture by index.
|
||||
* Returns nullptr is the texture does not exist.
|
||||
*/
|
||||
virtual video::ITexture *getTexture(u8 index) override;
|
||||
private:
|
||||
RenderSource *upstream { nullptr };
|
||||
};
|
||||
|
||||
/**
|
||||
* Implements direct output to screen framebuffer.
|
||||
*/
|
||||
class ScreenTarget : public RenderTarget
|
||||
{
|
||||
public:
|
||||
virtual void activate(PipelineContext &context) override;
|
||||
virtual void reset(PipelineContext &context) override;
|
||||
private:
|
||||
core::dimension2du size;
|
||||
};
|
||||
|
||||
class DynamicTarget : public RenderTarget
|
||||
{
|
||||
public:
|
||||
bool isConfigured() { return upstream != nullptr; }
|
||||
void setRenderTarget(RenderTarget *value) { upstream = value; }
|
||||
virtual void activate(PipelineContext &context) override;
|
||||
private:
|
||||
RenderTarget *upstream { nullptr };
|
||||
};
|
||||
|
||||
/**
|
||||
* Base class for rendering steps in the pipeline
|
||||
*/
|
||||
class RenderStep : virtual public RenderPipelineObject
|
||||
{
|
||||
public:
|
||||
/**
|
||||
* Assigns render source to this step.
|
||||
*
|
||||
* @param source source of rendering information
|
||||
*/
|
||||
virtual void setRenderSource(RenderSource *source) = 0;
|
||||
|
||||
/**
|
||||
* Assigned render target to this step.
|
||||
*
|
||||
* @param target render target to send output to.
|
||||
*/
|
||||
virtual void setRenderTarget(RenderTarget *target) = 0;
|
||||
|
||||
/**
|
||||
* Runs the step. This method is invoked by the pipeline.
|
||||
*/
|
||||
virtual void run(PipelineContext &context) = 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* Provides default empty implementation of supporting methods in a rendering step.
|
||||
*/
|
||||
class TrivialRenderStep : public RenderStep
|
||||
{
|
||||
public:
|
||||
virtual void setRenderSource(RenderSource *source) override {}
|
||||
virtual void setRenderTarget(RenderTarget *target) override {}
|
||||
virtual void reset(PipelineContext &) override {}
|
||||
};
|
||||
|
||||
/**
|
||||
* Dynamically changes render target of another step.
|
||||
*
|
||||
* This allows re-running parts of the pipeline with different outputs
|
||||
*/
|
||||
class SetRenderTargetStep : public TrivialRenderStep
|
||||
{
|
||||
public:
|
||||
SetRenderTargetStep(RenderStep *step, RenderTarget *target);
|
||||
virtual void run(PipelineContext &context) override;
|
||||
private:
|
||||
RenderStep *step;
|
||||
RenderTarget *target;
|
||||
};
|
||||
|
||||
/**
|
||||
* Render Pipeline provides a flexible way to execute rendering steps in the engine.
|
||||
*
|
||||
* RenderPipeline also implements @see RenderStep, allowing for nesting of the pipelines.
|
||||
*/
|
||||
class RenderPipeline : public RenderStep
|
||||
{
|
||||
public:
|
||||
/**
|
||||
* Add a step to the end of the pipeline
|
||||
*
|
||||
* @param step reference to a @see RenderStep implementation.
|
||||
*/
|
||||
RenderStep *addStep(RenderStep *step)
|
||||
{
|
||||
m_pipeline.push_back(step);
|
||||
return step;
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture ownership of a dynamically created @see RenderStep instance.
|
||||
*
|
||||
* RenderPipeline will delete the instance when the pipeline is destroyed.
|
||||
*
|
||||
* @param step reference to the instance.
|
||||
* @return RenderStep* value of the 'step' parameter.
|
||||
*/
|
||||
template<typename T>
|
||||
T *own(std::unique_ptr<T> &&object)
|
||||
{
|
||||
T* result = object.release();
|
||||
m_objects.push_back(std::unique_ptr<RenderPipelineObject>(result));
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new object that will be managed by the pipeline
|
||||
*
|
||||
* @tparam T type of the object to be created
|
||||
* @tparam Args types of constructor arguments
|
||||
* @param args constructor arguments
|
||||
* @return T* pointer to the newly created object
|
||||
*/
|
||||
template<typename T, typename... Args>
|
||||
T *createOwned(Args&&... args) {
|
||||
return own(std::make_unique<T>(std::forward<Args>(args)...));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create and add a step managed by the pipeline and return a pointer
|
||||
* to the step for further configuration.
|
||||
*
|
||||
* @tparam T Type of the step to be added.
|
||||
* @tparam Args Types of the constructor parameters
|
||||
* @param args Constructor parameters
|
||||
* @return RenderStep* Pointer to the created step for further configuration.
|
||||
*/
|
||||
template<typename T, typename... Args>
|
||||
RenderStep *addStep(Args&&... args) {
|
||||
T* result = own(std::make_unique<T>(std::forward<Args>(args)...));
|
||||
return addStep(result);
|
||||
}
|
||||
|
||||
RenderSource *getInput();
|
||||
RenderTarget *getOutput();
|
||||
|
||||
v2f getScale() { return scale; }
|
||||
void setScale(v2f value) { scale = value; }
|
||||
|
||||
virtual void reset(PipelineContext &context) override {}
|
||||
virtual void run(PipelineContext &context) override;
|
||||
|
||||
virtual void setRenderSource(RenderSource *source) override;
|
||||
virtual void setRenderTarget(RenderTarget *target) override;
|
||||
private:
|
||||
std::vector<RenderStep *> m_pipeline;
|
||||
std::vector< std::unique_ptr<RenderPipelineObject> > m_objects;
|
||||
DynamicSource m_input;
|
||||
DynamicTarget m_output;
|
||||
v2f scale { 1.0f, 1.0f };
|
||||
};
|
@ -19,58 +19,134 @@ with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
*/
|
||||
|
||||
#include "plain.h"
|
||||
#include "settings.h"
|
||||
#include "secondstage.h"
|
||||
#include "client/camera.h"
|
||||
#include "client/client.h"
|
||||
#include "client/clientmap.h"
|
||||
#include "client/hud.h"
|
||||
#include "client/minimap.h"
|
||||
#include "client/shadows/dynamicshadowsrender.h"
|
||||
|
||||
inline u32 scaledown(u32 coef, u32 size)
|
||||
/// Draw3D pipeline step
|
||||
void Draw3D::run(PipelineContext &context)
|
||||
{
|
||||
return (size + coef - 1) / coef;
|
||||
}
|
||||
if (m_target)
|
||||
m_target->activate(context);
|
||||
|
||||
RenderingCorePlain::RenderingCorePlain(
|
||||
IrrlichtDevice *_device, Client *_client, Hud *_hud)
|
||||
: RenderingCore(_device, _client, _hud)
|
||||
{
|
||||
scale = g_settings->getU16("undersampling");
|
||||
}
|
||||
|
||||
void RenderingCorePlain::initTextures()
|
||||
{
|
||||
if (scale <= 1)
|
||||
context.device->getSceneManager()->drawAll();
|
||||
context.device->getVideoDriver()->setTransform(video::ETS_WORLD, core::IdentityMatrix);
|
||||
if (!context.show_hud)
|
||||
return;
|
||||
v2u32 size{scaledown(scale, screensize.X), scaledown(scale, screensize.Y)};
|
||||
lowres = driver->addRenderTargetTexture(
|
||||
size, "render_lowres", video::ECF_A8R8G8B8);
|
||||
context.hud->drawBlockBounds();
|
||||
context.hud->drawSelectionMesh();
|
||||
if (context.draw_wield_tool)
|
||||
context.client->getCamera()->drawWieldedTool();
|
||||
}
|
||||
|
||||
void RenderingCorePlain::clearTextures()
|
||||
void DrawHUD::run(PipelineContext &context)
|
||||
{
|
||||
if (scale <= 1)
|
||||
return;
|
||||
driver->removeTexture(lowres);
|
||||
if (context.show_hud) {
|
||||
if (context.shadow_renderer)
|
||||
context.shadow_renderer->drawDebug();
|
||||
|
||||
if (context.draw_crosshair)
|
||||
context.hud->drawCrosshair();
|
||||
|
||||
context.hud->drawHotbar(context.client->getEnv().getLocalPlayer()->getWieldIndex());
|
||||
context.hud->drawLuaElements(context.client->getCamera()->getOffset());
|
||||
context.client->getCamera()->drawNametags();
|
||||
auto mapper = context.client->getMinimap();
|
||||
if (mapper && context.show_minimap)
|
||||
mapper->drawMinimap();
|
||||
}
|
||||
context.device->getGUIEnvironment()->drawAll();
|
||||
}
|
||||
|
||||
void RenderingCorePlain::beforeDraw()
|
||||
|
||||
void MapPostFxStep::setRenderTarget(RenderTarget * _target)
|
||||
{
|
||||
if (scale <= 1)
|
||||
return;
|
||||
driver->setRenderTarget(lowres, true, true, skycolor);
|
||||
target = _target;
|
||||
}
|
||||
|
||||
void RenderingCorePlain::upscale()
|
||||
void MapPostFxStep::run(PipelineContext &context)
|
||||
{
|
||||
if (scale <= 1)
|
||||
return;
|
||||
driver->setRenderTarget(0, true, true);
|
||||
v2u32 size{scaledown(scale, screensize.X), scaledown(scale, screensize.Y)};
|
||||
v2u32 dest_size{scale * size.X, scale * size.Y};
|
||||
driver->draw2DImage(lowres, core::rect<s32>(0, 0, dest_size.X, dest_size.Y),
|
||||
core::rect<s32>(0, 0, size.X, size.Y));
|
||||
if (target)
|
||||
target->activate(context);
|
||||
|
||||
context.client->getEnv().getClientMap().renderPostFx(context.client->getCamera()->getCameraMode());
|
||||
}
|
||||
|
||||
void RenderingCorePlain::drawAll()
|
||||
void RenderShadowMapStep::run(PipelineContext &context)
|
||||
{
|
||||
draw3D();
|
||||
drawPostFx();
|
||||
upscale();
|
||||
drawHUD();
|
||||
// This is necessary to render shadows for animations correctly
|
||||
context.device->getSceneManager()->getRootSceneNode()->OnAnimate(context.device->getTimer()->getTime());
|
||||
context.shadow_renderer->update();
|
||||
}
|
||||
|
||||
// class UpscaleStep
|
||||
|
||||
void UpscaleStep::run(PipelineContext &context)
|
||||
{
|
||||
video::ITexture *lowres = m_source->getTexture(0);
|
||||
m_target->activate(context);
|
||||
context.device->getVideoDriver()->draw2DImage(lowres,
|
||||
core::rect<s32>(0, 0, context.target_size.X, context.target_size.Y),
|
||||
core::rect<s32>(0, 0, lowres->getSize().Width, lowres->getSize().Height));
|
||||
}
|
||||
|
||||
std::unique_ptr<RenderStep> create3DStage(Client *client, v2f scale)
|
||||
{
|
||||
RenderStep *step = new Draw3D();
|
||||
if (g_settings->getBool("enable_shaders")) {
|
||||
RenderPipeline *pipeline = new RenderPipeline();
|
||||
pipeline->addStep(pipeline->own(std::unique_ptr<RenderStep>(step)));
|
||||
|
||||
auto effect = addPostProcessing(pipeline, step, scale, client);
|
||||
effect->setRenderTarget(pipeline->getOutput());
|
||||
step = pipeline;
|
||||
}
|
||||
return std::unique_ptr<RenderStep>(step);
|
||||
}
|
||||
|
||||
static v2f getDownscaleFactor()
|
||||
{
|
||||
u16 undersampling = MYMAX(g_settings->getU16("undersampling"), 1);
|
||||
return v2f(1.0f / undersampling);
|
||||
}
|
||||
|
||||
RenderStep* addUpscaling(RenderPipeline *pipeline, RenderStep *previousStep, v2f downscale_factor)
|
||||
{
|
||||
const int TEXTURE_UPSCALE = 0;
|
||||
|
||||
if (downscale_factor.X == 1.0f && downscale_factor.Y == 1.0f)
|
||||
return previousStep;
|
||||
|
||||
// Initialize buffer
|
||||
TextureBuffer *buffer = pipeline->createOwned<TextureBuffer>();
|
||||
buffer->setTexture(TEXTURE_UPSCALE, downscale_factor, "upscale", video::ECF_A8R8G8B8);
|
||||
|
||||
// Attach previous step to the buffer
|
||||
TextureBufferOutput *buffer_output = pipeline->createOwned<TextureBufferOutput>(buffer, TEXTURE_UPSCALE);
|
||||
previousStep->setRenderTarget(buffer_output);
|
||||
|
||||
// Add upscaling step
|
||||
RenderStep *upscale = pipeline->createOwned<UpscaleStep>();
|
||||
upscale->setRenderSource(buffer);
|
||||
pipeline->addStep(upscale);
|
||||
|
||||
return upscale;
|
||||
}
|
||||
|
||||
void populatePlainPipeline(RenderPipeline *pipeline, Client *client)
|
||||
{
|
||||
auto downscale_factor = getDownscaleFactor();
|
||||
auto step3D = pipeline->own(create3DStage(client, downscale_factor));
|
||||
pipeline->addStep(step3D);
|
||||
pipeline->addStep<MapPostFxStep>();
|
||||
|
||||
step3D = addUpscaling(pipeline, step3D, downscale_factor);
|
||||
|
||||
step3D->setRenderTarget(pipeline->createOwned<ScreenTarget>());
|
||||
|
||||
pipeline->addStep<DrawHUD>();
|
||||
}
|
||||
|
@ -20,19 +20,70 @@ with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
|
||||
#pragma once
|
||||
#include "core.h"
|
||||
#include "pipeline.h"
|
||||
|
||||
class RenderingCorePlain : public RenderingCore
|
||||
/**
|
||||
* Implements a pipeline step that renders the 3D scene
|
||||
*/
|
||||
class Draw3D : public RenderStep
|
||||
{
|
||||
protected:
|
||||
int scale = 0;
|
||||
video::ITexture *lowres = nullptr;
|
||||
|
||||
void initTextures() override;
|
||||
void clearTextures() override;
|
||||
void beforeDraw() override;
|
||||
void upscale();
|
||||
|
||||
public:
|
||||
RenderingCorePlain(IrrlichtDevice *_device, Client *_client, Hud *_hud);
|
||||
void drawAll() override;
|
||||
virtual void setRenderSource(RenderSource *) override {}
|
||||
virtual void setRenderTarget(RenderTarget *target) override { m_target = target; }
|
||||
|
||||
virtual void reset(PipelineContext &context) override {}
|
||||
virtual void run(PipelineContext &context) override;
|
||||
|
||||
private:
|
||||
RenderTarget *m_target {nullptr};
|
||||
};
|
||||
|
||||
/**
|
||||
* Implements a pipeline step that renders the game HUD
|
||||
*/
|
||||
class DrawHUD : public RenderStep
|
||||
{
|
||||
public:
|
||||
virtual void setRenderSource(RenderSource *) override {}
|
||||
virtual void setRenderTarget(RenderTarget *) override {}
|
||||
|
||||
virtual void reset(PipelineContext &context) override {}
|
||||
virtual void run(PipelineContext &context) override;
|
||||
};
|
||||
|
||||
class MapPostFxStep : public TrivialRenderStep
|
||||
{
|
||||
public:
|
||||
virtual void setRenderTarget(RenderTarget *) override;
|
||||
virtual void run(PipelineContext &context) override;
|
||||
private:
|
||||
RenderTarget *target;
|
||||
};
|
||||
|
||||
class RenderShadowMapStep : public TrivialRenderStep
|
||||
{
|
||||
public:
|
||||
virtual void run(PipelineContext &context) override;
|
||||
};
|
||||
|
||||
/**
|
||||
* UpscaleStep step performs rescaling of the image
|
||||
* in the source texture 0 to the size of the target.
|
||||
*/
|
||||
class UpscaleStep : public RenderStep
|
||||
{
|
||||
public:
|
||||
|
||||
virtual void setRenderSource(RenderSource *source) override { m_source = source; }
|
||||
virtual void setRenderTarget(RenderTarget *target) override { m_target = target; }
|
||||
virtual void reset(PipelineContext &context) override {};
|
||||
virtual void run(PipelineContext &context) override;
|
||||
private:
|
||||
RenderSource *m_source;
|
||||
RenderTarget *m_target;
|
||||
};
|
||||
|
||||
std::unique_ptr<RenderStep> create3DStage(Client *client, v2f scale);
|
||||
RenderStep* addUpscaling(RenderPipeline *pipeline, RenderStep *previousStep, v2f downscale_factor);
|
||||
|
||||
void populatePlainPipeline(RenderPipeline *pipeline, Client *client);
|
||||
|
118
src/client/render/secondstage.cpp
Normal file
118
src/client/render/secondstage.cpp
Normal file
@ -0,0 +1,118 @@
|
||||
/*
|
||||
Minetest
|
||||
Copyright (C) 2010-2013 celeron55, Perttu Ahola <celeron55@gmail.com>
|
||||
Copyright (C) 2017 numzero, Lobachevskiy Vitaliy <numzer0@yandex.ru>
|
||||
Copyright (C) 2020 appgurueu, Lars Mueller <appgurulars@gmx.de>
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU Lesser General Public License as published by
|
||||
the Free Software Foundation; either version 2.1 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Lesser General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public License along
|
||||
with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*/
|
||||
|
||||
#include "secondstage.h"
|
||||
#include "client/client.h"
|
||||
#include "client/shader.h"
|
||||
#include "client/tile.h"
|
||||
|
||||
PostProcessingStep::PostProcessingStep(u32 _shader_id, const std::vector<u8> &_texture_map) :
|
||||
shader_id(_shader_id), texture_map(_texture_map)
|
||||
{
|
||||
assert(texture_map.size() <= video::MATERIAL_MAX_TEXTURES);
|
||||
configureMaterial();
|
||||
}
|
||||
|
||||
void PostProcessingStep::configureMaterial()
|
||||
{
|
||||
material.UseMipMaps = false;
|
||||
material.ZBuffer = true;
|
||||
material.ZWriteEnable = video::EZW_ON;
|
||||
for (u32 k = 0; k < texture_map.size(); ++k) {
|
||||
material.TextureLayer[k].AnisotropicFilter = false;
|
||||
material.TextureLayer[k].BilinearFilter = false;
|
||||
material.TextureLayer[k].TrilinearFilter = false;
|
||||
material.TextureLayer[k].TextureWrapU = video::ETC_CLAMP_TO_EDGE;
|
||||
material.TextureLayer[k].TextureWrapV = video::ETC_CLAMP_TO_EDGE;
|
||||
}
|
||||
}
|
||||
|
||||
void PostProcessingStep::setRenderSource(RenderSource *_source)
|
||||
{
|
||||
source = _source;
|
||||
}
|
||||
|
||||
void PostProcessingStep::setRenderTarget(RenderTarget *_target)
|
||||
{
|
||||
target = _target;
|
||||
}
|
||||
|
||||
void PostProcessingStep::reset(PipelineContext &context)
|
||||
{
|
||||
}
|
||||
|
||||
void PostProcessingStep::run(PipelineContext &context)
|
||||
{
|
||||
if (target)
|
||||
target->activate(context);
|
||||
|
||||
// attach the shader
|
||||
material.MaterialType = context.client->getShaderSource()->getShaderInfo(shader_id).material;
|
||||
|
||||
auto driver = context.device->getVideoDriver();
|
||||
|
||||
for (u32 i = 0; i < texture_map.size(); i++)
|
||||
material.TextureLayer[i].Texture = source->getTexture(texture_map[i]);
|
||||
|
||||
static const video::SColor color = video::SColor(0, 0, 0, 255);
|
||||
static const video::S3DVertex vertices[4] = {
|
||||
video::S3DVertex(1.0, -1.0, 0.0, 0.0, 0.0, -1.0,
|
||||
color, 1.0, 0.0),
|
||||
video::S3DVertex(-1.0, -1.0, 0.0, 0.0, 0.0, -1.0,
|
||||
color, 0.0, 0.0),
|
||||
video::S3DVertex(-1.0, 1.0, 0.0, 0.0, 0.0, -1.0,
|
||||
color, 0.0, 1.0),
|
||||
video::S3DVertex(1.0, 1.0, 0.0, 0.0, 0.0, -1.0,
|
||||
color, 1.0, 1.0),
|
||||
};
|
||||
static const u16 indices[6] = {0, 1, 2, 2, 3, 0};
|
||||
driver->setMaterial(material);
|
||||
driver->drawVertexPrimitiveList(&vertices, 4, &indices, 2);
|
||||
}
|
||||
|
||||
RenderStep *addPostProcessing(RenderPipeline *pipeline, RenderStep *previousStep, v2f scale, Client *client)
|
||||
{
|
||||
auto buffer = pipeline->createOwned<TextureBuffer>();
|
||||
static const u8 TEXTURE_COLOR = 0;
|
||||
static const u8 TEXTURE_DEPTH = 3;
|
||||
|
||||
// init post-processing buffer
|
||||
buffer->setTexture(TEXTURE_COLOR, scale, "3d_render", video::ECF_A8R8G8B8);
|
||||
|
||||
video::ECOLOR_FORMAT depth_format = video::ECF_D16; // fallback depth format
|
||||
auto driver = client->getSceneManager()->getVideoDriver();
|
||||
if (driver->queryTextureFormat(video::ECF_D32))
|
||||
depth_format = video::ECF_D32;
|
||||
else if (driver->queryTextureFormat(video::ECF_D24S8))
|
||||
depth_format = video::ECF_D24S8;
|
||||
buffer->setDepthTexture(TEXTURE_DEPTH, scale, "3d_depthmap", depth_format);
|
||||
|
||||
// attach buffer to the previous step
|
||||
previousStep->setRenderTarget(buffer);
|
||||
|
||||
// post-processing stage
|
||||
// set up shader
|
||||
u32 shader_id = client->getShaderSource()->getShader("second_stage", TILE_MATERIAL_PLAIN, NDT_MESH);
|
||||
|
||||
RenderStep *effect = pipeline->addStep<PostProcessingStep>(shader_id, std::vector<u8> { TEXTURE_COLOR });
|
||||
effect->setRenderSource(buffer);
|
||||
return effect;
|
||||
}
|
@ -20,24 +20,27 @@ with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
|
||||
#pragma once
|
||||
#include "stereo.h"
|
||||
#include "pipeline.h"
|
||||
|
||||
// The support is absent in 1.9.0 (dropped in r5068)
|
||||
#if (IRRLICHT_VERSION_MAJOR == 1) && (IRRLICHT_VERSION_MINOR <= 8)
|
||||
#define STEREO_PAGEFLIP_SUPPORTED
|
||||
|
||||
class RenderingCorePageflip : public RenderingCoreStereo
|
||||
class PostProcessingStep : public RenderStep
|
||||
{
|
||||
protected:
|
||||
video::ITexture *hud = nullptr;
|
||||
|
||||
void initTextures() override;
|
||||
void clearTextures() override;
|
||||
void useEye(bool right) override;
|
||||
void resetEye() override;
|
||||
|
||||
public:
|
||||
using RenderingCoreStereo::RenderingCoreStereo;
|
||||
void drawAll() override;
|
||||
PostProcessingStep(u32 shader_id, const std::vector<u8> &texture_map);
|
||||
|
||||
|
||||
void setRenderSource(RenderSource *source) override;
|
||||
void setRenderTarget(RenderTarget *target) override;
|
||||
void reset(PipelineContext &context) override;
|
||||
void run(PipelineContext &context) override;
|
||||
|
||||
private:
|
||||
u32 shader_id;
|
||||
std::vector<u8> texture_map;
|
||||
RenderSource *source { nullptr };
|
||||
RenderTarget *target { nullptr };
|
||||
video::SMaterial material;
|
||||
|
||||
void configureMaterial();
|
||||
};
|
||||
|
||||
#endif
|
||||
RenderStep *addPostProcessing(RenderPipeline *pipeline, RenderStep *previousStep, v2f scale, Client *client);
|
@ -19,56 +19,73 @@ with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
*/
|
||||
|
||||
#include "sidebyside.h"
|
||||
#include <ICameraSceneNode.h>
|
||||
#include "client/hud.h"
|
||||
#include "client/camera.h"
|
||||
|
||||
RenderingCoreSideBySide::RenderingCoreSideBySide(
|
||||
IrrlichtDevice *_device, Client *_client, Hud *_hud, bool _horizontal, bool _flipped)
|
||||
: RenderingCoreStereo(_device, _client, _hud), horizontal(_horizontal), flipped(_flipped)
|
||||
DrawImageStep::DrawImageStep(u8 texture_index, v2f _offset) :
|
||||
texture_index(texture_index), offset(_offset)
|
||||
{}
|
||||
|
||||
void DrawImageStep::setRenderSource(RenderSource *_source)
|
||||
{
|
||||
source = _source;
|
||||
}
|
||||
void DrawImageStep::setRenderTarget(RenderTarget *_target)
|
||||
{
|
||||
target = _target;
|
||||
}
|
||||
|
||||
void RenderingCoreSideBySide::initTextures()
|
||||
void DrawImageStep::run(PipelineContext &context)
|
||||
{
|
||||
if (target)
|
||||
target->activate(context);
|
||||
|
||||
auto texture = source->getTexture(texture_index);
|
||||
core::dimension2du output_size = context.device->getVideoDriver()->getScreenSize();
|
||||
v2s32 pos(offset.X * output_size.Width, offset.Y * output_size.Height);
|
||||
context.device->getVideoDriver()->draw2DImage(texture, pos);
|
||||
}
|
||||
|
||||
void populateSideBySidePipeline(RenderPipeline *pipeline, Client *client, bool horizontal, bool flipped, v2f &virtual_size_scale)
|
||||
{
|
||||
static const u8 TEXTURE_LEFT = 0;
|
||||
static const u8 TEXTURE_RIGHT = 1;
|
||||
|
||||
v2f offset;
|
||||
if (horizontal) {
|
||||
image_size = {screensize.X, screensize.Y / 2};
|
||||
rpos = v2s32(0, screensize.Y / 2);
|
||||
} else {
|
||||
image_size = {screensize.X / 2, screensize.Y};
|
||||
rpos = v2s32(screensize.X / 2, 0);
|
||||
virtual_size_scale = v2f(1.0f, 0.5f);
|
||||
offset = v2f(0.0f, 0.5f);
|
||||
}
|
||||
else {
|
||||
virtual_size_scale = v2f(0.5f, 1.0f);
|
||||
offset = v2f(0.5f, 0.0f);
|
||||
}
|
||||
virtual_size = image_size;
|
||||
left = driver->addRenderTargetTexture(
|
||||
image_size, "3d_render_left", video::ECF_A8R8G8B8);
|
||||
right = driver->addRenderTargetTexture(
|
||||
image_size, "3d_render_right", video::ECF_A8R8G8B8);
|
||||
}
|
||||
|
||||
void RenderingCoreSideBySide::clearTextures()
|
||||
{
|
||||
driver->removeTexture(left);
|
||||
driver->removeTexture(right);
|
||||
}
|
||||
TextureBuffer *buffer = pipeline->createOwned<TextureBuffer>();
|
||||
buffer->setTexture(TEXTURE_LEFT, virtual_size_scale, "3d_render_left", video::ECF_A8R8G8B8);
|
||||
buffer->setTexture(TEXTURE_RIGHT, virtual_size_scale, "3d_render_right", video::ECF_A8R8G8B8);
|
||||
|
||||
void RenderingCoreSideBySide::drawAll()
|
||||
{
|
||||
driver->OnResize(image_size); // HACK to make GUI smaller
|
||||
renderBothImages();
|
||||
driver->OnResize(screensize);
|
||||
driver->draw2DImage(left, {});
|
||||
driver->draw2DImage(right, rpos);
|
||||
}
|
||||
auto step3D = pipeline->own(create3DStage(client, virtual_size_scale));
|
||||
|
||||
void RenderingCoreSideBySide::useEye(bool _right)
|
||||
{
|
||||
driver->setRenderTarget(_right ? right : left, true, true, skycolor);
|
||||
RenderingCoreStereo::useEye(_right ^ flipped);
|
||||
}
|
||||
// eyes
|
||||
for (bool right : { false, true }) {
|
||||
pipeline->addStep<OffsetCameraStep>(flipped ? !right : right);
|
||||
auto output = pipeline->createOwned<TextureBufferOutput>(buffer, right ? TEXTURE_RIGHT : TEXTURE_LEFT);
|
||||
pipeline->addStep<SetRenderTargetStep>(step3D, output);
|
||||
pipeline->addStep(step3D);
|
||||
pipeline->addStep<MapPostFxStep>();
|
||||
pipeline->addStep<DrawHUD>();
|
||||
}
|
||||
|
||||
void RenderingCoreSideBySide::resetEye()
|
||||
{
|
||||
hud->resizeHotbar();
|
||||
drawHUD();
|
||||
driver->setRenderTarget(nullptr, false, false, skycolor);
|
||||
RenderingCoreStereo::resetEye();
|
||||
}
|
||||
pipeline->addStep<OffsetCameraStep>(0.0f);
|
||||
|
||||
auto screen = pipeline->createOwned<ScreenTarget>();
|
||||
|
||||
for (bool right : { false, true }) {
|
||||
auto step = pipeline->addStep<DrawImageStep>(
|
||||
right ? TEXTURE_RIGHT : TEXTURE_LEFT,
|
||||
right ? offset : v2f());
|
||||
step->setRenderSource(buffer);
|
||||
step->setRenderTarget(screen);
|
||||
}
|
||||
}
|
@ -21,23 +21,21 @@ with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
#pragma once
|
||||
#include "stereo.h"
|
||||
|
||||
class RenderingCoreSideBySide : public RenderingCoreStereo
|
||||
class DrawImageStep : public RenderStep
|
||||
{
|
||||
protected:
|
||||
video::ITexture *left = nullptr;
|
||||
video::ITexture *right = nullptr;
|
||||
bool horizontal = false;
|
||||
bool flipped = false;
|
||||
core::dimension2du image_size;
|
||||
v2s32 rpos;
|
||||
|
||||
void initTextures() override;
|
||||
void clearTextures() override;
|
||||
void useEye(bool right) override;
|
||||
void resetEye() override;
|
||||
|
||||
public:
|
||||
RenderingCoreSideBySide(IrrlichtDevice *_device, Client *_client, Hud *_hud,
|
||||
bool _horizontal = false, bool _flipped = false);
|
||||
void drawAll() override;
|
||||
DrawImageStep(u8 texture_index, v2f offset);
|
||||
|
||||
void setRenderSource(RenderSource *_source) override;
|
||||
void setRenderTarget(RenderTarget *_target) override;
|
||||
|
||||
void reset(PipelineContext &context) override {}
|
||||
void run(PipelineContext &context) override;
|
||||
private:
|
||||
u8 texture_index;
|
||||
v2f offset;
|
||||
RenderSource *source;
|
||||
RenderTarget *target;
|
||||
};
|
||||
|
||||
void populateSideBySidePipeline(RenderPipeline *pipeline, Client *client, bool horizontal, bool flipped, v2f &virtual_size_scale);
|
@ -19,42 +19,29 @@ with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
*/
|
||||
|
||||
#include "stereo.h"
|
||||
#include "client/client.h"
|
||||
#include "client/camera.h"
|
||||
#include "constants.h"
|
||||
#include "settings.h"
|
||||
|
||||
RenderingCoreStereo::RenderingCoreStereo(
|
||||
IrrlichtDevice *_device, Client *_client, Hud *_hud)
|
||||
: RenderingCore(_device, _client, _hud)
|
||||
OffsetCameraStep::OffsetCameraStep(float eye_offset)
|
||||
{
|
||||
eye_offset = BS * g_settings->getFloat("3d_paralax_strength", -0.087f, 0.087f);
|
||||
move.setTranslation(core::vector3df(eye_offset, 0.0f, 0.0f));
|
||||
}
|
||||
|
||||
void RenderingCoreStereo::beforeDraw()
|
||||
|
||||
OffsetCameraStep::OffsetCameraStep(bool right_eye)
|
||||
{
|
||||
cam = camera->getCameraNode();
|
||||
base_transform = cam->getRelativeTransformation();
|
||||
float eye_offset = BS * g_settings->getFloat("3d_paralax_strength", -0.087f, 0.087f) * (right_eye ? 1 : -1);
|
||||
move.setTranslation(core::vector3df(eye_offset, 0.0f, 0.0f));
|
||||
}
|
||||
|
||||
void RenderingCoreStereo::useEye(bool right)
|
||||
void OffsetCameraStep::reset(PipelineContext &context)
|
||||
{
|
||||
core::matrix4 move;
|
||||
move.setTranslation(
|
||||
core::vector3df(right ? eye_offset : -eye_offset, 0.0f, 0.0f));
|
||||
cam->setPosition((base_transform * move).getTranslation());
|
||||
base_transform = context.client->getCamera()->getCameraNode()->getRelativeTransformation();
|
||||
}
|
||||
|
||||
void RenderingCoreStereo::resetEye()
|
||||
void OffsetCameraStep::run(PipelineContext &context)
|
||||
{
|
||||
cam->setPosition(base_transform.getTranslation());
|
||||
}
|
||||
|
||||
void RenderingCoreStereo::renderBothImages()
|
||||
{
|
||||
useEye(false);
|
||||
draw3D();
|
||||
resetEye();
|
||||
useEye(true);
|
||||
draw3D();
|
||||
resetEye();
|
||||
context.client->getCamera()->getCameraNode()->setPosition((base_transform * move).getTranslation());
|
||||
}
|
||||
|
@ -20,19 +20,22 @@ with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
|
||||
#pragma once
|
||||
#include "core.h"
|
||||
#include "plain.h"
|
||||
#include "pipeline.h"
|
||||
|
||||
class RenderingCoreStereo : public RenderingCore
|
||||
|
||||
/**
|
||||
* Offset camera for a specific eye in stereo rendering mode
|
||||
*/
|
||||
class OffsetCameraStep : public TrivialRenderStep
|
||||
{
|
||||
protected:
|
||||
scene::ICameraSceneNode *cam;
|
||||
core::matrix4 base_transform;
|
||||
float eye_offset;
|
||||
|
||||
void beforeDraw() override;
|
||||
virtual void useEye(bool right);
|
||||
virtual void resetEye();
|
||||
void renderBothImages();
|
||||
|
||||
public:
|
||||
RenderingCoreStereo(IrrlichtDevice *_device, Client *_client, Hud *_hud);
|
||||
OffsetCameraStep(float eye_offset);
|
||||
OffsetCameraStep(bool right_eye);
|
||||
|
||||
void run(PipelineContext &context) override;
|
||||
void reset(PipelineContext &context) override;
|
||||
private:
|
||||
core::matrix4 base_transform;
|
||||
core::matrix4 move;
|
||||
};
|
||||
|
@ -214,13 +214,13 @@ class MainShaderConstantSetter : public IShaderConstantSetter
|
||||
CachedVertexShaderSetting<f32, 16> m_world;
|
||||
|
||||
// Shadow-related
|
||||
CachedPixelShaderSetting<f32, 16> m_shadow_view_proj;
|
||||
CachedPixelShaderSetting<f32, 16, false> m_shadow_view_proj;
|
||||
CachedPixelShaderSetting<f32, 3> m_light_direction;
|
||||
CachedPixelShaderSetting<f32> m_texture_res;
|
||||
CachedPixelShaderSetting<f32> m_shadow_strength;
|
||||
CachedPixelShaderSetting<f32> m_time_of_day;
|
||||
CachedPixelShaderSetting<f32> m_shadowfar;
|
||||
CachedPixelShaderSetting<f32, 4> m_camera_pos;
|
||||
CachedPixelShaderSetting<f32, 4, false> m_camera_pos;
|
||||
CachedPixelShaderSetting<s32> m_shadow_texture;
|
||||
CachedVertexShaderSetting<f32> m_perspective_bias0_vertex;
|
||||
CachedPixelShaderSetting<f32> m_perspective_bias0_pixel;
|
||||
|
@ -80,7 +80,7 @@ public:
|
||||
};
|
||||
|
||||
|
||||
template <typename T, std::size_t count=1>
|
||||
template <typename T, std::size_t count, bool cache>
|
||||
class CachedShaderSetting {
|
||||
const char *m_name;
|
||||
T m_sent[count];
|
||||
@ -93,30 +93,32 @@ protected:
|
||||
public:
|
||||
void set(const T value[count], video::IMaterialRendererServices *services)
|
||||
{
|
||||
if (has_been_set && std::equal(m_sent, m_sent + count, value))
|
||||
if (cache && has_been_set && std::equal(m_sent, m_sent + count, value))
|
||||
return;
|
||||
if (is_pixel)
|
||||
services->setPixelShaderConstant(services->getPixelShaderConstantID(m_name), value, count);
|
||||
else
|
||||
services->setVertexShaderConstant(services->getVertexShaderConstantID(m_name), value, count);
|
||||
|
||||
std::copy(value, value + count, m_sent);
|
||||
has_been_set = true;
|
||||
if (cache) {
|
||||
std::copy(value, value + count, m_sent);
|
||||
has_been_set = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template <typename T, std::size_t count = 1>
|
||||
class CachedPixelShaderSetting : public CachedShaderSetting<T, count> {
|
||||
template <typename T, std::size_t count = 1, bool cache=true>
|
||||
class CachedPixelShaderSetting : public CachedShaderSetting<T, count, cache> {
|
||||
public:
|
||||
CachedPixelShaderSetting(const char *name) :
|
||||
CachedShaderSetting<T, count>(name, true){}
|
||||
CachedShaderSetting<T, count, cache>(name, true){}
|
||||
};
|
||||
|
||||
template <typename T, std::size_t count = 1>
|
||||
class CachedVertexShaderSetting : public CachedShaderSetting<T, count> {
|
||||
template <typename T, std::size_t count = 1, bool cache=true>
|
||||
class CachedVertexShaderSetting : public CachedShaderSetting<T, count, cache> {
|
||||
public:
|
||||
CachedVertexShaderSetting(const char *name) :
|
||||
CachedShaderSetting<T, count>(name, false){}
|
||||
CachedShaderSetting<T, count, cache>(name, false){}
|
||||
};
|
||||
|
||||
|
||||
|
@ -707,3 +707,22 @@ std::string ShadowRenderer::readShaderFile(const std::string &path)
|
||||
|
||||
return prefix + content;
|
||||
}
|
||||
|
||||
ShadowRenderer *createShadowRenderer(IrrlichtDevice *device, Client *client)
|
||||
{
|
||||
// disable if unsupported
|
||||
if (g_settings->getBool("enable_dynamic_shadows") && (
|
||||
g_settings->get("video_driver") != "opengl" ||
|
||||
!g_settings->getBool("enable_shaders"))) {
|
||||
g_settings->setBool("enable_dynamic_shadows", false);
|
||||
}
|
||||
|
||||
if (g_settings->getBool("enable_shaders") &&
|
||||
g_settings->getBool("enable_dynamic_shadows")) {
|
||||
ShadowRenderer *shadow_renderer = new ShadowRenderer(device, client);
|
||||
shadow_renderer->initialize();
|
||||
return shadow_renderer;
|
||||
}
|
||||
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -160,3 +160,12 @@ private:
|
||||
shadowScreenQuad *m_screen_quad{nullptr};
|
||||
shadowScreenQuadCB *m_shadow_mix_cb{nullptr};
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief Create a shadow renderer if settings allow this.
|
||||
*
|
||||
* @param device Device to be used to render shadows.
|
||||
* @param client Reference to the client context.
|
||||
* @return A new ShadowRenderer instance or nullptr if shadows are disabled or not supported.
|
||||
*/
|
||||
ShadowRenderer *createShadowRenderer(IrrlichtDevice *device, Client *client);
|
||||
|
Loading…
Reference in New Issue
Block a user