Simplify example 27 slightly.

Also use tab instead of spaces (as usual in Irrlicht)


git-svn-id: svn://svn.code.sf.net/p/irrlicht/code/trunk@6408 dfc29bdd-3216-0410-991c-e03cc46cb475
This commit is contained in:
cutealien 2022-06-20 19:57:35 +00:00
parent d70d96031b
commit 338af5c0ea

View File

@ -1,16 +1,16 @@
/** Example 027 Post Processing /** Example 027 Post Processing
This tutorial shows how to implement post processing for D3D9 and OpenGL with This tutorial shows how to implement post processing for D3D9 and OpenGL with
the engine. In order to do post processing, scene objects are firstly rendered the engine. In order to do post processing, scene objects are firstly rendered
to render target. With the help of screen quad, the render target texture to render target. With the help of screen quad, the render target texture
is then drawn on the quad with shader-defined effects applied. is then drawn on the quad with shader-defined effects applied.
This tutorial shows how to create a screen quad. It also shows how to create a This tutorial shows how to create a screen quad. It also shows how to create a
render target texture and associate it with the quad. Effects are defined as render target texture and associate it with the quad. Effects are defined as
shaders which are applied during rendering the quad with the render target shaders which are applied during rendering the quad with the render target
texture attached to it. texture attached to it.
A simple color inverse example is presented in this tutorial. The effect is A simple color inverse example is presented in this tutorial. The effect is
written in HLSL and GLSL. written in HLSL and GLSL.
@author Boshen Guan @author Boshen Guan
@ -29,46 +29,42 @@ using namespace irr;
/* /*
We write a class derived from IShaderConstantSetCallBack class and implement We write a class derived from IShaderConstantSetCallBack class and implement
OnSetConstants callback interface. In this callback, we will set constants OnSetConstants callback interface. In this callback, we will set constants
used by the shader. used by the shader.
In this example, our HLSL shader needs texture size as input in its vertex In this example, our HLSL shader needs texture size as input in its vertex
shader. Therefore, we set texture size in OnSetConstants callback using shader. Therefore, we set texture size in OnSetConstants callback using
setVertexShaderConstant function. setVertexShaderConstant function.
*/ */
IrrlichtDevice* device = 0;
video::ITexture* rt = 0;
class QuadShaderCallBack : public video::IShaderConstantSetCallBack class QuadShaderCallBack : public video::IShaderConstantSetCallBack
{ {
public: public:
QuadShaderCallBack() : FirstUpdate(true), TextureSizeID(-1), TextureSamplerID(-1) QuadShaderCallBack() : FirstUpdate(true), TextureSizeID(-1), TextureSamplerID(-1)
{ } { }
virtual void OnSetConstants(video::IMaterialRendererServices* services, virtual void OnSetConstants(video::IMaterialRendererServices* services,
s32 userData) s32 userData)
{ {
core::dimension2d<u32> size = rt->getSize();
// get texture size array
f32 textureSize[] =
{
(f32)size.Width, (f32)size.Height
};
if ( FirstUpdate ) if ( FirstUpdate )
{ {
FirstUpdate = false;
TextureSizeID = services->getVertexShaderConstantID("TextureSize"); TextureSizeID = services->getVertexShaderConstantID("TextureSize");
TextureSamplerID = services->getPixelShaderConstantID("TextureSampler"); TextureSamplerID = services->getPixelShaderConstantID("TextureSampler");
} }
// set texture size to vertex shader // get texture size array (for our simple example HLSL just needs that to calculate pixel centers)
services->setVertexShaderConstant(TextureSizeID, reinterpret_cast<f32*>(textureSize), 2); core::dimension2d<u32> size = services->getVideoDriver()->getCurrentRenderTargetSize();
f32 textureSize[2];
// set texture for an OpenGL driver textureSize[0] = (f32)size.Width;
s32 textureLayer = 0; textureSize[1] = (f32)size.Height;
services->setPixelShaderConstant(TextureSamplerID, &textureLayer, 1);
} // set texture size to vertex shader
services->setVertexShaderConstant(TextureSizeID, textureSize, 2);
// set texture for an OpenGL driver
s32 textureLayer = 0;
services->setPixelShaderConstant(TextureSamplerID, &textureLayer, 1);
}
private: private:
bool FirstUpdate; bool FirstUpdate;
@ -80,102 +76,88 @@ class ScreenQuad : public IReferenceCounted
{ {
public: public:
ScreenQuad(video::IVideoDriver* driver) ScreenQuad(video::IVideoDriver* driver)
: Driver(driver) : Driver(driver)
{ {
// --------------------------------> u // --------------------------------> u
// |[1](-1, 1)----------[2](1, 1) // |[1](-1, 1)----------[2](1, 1)
// | | ( 0, 0) / | (1, 0) // | | ( 0, 0) / | (1, 0)
// | | / | // | | / |
// | | / | // | | / |
// | | / | // | | / |
// | | / | // | | / |
// | | / | // | | / |
// | | / | // | | / |
// | | / | // | | / |
// | | / | // | | / |
// |[0](-1, -1)---------[3](1, -1) // |[0](-1, -1)---------[3](1, -1)
// | ( 0, 1) (1, 1) // | ( 0, 1) (1, 1)
// V // V
// v // v
/* /*
A screen quad is composed of two adjacent triangles with 4 vertices. A screen quad is composed of two adjacent triangles with 4 vertices.
Vertex [0], [1] and [2] create the first triangle and Vertex [0], Vertex [0], [1] and [2] create the first triangle and Vertex [0],
[2] and [3] create the second one. To map texture on the quad, UV [2] and [3] create the second one. To map texture on the quad, UV
coordinates are assigned to the vertices. The origin of UV coordinate coordinates are assigned to the vertices. The origin of UV coordinate
locates on the top-left corner. And the value of UVs range from 0 to 1. locates on the top-left corner. And the value of UVs range from 0 to 1.
*/ */
// define vertices array // define vertices array
Vertices[0] = irr::video::S3DVertex(-1.0f, -1.0f, 0.0f, 1, 1, 0, irr::video::SColor(0,255,255,255), 0.0f, 1.0f); Vertices[0] = irr::video::S3DVertex(-1.0f, -1.0f, 0.0f, 1, 1, 0, irr::video::SColor(0,255,255,255), 0.0f, 1.0f);
Vertices[1] = irr::video::S3DVertex(-1.0f, 1.0f, 0.0f, 1, 1, 0, irr::video::SColor(0,255,255,255), 0.0f, 0.0f); Vertices[1] = irr::video::S3DVertex(-1.0f, 1.0f, 0.0f, 1, 1, 0, irr::video::SColor(0,255,255,255), 0.0f, 0.0f);
Vertices[2] = irr::video::S3DVertex( 1.0f, 1.0f, 0.0f, 1, 1, 0, irr::video::SColor(0,255,255,255), 1.0f, 0.0f); Vertices[2] = irr::video::S3DVertex( 1.0f, 1.0f, 0.0f, 1, 1, 0, irr::video::SColor(0,255,255,255), 1.0f, 0.0f);
Vertices[3] = irr::video::S3DVertex( 1.0f, -1.0f, 0.0f, 1, 1, 0, irr::video::SColor(0,255,255,255), 1.0f, 1.0f); Vertices[3] = irr::video::S3DVertex( 1.0f, -1.0f, 0.0f, 1, 1, 0, irr::video::SColor(0,255,255,255), 1.0f, 1.0f);
// define indices for triangles // define indices for triangles
Indices[0] = 0; Indices[0] = 0;
Indices[1] = 1; Indices[1] = 1;
Indices[2] = 2; Indices[2] = 2;
Indices[3] = 0; Indices[3] = 0;
Indices[4] = 2; Indices[4] = 2;
Indices[5] = 3; Indices[5] = 3;
// turn off lighting as default // turn off lighting as default
Material.setFlag(video::EMF_LIGHTING, false); Material.setFlag(video::EMF_LIGHTING, false);
// set texture warp settings to clamp to edge pixel // set texture warp settings to clamp to edge pixel
for (u32 i = 0; i < video::MATERIAL_MAX_TEXTURES; i++) for (u32 i = 0; i < video::MATERIAL_MAX_TEXTURES; i++)
{ {
Material.TextureLayer[i].TextureWrapU = video::ETC_CLAMP_TO_EDGE; Material.TextureLayer[i].TextureWrapU = video::ETC_CLAMP_TO_EDGE;
Material.TextureLayer[i].TextureWrapV = video::ETC_CLAMP_TO_EDGE; Material.TextureLayer[i].TextureWrapV = video::ETC_CLAMP_TO_EDGE;
} }
} }
virtual ~ScreenQuad() {} virtual ~ScreenQuad() {}
//! render the screen quad //! render the screen quad
virtual void render() virtual void render()
{ {
// set the material of screen quad // set the material of screen quad
Driver->setMaterial(Material); Driver->setMaterial(Material);
// set matrices to fit the quad to full viewport // set world matrix to fit the quad to full viewport (we only use ETS_WORLD in the shader, so view, projection currently don't matter)
Driver->setTransform(video::ETS_WORLD, core::IdentityMatrix); Driver->setTransform(video::ETS_WORLD, core::IdentityMatrix);
Driver->setTransform(video::ETS_VIEW, core::IdentityMatrix);
Driver->setTransform(video::ETS_PROJECTION, core::IdentityMatrix);
// draw screen quad // draw screen quad
Driver->drawVertexPrimitiveList(Vertices, 4, Indices, 2); Driver->drawVertexPrimitiveList(Vertices, 4, Indices, 2);
} }
//! sets a flag of material to a new value //! Access the material
virtual void setMaterialFlag(video::E_MATERIAL_FLAG flag, bool newvalue) virtual video::SMaterial& getMaterial()
{ {
Material.setFlag(flag, newvalue); return Material;
} }
//! sets the texture of the specified layer in material to the new texture.
void setMaterialTexture(u32 textureLayer, video::ITexture* texture)
{
Material.setTexture(textureLayer, texture);
}
//! sets the material type to a new material type.
virtual void setMaterialType(video::E_MATERIAL_TYPE newType)
{
Material.MaterialType = newType;
}
private: private:
video::IVideoDriver *Driver; video::IVideoDriver *Driver;
video::S3DVertex Vertices[4]; video::S3DVertex Vertices[4];
u16 Indices[6]; u16 Indices[6];
video::SMaterial Material; video::SMaterial Material;
}; };
/* /*
@ -184,233 +166,234 @@ according to the driver type.
*/ */
int main() int main()
{ {
// ask user for driver // ask user for driver
video::E_DRIVER_TYPE driverType=driverChoiceConsole(); video::E_DRIVER_TYPE driverType=driverChoiceConsole();
if (driverType==video::EDT_COUNT) if (driverType==video::EDT_COUNT)
return 1; return 1;
// create device // create device
device = createDevice(driverType, core::dimension2d<u32>(640, 480)); IrrlichtDevice* device = createDevice(driverType, core::dimension2d<u32>(640, 480));
if (device == 0) if (device == 0)
return 1; // could not create selected driver. return 1; // could not create selected driver.
video::IVideoDriver* driver = device->getVideoDriver(); video::IVideoDriver* driver = device->getVideoDriver();
scene::ISceneManager* smgr = device->getSceneManager(); scene::ISceneManager* smgr = device->getSceneManager();
/* /*
In this example, high level post processing shaders are loaded for both In this example, high level post processing shaders are loaded for both
Direct3D and OpenGL drivers. Direct3D and OpenGL drivers.
File pp_d3d9.hlsl is for Direct3D 9, and pp_opengl.frag/pp_opengl.vert File pp_d3d9.hlsl is for Direct3D 9, and pp_opengl.frag/pp_opengl.vert
are for OpenGL. are for OpenGL.
*/ */
const io::path mediaPath = getExampleMediaPath(); const io::path mediaPath = getExampleMediaPath();
io::path vsFileName; // filename for the vertex shader io::path vsFileName; // filename for the vertex shader
io::path psFileName; // filename for the pixel shader io::path psFileName; // filename for the pixel shader
switch(driverType) switch(driverType)
{ {
case video::EDT_DIRECT3D9: case video::EDT_DIRECT3D9:
psFileName = mediaPath + "pp_d3d9.hlsl"; psFileName = mediaPath + "pp_d3d9.hlsl";
vsFileName = psFileName; // both shaders are in the same file vsFileName = psFileName; // both shaders are in the same file
break; break;
case video::EDT_OPENGL: case video::EDT_OPENGL:
case video::EDT_BURNINGSVIDEO: case video::EDT_BURNINGSVIDEO:
psFileName = mediaPath + "pp_opengl.frag"; psFileName = mediaPath + "pp_opengl.frag";
vsFileName = mediaPath + "pp_opengl.vert"; vsFileName = mediaPath + "pp_opengl.vert";
break; break;
} }
/* /*
Check for hardware capability of executing the corresponding shaders Check for hardware capability of executing the corresponding shaders
on selected renderer. This is not necessary though. on selected renderer. This is not necessary though.
*/ */
if (!driver->queryFeature(video::EVDF_PIXEL_SHADER_1_1) && if (!driver->queryFeature(video::EVDF_PIXEL_SHADER_1_1) &&
!driver->queryFeature(video::EVDF_ARB_FRAGMENT_PROGRAM_1)) !driver->queryFeature(video::EVDF_ARB_FRAGMENT_PROGRAM_1))
{ {
device->getLogger()->log("WARNING: Pixel shaders disabled "\ device->getLogger()->log("WARNING: Pixel shaders disabled "\
"because of missing driver/hardware support."); "because of missing driver/hardware support.");
psFileName = ""; psFileName = "";
} }
if (!driver->queryFeature(video::EVDF_VERTEX_SHADER_1_1) && if (!driver->queryFeature(video::EVDF_VERTEX_SHADER_1_1) &&
!driver->queryFeature(video::EVDF_ARB_VERTEX_PROGRAM_1)) !driver->queryFeature(video::EVDF_ARB_VERTEX_PROGRAM_1))
{ {
device->getLogger()->log("WARNING: Vertex shaders disabled "\ device->getLogger()->log("WARNING: Vertex shaders disabled "\
"because of missing driver/hardware support."); "because of missing driver/hardware support.");
vsFileName = ""; vsFileName = "";
} }
/* /*
An animated mesh is loaded to be displayed. As in most examples, An animated mesh is loaded to be displayed. As in most examples,
we'll take the fairy md2 model. we'll take the fairy md2 model.
*/ */
// load and display animated fairy mesh // load and display animated fairy mesh
scene::IAnimatedMeshSceneNode* fairy = smgr->addAnimatedMeshSceneNode( scene::IAnimatedMeshSceneNode* fairy = smgr->addAnimatedMeshSceneNode(
smgr->getMesh(mediaPath + "faerie.md2")); smgr->getMesh(mediaPath + "faerie.md2"));
if (fairy) if (fairy)
{ {
fairy->setMaterialTexture(0, fairy->setMaterialTexture(0,
driver->getTexture(mediaPath + "faerie2.bmp")); // set diffuse texture driver->getTexture(mediaPath + "faerie2.bmp")); // set diffuse texture
fairy->setMaterialFlag(video::EMF_LIGHTING, false); // disable dynamic lighting fairy->setMaterialFlag(video::EMF_LIGHTING, false); // disable dynamic lighting
fairy->setPosition(core::vector3df(-10,0,-100)); fairy->setPosition(core::vector3df(-10,0,-100));
fairy->setMD2Animation ( scene::EMAT_STAND ); fairy->setMD2Animation ( scene::EMAT_STAND );
} }
// add scene camera // add scene camera
smgr->addCameraSceneNode(0, core::vector3df(10,10,-80), smgr->addCameraSceneNode(0, core::vector3df(10,10,-80),
core::vector3df(-10,10,-100)); core::vector3df(-10,10,-100));
/* /*
We create a render target texture (RTT) with the same size as frame buffer. We create a render target texture (RTT) with the same size as frame buffer.
Instead of rendering the scene directly to the frame buffer, we firstly Instead of rendering the scene directly to the frame buffer, we firstly
render it to this RTT. Post processing is then applied based on this RTT. render it to this RTT. Post processing is then applied based on this RTT.
RTT size needs not to be the same with frame buffer though. However in this RTT size needs not to be the same with frame buffer though. However in this
example, we expect the result of rendering to RTT to be consistent with the example, we expect the result of rendering to RTT to be consistent with the
result of rendering directly to the frame buffer. Therefore, the size of result of rendering directly to the frame buffer. Therefore, the size of
RTT keeps the same with frame buffer. RTT keeps the same with frame buffer.
*/ */
// create render target // create render target
video::ITexture* rt = 0;
if (driver->queryFeature(video::EVDF_RENDER_TO_TARGET))
{
rt = driver->addRenderTargetTexture(core::dimension2d<u32>(640, 480), "RTT1");
}
else
{
device->getLogger()->log("Your hardware or this renderer is not able to use the "\
"render to texture feature. RTT Disabled.");
}
if (driver->queryFeature(video::EVDF_RENDER_TO_TARGET)) /*
{ Post processing is achieved by rendering a screen quad with this RTT (with
rt = driver->addRenderTargetTexture(core::dimension2d<u32>(640, 480), "RTT1"); previously rendered result) as a texture on the quad. A screen quad is
} geometry of flat plane composed of two adjacent triangles covering the
else entire area of viewport. In this pass of rendering, RTT works just like
{ a normal texture and is drawn on the quad during rendering. We can then
device->getLogger()->log("Your hardware or this renderer is not able to use the "\ take control of this rendering process by applying various shader-defined
"render to texture feature. RTT Disabled."); materials to the quad. In other words, we can achieve different effect by
} writing different shaders.
This process is called post processing because it normally does not rely
on scene geometry. The inputs of this process are just textures, or in
other words, just images. With the help of screen quad, we can draw these
images on the screen with different effects. For example, we can adjust
contrast, make grayscale, add noise, do more fancy effect such as blur,
bloom, ghost, or just like in this example, we invert the color to produce
negative image.
Note that post processing is not limited to use only one texture. It can
take multiple textures as shader inputs to provide desired result. In
addition, post processing can also be chained to produce compound result.
*/
/* // we create a screen quad
Post processing is achieved by rendering a screen quad with this RTT (with ScreenQuad *screenQuad = new ScreenQuad(driver);
previously rendered result) as a texture on the quad. A screen quad is video::SMaterial& screenQuadMaterial = screenQuad->getMaterial();
geometry of flat plane composed of two adjacent triangles covering the
entire area of viewport. In this pass of rendering, RTT works just like
a normal texture and is drawn on the quad during rendering. We can then
take control of this rendering process by applying various shader-defined
materials to the quad. In other words, we can achieve different effect by
writing different shaders.
This process is called post processing because it normally does not rely
on scene geometry. The inputs of this process are just textures, or in
other words, just images. With the help of screen quad, we can draw these
images on the screen with different effects. For example, we can adjust
contrast, make grayscale, add noise, do more fancy effect such as blur,
bloom, ghost, or just like in this example, we invert the color to produce
negative image.
Note that post processing is not limited to use only one texture. It can
take multiple textures as shader inputs to provide desired result. In
addition, post processing can also be chained to produce compound result.
*/
// we create a screen quad // turn off mip maps and bilinear filter since we do not want interpolated results
ScreenQuad *screenQuad = new ScreenQuad(driver); screenQuadMaterial.setFlag(video::EMF_USE_MIP_MAPS, false);
screenQuadMaterial.setFlag(video::EMF_BILINEAR_FILTER, false);
// turn off mip maps and bilinear filter since we do not want interpolated result // set quad texture to RTT we just create
screenQuad->setMaterialFlag(video::EMF_USE_MIP_MAPS, false); screenQuadMaterial.setTexture(0, rt);
screenQuad->setMaterialFlag(video::EMF_BILINEAR_FILTER, false);
// set quad texture to RTT we just create /*
screenQuad->setMaterialTexture(0, rt); Let's create material for the quad. Like in other example, we create material
using IGPUProgrammingServices and call addShaderMaterialFromFiles, which
returns a material type identifier.
*/
/* // create materials
Let's create material for the quad. Like in other example, we create material
using IGPUProgrammingServices and call addShaderMaterialFromFiles, which
returns a material type identifier.
*/
// create materials video::IGPUProgrammingServices* gpu = driver->getGPUProgrammingServices();
s32 ppMaterialType = 0;
video::IGPUProgrammingServices* gpu = driver->getGPUProgrammingServices(); if (gpu)
s32 ppMaterialType = 0; {
// We write a QuadShaderCallBack class that implements OnSetConstants
// callback of IShaderConstantSetCallBack class at the beginning of
// this tutorial. We set shader constants in this callback.
if (gpu) // create an instance of callback class
{
// We write a QuadShaderCallBack class that implements OnSetConstants
// callback of IShaderConstantSetCallBack class at the beginning of
// this tutorial. We set shader constants in this callback.
// create an instance of callback class
QuadShaderCallBack* mc = new QuadShaderCallBack(); QuadShaderCallBack* mc = new QuadShaderCallBack();
// create material from post processing shaders // create material from post processing shaders
ppMaterialType = gpu->addHighLevelShaderMaterialFromFiles( ppMaterialType = gpu->addHighLevelShaderMaterialFromFiles(
vsFileName, "vertexMain", video::EVST_VS_1_1, vsFileName, "vertexMain", video::EVST_VS_1_1,
psFileName, "pixelMain", video::EPST_PS_1_1, mc); psFileName, "pixelMain", video::EPST_PS_1_1, mc);
mc->drop(); mc->drop();
} }
// set post processing material type to the quad // set post processing material type to the quad
screenQuad->setMaterialType((video::E_MATERIAL_TYPE)ppMaterialType); screenQuadMaterial.MaterialType = (video::E_MATERIAL_TYPE)ppMaterialType;
/* /*
Now draw everything. That's all. Now draw everything. That's all.
*/ */
int lastFPS = -1; int lastFPS = -1;
while(device->run()) while(device->run())
{ {
if (device->isWindowActive()) if (device->isWindowActive())
{ {
driver->beginScene(true, true, video::SColor(255,0,0,0)); driver->beginScene(true, true, video::SColor(255,0,0,0));
if (rt) if (rt)
{ {
// draw scene into render target // draw scene into render target
// set render target to RTT // set render target to RTT
driver->setRenderTarget(rt, true, true, video::SColor(255,0,0,0)); driver->setRenderTarget(rt, true, true, video::SColor(255,0,0,0));
// draw scene to RTT just like normal rendering // draw scene to RTT just like normal rendering
smgr->drawAll(); smgr->drawAll();
// after rendering to RTT, we change render target back // after rendering to RTT, we change render target back
driver->setRenderTarget(0, true, true, video::SColor(255,0,0,0)); driver->setRenderTarget(0, true, true, video::SColor(255,0,0,0));
// render screen quad to apply post processing // render screen quad to apply post processing
screenQuad->render(); screenQuad->render();
} }
else else
{ {
// draw scene normally // draw scene normally
smgr->drawAll(); smgr->drawAll();
} }
driver->endScene(); driver->endScene();
int fps = driver->getFPS(); int fps = driver->getFPS();
if (lastFPS != fps) if (lastFPS != fps)
{ {
core::stringw str = L"Irrlicht Engine - Post processing example ["; core::stringw str = L"Irrlicht Engine - Post processing example [";
str += driver->getName(); str += driver->getName();
str += "] FPS:"; str += "] FPS:";
str += fps; str += fps;
device->setWindowCaption(str.c_str()); device->setWindowCaption(str.c_str());
lastFPS = fps; lastFPS = fps;
} }
} }
} }
// do not forget to manually drop the screen quad // do not forget to manually drop the screen quad
screenQuad->drop(); screenQuad->drop();
device->drop(); device->drop();
return 0; return 0;
} }
/* /*