Skip to content

Instantly share code, notes, and snippets.

@jasonbeverage
Created November 18, 2016 15:38
Show Gist options
  • Save jasonbeverage/c73589bf7d1015f70cbe816337c8b5d0 to your computer and use it in GitHub Desktop.
Save jasonbeverage/c73589bf7d1015f70cbe816337c8b5d0 to your computer and use it in GitHub Desktop.
Test of queued RTT rendering in osgearth.
#include <osgDB/ReadFile>
#include <osgUtil/Optimizer>
#include <osg/CoordinateSystemNode>
#include <osg/Switch>
#include <osg/Types>
#include <osgText/Text>
#include <osgViewer/Viewer>
#include <osgViewer/ViewerEventHandlers>
#include <osgGA/TrackballManipulator>
#include <osgGA/FlightManipulator>
#include <osgGA/DriveManipulator>
#include <osgGA/KeySwitchMatrixManipulator>
#include <osgGA/StateSetManipulator>
#include <osgGA/AnimationPathManipulator>
#include <osgGA/TerrainManipulator>
#include <osgGA/SphericalManipulator>
#include <osgGA/Device>
#include <iostream>
#include <queue>
/*
osg::Camera* createRTTCamera(osg::Node* node, osg::Texture2D* texture)
{
osg::Camera* camera = new osg::Camera;
camera->setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// set view
camera->setReferenceFrame(osg::Transform::ABSOLUTE_RF);
// set viewport
camera->setViewport(0,0,texture->getTextureWidth(),texture->getTextureHeight());
// set the camera to render before the main camera.
camera->setRenderOrder(osg::Camera::PRE_RENDER);
// tell the camera to use OpenGL frame buffer object where supported.
camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
// attach the texture and use it as the color buffer. This is just for testing and won't do read back into an image.
camera->attach( osg::Camera::COLOR_BUFFER, texture, 0, 0, false );
const osg::BoundingSphere& bs = node->getBound();
float znear = 1.0f*bs.radius();
float zfar = 3.0f*bs.radius();
// 2:1 aspect ratio as per flag geometry below.
float proj_top = 0.25f*znear;
float proj_right = 0.5f*znear;
znear *= 0.9f;
zfar *= 1.1f;
// set up projection.
camera->setProjectionMatrixAsFrustum(-proj_right,proj_right,-proj_top,proj_top,znear,zfar);
// set view
camera->setReferenceFrame(osg::Transform::ABSOLUTE_RF);
camera->setViewMatrixAsLookAt(bs.center()-osg::Vec3(0.0f,2.0f,0.0f)*bs.radius(),bs.center(),osg::Vec3(0.0f,0.0f,1.0f));
camera->addChild( node );
return camera;
}
*/
osg::Camera* createRTTCamera(osg::Node* node)
{
osg::Camera* camera = new osg::Camera;
camera->setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// set view
camera->setReferenceFrame(osg::Transform::ABSOLUTE_RF);
// set viewport
//camera->setViewport(0,0,texture->getTextureWidth(),texture->getTextureHeight());
// set the camera to render before the main camera.
camera->setRenderOrder(osg::Camera::PRE_RENDER);
// tell the camera to use OpenGL frame buffer object where supported.
camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
// attach the texture and use it as the color buffer. This is just for testing and won't do read back into an image.
//camera->attach( osg::Camera::COLOR_BUFFER, texture, 0, 0, false );
const osg::BoundingSphere& bs = node->getBound();
float znear = 1.0f*bs.radius();
float zfar = 3.0f*bs.radius();
// 2:1 aspect ratio as per flag geometry below.
float proj_top = 0.25f*znear;
float proj_right = 0.5f*znear;
znear *= 0.9f;
zfar *= 1.1f;
// set up projection.
camera->setProjectionMatrixAsFrustum(-proj_right,proj_right,-proj_top,proj_top,znear,zfar);
// set view
camera->setReferenceFrame(osg::Transform::ABSOLUTE_RF);
camera->setViewMatrixAsLookAt(bs.center()-osg::Vec3(0.0f,2.0f,0.0f)*bs.radius(),bs.center(),osg::Vec3(0.0f,0.0f,1.0f));
camera->addChild( node );
return camera;
}
std::queue< osg::ref_ptr< osg::Texture2D > > textureQueue;
osg::Node* makeCell(int x, int y)
{
osg::Geometry* geometry = new osg::Geometry;
osg::Vec3Array* verts = new osg::Vec3Array;
verts->push_back(osg::Vec3(x, 0, y));
verts->push_back(osg::Vec3(x + 1.0, 0, y));
verts->push_back(osg::Vec3(x + 1.0, 0, y + 1.0));
verts->push_back(osg::Vec3(x, 0, y + 1.0));
geometry->setVertexArray(verts);
osg::Vec4ubArray* colors = new osg::Vec4ubArray;
colors->push_back(osg::Vec4ub(255,255,255,255));
geometry->setColorArray(colors, osg::Array::Binding::BIND_OVERALL);
osg::Vec2Array* texCoords = new osg::Vec2Array;
texCoords->push_back(osg::Vec2f(0.0f, 0.0f));
texCoords->push_back(osg::Vec2f(1.0f, 0.0f));
texCoords->push_back(osg::Vec2f(1.0f, 1.0f));
texCoords->push_back(osg::Vec2f(0.0f, 1.0f));
geometry->setTexCoordArray(0, texCoords);
osg::Texture2D *texture = new osg::Texture2D();
texture->setInternalFormat(GL_RGBA);
texture->setSourceFormat( GL_RGBA );
texture->setResizeNonPowerOfTwoHint( false );
texture->setTextureSize( 256, 256 );
texture->setFilter(osg::Texture2D::MIN_FILTER,osg::Texture::LINEAR);
texture->setFilter(osg::Texture2D::MAG_FILTER,osg::Texture::LINEAR);
texture->setSourceType( GL_UNSIGNED_BYTE );
std::stringstream buf;
buf << "texture " << x << "x" << y << std::endl;
texture->setName(buf.str());
textureQueue.push( texture );
geometry->getOrCreateStateSet()->setTextureAttributeAndModes(0, texture, osg::StateAttribute::ON);
geometry->setUseVertexBufferObjects(true);
geometry->setUseDisplayList(false);
geometry->addPrimitiveSet(new osg::DrawArrays(GL_QUADS, 0, verts->size()));
osg::Geode* geode =new osg::Geode;
geode->addDrawable( geometry );
return geode;
}
struct PreDrawCallback : public osg::Camera::DrawCallback
{
PreDrawCallback()
{
}
virtual void operator () (osg::RenderInfo& renderInfo) const
{
if (!textureQueue.empty())
{
osg::ref_ptr< osg::Texture2D > texture = textureQueue.front();
if (texture.valid())
{
OSG_NOTICE << "Processing " << texture->getName() << std::endl;
// set viewport
renderInfo.getCurrentCamera()->setViewport(0,0,texture->getTextureWidth(),texture->getTextureHeight());
// attach the texture and use it as the color buffer. This is just for testing and won't do read back into an image.
renderInfo.getCurrentCamera()->attach( osg::Camera::COLOR_BUFFER, texture, 0, 0, false );
OSG_NOTICE << textureQueue.size() << " remaining..." << std::endl;
textureQueue.pop();
}
}
}
};
struct PostDrawCallback : public osg::Camera::DrawCallback
{
PostDrawCallback()
{
}
virtual void operator () (osg::RenderInfo& renderInfo) const
{
//renderInfo.getCurrentCamera()->detach(osg::Camera::COLOR_BUFFER);
}
};
int main(int argc, char** argv)
{
// use an ArgumentParser object to manage the program arguments.
osg::ArgumentParser arguments(&argc,argv);
osgViewer::Viewer viewer(arguments);
// set up the camera manipulators.
{
osg::ref_ptr<osgGA::KeySwitchMatrixManipulator> keyswitchManipulator = new osgGA::KeySwitchMatrixManipulator;
keyswitchManipulator->addMatrixManipulator( '1', "Trackball", new osgGA::TrackballManipulator() );
keyswitchManipulator->addMatrixManipulator( '2', "Flight", new osgGA::FlightManipulator() );
keyswitchManipulator->addMatrixManipulator( '3', "Drive", new osgGA::DriveManipulator() );
keyswitchManipulator->addMatrixManipulator( '4', "Terrain", new osgGA::TerrainManipulator() );
keyswitchManipulator->addMatrixManipulator( '5', "Orbit", new osgGA::OrbitManipulator() );
keyswitchManipulator->addMatrixManipulator( '6', "FirstPerson", new osgGA::FirstPersonManipulator() );
keyswitchManipulator->addMatrixManipulator( '7', "Spherical", new osgGA::SphericalManipulator() );
std::string pathfile;
double animationSpeed = 1.0;
while(arguments.read("--speed",animationSpeed) ) {}
char keyForAnimationPath = '8';
while (arguments.read("-p",pathfile))
{
osgGA::AnimationPathManipulator* apm = new osgGA::AnimationPathManipulator(pathfile);
if (apm || !apm->valid())
{
apm->setTimeScale(animationSpeed);
unsigned int num = keyswitchManipulator->getNumMatrixManipulators();
keyswitchManipulator->addMatrixManipulator( keyForAnimationPath, "Path", apm );
keyswitchManipulator->selectMatrixManipulator(num);
++keyForAnimationPath;
}
}
viewer.setCameraManipulator( keyswitchManipulator.get() );
}
// add the state manipulator
viewer.addEventHandler( new osgGA::StateSetManipulator(viewer.getCamera()->getOrCreateStateSet()) );
// add the thread model handler
viewer.addEventHandler(new osgViewer::ThreadingHandler);
// add the window size toggle handler
viewer.addEventHandler(new osgViewer::WindowSizeHandler);
// add the stats handler
viewer.addEventHandler(new osgViewer::StatsHandler);
// add the help handler
viewer.addEventHandler(new osgViewer::HelpHandler(arguments.getApplicationUsage()));
// add the record camera path handler
viewer.addEventHandler(new osgViewer::RecordCameraPathHandler);
// add the LOD Scale handler
viewer.addEventHandler(new osgViewer::LODScaleHandler);
// add the screen capture handler
viewer.addEventHandler(new osgViewer::ScreenCaptureHandler);
// load the data
osg::ref_ptr<osg::Node> loadedModel = osgDB::readNodeFiles(arguments);
if (!loadedModel)
{
std::cout << arguments.getApplicationName() <<": No data loaded" << std::endl;
return 1;
}
// any option left unread are converted into errors to write out later.
arguments.reportRemainingOptionsAsUnrecognized();
// report any errors if they have occurred when parsing the program arguments.
if (arguments.errors())
{
arguments.writeErrorMessages(std::cout);
return 1;
}
osg::Group* root = new osg::Group;
root->getOrCreateStateSet()->setMode(GL_LIGHTING, osg::StateAttribute::OFF);
/*
texture = new osg::Texture2D();
texture->setInternalFormat(GL_RGBA);
texture->setSourceFormat( GL_RGBA );
texture->setResizeNonPowerOfTwoHint( false );
texture->setTextureSize( 256, 256 );
texture->setFilter(osg::Texture2D::MIN_FILTER,osg::Texture::LINEAR);
texture->setFilter(osg::Texture2D::MAG_FILTER,osg::Texture::LINEAR);
texture->setSourceType( GL_UNSIGNED_BYTE );
osg::Camera* capture = createRTTCamera(loadedModel.get(), texture );
root->addChild(capture);
*/
unsigned int numCaptureCameras = 30;
std::vector< osg::ref_ptr< osg::Camera > > captureCameras;
//osg::ref_ptr< osg::Camera > capture = createRTTCamera(loadedModel.get());
//root->addChild(capture);
//capture->setPreDrawCallback( new PreDrawCallback() );
//capture->setPostDrawCallback( new PostDrawCallback() );
for(unsigned int x = 0; x < 50; x++)
{
for (unsigned int y = 0; y < 50; y++)
{
root->addChild(makeCell(x, y));
}
}
//root->addChild(loadedModel);
viewer.setSceneData( root );
viewer.realize();
while (!viewer.done())
{
for (unsigned int i = 0; i < captureCameras.size(); i++)
{
root->removeChild( captureCameras[i].get() );
}
captureCameras.clear();
while (!textureQueue.empty() && captureCameras.size() < numCaptureCameras)
{
osg::ref_ptr< osg::Camera > capture = createRTTCamera(loadedModel.get());
root->addChild( capture.get() );
captureCameras.push_back( capture.get() );
osg::ref_ptr< osg::Texture2D > texture = textureQueue.front();
if (texture.valid())
{
OSG_NOTICE << "Processing " << texture->getName() << std::endl;
// set viewport
capture->setViewport(0,0,texture->getTextureWidth(),texture->getTextureHeight());
// attach the texture and use it as the color buffer. This is just for testing and won't do read back into an image.
capture->attach( osg::Camera::COLOR_BUFFER, texture, 0, 0, false );
OSG_NOTICE << textureQueue.size() << " remaining..." << std::endl;
textureQueue.pop();
}
}
OSG_NOTICE << captureCameras.size() << " capture cameras" << std::endl;
viewer.frame();
}
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment