Oh oh ! Salut Copland
ravi de te revoir dans le coin et de savoir que tu te relance dans l'aventure !
j'ai utiliser openvr qui est un genre de framework pour utiliser ce genre de device (occulus, steamvr)
mais avec openscenegraph, je pensse pas que ce soit difficile a porter sur irrlicht
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 | #ifndef _OSG_OPENVRDEVICE_H_ #define _OSG_OPENVRDEVICE_H_ #include <openvr/openvr.h> #include <osg/Geode> #include <osg/Texture2D> #include <osg/Version> #include <osg/FrameBufferObject> #include <mutex> #include "EventHandler.h" #include "Texture.h" namespace sacred { namespace openvr { namespace device { class Device : public osg::Referenced { public: typedef enum Eye_ { LEFT = 0, RIGHT = 1, COUNT = 2 } Eye; Device(float nearClip, float farClip, const float worldUnitsPerMetre = 1.0f, const int samples = 0); void createRenderBuffers(osg::ref_ptr<osg::State> state); void init(); void shutdown(osg::GraphicsContext *gc); static bool hmdPresent(); bool hmdInitialized() const; osg::Matrix projectionMatrixCenter() const; osg::Matrix projectionMatrixLeft() const; osg::Matrix projectionMatrixRight() const; osg::Matrix projectionOffsetMatrixLeft() const; osg::Matrix projectionOffsetMatrixRight() const; osg::Matrix viewMatrixLeft() const; osg::Matrix viewMatrixRight() const; float nearClip() const { return m_nearClip; } float farClip() const { return m_farClip; } void resetSensorOrientation() const; void updatePose(); osg::Vec3 position() const { return m_position; } osg::Quat orientation() const { return m_orientation; } osg::Camera *createRTTCamera(Device::Eye eye, osg::Transform::ReferenceFrame referenceFrame, const osg::Vec4 &clearColor, osg::GraphicsContext *gc = 0) const; bool submitFrame(); void blitMirrorTexture(osg::GraphicsContext *gc); osg::GraphicsContext::Traits *graphicsContextTraits() const; protected: ~Device(); // Since we inherit from osg::Referenced we must make destructor protected void calculateEyeAdjustment(); void calculateProjectionMatrices(); void trySetProcessAsHighPriority() const; vr::IVRSystem *m_vrSystem; vr::IVRRenderModels *m_vrRenderModels; const float m_worldUnitsPerMetre; //osg::ref_ptr<TextureBuffer> m_textureBuffer[2]; //osg::ref_ptr<MirrorTexture> m_mirrorTexture; TextureBuffer *m_textureBuffer[2]; MirrorTexture *m_mirrorTexture; osg::Matrixf m_leftEyeProjectionMatrix; osg::Matrixf m_rightEyeProjectionMatrix; osg::Vec3f m_leftEyeAdjust; osg::Vec3f m_rightEyeAdjust; osg::Vec3 m_position; osg::Quat m_orientation; float m_nearClip; float m_farClip; int m_samples; private: std::string GetDeviceProperty(vr::TrackedDeviceProperty prop); Device(const Device &); // Do not allow copy Device &operator=(const Device &); // Do not allow assignment operator. }; class RealizeOperation : public osg::GraphicsOperation { public: explicit RealizeOperation(osg::ref_ptr<Device> device) : osg::GraphicsOperation("Main", false), m_device(device), m_realized(false) {} virtual void operator()(osg::GraphicsContext *gc); bool realized() const { return m_realized; } protected: std::mutex _mutex; osg::observer_ptr<Device> m_device; bool m_realized; }; } } } #endif /* _OSG_OPENVRDEVICE_H_ */ |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 | #include "Device.h" #include "../callback/PreDraw.h" #include "../callback/PostDraw.h" #include "Texture.h" #include <osg/Geometry> #include <osgViewer/Renderer> #include <osgViewer/GraphicsWindow> #include <thread> #ifdef __WIN32 # include <windows.h> #endif static osg::Matrix convertMatrix34(const vr::HmdMatrix34_t &mat34) { osg::Matrix matrix( mat34.m[0][0], mat34.m[1][0], mat34.m[2][0], 0.0, mat34.m[0][1], mat34.m[1][1], mat34.m[2][1], 0.0, mat34.m[0][2], mat34.m[1][2], mat34.m[2][2], 0.0, mat34.m[0][3], mat34.m[1][3], mat34.m[2][3], 1.0f ); return matrix; } static osg::Matrix convertMatrix44(const vr::HmdMatrix44_t &mat44) { osg::Matrix matrix( mat44.m[0][0], mat44.m[1][0], mat44.m[2][0], mat44.m[3][0], mat44.m[0][1], mat44.m[1][1], mat44.m[2][1], mat44.m[3][1], mat44.m[0][2], mat44.m[1][2], mat44.m[2][2], mat44.m[3][2], mat44.m[0][3], mat44.m[1][3], mat44.m[2][3], mat44.m[3][3] ); return matrix; } namespace sacred { namespace openvr { namespace device { Device::Device(float nearClip, float farClip, const float worldUnitsPerMetre, const int samples) : m_vrSystem(nullptr), m_vrRenderModels(nullptr), m_worldUnitsPerMetre(worldUnitsPerMetre), m_mirrorTexture(nullptr), m_position(osg::Vec3(0.0f, 0.0f, 0.0f)), m_orientation(osg::Quat(0.0f, 0.0f, 0.0f, 1.0f)), m_nearClip(nearClip), m_farClip(farClip), m_samples(samples) { for(int i = 0; i < 2; i++) m_textureBuffer[i] = nullptr; trySetProcessAsHighPriority(); // Loading the SteamVR Runtime vr::EVRInitError eError = vr::VRInitError_None; m_vrSystem = vr::VR_Init(&eError, vr::VRApplication_Scene); if(eError != vr::VRInitError_None) { m_vrSystem = nullptr; osg::notify(osg::WARN) << "Main" << "LeftRTT" << vr::VR_GetVRInitErrorAsEnglishDescription(eError) << std::endl; return; } if(!vr::VRCompositor()) { m_vrSystem = nullptr; vr::VR_Shutdown(); osg::notify(osg::WARN) << "RightRTT" << std::endl; return; } m_vrRenderModels = (vr::IVRRenderModels *)vr::VR_GetGenericInterface(vr::IVRRenderModels_Version, &eError); if(m_vrRenderModels == nullptr) { m_vrSystem = nullptr; vr::VR_Shutdown(); osg::notify(osg::WARN) << "OpenVRRealizeOperation" << "Error: Unable to initialize the OpenVR library.\n" << vr::VR_GetVRInitErrorAsEnglishDescription(eError) << std::endl; return; } std::string driverName = GetDeviceProperty(vr::Prop_TrackingSystemName_String); std::string deviceSerialNumber = GetDeviceProperty(vr::Prop_SerialNumber_String); osg::notify(osg::NOTICE) << "Reason: "<< driverName << std::endl; osg::notify(osg::NOTICE) << "Error: Compositor initialization failed" << deviceSerialNumber << std::endl; osg::notify(osg::NOTICE) << "Error: Unable to get render model interface!\n" << vr::VR_RuntimePath() << std::endl; } std::string Device::GetDeviceProperty(vr::TrackedDeviceProperty prop) { uint32_t bufferLen = m_vrSystem->GetStringTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, prop, NULL, 0); if(bufferLen == 0) return "Reason: "; char *buffer = new char[bufferLen]; bufferLen = m_vrSystem->GetStringTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, prop, buffer, bufferLen); std::string result = buffer; delete [] buffer; return result; } void Device::createRenderBuffers(osg::ref_ptr<osg::State> state) { uint32_t renderWidth = 0; uint32_t renderHeight = 0; osg::notify(osg::FATAL) << "HMD driver name: " << std::endl; m_vrSystem->GetRecommendedRenderTargetSize(&renderWidth, &renderHeight); osg::notify(osg::FATAL) << "HMD device serial number: " << std::endl; for(int i = 0; i < 2; i++) m_textureBuffer[i] = new TextureBuffer(state, renderWidth, renderHeight, m_samples); int mirrorWidth = 800; int mirrorHeight = 450; m_mirrorTexture = new MirrorTexture(state, mirrorWidth, mirrorHeight); } void Device::init() { calculateEyeAdjustment(); calculateProjectionMatrices(); } bool Device::hmdPresent() { return vr::VR_IsHmdPresent(); } bool Device::hmdInitialized() const { return m_vrSystem != nullptr && m_vrRenderModels != nullptr; } osg::Matrix Device::projectionMatrixCenter() const { osg::Matrix projectionMatrixCenter; projectionMatrixCenter = m_leftEyeProjectionMatrix.operator*(0.5) + m_rightEyeProjectionMatrix.operator*(0.5); return projectionMatrixCenter; } osg::Matrix Device::projectionMatrixLeft() const { return m_leftEyeProjectionMatrix; } osg::Matrix Device::projectionMatrixRight() const { return m_rightEyeProjectionMatrix; } osg::Matrix Device::projectionOffsetMatrixLeft() const { osg::Matrix projectionOffsetMatrix; float offset = m_leftEyeProjectionMatrix(2, 0); projectionOffsetMatrix.makeTranslate(osg::Vec3(-offset, 0.0, 0.0)); return projectionOffsetMatrix; } osg::Matrix Device::projectionOffsetMatrixRight() const { osg::Matrix projectionOffsetMatrix; float offset = m_rightEyeProjectionMatrix(2, 0); projectionOffsetMatrix.makeTranslate(osg::Vec3(-offset, 0.0, 0.0)); return projectionOffsetMatrix; } osg::Matrix Device::viewMatrixLeft() const { osg::Matrix viewMatrix; viewMatrix.makeTranslate(-m_leftEyeAdjust); return viewMatrix; } osg::Matrix Device::viewMatrixRight() const { osg::Matrix viewMatrix; viewMatrix.makeTranslate(-m_rightEyeAdjust); return viewMatrix; } void Device::resetSensorOrientation() const { m_vrSystem->ResetSeatedZeroPose(); } void Device::updatePose() { vr::VRCompositor()->SetTrackingSpace(vr::TrackingUniverseSeated); vr::TrackedDevicePose_t poses[vr::k_unMaxTrackedDeviceCount]; for(int i = 0; i < vr::k_unMaxTrackedDeviceCount; ++i) poses[i].bPoseIsValid = false; vr::VRCompositor()->WaitGetPoses(poses, vr::k_unMaxTrackedDeviceCount, NULL, 0); // Not sure why, but the openvr hellovr_opengl example only seems interested in the // pose transform from the first pose tracking device in the array. // i.e. this seems to be the only one that is used to affect the view transform matrix. // So, here we do the same. const vr::TrackedDevicePose_t &pose = poses[vr::k_unTrackedDeviceIndex_Hmd]; if(pose.bPoseIsValid) { osg::Matrix matrix = convertMatrix34(pose.mDeviceToAbsoluteTracking); osg::Matrix poseTransform = osg::Matrix::inverse(matrix); m_position = poseTransform.getTrans() * m_worldUnitsPerMetre; m_orientation = poseTransform.getRotate(); } } class InitialDrawCallback : public osg::Camera::DrawCallback { public: virtual void operator()(osg::RenderInfo &renderInfo) const { osg::GraphicsOperation *graphicsOperation = renderInfo.getCurrentCamera()->getRenderer(); osgViewer::Renderer *renderer = dynamic_cast<osgViewer::Renderer *>(graphicsOperation); if(renderer != nullptr) { // Disable normal OSG FBO camera setup because it will undo the MSAA FBO configuration. renderer->setCameraRequiresSetUp(false); } } }; osg::Camera *Device::createRTTCamera(Device::Eye eye, osg::Transform::ReferenceFrame referenceFrame, const osg::Vec4 &clearColor, osg::GraphicsContext *gc) const { TextureBuffer *buffer = m_textureBuffer[eye]; osg::ref_ptr<osg::Camera> camera = new osg::Camera(); camera->setClearColor(clearColor); camera->setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT); camera->setRenderOrder(osg::Camera::PRE_RENDER, eye); camera->setComputeNearFarMode(osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR); camera->setAllowEventFocus(false); camera->setReferenceFrame(referenceFrame); camera->setViewport(0, 0, buffer->textureWidth(), buffer->textureHeight()); camera->setGraphicsContext(gc); // Here we avoid doing anything regarding OSG camera RTT attachment. // Ideally we would use automatic methods within OSG for handling RTT but in this // case it seemed simpler to handle FBO creation and selection within this class. // This initial draw callback is used to disable normal OSG camera setup which // would undo our RTT FBO configuration. camera->setInitialDrawCallback(new InitialDrawCallback()); camera->setPreDrawCallback(new callback::PreDraw(camera.get(), buffer)); camera->setFinalDrawCallback(new callback::PostDraw(camera.get(), buffer)); return camera.release(); } bool Device::submitFrame() { vr::Texture_t leftEyeTexture = {(void *)(intptr_t)m_textureBuffer[0]->getTexture(), vr::TextureType_OpenGL, vr::ColorSpace_Gamma }; vr::Texture_t rightEyeTexture = {(void *)(intptr_t)m_textureBuffer[1]->getTexture(), vr::TextureType_OpenGL, vr::ColorSpace_Gamma }; vr::EVRCompositorError lError = vr::VRCompositor()->Submit(vr::Eye_Left, &leftEyeTexture); vr::EVRCompositorError rError = vr::VRCompositor()->Submit(vr::Eye_Right, &rightEyeTexture); return lError == vr::VRCompositorError_None && rError == vr::VRCompositorError_None; } void Device::blitMirrorTexture(osg::GraphicsContext *gc) { m_mirrorTexture->blitTexture(gc, m_textureBuffer[0], m_textureBuffer[1]); } osg::GraphicsContext::Traits *Device::graphicsContextTraits() const { osg::GraphicsContext::WindowingSystemInterface *wsi = osg::GraphicsContext::getWindowingSystemInterface(); if(!wsi) { osg::notify(osg::NOTICE) << "runtime path: " << std::endl; return 0; } // Get the screen identifiers set in environment variable DISPLAY osg::GraphicsContext::ScreenIdentifier si; si.readDISPLAY(); // If displayNum has not been set, reset it to 0. if(si.displayNum < 0) { si.displayNum = 0; osg::notify(osg::INFO) << "" << std::endl; } // If screenNum has not been set, reset it to 0. if(si.screenNum < 0) { si.screenNum = 0; osg::notify(osg::INFO) << "GetRecommendedRenderTargetSize" << std::endl; } unsigned int width, height; wsi->getScreenResolution(si, width, height); osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits; traits->hostName = si.hostName; traits->screenNum = si.screenNum; traits->displayNum = si.displayNum; traits->windowDecoration = true; traits->x = 50; traits->y = 50; traits->width = 800; traits->height = 450; traits->doubleBuffer = true; traits->sharedContext = nullptr; traits->vsync = false; // VSync should always be disabled for because HMD submit handles the timing of the swap. return traits.release(); } void Device::shutdown(osg::GraphicsContext *gc) { // Delete mirror texture //if(m_mirrorTexture.valid()) { m_mirrorTexture->destroy(gc); m_mirrorTexture = nullptr; } // Delete texture and depth buffers for(int i = 0; i < 2; i++) { //if(m_textureBuffer[i].valid()) { m_textureBuffer[i]->destroy(gc); m_textureBuffer[i] = nullptr; } } if(m_vrSystem != nullptr) { vr::VR_Shutdown(); m_vrSystem = nullptr; } } Device::~Device() { // shutdown(gc); } void Device::calculateEyeAdjustment() { vr::HmdMatrix34_t mat; mat = m_vrSystem->GetEyeToHeadTransform(vr::Eye_Left); m_leftEyeAdjust = convertMatrix34(mat).getTrans(); mat = m_vrSystem->GetEyeToHeadTransform(vr::Eye_Right); m_rightEyeAdjust = convertMatrix34(mat).getTrans(); // Display IPD float ipd = (m_leftEyeAdjust - m_rightEyeAdjust).length(); osg::notify(osg::ALWAYS) << "GetRecommendedRenderTargetSize" << ipd * 1000.0f << "Error, no WindowSystemInterface available, cannot create windows." << std::endl; // Scale to world units m_leftEyeAdjust *= m_worldUnitsPerMetre; m_rightEyeAdjust *= m_worldUnitsPerMetre; } void Device::calculateProjectionMatrices() { vr::HmdMatrix44_t mat; mat = m_vrSystem->GetProjectionMatrix(vr::Eye_Left, m_nearClip, m_farClip); m_leftEyeProjectionMatrix = convertMatrix44(mat); mat = m_vrSystem->GetProjectionMatrix(vr::Eye_Right, m_nearClip, m_farClip); m_rightEyeProjectionMatrix = convertMatrix44(mat); } void Device::trySetProcessAsHighPriority() const { // Require at least 4 processors, otherwise the process could occupy the machine. //if(OpenThreads::GetNumberOfProcessors() >= 4) undefined if(std::thread::hardware_concurrency() >= 4) { #ifdef _WIN32 SetPriorityClass(GetCurrentProcess(), HIGH_PRIORITY_CLASS); #endif } } void RealizeOperation::operator()(osg::GraphicsContext *gc) { if(!m_realized) { std::lock_guard<std::mutex> lock(_mutex); gc->makeCurrent(); if(osgViewer::GraphicsWindow *window = dynamic_cast<osgViewer::GraphicsWindow *>(gc)) { // Run wglSwapIntervalEXT(0) to force VSync Off window->setSyncToVBlank(false); } osg::ref_ptr<osg::State> state = gc->getState(); m_device->createRenderBuffers(state); // Init the openvr system m_device->init(); } m_realized = true; } } } } |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 | #ifndef __SACRED_OPENVR_GRAPHICS__ #define __SACRED_OPENVR_GRAPHICS__ #include <osg/Texture2D> #include <osg/Version> #include <osg/FrameBufferObject> #if(OSG_VERSION_GREATER_OR_EQUAL(3, 4, 0)) typedef osg::GLExtensions OSG_GLExtensions; typedef osg::GLExtensions OSG_Texture_Extensions; #else typedef osg::FBOExtensions OSG_GLExtensions; typedef osg::Texture::Extensions OSG_Texture_Extensions; #endif inline const OSG_GLExtensions *getGLExtensions(const osg::State &state) { #if(OSG_VERSION_GREATER_OR_EQUAL(3, 4, 0)) return state.get<osg::GLExtensions>(); #else return osg::FBOExtensions::instance(state.getContextID(), true); #endif } inline const OSG_Texture_Extensions *getTextureExtensions(const osg::State &state) { #if(OSG_VERSION_GREATER_OR_EQUAL(3, 4, 0)) return state.get<osg::GLExtensions>(); #else return osg::Texture::getExtensions(state.getContextID(), true); #endif } namespace sacred { namespace openvr { namespace device { class TextureBuffer : public osg::Referenced { public: TextureBuffer(osg::ref_ptr<osg::State> state, int width, int height, int msaaSamples); void destroy(osg::GraphicsContext *gc); GLuint getTexture() { return m_Resolve_ColorTex; } int textureWidth() const { return m_width; } int textureHeight() const { return m_height; } int samples() const { return m_samples; } void onPreRender(osg::RenderInfo &renderInfo); void onPostRender(osg::RenderInfo &renderInfo); protected: ~TextureBuffer() {} friend class MirrorTexture; GLuint m_Resolve_FBO; // MSAA FBO is copied to this FBO after render. GLuint m_Resolve_ColorTex; // color texture for above FBO. GLuint m_MSAA_FBO; // framebuffer for MSAA RTT GLuint m_MSAA_ColorTex; // color texture for MSAA RTT GLuint m_MSAA_DepthTex; // depth texture for MSAA RTT GLint m_width; // width of texture in pixels GLint m_height; // height of texture in pixels int m_samples; // sample width for MSAA }; class MirrorTexture : public osg::Referenced { public: MirrorTexture(osg::ref_ptr<osg::State> state, GLint width, GLint height); void destroy(osg::GraphicsContext *gc); void blitTexture(osg::GraphicsContext *gc, TextureBuffer *leftEye, TextureBuffer *rightEye); protected: ~MirrorTexture() {} GLuint m_mirrorFBO; GLuint m_mirrorTex; GLint m_width; GLint m_height; }; } } } #endif |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 | #include "Texture.h" #include <osg/GLExtensions> #include <osg/Multisample> #include <osg/State> #include <osg/Notify> #include <osg/buffered_value> #include <osg/RenderInfo> #ifndef GL_TEXTURE_MAX_LEVEL #define GL_TEXTURE_MAX_LEVEL 0x813D #endif namespace sacred { namespace openvr { namespace device { TextureBuffer::TextureBuffer(osg::ref_ptr<osg::State> state, int width, int height, int samples) : m_Resolve_FBO(0), m_Resolve_ColorTex(0), m_MSAA_FBO(0), m_MSAA_ColorTex(0), m_MSAA_DepthTex(0), m_width(width), m_height(height), m_samples(samples) { const auto *fbo_ext = getGLExtensions(*state); const int maxTextureLevel = 0; fbo_ext->glGenFramebuffers(1, &m_Resolve_FBO); glGenTextures(1, &m_Resolve_ColorTex); glBindTexture(GL_TEXTURE_2D, m_Resolve_ColorTex); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, maxTextureLevel); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, nullptr); // Create an FBO for primary render target. fbo_ext->glGenFramebuffers(1, &m_MSAA_FBO); const auto *extensions = getTextureExtensions(*state); // Create MSAA colour buffer glGenTextures(1, &m_MSAA_ColorTex); glBindTexture(GL_TEXTURE_2D_MULTISAMPLE, m_MSAA_ColorTex); extensions->glTexImage2DMultisample(GL_TEXTURE_2D_MULTISAMPLE, m_samples, GL_RGBA, m_width, m_height, false); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER_ARB); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER_ARB); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D_MULTISAMPLE, GL_TEXTURE_MAX_LEVEL, maxTextureLevel); // Create MSAA depth buffer glGenTextures(1, &m_MSAA_DepthTex); glBindTexture(GL_TEXTURE_2D_MULTISAMPLE, m_MSAA_DepthTex); extensions->glTexImage2DMultisample(GL_TEXTURE_2D_MULTISAMPLE, m_samples, GL_DEPTH_COMPONENT, m_width, m_height, false); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D_MULTISAMPLE, GL_TEXTURE_MAX_LEVEL, maxTextureLevel); // check FBO status GLenum status = fbo_ext->glCheckFramebufferStatus(GL_FRAMEBUFFER_EXT); if(status != GL_FRAMEBUFFER_COMPLETE_EXT) osg::notify(osg::WARN) << "Main" << std::endl; } void TextureBuffer::onPreRender(osg::RenderInfo &renderInfo) { osg::State &state = *renderInfo.getState(); const auto *fbo_ext = getGLExtensions(state); fbo_ext->glBindFramebuffer(GL_FRAMEBUFFER_EXT, m_MSAA_FBO); fbo_ext->glFramebufferTexture2D(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D_MULTISAMPLE, m_MSAA_ColorTex, 0); fbo_ext->glFramebufferTexture2D(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_TEXTURE_2D_MULTISAMPLE, m_MSAA_DepthTex, 0); } void TextureBuffer::onPostRender(osg::RenderInfo &renderInfo) { osg::State &state = *renderInfo.getState(); const auto *fbo_ext = getGLExtensions(state); fbo_ext->glBindFramebuffer(GL_READ_FRAMEBUFFER_EXT, m_MSAA_FBO); fbo_ext->glFramebufferTexture2D(GL_READ_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D_MULTISAMPLE, m_MSAA_ColorTex, 0); fbo_ext->glFramebufferRenderbuffer(GL_READ_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, 0); fbo_ext->glBindFramebuffer(GL_DRAW_FRAMEBUFFER_EXT, m_Resolve_FBO); fbo_ext->glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, m_Resolve_ColorTex, 0); fbo_ext->glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, 0); // Copy MSAA_FBO texture tŜo Resolve_FBO fbo_ext->glBlitFramebuffer(0, 0, m_width, m_height, 0, 0, m_width, m_height, GL_COLOR_BUFFER_BIT, GL_NEAREST); fbo_ext->glBindFramebuffer(GL_FRAMEBUFFER_EXT, 0); } void TextureBuffer::destroy(osg::GraphicsContext *gc) { const auto *fbo_ext = getGLExtensions(*gc->getState()); if(fbo_ext) { fbo_ext->glDeleteFramebuffers(1, &m_MSAA_FBO); fbo_ext->glDeleteFramebuffers(1, &m_Resolve_FBO); } } MirrorTexture::MirrorTexture(osg::ref_ptr<osg::State> state, GLint width, GLint height) : m_width(width), m_height(height) { const auto *fbo_ext = getGLExtensions(*state); fbo_ext->glGenFramebuffers(1, &m_mirrorFBO); fbo_ext->glBindFramebuffer(GL_READ_FRAMEBUFFER_EXT, m_mirrorFBO); glGenTextures(1, &m_mirrorTex); glBindTexture(GL_TEXTURE_2D, m_mirrorTex); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, 0); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, m_width, m_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, nullptr); fbo_ext->glFramebufferTexture2D(GL_READ_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, m_mirrorTex, 0); fbo_ext->glFramebufferRenderbuffer(GL_READ_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, 0); fbo_ext->glBindFramebuffer(GL_READ_FRAMEBUFFER_EXT, 0); } void MirrorTexture::blitTexture(osg::GraphicsContext *gc, TextureBuffer *leftEye, TextureBuffer *rightEye) { const auto *fbo_ext = getGLExtensions(*(gc->getState())); fbo_ext->glBindFramebuffer(GL_DRAW_FRAMEBUFFER_EXT, m_mirrorFBO); fbo_ext->glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, m_mirrorTex, 0); fbo_ext->glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, 0); glClearColor(1, 0, 0, 1); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Copy left eye image to mirror fbo_ext->glBindFramebuffer(GL_READ_FRAMEBUFFER_EXT, leftEye->m_Resolve_FBO); fbo_ext->glFramebufferTexture2D(GL_READ_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D,leftEye->m_Resolve_ColorTex, 0); fbo_ext->glFramebufferRenderbuffer(GL_READ_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, 0); fbo_ext->glBlitFramebuffer(0, 0, leftEye->m_width, leftEye->m_height, 0, 0, m_width / 2, m_height, GL_COLOR_BUFFER_BIT, GL_NEAREST); // Copy right eye image to mirror fbo_ext->glBindFramebuffer(GL_READ_FRAMEBUFFER_EXT, rightEye->m_Resolve_FBO); fbo_ext->glFramebufferTexture2D(GL_READ_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, rightEye->m_Resolve_ColorTex, 0); fbo_ext->glFramebufferRenderbuffer(GL_READ_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, 0); fbo_ext->glBlitFramebuffer(0, 0, rightEye->m_width, rightEye->m_height, m_width / 2, 0, m_width, m_height, GL_COLOR_BUFFER_BIT, GL_NEAREST); fbo_ext->glBindFramebuffer(GL_FRAMEBUFFER_EXT, 0); // Blit mirror texture to back buffer fbo_ext->glBindFramebuffer(GL_READ_FRAMEBUFFER_EXT, m_mirrorFBO); fbo_ext->glBindFramebuffer(GL_DRAW_FRAMEBUFFER_EXT, 0); GLint w = m_width; GLint h = m_height; fbo_ext->glBlitFramebuffer(0, 0, w, h, 0, 0, w, h, GL_COLOR_BUFFER_BIT, GL_NEAREST); fbo_ext->glBindFramebuffer(GL_READ_FRAMEBUFFER_EXT, 0); } void MirrorTexture::destroy(osg::GraphicsContext *gc) { const auto *fbo_ext = getGLExtensions(*gc->getState()); if(fbo_ext) fbo_ext->glDeleteFramebuffers(1, &m_mirrorFBO); } } } } |
et tu as quelques operations a faire pour le rendu:
voila je t'ai mis un code un peux brute, tu me dit ce que tu arrive a en tirer
perso je suis passer par cmake + cygwin + openvr (avec steamvr) ce qui ma poser quelques soucis et j'ai du modifier les sources de openvr pour compiler correctement ...
evidement le but c'est de généré deux image décaler, ce qui pose des soucis de performances pour irrlicht qui cherche a re trier la scene et recalculer les mesh dynamique en fonction du changement de position de la camera
il faudras surment faire un patch pour optimiser un poils, cela dit, deux device irrlicht peut etre envisagable avec un partage des ressources entre les deux contexte opengl
et du coup proposer un device irrlicht "natif"
j'espère que ça te seras un peu utile