Forum Discussion
mrjazz
12 years agoHonored Guest
Using the OculusSDK with Qt - a minimalist example
I have created a small Qt-project to demonstrate how to use the OculusSDK in conjunction with the platform independent Qt framework.
It basically consists of just one child-class of QGLWidget. Besides the platform dependent locations of the libraries and header-files the pro-file has to contain these settings:
The project contains three files:
main.cpp
oculusglwidget.h
oculusglwidget.cpp
It basically consists of just one child-class of QGLWidget. Besides the platform dependent locations of the libraries and header-files the pro-file has to contain these settings:
QT += core gui opengl network
CONFIG += c++11
LIBS += -lovr
The project contains three files:
main.cpp
#include "oculusglwidget.h"
#include <QApplication>
int main(int argc, char *argv[]) {
QApplication a(argc, argv);
OculusGLWidget w;
w.show();
return a.exec();
}
oculusglwidget.h
#ifndef OCULUSGLWIDGET_H
#define OCULUSGLWIDGET_H
#include <QGLWidget>
#include <QGLFramebufferObject>
#include <OVR.h>
#include <OVR_CAPI_GL.h>
#include <Kernel/OVR_Math.h>
class OculusGLWidget : public QGLWidget {
Q_OBJECT
public:
OculusGLWidget();
~OculusGLWidget();
protected:
virtual void initializeGL();
virtual void resizeGL(int width, int height);
virtual void paintGL();
virtual void keyPressEvent(QKeyEvent *event);
private:
QImage myImg;
GLuint myTex = 0;
static const int IMG_COUNT = 1000;
struct ImgInfo {
float yawVelocity;
OVR::Vector3f pos;
} imgInfos[IMG_COUNT];
void drawScene(const double &dt);
float bodyYaw = 0;
OVR::Vector3f bodyPos = OVR::Vector3f(0, 0, 0);
ovrHmd Hmd = {};
ovrHmdDesc HmdDesc = {};
QGLFramebufferObject *offFB = NULL;
ovrGLTexture eyeTextures[2] = {};
ovrEyeRenderDesc EyeRenderDesc[2];
OVR::Matrix4f Projection[2];
double tStart = 0;
};
#endif // OCULUSGLWIDGET_H
oculusglwidget.cpp
#include "oculusglwidget.h"
#include <iostream>
#include <cmath>
#include <random>
#include <chrono>
#include <QTimer>
#include <QKeyEvent>
#include <QNetworkReply>
#include <QPainter>
#include <QLinearGradient>
static const GLfloat SPACE_DEPTH = 25; // 25 meters
OculusGLWidget::OculusGLWidget() {
format().setDepth(false);
setAutoBufferSwap(false);
setFocusPolicy(Qt::StrongFocus);
ovr_Initialize();
Hmd = ovrHmd_Create(0);
if (!Hmd) {
std::cerr << "No HMD detected." << std::endl;
Hmd = ovrHmd_CreateDebug(ovrHmd_DK1);
if (!Hmd) {
std::cerr << "HMD-creation failed." << std::endl;
exit(1);
}
}
ovrHmd_GetDesc(Hmd, &HmdDesc);
bodyPos.y = ovrHmd_GetFloat(Hmd, OVR_KEY_EYE_HEIGHT, 1.70);
{ // randomly distribute images
std::mt19937 generator(std::chrono::system_clock::now().time_since_epoch().count());
std::uniform_real_distribution<float> distriPos(-SPACE_DEPTH, SPACE_DEPTH);
std::uniform_real_distribution<float> distriYawVelocity(-90, 90); // max: 90 degrees/second
for (int i = 0; i < IMG_COUNT; ++i) {
imgInfos[i].yawVelocity = distriYawVelocity(generator);
do {
imgInfos[i].pos = OVR::Vector3f(distriPos(generator), distriPos(generator), distriPos(generator));
} while ((imgInfos[i].pos-bodyPos).Length() < 1);
}
}
{ // load image from web
auto createTextImage = [](const QString &text) {
static const int w = 256, h = 256;
QImage img(w, h, QImage::Format_ARGB32);
QPainter p(&img);
p.fillRect(0, 0, w, h, QColor(64, 0, 0));
QLinearGradient lg(0, 0, 0, h);
lg.setColorAt(0, QColor(64, 64, 64));
lg.setColorAt(1, QColor(16, 16, 16));
p.fillRect(QRectF(0, 0, w, h), lg);
p.setPen(QColor(255, 255, 255));
p.setFont(QFont("Helvetica", w/10));
p.drawText(0, 0, w, h, Qt::AlignHCenter | Qt::AlignVCenter, text);
return img;
};
myImg = createTextImage("Loading...");
QNetworkAccessManager *nam = new QNetworkAccessManager(this);
connect(nam, &QNetworkAccessManager::finished, [&](QNetworkReply *reply) {
if (!myImg.loadFromData(reply->readAll())) {
std::cerr << "Could not load image." << std::endl;
myImg = createTextImage("Error!");
}
makeCurrent();
deleteTexture(myTex);
myTex = 0;
reply->deleteLater();
});
nam->get(QNetworkRequest(QUrl("http://dbvc4uanumi2d.cloudfront.net/cdn/3.6.7/wp-content/themes/oculus/img/rift/lowLatencyHeadTracking.jpg")));
}
}
OculusGLWidget::~OculusGLWidget() {
makeCurrent();
deleteTexture(myTex);
delete offFB;
if (Hmd) ovrHmd_Destroy(Hmd);
ovr_Shutdown();
}
void OculusGLWidget::initializeGL() {
ovrHmd_SetEnabledCaps(Hmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);
ovrHmd_StartSensor(Hmd, ovrSensorCap_Orientation | ovrSensorCap_YawCorrection | ovrSensorCap_Position, 0);
OVR::Sizei recommenedTexSize[] = {
ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left, HmdDesc.DefaultEyeFov[0], 1),
ovrHmd_GetFovTextureSize(Hmd, ovrEye_Right, HmdDesc.DefaultEyeFov[1], 1)
};
QGLFramebufferObjectFormat ff;
ff.setAttachment(QGLFramebufferObject::Depth);
offFB = new QGLFramebufferObject(recommenedTexSize[0].w+recommenedTexSize[1].w, recommenedTexSize[0].h, ff);
glBindTexture(GL_TEXTURE_2D, offFB->texture());
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
for (int i = 0; i < 2; ++i) {
eyeTextures[i].OGL.Header.API = ovrRenderAPI_OpenGL;
eyeTextures[i].OGL.Header.TextureSize.h = offFB->height();
eyeTextures[i].OGL.Header.TextureSize.w = offFB->width();
eyeTextures[i].OGL.Header.RenderViewport.Pos.x = i==0?0:recommenedTexSize[0].w;
eyeTextures[i].OGL.Header.RenderViewport.Pos.y = 0;
eyeTextures[i].OGL.Header.RenderViewport.Size = recommenedTexSize[i];
eyeTextures[i].OGL.TexId = offFB->texture();
}
QTimer *t = new QTimer(this);
connect(t, &QTimer::timeout, this, &QGLWidget::updateGL);
t->start();
}
void OculusGLWidget::resizeGL(int width, int height) {
ovrRenderAPIConfig config = {};
config.Header.API = ovrRenderAPI_OpenGL;
config.Header.RTSize = OVR::Sizei(width, height);
unsigned int distortionCaps = ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp;
if (!ovrHmd_ConfigureRendering(Hmd, &config, distortionCaps, HmdDesc.DefaultEyeFov, EyeRenderDesc)) {
std::cerr << "ovrHmd_ConfigureRendering" << std::endl;
exit(1);
}
Projection[0] = ovrMatrix4f_Projection(EyeRenderDesc[0].Fov, .1f, 1000.0f, true);
Projection[1] = ovrMatrix4f_Projection(EyeRenderDesc[1].Fov, .1f, 1000.0f, true);
}
void OculusGLWidget::paintGL() {
ovrFrameTiming HmdFrameTiming = ovrHmd_BeginFrame(Hmd, 0);
OVR::SensorState ss = ovrHmd_GetSensorState(Hmd, HmdFrameTiming.ScanoutMidpointSeconds);
if (tStart == 0) tStart = ss.Predicted.TimeInSeconds;
const double dt = ss.Predicted.TimeInSeconds-tStart;
offFB->bind();
glEnable(GL_DEPTH_TEST);
#if 1
glClearColor(0.1, 0.0, 0.1, 0.0);
#else // be aware of advanced data like this
const auto &v = ss.Predicted.AngularVelocity;
glClearColor(fabs(v.x)*0.2, fabs(v.y)*0.2, fabs(v.z)*0.2, 0.0);
#endif
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
using namespace OVR;
const Quatf bodyQ(Vector3f(0, 1, 0), bodyYaw);
for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) {
ovrEyeType eye = HmdDesc.EyeRenderOrder[eyeIndex];
glViewport(eyeTextures[eye].OGL.Header.RenderViewport.Pos.x,
eyeTextures[eye].OGL.Header.RenderViewport.Pos.y,
eyeTextures[eye].OGL.Header.RenderViewport.Size.w,
eyeTextures[eye].OGL.Header.RenderViewport.Size.h);
ovrPosef eyeRenderPose = ovrHmd_BeginEyeRender(Hmd, eye);
const Transformf worldPose(bodyQ*eyeRenderPose.Orientation, bodyPos+bodyQ.Rotate(eyeRenderPose.Position));
const static Vector3f UpVector(0.0f, 1.0f, 0.0f);
const static Vector3f ForwardVector(0.0f, 0.0f, -1.0f);
const Matrix4f view = Matrix4f::LookAtRH(worldPose.Translation, worldPose.Translation + worldPose.Rotation.Rotate(ForwardVector), worldPose.Rotation.Rotate(UpVector));
const Matrix4f viewProj = Projection[eye] * Matrix4f::Translation(Vector3f(EyeRenderDesc[eye].ViewAdjust)) * view;
glUseProgram(0); // Fixed function pipeline
glMatrixMode(GL_PROJECTION);
glLoadMatrixf(&viewProj.Transposed().M[0][0]);
glMatrixMode(GL_MODELVIEW);
drawScene(dt);
ovrHmd_EndEyeRender(Hmd, eye, eyeRenderPose, &eyeTextures[eye].Texture);
}
offFB->release();
ovrHmd_EndFrame(Hmd);
}
void OculusGLWidget::drawScene(const double &dt) {
if (myImg.isNull()) return;
if (!myTex) myTex = bindTexture(myImg, GL_TEXTURE_2D, GL_RGBA, QGLContext::LinearFilteringBindOption | QGLContext::InvertedYBindOption);
glBindTexture(GL_TEXTURE_2D, myTex);
const GLfloat aspect = (GLfloat)myImg.width()/myImg.height();
auto drawRect = []() {
glBegin(GL_TRIANGLE_STRIP);
glTexCoord2f(0, 0); glVertex3f(-0.5, -0.5, 0);
glTexCoord2f(1, 0); glVertex3f(0.5, -0.5, 0);
glTexCoord2f(0, 1); glVertex3f(-0.5, 0.5, 0);
glTexCoord2f(1, 1); glVertex3f(0.5, 0.5, 0);
glEnd();
};
for (int i = 0; i < IMG_COUNT; ++i) { // rotating images
const ImgInfo &info = imgInfos[i];
glLoadIdentity();
glTranslatef(info.pos.x, info.pos.y, info.pos.z);
glRotatef(dt*info.yawVelocity, 0, 1, 0);
static const GLfloat imgWidth = 2; // 2 meters wide
glScalef(imgWidth, imgWidth/aspect, 1);
drawRect();
}
{ // ground
glLoadIdentity();
glTranslatef(0, 0, -SPACE_DEPTH/aspect*0.5);
glRotatef(-90, 1, 0, 0);
glScalef(SPACE_DEPTH, SPACE_DEPTH/aspect, 1);
drawRect();
}
}
void OculusGLWidget::keyPressEvent(QKeyEvent *event) {
switch (event->key()) {
case Qt::Key_R:
ovrHmd_ResetSensor(Hmd);
break;
}
}
5 Replies
- cyberealityGrand ChampionNice work.
- jhericoAdventurer
const Matrix4f viewProj = Projection[eye] * Matrix4f::Translation(Vector3f(EyeRenderDesc[eye].ViewAdjust)) * view;
glUseProgram(0); // Fixed function pipeline
glMatrixMode(GL_PROJECTION);
glLoadMatrixf(&viewProj.Transposed().M[0][0]);
glMatrixMode(GL_MODELVIEW);
Stuffing both the projection and view matrices into GL_PROJECTION and leaving GL_MODELVIEW as an identity matrix will break as soon as you add any lighting effects to your scene (along with breaking a number of other things... see here).
Do this instead:
glUseProgram(0); // Fixed function pipeline
glMatrixMode(GL_PROJECTION);
glLoadMatrixf(&Projection[eye].Transposed().M[0][0]);
const Matrix4f modelview = Matrix4f::Translation(Vector3f(EyeRenderDesc[eye].ViewAdjust)) * view;
glMatrixMode(GL_MODELVIEW);
glLoadMatrixf(&modelview.Transposed().M[0][0]);- shubham.2023Honored Guest
Hey, do you have the above code example for today's Oculus SDK after it changed in 2014-2015?
- mrjazzHonored Guest
"jherico" wrote:
Stuffing both the projection and view matrices into GL_PROJECTION and leaving GL_MODELVIEW as an identity matrix will break as soon as you add any lighting effects to your scene (along with breaking a number of other things... see here).
Thank you! I didn't think about that. Usually I soley use custom shaders, so I don't have much experience with this fixed pipeline stuff. I just used it here, because I thought it would give a simpler example. - shubham.2023Honored Guest
Hey, do you have the code example for today's Oculus SDK after it changed in 2014-2015?
Quick Links
- Horizon Developer Support
- Quest User Forums
- Troubleshooting Forum for problems with a game or app
- Quest Support for problems with your device
Other Meta Support
Related Content
- 4 years ago
- 7 months ago