cancel
Showing results for 
Search instead for 
Did you mean: 

Integration of OpenGL Simulator application and Oculus Rift

CMLINDGREN
Honored Guest
I'm trying to integrate an existing Simulator application that uses OpenGL for our graphics engine with the Oculus Rift using SDK 1.12 ... I'm a newbie to both OpenGL and the SDK and have been trying to use what I have learned and researched in the past 3 weeks to make this work. Currently I'm just trying to get the Left eye image working as you will see. When the application is ran I do get an image in the left eye but it is flickering and then after about 5 seconds the application freezes up and I get a Low Computer resources warning. Any help would be much appreciated as my expert in OpenGL left the company 3 weeks ago and need to get this working by Thursday for a conference next week. Attached is the code I cobbled together ... there are probably multiple issues with it so don't hold back ... I will not be offended just very grateful! Lol.

void refreshVR()
{

// Query the HMD for ts current tracking state.
ovrTrackingState ts = ovr_GetTrackingState(m_session, ovr_GetTimeInSeconds(), ovrTrue);
if (ts.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) 
{
Posef pose = ts.HeadPose.ThePose;
float yaw; float eyePitch; float eyeRoll;
pose.Rotation.GetEulerAngles<Axis_Y, Axis_X, Axis_Z>(&yaw, &eyePitch, &eyeRoll);

Simulation::SimulationController::VIEW_3D.getModifiable().sv_orientation.set(Math::Vector<3, Math::FastAngle>(
 Math::FastAngle::getAsRadians(eyeRoll)
, Math::FastAngle::getAsRadians(eyePitch)
, Math::FastAngle::getAsRadians(yaw))
);

}

// Initialize VR structures, filling out description.
ovrEyeRenderDesc eyeRenderDesc[2];
ovrVector3f      hmdToEyeViewOffset[2];
ovrHmdDesc hmdDesc = ovr_GetHmdDesc(m_session);
eyeRenderDesc[0] = ovr_GetRenderDesc(m_session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]);
eyeRenderDesc[1] = ovr_GetRenderDesc(m_session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]);
hmdToEyeViewOffset[0] = eyeRenderDesc[0].HmdToEyeOffset;
hmdToEyeViewOffset[1] = eyeRenderDesc[1].HmdToEyeOffset;

ovrTextureSwapChainDesc desc = {};
desc.Type = ovrTexture_2D;
desc.ArraySize = 1;
desc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
desc.Width = bufferSize.w;
desc.Height = bufferSize.h;
desc.MipLevels = 1;
desc.SampleCount = 1;
desc.StaticImage = ovrFalse;

ovrSessionStatus sessionStatus;
ovr_GetSessionStatus(m_session, &sessionStatus);

//ovrPosef EyeRenderPose[2];
//double sensorSampleTime;    // sensorSampleTime is fed into the layer later
//ovr_GetEyePoses(m_session, 0, ovrTrue, hmdToEyeViewOffset, EyeRenderPose, &sensorSampleTime);
if (sessionStatus.IsVisible)
{

//glGenFramebuffers(1, &fboId);
glBindFramebuffer(GL_FRAMEBUFFER, fboId);
if (ovr_CreateTextureSwapChainGL(m_session, &desc, &textureSwapChain) == ovrSuccess)
{

ovr_GetTextureSwapChainBufferGL(m_session, textureSwapChain, 0, &texId);

if (!m_is_init)
{

glBindTexture(GL_TEXTURE_2D, texId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, l_TextureSizeLeft.w, l_TextureSizeLeft.h, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, 0);

glGenRenderbuffers(1, &m_rbo);
glBindRenderbuffer(GL_RENDERBUFFER, m_rbo);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH24_STENCIL8, bufferSize.w, bufferSize.h);
glBindRenderbuffer(GL_RENDERBUFFER, 0);

// Commit the changes to the texture swap chain
ovr_CommitTextureSwapChain(m_session, textureSwapChain);

}

}

glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texId, 0);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, m_rbo);
glBindFramebuffer(GL_FRAMEBUFFER, 0);

glBindFramebuffer(GL_FRAMEBUFFER, fboId);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
{
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_INVALID_FRAMEBUFFER_OPERATION) throw "GL_INVALID_FRAMEBUFFER_OPERATION";
else if (glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_UNDEFINED) throw "GL_FRAMEBUFFER_UNDEFINED";
else if (glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT) throw "GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT";
else if (glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT) throw "GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT";
else if (glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER) throw "GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER";
else if (glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER) throw "GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER";
else if (glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_UNSUPPORTED) throw "GL_FRAMEBUFFER_UNSUPPORTED";
else if (glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE) throw "GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE";
}

Math2IU::Dimension m_size = { (int_U4)l_TextureSizeLeft.w, (int_U4)l_TextureSizeLeft.h };
OGL::OGLController::VIEW_PORT_DIMENSION.set(m_size);
glViewport(0, 0, l_TextureSizeLeft.w, l_TextureSizeLeft.h);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);

}

// Initialize our single full screen Fov layer.
ovrLayerEyeFov layer;
layer.Header.Type = ovrLayerType_EyeFov;
layer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
layer.ColorTexture[0] = textureSwapChain;
layer.ColorTexture[1] = textureSwapChain;
layer.Fov[0] = eyeRenderDesc[0].Fov;
layer.Fov[1] = eyeRenderDesc[1].Fov;
layer.Viewport[0] = Recti(0, 0, bufferSize.w / 2, bufferSize.h);
layer.Viewport[1] = Recti(bufferSize.w / 2, 0, bufferSize.w / 2, bufferSize.h);
//layer.SensorSampleTime = sensorSampleTime;

// Get both eye poses simultaneously, with IPD offset already included.
double displayMidpointSeconds = ovr_GetPredictedDisplayTime(m_session, 0);
ovrTrackingState hmdState = ovr_GetTrackingState(m_session, displayMidpointSeconds, ovrTrue);
ovr_CalcEyePoses(hmdState.HeadPose.ThePose, hmdToEyeViewOffset, layer.RenderPose);

// Submit frame with one layer we have.
ovrLayerHeader* layers = &layer.Header;
ovrResult       result = ovr_SubmitFrame(m_session, 0, nullptr, &layers, 1);

sessionStatus.IsVisible = (result == ovrSuccess);

}
3 REPLIES 3

CMLINDGREN
Honored Guest
Ok made some progress ... now I just have a jittery or studdering image that is being displayed the crash is gone. Anyone have an idea? It almost looks like there is a main image and another flickering on top of it? Also, my main window on the computer ends up being frozen while wearing the headset though and in the end we don't want that. Thanks In Advance.

void refreshVR()
{

// Query the HMD for ts current tracking state.
ovrTrackingState ts = ovr_GetTrackingState(m_session, ovr_GetTimeInSeconds(), ovrTrue);
if (ts.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) 
{
Posef pose = ts.HeadPose.ThePose;
float yaw; float eyePitch; float eyeRoll;
pose.Rotation.GetEulerAngles<Axis_Y, Axis_X, Axis_Z>(&yaw, &eyePitch, &eyeRoll);

Simulation::SimulationController::VIEW_3D.getModifiable().sv_orientation.set(Math::Vector<3, Math::FastAngle>(
 Math::FastAngle::getAsRadians(eyeRoll)
, Math::FastAngle::getAsRadians(eyePitch)
, Math::FastAngle::getAsRadians(yaw))
);

}

// Initialize VR structures, filling out description.
ovrEyeRenderDesc eyeRenderDesc[2];
ovrVector3f      hmdToEyeViewOffset[2];
ovrHmdDesc hmdDesc = ovr_GetHmdDesc(m_session);
eyeRenderDesc[0] = ovr_GetRenderDesc(m_session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]);
eyeRenderDesc[1] = ovr_GetRenderDesc(m_session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]);
hmdToEyeViewOffset[0] = eyeRenderDesc[0].HmdToEyeOffset;
hmdToEyeViewOffset[1] = eyeRenderDesc[1].HmdToEyeOffset;

ovrTextureSwapChainDesc desc = {};
desc.Type = ovrTexture_2D;
desc.ArraySize = 1;
desc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
desc.Width = bufferSize.w;
desc.Height = bufferSize.h;
desc.MipLevels = 1;
desc.SampleCount = 1;
desc.StaticImage = ovrFalse;

ovrSessionStatus sessionStatus;
ovr_GetSessionStatus(m_session, &sessionStatus);

ovrPosef EyeRenderPose[2];
double sensorSampleTime;    // sensorSampleTime is fed into the layer later
ovr_GetEyePoses(m_session, 0, ovrTrue, hmdToEyeViewOffset, EyeRenderPose, &sensorSampleTime);
GLuint chainTexId;

if (sessionStatus.IsVisible)
{

if (!m_is_init)
{

if (ovr_CreateTextureSwapChainGL(m_session, &desc, &textureSwapChain) == ovrSuccess)
{

int length = 0;
ovr_GetTextureSwapChainLength(m_session, textureSwapChain, &length);

for (int i = 0; i < length; ++i)
{
ovr_GetTextureSwapChainBufferGL(m_session, textureSwapChain, 0, &chainTexId);

glBindTexture(GL_TEXTURE_2D, chainTexId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, l_TextureSizeLeft.w, l_TextureSizeLeft.h, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, chainTexId, 0);
glBindTexture(GL_TEXTURE_2D, 0);

glGenRenderbuffers(1, &m_rbo);
glBindRenderbuffer(GL_RENDERBUFFER, m_rbo);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH24_STENCIL8, bufferSize.w, bufferSize.h);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, m_rbo);
glBindRenderbuffer(GL_RENDERBUFFER, 0);

}

m_is_init = true;

}

}
else
{
int currentIndex = 0;
ovr_GetTextureSwapChainCurrentIndex(m_session, textureSwapChain, &currentIndex);
ovr_GetTextureSwapChainBufferGL(m_session, textureSwapChain, currentIndex, &texId);
}

glBindFramebuffer(GL_FRAMEBUFFER, fboId);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texId, 0);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, m_rbo);

Math2IU::Dimension m_size = { (int_U4)l_TextureSizeLeft.w, (int_U4)l_TextureSizeLeft.h };
OGL::OGLController::VIEW_PORT_DIMENSION.set(m_size);
glViewport(0, 0, l_TextureSizeLeft.w, l_TextureSizeLeft.h);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);

ovr_CommitTextureSwapChain(m_session, textureSwapChain);

}

// Initialize our single full screen Fov layer.
ovrLayerEyeFov layer;
layer.Header.Type = ovrLayerType_EyeFov;
layer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
layer.ColorTexture[0] = textureSwapChain;
layer.ColorTexture[1] = textureSwapChain;
layer.Fov[0] = eyeRenderDesc[0].Fov;
layer.Fov[1] = eyeRenderDesc[1].Fov;
layer.Viewport[0] = Recti(0, 0, bufferSize.w / 2, bufferSize.h);
layer.Viewport[1] = Recti(bufferSize.w / 2, 0, bufferSize.w / 2, bufferSize.h);
layer.SensorSampleTime = sensorSampleTime;

// Get both eye poses simultaneously, with IPD offset already included.
double displayMidpointSeconds = ovr_GetPredictedDisplayTime(m_session, 0);
ovrTrackingState hmdState = ovr_GetTrackingState(m_session, displayMidpointSeconds, ovrTrue);
ovr_CalcEyePoses(hmdState.HeadPose.ThePose, hmdToEyeViewOffset, layer.RenderPose);

// Submit frame with one layer we have.
ovrLayerHeader* layers = &layer.Header;
ovrResult       result = ovr_SubmitFrame(m_session, 0, nullptr, &layers, 1);

sessionStatus.IsVisible = (result == ovrSuccess);

}

CMLINDGREN
Honored Guest
Thank you. I did solve the jittery issue and have the Left eye being rendered and displayed in the Rift. I will post that code in a minute. Now I'm trying to understand how to do the Right Eye as it looks like I need to do a Render in between the two eyes?

CMLINDGREN
Honored Guest
The latest ... I can get the left eye or right eye to work independently but no together yet. Thus I can see the scene in the left or right eye depending on the Viewport settings but not sure of the process to handle both at the same time.

void refreshVRLeft()
{

// Query the HMD for ts current tracking state.
ovrTrackingState ts = ovr_GetTrackingState(m_session, ovr_GetTimeInSeconds(), ovrTrue);
if (ts.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) 
{
Posef pose = ts.HeadPose.ThePose;
float yaw; float eyePitch; float eyeRoll;
pose.Rotation.GetEulerAngles<Axis_Y, Axis_X, Axis_Z>(&yaw, &eyePitch, &eyeRoll);

Simulation::SimulationController::VIEW_3D.getModifiable().sv_orientation.set(Math::Vector<3, Math::FastAngle>(
 Math::FastAngle::getAsRadians(eyeRoll)
, Math::FastAngle::getAsRadians(eyePitch)
, Math::FastAngle::getAsRadians(yaw))
);

}

// Initialize VR structures, filling out description.
ovrEyeRenderDesc eyeRenderDesc[2];
ovrVector3f      hmdToEyeViewOffset[2];
ovrHmdDesc hmdDesc = ovr_GetHmdDesc(m_session);
eyeRenderDesc[0] = ovr_GetRenderDesc(m_session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]);
eyeRenderDesc[1] = ovr_GetRenderDesc(m_session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]);
hmdToEyeViewOffset[0] = eyeRenderDesc[0].HmdToEyeOffset;
hmdToEyeViewOffset[1] = eyeRenderDesc[1].HmdToEyeOffset;

ovrTextureSwapChainDesc desc = {};
desc.Type = ovrTexture_2D;
desc.ArraySize = 1;
desc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
desc.Width = bufferSize.w;
desc.Height = bufferSize.h;
desc.MipLevels = 1;
desc.SampleCount = 1;
desc.StaticImage = ovrFalse;

ovrSessionStatus sessionStatus;
ovr_GetSessionStatus(m_session, &sessionStatus);

ovrPosef EyeRenderPose[2];
double sensorSampleTime;    // sensorSampleTime is fed into the layer later
ovr_GetEyePoses(m_session, 0, ovrTrue, hmdToEyeViewOffset, EyeRenderPose, &sensorSampleTime);
GLuint chainTexId;

if (sessionStatus.IsVisible)
{

if (!m_is_init)
{

if (ovr_CreateTextureSwapChainGL(m_session, &desc, &textureSwapChain) == ovrSuccess)
{

int length = 0;
ovr_GetTextureSwapChainLength(m_session, textureSwapChain, &length);

for (int i = 0; i < length; ++i)
{
ovr_GetTextureSwapChainBufferGL(m_session, textureSwapChain, 0, &chainTexId);

glBindTexture(GL_TEXTURE_2D, chainTexId);
//glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, l_TextureSizeLeft.w, l_TextureSizeLeft.h, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, bufferSize.w, bufferSize.h, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, chainTexId, 0);
glBindTexture(GL_TEXTURE_2D, 0);

glGenRenderbuffers(1, &m_rbo);
glBindRenderbuffer(GL_RENDERBUFFER, m_rbo);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH24_STENCIL8, bufferSize.w, bufferSize.h);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, m_rbo);
glBindRenderbuffer(GL_RENDERBUFFER, 0);

}

m_is_init = true;

}

}
else
{
ovr_CommitTextureSwapChain(m_session, textureSwapChain);
int currentIndex = 0;
ovr_GetTextureSwapChainCurrentIndex(m_session, textureSwapChain, &currentIndex);
ovr_GetTextureSwapChainBufferGL(m_session, textureSwapChain, currentIndex, &texId);
}

glBindFramebuffer(GL_FRAMEBUFFER, fboId);

glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texId, 0);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, m_rbo);

Math2IU::Dimension m_size = { (int_U4)l_TextureSizeLeft.w, (int_U4)l_TextureSizeLeft.h };
OGL::OGLController::VIEW_PORT_DIMENSION.set(m_size);
glViewport(0, 0, l_TextureSizeLeft.w, l_TextureSizeLeft.h);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);

}

// Initialize our single full screen Fov layer.
ovrLayerEyeFov layer;
layer.Header.Type = ovrLayerType_EyeFov;
layer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
layer.ColorTexture[0] = textureSwapChain;
layer.ColorTexture[1] = textureSwapChain;
layer.Fov[0] = eyeRenderDesc[0].Fov;
layer.Fov[1] = eyeRenderDesc[1].Fov;
layer.Viewport[0] = Recti(0, 0, bufferSize.w / 2, bufferSize.h);
layer.Viewport[1] = Recti(bufferSize.w / 2, 0, bufferSize.w / 2, bufferSize.h);
layer.SensorSampleTime = sensorSampleTime;

// Get both eye poses simultaneously, with IPD offset already included.
double displayMidpointSeconds = ovr_GetPredictedDisplayTime(m_session, 0);
ovrTrackingState hmdState = ovr_GetTrackingState(m_session, displayMidpointSeconds, ovrTrue);
ovr_CalcEyePoses(hmdState.HeadPose.ThePose, hmdToEyeViewOffset, layer.RenderPose);

// Submit frame with one layer we have.
ovrLayerHeader* layers = &layer.Header;
ovrResult       result = ovr_SubmitFrame(m_session, 0, nullptr, &layers, 1);

sessionStatus.IsVisible = (result == ovrSuccess);

}