Skip to content

Instantly share code, notes, and snippets.

@ejulien
Created September 17, 2019 21:16
Show Gist options
  • Save ejulien/204d9910000e83754159a4e99084a16e to your computer and use it in GitHub Desktop.
Save ejulien/204d9910000e83754159a4e99084a16e to your computer and use it in GitHub Desktop.
Harfang 2 OpenVR API
// HARFANG(R) Copyright (C) 2019 Emmanuel Julien, Movida Production.
#include <bgfx/bgfx.h>
#include <bgfx/platform.h>
#include "engine/openvr_api.h"
#include "engine/scene.h"
#include "foundation/format.h"
#include "foundation/log.h"
#include "foundation/math.h"
#include "platform/input_system.h"
#include <array>
#include <vector>
#if HG_ENABLE_OPENVR_API
#include <openvr.h>
namespace hg {
static vr::IVRSystem *vr_system = nullptr;
struct OpenVRTrackedDeviceState {
bool connected;
Mat4 mtx;
vr::VRControllerState_t state;
vr::ETrackedDeviceClass device_class;
uint16_t haptic_pulse;
};
static std::array<OpenVRTrackedDeviceState, vr::k_unMaxTrackedDeviceCount> openvr_tracked_device_states = {};
static uint32_t rt_width = 0, rt_height = 0;
static Mat44 OVRToMat44(const vr::HmdMatrix44_t &m) {
static Mat44 VR_to_gs(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1);
return /*VR_to_gs * */ Mat44(m.m[0][0], m.m[1][0], m.m[2][0], m.m[3][0], m.m[0][1], m.m[1][1], m.m[2][1], m.m[3][1], m.m[0][2], m.m[1][2], m.m[2][2],
m.m[3][2], m.m[0][3], m.m[1][3], m.m[2][3], m.m[3][3]) *
VR_to_gs;
}
static Mat4 OVRToMat4(const vr::HmdMatrix34_t &m) {
static Mat4 VR_to_gs(1, 0, 0, 0, 1, 0, 0, 0, -1, 0, 0, 0);
return VR_to_gs * Mat4(m.m[0][0], m.m[1][0], m.m[2][0], m.m[0][1], m.m[1][1], m.m[2][1], m.m[0][2], m.m[1][2], m.m[2][2], m.m[0][3], m.m[1][3], m.m[2][3]) *
VR_to_gs;
}
static std::string GetStringTrackedDeviceProperty_(
vr::IVRSystem *system, vr::TrackedDeviceIndex_t unDevice, vr::TrackedDeviceProperty prop, vr::TrackedPropertyError *peError = nullptr) {
const auto len = system->GetStringTrackedDeviceProperty(unDevice, prop, nullptr, 0, peError);
if (!len)
return {};
std::string buffer(len, 'x');
system->GetStringTrackedDeviceProperty(unDevice, prop, &buffer[0], len, peError);
return buffer;
}
//
template <int idx> VRControllerState OpenVRControllerReader() {
const auto &vr_state = openvr_tracked_device_states[idx];
VRControllerState state;
state.connected = vr_state.device_class == vr::TrackedDeviceClass::TrackedDeviceClass_Controller ? vr_state.connected : false;
state.world = vr_state.mtx;
state.pressed[VRCB_DPad_Up] = vr_state.state.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_DPad_Up);
state.pressed[VRCB_DPad_Down] = vr_state.state.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_DPad_Down);
state.pressed[VRCB_DPad_Left] = vr_state.state.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_DPad_Left);
state.pressed[VRCB_DPad_Right] = vr_state.state.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_DPad_Right);
state.pressed[VRCB_System] = vr_state.state.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_System);
state.pressed[VRCB_AppMenu] = vr_state.state.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_ApplicationMenu);
state.pressed[VRCB_Grip] = vr_state.state.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_Grip);
state.pressed[VRCB_A] = vr_state.state.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_A);
state.pressed[VRCB_ProximitySensor] = vr_state.state.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_ProximitySensor);
state.pressed[VRCB_Axis0] = vr_state.state.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_Axis0);
state.pressed[VRCB_Axis1] = vr_state.state.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_Axis1);
state.pressed[VRCB_Axis2] = vr_state.state.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_Axis2);
state.pressed[VRCB_Axis3] = vr_state.state.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_Axis3);
state.pressed[VRCB_Axis4] = vr_state.state.ulButtonPressed & vr::ButtonMaskFromId(vr::k_EButton_Axis4);
state.touched[VRCB_DPad_Up] = vr_state.state.ulButtonTouched & vr::ButtonMaskFromId(vr::k_EButton_DPad_Up);
state.touched[VRCB_DPad_Down] = vr_state.state.ulButtonTouched & vr::ButtonMaskFromId(vr::k_EButton_DPad_Down);
state.touched[VRCB_DPad_Left] = vr_state.state.ulButtonTouched & vr::ButtonMaskFromId(vr::k_EButton_DPad_Left);
state.touched[VRCB_DPad_Right] = vr_state.state.ulButtonTouched & vr::ButtonMaskFromId(vr::k_EButton_DPad_Right);
state.touched[VRCB_System] = vr_state.state.ulButtonTouched & vr::ButtonMaskFromId(vr::k_EButton_System);
state.touched[VRCB_AppMenu] = vr_state.state.ulButtonTouched & vr::ButtonMaskFromId(vr::k_EButton_ApplicationMenu);
state.touched[VRCB_Grip] = vr_state.state.ulButtonTouched & vr::ButtonMaskFromId(vr::k_EButton_Grip);
state.touched[VRCB_A] = vr_state.state.ulButtonTouched & vr::ButtonMaskFromId(vr::k_EButton_A);
state.touched[VRCB_ProximitySensor] = vr_state.state.ulButtonTouched & vr::ButtonMaskFromId(vr::k_EButton_ProximitySensor);
state.touched[VRCB_Axis0] = vr_state.state.ulButtonTouched & vr::ButtonMaskFromId(vr::k_EButton_Axis0);
state.touched[VRCB_Axis1] = vr_state.state.ulButtonTouched & vr::ButtonMaskFromId(vr::k_EButton_Axis1);
state.touched[VRCB_Axis2] = vr_state.state.ulButtonTouched & vr::ButtonMaskFromId(vr::k_EButton_Axis2);
state.touched[VRCB_Axis3] = vr_state.state.ulButtonTouched & vr::ButtonMaskFromId(vr::k_EButton_Axis3);
state.touched[VRCB_Axis4] = vr_state.state.ulButtonTouched & vr::ButtonMaskFromId(vr::k_EButton_Axis4);
state.surface[0] = {vr_state.state.rAxis[0].x, vr_state.state.rAxis[0].y};
state.surface[1] = {vr_state.state.rAxis[1].x, vr_state.state.rAxis[1].y};
state.surface[2] = {vr_state.state.rAxis[2].x, vr_state.state.rAxis[2].y};
state.surface[3] = {vr_state.state.rAxis[3].x, vr_state.state.rAxis[3].y};
state.surface[4] = {vr_state.state.rAxis[4].x, vr_state.state.rAxis[4].y};
return state;
}
template <int idx> void OpenVRControllerSendHapticPulse(time_ns duration) {
openvr_tracked_device_states[idx].haptic_pulse = numeric_cast<uint16_t, int64_t>(Max<int64_t>(time_to_us(duration), 65535));
}
template <int idx> VRGenericTrackerState OpenVRGenericTrackerReader() {
const auto &vr_state = openvr_tracked_device_states[idx];
VRGenericTrackerState state;
state.connected = vr_state.device_class == vr::TrackedDeviceClass::TrackedDeviceClass_GenericTracker ? vr_state.connected : false;
state.world = vr_state.mtx;
return state;
}
//
bool OpenVRInit() {
if (vr_system)
return true; // setup already
vr::EVRInitError eError = vr::VRInitError_None;
vr_system = vr::VR_Init(&eError, vr::VRApplication_Scene);
if (!vr_system) {
error(format("OpenVR initialization failed: %1").arg(vr::VR_GetVRInitErrorAsEnglishDescription(eError)));
return false; // initialization failure
}
const auto driver = GetStringTrackedDeviceProperty_(vr_system, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_TrackingSystemName_String);
const auto display = GetStringTrackedDeviceProperty_(vr_system, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SerialNumber_String);
debug(format("OpenVR driver %2 initialized on display %1").arg(display).arg(driver));
vr_system->GetRecommendedRenderTargetSize(&rt_width, &rt_height);
debug(format("OpenVR recommended render target size %1x%2").arg(rt_width).arg(rt_height));
return true;
}
void OpenVRShutdown() {
debug("OpenVR shutting down");
vr::VR_Shutdown();
vr_system = nullptr;
}
//
OpenVRState OpenVRGetState(const Mat4 &body, float znear, float zfar) {
vr::TrackedDevicePose_t m_rTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
vr::VRCompositor()->WaitGetPoses(m_rTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, nullptr, 0);
OpenVRState state;
state.body = body;
const auto &hmd_pose = m_rTrackedDevicePose[vr::k_unTrackedDeviceIndex_Hmd];
if (hmd_pose.bPoseIsValid) {
state.head = body * OVRToMat4(hmd_pose.mDeviceToAbsoluteTracking);
state.inv_head = InverseFast(state.head);
}
state.width = rt_width;
state.height = rt_height;
state.left.offset = OVRToMat4(vr_system->GetEyeToHeadTransform(vr::Eye_Left));
state.right.offset = OVRToMat4(vr_system->GetEyeToHeadTransform(vr::Eye_Right));
state.left.projection = OVRToMat44(vr_system->GetProjectionMatrix(vr::Eye_Left, znear, zfar));
state.right.projection = OVRToMat44(vr_system->GetProjectionMatrix(vr::Eye_Right, znear, zfar));
for (vr::TrackedDeviceIndex_t unTrackedDevice = vr::k_unTrackedDeviceIndex_Hmd + 1; unTrackedDevice < vr::k_unMaxTrackedDeviceCount; ++unTrackedDevice) {
auto &vr_state = openvr_tracked_device_states[unTrackedDevice];
vr_state.connected = vr_system->IsTrackedDeviceConnected(unTrackedDevice);
vr_state.device_class = vr_system->GetTrackedDeviceClass(unTrackedDevice);
vr_system->GetControllerState(unTrackedDevice, &vr_state.state, sizeof(vr::VRControllerState_t));
if (m_rTrackedDevicePose[unTrackedDevice].bPoseIsValid)
vr_state.mtx = body * OVRToMat4(m_rTrackedDevicePose[unTrackedDevice].mDeviceToAbsoluteTracking);
if (vr_state.haptic_pulse > 0)
vr_system->TriggerHapticPulse(unTrackedDevice, 0, vr_state.haptic_pulse);
vr_state.haptic_pulse = 0;
}
return state;
}
void OpenVRSubmitFrame(void *left_eye_texture, void *right_eye_texture) {
if (!vr::VRCompositor())
return;
vr::Texture_t leftEyeTexture = {left_eye_texture, vr::TextureType_DirectX, vr::ColorSpace_Gamma};
vr::VRCompositor()->Submit(vr::Eye_Left, &leftEyeTexture);
vr::Texture_t rightEyeTexture = {right_eye_texture, vr::TextureType_DirectX, vr::ColorSpace_Gamma};
vr::VRCompositor()->Submit(vr::Eye_Right, &rightEyeTexture);
}
//
OpenVREyeFrameBuffer OpenVRCreateEyeFrameBuffer(OpenVRAA aa) {
OpenVREyeFrameBuffer eye;
static const uint64_t ovraa_flags[] = {0, BGFX_TEXTURE_RT_MSAA_X2, BGFX_TEXTURE_RT_MSAA_X4, BGFX_TEXTURE_RT_MSAA_X8, BGFX_TEXTURE_RT_MSAA_X16};
eye.color = bgfx::createTexture2D(rt_width, rt_height, false, 1, bgfx::TextureFormat::RGBA8, BGFX_TEXTURE_RT | ovraa_flags[aa]);
if (aa != OVRAA_None)
eye.resolve = bgfx::createTexture2D(rt_width, rt_height, false, 1, bgfx::TextureFormat::RGBA8, BGFX_TEXTURE_NONE);
bgfx::frame(); // so that the texture gets created
if (aa != OVRAA_None)
eye.native = bgfx::overrideInternal(eye.resolve, rt_width, rt_height, 1, bgfx::TextureFormat::RGBA8, BGFX_TEXTURE_READ_BACK);
else
eye.native = bgfx::overrideInternal(eye.color, rt_width, rt_height, 1, bgfx::TextureFormat::RGBA8, BGFX_TEXTURE_RT);
eye.depth = bgfx::createTexture2D(rt_width, rt_height, false, 1, bgfx::TextureFormat::D24, BGFX_TEXTURE_RT_WRITE_ONLY | ovraa_flags[aa]);
bgfx::TextureHandle h[2] = {eye.color, eye.depth};
eye.fb = bgfx::createFrameBuffer(2, h, false);
return eye;
}
void OpenVRDestroyEyeFrameBuffer(OpenVREyeFrameBuffer &eye_fb) {
bgfx::destroy(eye_fb.fb);
eye_fb.fb = {bgfx::kInvalidHandle};
bgfx::destroy(eye_fb.color);
eye_fb.color = {bgfx::kInvalidHandle};
bgfx::destroy(eye_fb.depth);
eye_fb.depth = {bgfx::kInvalidHandle};
bgfx::destroy(eye_fb.resolve);
eye_fb.resolve = {bgfx::kInvalidHandle};
eye_fb.native = 0;
}
void OpenVRResolveMSAA(bgfx::ViewId view, const OpenVREyeFrameBuffer &left, const OpenVREyeFrameBuffer &right) {
if (bgfx::isValid(left.resolve))
bgfx::blit(view, left.resolve, 0, 0, left.color, 0, 0, rt_width, rt_height);
if (bgfx::isValid(right.resolve))
bgfx::blit(view, right.resolve, 0, 0, right.color, 0, 0, rt_width, rt_height);
}
void OpenVRSubmitFrame(const OpenVREyeFrameBuffer &left, const OpenVREyeFrameBuffer &right) { OpenVRSubmitFrame((void *)left.native, (void *)right.native); }
//
void OpenVRStateToViewState(const OpenVRState &state, ViewState &left, ViewState &right) {
left.view = InverseFast(state.head * state.left.offset);
left.proj = state.left.projection;
left.frustum = MakeFrustum(state.left.projection * left.view);
right.view = InverseFast(state.head * state.right.offset);
right.proj = state.right.projection;
right.frustum = MakeFrustum(state.right.projection * right.view);
}
} // namespace hg
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment