Skip to content

Instantly share code, notes, and snippets.

@JSandusky
Last active July 11, 2020 03:19
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save JSandusky/99502966989ceb6652d711bcc9124c75 to your computer and use it in GitHub Desktop.
Save JSandusky/99502966989ceb6652d711bcc9124c75 to your computer and use it in GitHub Desktop.
Urho3D OpenVR

Stuff you have to fix to use it

  • I only scanned the first language in reading the JSON action manifest for names
    • you probably want all the names keyed on language
  • You probably need to remove the singlePass (not the same as singleTexture) code from it
    • Read about single-pass stereo instancing if you want to try to implement
    • You definitely want replace VertexBuffer element hashes with CRC32
    • replace VertexElement's instanced flag with a instanceStep integer (this is why you need a CRC32 instead)
    • Add SetElements to VertexBuffer, just set the elements and then call UpdateOffsets()
      • have to check is VR and then regenerate instancing elements based on isVR (for the instancing step rate)
    • Add left/right cameras to Viewport
    • Add isVR all over the place to batch drawing and in viewport checks
    • Add a View::SetVRCameraShaderParameters complement to View::SetCameraShaderParameters that uploads matrix and vector arrays when isVR
    • When isVR
      • All batch draws become instanced draws of at least 2, if it's already instanced then multiply by 2
      • Update shaders to look at instanceID & 1 for indexing cbuffer params
      • Vertex shader must clip and shift based on the eye
// BatchGroup::Draw, instanced case
// WARNING: if you don't do this then shadows won't work
if (!isVR && instanceBuffer->GetElements()[0].instStep_ == 2)
    instanceBuffer->SetElements(CreateInstancingBufferElements(instanceBuffer->GetElements().Size() - 3, false));
else if (isVR && instanceBuffer->GetElements()[0].instStep_ != 2)
    instanceBuffer->SetElements(CreateInstancingBufferElements(instanceBuffer->GetElements().Size() - 3, true));
#include "VR.h"
#include "../Graphics/Camera.h"
#include "../IO/FileSystem.h"
#include "../Graphics/Graphics.h"
#include "../Graphics/Geometry.h"
#include "../IO/Log.h"
#include "../Graphics/Material.h"
#include "../Graphics/Model.h"
#include "../Scene/Node.h"
#include "../Graphics/RenderPath.h"
#include "../Resource/ResourceCache.h"
#include "../Scene/Scene.h"
#include "../Graphics/StaticModel.h"
#include "../Graphics/Texture.h"
#include "../Graphics/Texture2D.h"
#include "../Graphics/View.h"
#include "../Graphics/VertexBuffer.h"
#include "../Graphics/IndexBuffer.h"
#include "../Engine/Engine.h"
#include "../Core/CoreEvents.h"
#include "../Graphics/GraphicsEvents.h"
#include "../Scene/SceneEvents.h"
#include "../Graphics/Shader.h"
#include "../Resource/JSONFile.h"
#include "../Resource/JSONValue.h"
#include "../Resource/XMLFile.h"
#include "VREvents.h"
#include <thread>
#include <chrono>
#pragma optimize("", off)
namespace Urho3D
{
String GetTrackedDevicePropString(vr::IVRSystem* system, vr::TrackedDeviceIndex_t index, vr::TrackedDeviceProperty prop)
{
char name[512];
vr::ETrackedPropertyError err;
auto len = system->GetStringTrackedDeviceProperty(index, vr::Prop_RenderModelName_String, name, 512, &err);
if (err == vr::TrackedProp_Success)
return String(name, len);
return String();
}
String VR_CompositorError(vr::EVRCompositorError err)
{
switch (err)
{
case vr::VRCompositorError_None: return "None";
case vr::VRCompositorError_RequestFailed: return "Request Failed";
case vr::VRCompositorError_IncompatibleVersion: return "Incompatible Version";
case vr::VRCompositorError_DoNotHaveFocus: return "Do Not Have Focus";
case vr::VRCompositorError_InvalidTexture: return "Invalid Texture";
case vr::VRCompositorError_IsNotSceneApplication: return "Is Not Scene Application";
case vr::VRCompositorError_TextureIsOnWrongDevice: return "Texture is on wrong device";
case vr::VRCompositorError_TextureUsesUnsupportedFormat: return "Uses unsupported format";
case vr::VRCompositorError_SharedTexturesNotSupported: return "Shared textures not supported";
case vr::VRCompositorError_IndexOutOfRange: return "Index out of range";
case vr::VRCompositorError_AlreadySubmitted: return "Already submitted";
case vr::VRCompositorError_InvalidBounds: return "Invalid Bounds";
case vr::VRCompositorError_AlreadySet: return "Already Set";
}
return "None";
}
String VR_InputError(vr::EVRInputError err)
{
switch (err)
{
case vr::VRInputError_NameNotFound: return "Name not found";
case vr::VRInputError_WrongType: return "Wrong type";
case vr::VRInputError_InvalidHandle: return "Invalid handle";
case vr::VRInputError_InvalidParam: return "Invalid param";
case vr::VRInputError_NoSteam: return "No Steam";
case vr::VRInputError_MaxCapacityReached: return "Max capacity reached";
case vr::VRInputError_IPCError: return "IPC Error";
case vr::VRInputError_NoActiveActionSet: return "No active action set";
case vr::VRInputError_InvalidDevice: return "Invalid device";
case vr::VRInputError_InvalidSkeleton: return "Invalid skeleton";
case vr::VRInputError_InvalidBoneCount: return "Invalid bone count";
case vr::VRInputError_InvalidCompressedData: return "Invalid compressed data";
case vr::VRInputError_NoData: return "No data";
case vr::VRInputError_BufferTooSmall: return "Buffer too small";
case vr::VRInputError_MismatchedActionManifest: return "Mismatched action manfiest";
case vr::VRInputError_MissingSkeletonData: return "Missing skeleton data";
case vr::VRInputError_InvalidBoneIndex: return "Invalid bone index";
case vr::VRInputError_InvalidPriority: return "Invalid priority";
case vr::VRInputError_PermissionDenied: return "Permission denied";
case vr::VRInputError_InvalidRenderModel: return "Invalid render model";
}
return "None";
}
Vector3 VRSystem::ToUrho(const vr::HmdVector3_t& v) const
{
return Vector3(v.v[0], v.v[1], -v.v[2]);
}
Matrix4 VRSystem::ToUrho(const vr::HmdMatrix34_t &matPose) const
{
//return Matrix3x4(
// matPose.m[0][0], matPose.m[0][1], matPose.m[0][2], matPose.m[0][3],
// matPose.m[2][0], matPose.m[2][1], matPose.m[2][2], matPose.m[2][3],
// matPose.m[1][0], matPose.m[1][1], matPose.m[1][2], matPose.m[1][3])
// *
// Matrix3x4(1.0f, 0.0f, 0.0f, 0.0f,
// 0.0f, 1.0f, 0.0f, 0.0f,
// 0.0f, 0.0f, -1.0f, 0.0f);
//return Matrix3x4(
// matPose.m[0][0], matPose.m[0][1], matPose.m[0][2], matPose.m[0][3],
// matPose.m[2][0], matPose.m[2][1], matPose.m[2][2], -matPose.m[2][3],
// matPose.m[1][0], matPose.m[1][1], matPose.m[1][2], matPose.m[1][3]
// );
const auto s = scaleCorrection_;
return Matrix4(
s, 0, 0, 0,
0, s, 0, 0,
0, 0, s, 0,
0, 0, 0, 1
)
*
Matrix4(
matPose.m[0][0], matPose.m[0][1], -matPose.m[0][2], matPose.m[0][3],
matPose.m[1][0], matPose.m[1][1], -matPose.m[1][2], matPose.m[1][3] + heightCorrection_,
-matPose.m[2][0], -matPose.m[2][1], matPose.m[2][2], -matPose.m[2][3],
////1, 0, 0, 1
matPose.m[3][0], matPose.m[3][1], matPose.m[3][2], matPose.m[3][3]
);
}
Matrix4 VRSystem::ToUrho(const vr::HmdMatrix44_t &matPose) const
{
//return Matrix4(
// mat.m[0][0], mat.m[1][0], -mat.m[2][0], mat.m[3][0],
// mat.m[0][1], mat.m[1][1], -mat.m[2][1], mat.m[3][1],
// -mat.m[0][2], -mat.m[1][2], mat.m[2][2], -mat.m[3][2],
// mat.m[0][3], mat.m[1][3], mat.m[2][3], mat.m[3][3]
//);
return Matrix4(
matPose.m[0][0], matPose.m[0][1], matPose.m[0][2], matPose.m[0][3],
matPose.m[1][0], matPose.m[1][1], matPose.m[1][2], matPose.m[1][3],
matPose.m[2][0], matPose.m[2][1], matPose.m[2][2], -matPose.m[2][3],
//0, 0, 0, 0
matPose.m[3][0], matPose.m[3][1], matPose.m[3][2], matPose.m[3][3]
);
}
VRSystem::VRSystem(Context* ctx) : Object(ctx)
{
SubscribeToEvent(E_BEGINFRAME, URHO3D_HANDLER(VRSystem, HandlePreUpdate));
SubscribeToEvent(E_POSTPRESENT, URHO3D_HANDLER(VRSystem, HandlePostRender));
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; ++i)
poses_[i].bDeviceIsConnected = false;
}
VRSystem::~VRSystem()
{
Shutdown();
}
bool VRSystem::Initialize(const String& manifestPath)
{
auto engine = GetSubsystem<Engine>();
engine->SetMaxFps(90); // 90hz?
vr::EVRInitError error = vr::VRInitError_None;
auto hmd = vr::VR_Init(&error, vr::VRApplication_Scene);
if (error != vr::VRInitError_None)
{
URHO3D_LOGERROR(vr::VR_GetVRInitErrorAsEnglishDescription(error));
return false;
}
if (!vr::VRCompositor())
{
URHO3D_LOGERROR("VR compositor initialization failed");
return false;
}
if (hmd == nullptr)
{
URHO3D_LOGERROR("VR system interface initialization failed");
return false;
}
vrSystem_ = hmd;
vrSystem_->GetRecommendedRenderTargetSize(&eyeWidth_, &eyeHeight_);
trueEyeWidth_ = eyeWidth_;
trueEyeHeight_ = eyeHeight_;
eyeWidth_ *= renderScale_;
eyeHeight_ *= renderScale_;
CreateEyeTextures();
auto fs = GetSubsystem<FileSystem>();
auto progDir = AddTrailingSlash(fs->GetProgramDir());
auto manifestFile = progDir + manifestPath;
vr::VRInput()->GetInputSourceHandle("/user/head", &headInputHandle_);
vr::VRInput()->GetInputSourceHandle("/user/hand/left", &handInputHandles_[0]);
vr::VRInput()->GetInputSourceHandle("/user/hand/right", &handInputHandles_[1]);
if (fs->FileExists(manifestFile))
{
vr::VRInput()->SetActionManifestPath(manifestFile.CString());
ParseManifestFile(manifestFile);
vr::VRInput()->GetActionSetHandle("/actions/Default", &defaultActionSet_);
// First test for explicit definitions
auto leftGood = vr::VRInput()->GetActionHandle("/actions/Default/out/Haptic_Left", &hapticHandles_[0]);
auto rightGood = vr::VRInput()->GetActionHandle("/actions/Default/out/Haptic_Right", &hapticHandles_[1]);
hasHaptics_ = leftGood == vr::VRInputError_None && rightGood == vr::VRInputError_None;
// check for generic ones, and now we'll mask stuff out
if (!hasHaptics_)
{
if (vr::VRInput()->GetActionHandle("/actions/Default/out/Haptic", &hapticHandles_[0]) == vr::VRInputError_None)
{
hapticHandles_[1] = hapticHandles_[0];
hasHaptics_ = true;
}
}
}
else
URHO3D_LOGWARNING("No haptics found");
LoadHiddenAreaMesh();
return true;
}
void VRSystem::CreateEyeTextures()
{
sharedTexture_.Reset();
leftTexture_.Reset();
rightTexture_.Reset();
sharedDS_.Reset();
leftDS_.Reset();
rightDS_.Reset();
if (useSingleTexture_)
{
sharedTexture_ = new Texture2D(GetContext());
sharedTexture_->SetNumLevels(1);
sharedTexture_->SetSize(eyeWidth_ * 2, eyeHeight_, Graphics::GetRGBFormat(), TEXTURE_RENDERTARGET, msaaLevel_);
sharedTexture_->SetFilterMode(FILTER_BILINEAR);
sharedDS_ = new Texture2D(GetContext());
sharedDS_->SetNumLevels(1);
sharedDS_->SetSize(eyeWidth_ * 2, eyeHeight_, Graphics::GetDepthStencilFormat(), TEXTURE_DEPTHSTENCIL, msaaLevel_);
sharedTexture_->GetRenderSurface()->SetLinkedDepthStencil(sharedDS_->GetRenderSurface());
}
else
{
leftTexture_ = new Texture2D(GetContext());
leftTexture_->SetNumLevels(1);
leftTexture_->SetSize(eyeWidth_, eyeHeight_, Graphics::GetRGBFormat(), TEXTURE_RENDERTARGET, msaaLevel_);
leftTexture_->SetFilterMode(FILTER_BILINEAR);
rightTexture_ = new Texture2D(GetContext());
rightTexture_->SetNumLevels(1);
rightTexture_->SetSize(eyeWidth_, eyeHeight_, Graphics::GetRGBFormat(), TEXTURE_RENDERTARGET, msaaLevel_);
rightTexture_->SetFilterMode(FILTER_BILINEAR);
leftDS_ = new Texture2D(GetContext());
leftDS_->SetNumLevels(1);
leftDS_->SetSize(eyeWidth_, eyeHeight_, Graphics::GetDepthStencilFormat(), TEXTURE_DEPTHSTENCIL, msaaLevel_);
rightDS_ = new Texture2D(GetContext());
rightDS_->SetNumLevels(1);
rightDS_->SetSize(eyeWidth_, eyeHeight_, Graphics::GetDepthStencilFormat(), TEXTURE_DEPTHSTENCIL, msaaLevel_);
leftTexture_->GetRenderSurface()->SetLinkedDepthStencil(leftDS_->GetRenderSurface());
rightTexture_->GetRenderSurface()->SetLinkedDepthStencil(rightDS_->GetRenderSurface());
}
}
void VRSystem::LoadHiddenAreaMesh()
{
// Grab the hidden area meshes
for (int i = 0; i < 2; ++i)
{
auto leftMesh = vrSystem_->GetHiddenAreaMesh(i == 0 ? vr::Eye_Left : vr::Eye_Right);
hiddenAreaMesh_[i] = new Geometry(GetContext());
VertexBuffer* vbo = new VertexBuffer(GetContext());
vbo->SetSize(leftMesh.unTriangleCount * 3, { VertexElement(TYPE_VECTOR3, SEM_POSITION, 0, 0) });
PODVector<Vector3> verts;
for (unsigned i = 0; i < leftMesh.unTriangleCount; ++i)
{
verts.Push({ leftMesh.pVertexData[i * 3 + 0].v[0], leftMesh.pVertexData[i * 3 + 0].v[1], 0.0f });
verts.Push({ leftMesh.pVertexData[i * 3 + 1].v[0], leftMesh.pVertexData[i * 3 + 1].v[1], 0.0f });
verts.Push({ leftMesh.pVertexData[i * 3 + 2].v[0], leftMesh.pVertexData[i * 3 + 2].v[1], 0.0f });
}
vbo->SetData(verts.Buffer());
hiddenAreaMesh_[i]->SetVertexBuffer(0, vbo);
hiddenAreaMesh_[i]->SetDrawRange(TRIANGLE_LIST, 0, 0, 0, verts.Size(), true);
}
}
void VRSystem::LoadRenderModels()
{
// Load model controllers if possible
for (int i = 0; i < 2; ++i)
{
uint32_t handIndex = (uint32_t)(vrSystem_->GetTrackedDeviceIndexForControllerRole(i == 1 ? vr::TrackedControllerRole_RightHand : vr::TrackedControllerRole_LeftHand));
// if we have a texture then we're already long done
if (wandMeshes_[i].texture_)
continue;
String controllerName = GetTrackedDevicePropString(vrSystem_, handIndex, vr::Prop_RenderModelName_String);
if (controllerName.Length())
{
wandMeshes_[i].name_ = controllerName;
// if we have a geometry then we don't need to query again
if (wandMeshes_[i].geometry_.Null())
{
auto r = vr::VRRenderModels()->LoadRenderModel_Async(controllerName.CString(), &wandMeshes_[i].model_);
if (r == vr::VRRenderModelError_None)
{
if (wandMeshes_[i].geometry_.Null())
{
URHO3D_LOGWARNING("Loaded wand model");
auto& model = wandMeshes_[i].model_;
unsigned indexCount = model->unTriangleCount * 3;
VertexBuffer* vbo = new VertexBuffer(GetContext());
IndexBuffer* ibo = new IndexBuffer(GetContext());
BoundingBox bnds;
PODVector<vr::RenderModel_Vertex_t> vertices;
for (unsigned v = 0; v < model->unVertexCount; ++v)
{
vertices.Push(model->rVertexData[v]);
vertices[v].vPosition.v[2] *= -1; // our Z goes the other way
if (v == 0)
bnds.Define(Vector3(vertices[v].vPosition.v[0], vertices[v].vPosition.v[1], vertices[v].vPosition.v[2]));
else
bnds.Merge(Vector3(vertices[v].vPosition.v[0], vertices[v].vPosition.v[1], vertices[v].vPosition.v[2]));
}
vbo->SetSize(model->unVertexCount, {
VertexElement(TYPE_VECTOR3, SEM_POSITION, 0),
VertexElement(TYPE_VECTOR3, SEM_NORMAL, 0),
VertexElement(TYPE_VECTOR2, SEM_TEXCOORD, 0) });
vbo->SetData(vertices.Buffer());
ibo->SetSize(indexCount, false);
ibo->SetData(model->rIndexData);
wandMeshes_[i].bounds_ = bnds;
wandMeshes_[i].geometry_ = new Geometry(GetContext());
wandMeshes_[i].geometry_->SetVertexBuffer(0, vbo);
wandMeshes_[i].geometry_->SetIndexBuffer(ibo);
wandMeshes_[i].geometry_->SetDrawRange(TRIANGLE_LIST, 0, indexCount, 0, model->unVertexCount);
}
}
}
// only try for texture if we have geometry but no texture and a valid texture ID
if (wandMeshes_[i].geometry_.NotNull() && wandMeshes_[i].model_->diffuseTextureId >= 0 && wandMeshes_[i].texture_.Null())
{
auto r = vr::VRRenderModels()->LoadTexture_Async(wandMeshes_[i].model_->diffuseTextureId, &wandMeshes_[i].colorTex_);
if (r == vr::VRRenderModelError_None)
{
URHO3D_LOGWARNING("Loaded wand texture");
if (wandMeshes_[i].texture_.Null())
{
auto& tex = wandMeshes_[i].colorTex_;
wandMeshes_[i].texture_ = new Texture2D(GetContext());
wandMeshes_[i].texture_->SetSize(tex->unWidth, tex->unHeight, Graphics::GetRGBAFormat());
wandMeshes_[i].texture_->SetData(0, 0, 0, wandMeshes_[i].texture_->GetWidth(), wandMeshes_[i].texture_->GetHeight(), tex->rubTextureMapData);
}
}
}
}
}
}
void VRSystem::Shutdown()
{
if (vrSystem_)
{
vrSystem_ = nullptr;
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; ++i)
poses_[i].bDeviceIsConnected = false;
sharedTexture_.Reset();
leftTexture_.Reset();
rightTexture_.Reset();
wandMeshes_[0].Free();
wandMeshes_[1].Free();
vr::VR_Shutdown();
}
}
void VRSystem::HandlePreUpdate(StringHash, VariantMap&)
{
if (vrSystem_)
{
vr::VREvent_t event;
while (vrSystem_->PollNextEvent(&event, sizeof(event)))
{
switch (event.eventType)
{
case vr::VREvent_EnterStandbyMode: {
auto& data = GetEventDataMap();
data[VRPause::P_STATE] = true;
SendEvent(E_VRPAUSE, data);
GetSubsystem<Engine>()->SetMaxFps(10);
} break;
case vr::VREvent_DashboardActivated: {
auto& data = GetEventDataMap();
data[VRPause::P_STATE] = true;
SendEvent(E_VRPAUSE, data);
} break;
case vr::VREvent_DashboardDeactivated: {
auto& data = GetEventDataMap();
data[VRPause::P_STATE] = false;
SendEvent(E_VRPAUSE, data);
} break;
case vr::VREvent_LeaveStandbyMode: {
auto& data = GetEventDataMap();
data[VRPause::P_STATE] = false;
SendEvent(E_VRPAUSE, data);
GetSubsystem<Engine>()->SetMaxFps(90);
} break;
case vr::VREvent_Input_BindingsUpdated: {
//?? does this invalidate our action handles?
} break;
case vr::VREvent_Quit:
case vr::VREvent_ProcessQuit:
case vr::VREvent_DriverRequestedQuit:
GetSubsystem<Engine>()->Exit();
break;
}
}
vr::VRCompositor()->WaitGetPoses(poses_, vr::k_unMaxTrackedDeviceCount, NULL, 0);
// Update bindings if we have a valid action set
if (defaultActionSet_)
{
vr::VRActiveActionSet_t activeInputSets;
activeInputSets.ulActionSet = defaultActionSet_;
activeInputSets.nPriority = vr::k_nActionSetOverlayGlobalPriorityMax;
activeInputSets.ulSecondaryActionSet = vr::k_ulInvalidActionSetHandle;
activeInputSets.ulRestrictedToDevice = vr::k_ulInvalidInputValueHandle;
vr::VRInput()->UpdateActionState(&activeInputSets, sizeof(vr::VRActiveActionSet_t), 1);
UpdateBindingValues();
}
LoadRenderModels();
if (autoClearMasks_)
DrawEyeMasks();
}
}
Matrix3x4 VRSystem::GetHeadTransform() const
{
if (poses_[vr::k_unTrackedDeviceIndex_Hmd].bPoseIsValid)
{
return Matrix3x4(ToUrho(poses_[vr::k_unTrackedDeviceIndex_Hmd].mDeviceToAbsoluteTracking));
}
return Matrix3x4();
}
Matrix3x4 VRSystem::GetHandTransform(bool isRight) const
{
if (vrSystem_ == nullptr)
return Matrix3x4();
uint32_t handIndex = (uint32_t)(vrSystem_->GetTrackedDeviceIndexForControllerRole(isRight ? vr::TrackedControllerRole_RightHand : vr::TrackedControllerRole_LeftHand));
if (handIndex < vr::k_unMaxTrackedDeviceCount && poses_[handIndex].bPoseIsValid)
return Matrix3x4(ToUrho(poses_[handIndex].mDeviceToAbsoluteTracking));
return Matrix3x4();
}
void VRSystem::GetHandVelocity(bool isRight, Vector3* linear, Vector3* ang) const
{
if (vrSystem_ == nullptr)
return;
uint32_t handIndex = (uint32_t)(vrSystem_->GetTrackedDeviceIndexForControllerRole(isRight ? vr::TrackedControllerRole_RightHand : vr::TrackedControllerRole_LeftHand));
if (handIndex < vr::k_unMaxTrackedDeviceCount && poses_[handIndex].bPoseIsValid)
{
if (linear)
*linear = ToUrho(poses_[handIndex].vVelocity);
if (ang)
*ang = ToUrho(poses_[handIndex].vAngularVelocity);
}
}
Matrix3x4 VRSystem::GetEyeLocalTransform(bool isRight) const
{
if (vrSystem_)
return Matrix3x4(ToUrho(vrSystem_->GetEyeToHeadTransform(isRight ? vr::Eye_Right : vr::Eye_Left)));
return Matrix3x4();
}
Matrix3x4 VRSystem::GetHandAimTransform(bool isRight) const
{
if (vrSystem_)
{
vr::RenderModel_ControllerMode_State_t cState = { 0 };
vr::RenderModel_ComponentState_t state;
memset(&state, 0, sizeof(state));
if (vr::VRRenderModels()->GetComponentStateForDevicePath(wandMeshes_[isRight ? 1 : 0].name_.CString(), vr::k_pch_Controller_Component_Tip, handInputHandles_[isRight ? 1 : 0], &cState, &state))
return GetHandTransform(isRight) * Matrix3x4(ToUrho(state.mTrackingToComponentLocal));
}
return Matrix3x4();
}
Ray VRSystem::GetHandAimRay(bool isRight) const
{
if (vrSystem_)
{
auto aimTrans = GetHandAimTransform(isRight);
if (aimTrans == Matrix3x4::IDENTITY)
return Ray();
return Ray(aimTrans.Translation(), (aimTrans * Vector3(0, 0, 1)).Normalized());
}
return Ray();
}
Matrix4 VRSystem::GetProjection(bool isRight, float near, float far) const
{
if (vrSystem_)
{
auto m = ToUrho(vrSystem_->GetProjectionMatrix(isRight ? vr::Eye_Right : vr::Eye_Left, near, far)) * -1.0f;
return m;
}
return Matrix4();
}
void VRSystem::TriggerHaptic(bool isRight, float duration, float frequency, float amp)
{
if (hasHaptics_)
vr::VRInput()->TriggerHapticVibrationAction(hapticHandles_[isRight], 0.0f, duration, frequency, amp, handInputHandles_[isRight ? 1 : 0]);
}
void VRSystem::PrepareRig(Node* headRoot)
{
headRoot->SetWorldPosition(Vector3(0, 0, 0));
headRoot->SetWorldRotation(Quaternion::IDENTITY);
auto head = headRoot->CreateChild("Head", LOCAL);
auto leftEye = head->CreateChild("Left_Eye", LOCAL);
auto rightEye = head->CreateChild("Right_Eye", LOCAL);
auto leftCam = leftEye->GetOrCreateComponent<Camera>();
auto rightCam = rightEye->GetOrCreateComponent<Camera>();
auto leftHand = headRoot->CreateChild("Left_Hand", LOCAL);
auto rightHand = headRoot->CreateChild("Right_Hand", LOCAL);
}
void VRSystem::ConfigureRig(Node* vrRig, float nearDist, float farDist, bool forSinglePass)
{
auto head = vrRig->GetChild("Head");
auto leftEye = head->GetChild("Left_Eye");
auto rightEye = head->GetChild("Right_Eye");
ConfigureRig(head->GetScene(), head, leftEye, rightEye, nearDist, farDist, forSinglePass);
}
void VRSystem::ConfigureRig(Scene* scene, Node* head, Node* leftEye, Node* rightEye, float nearDist, float farDist, bool forSinglePass)
{
if (head == nullptr)
{
auto headRoot = scene->CreateChild("VRRig", LOCAL);
head = headRoot->CreateChild("Head", LOCAL);
}
head->SetTransform(GetHeadTransform());
if (leftEye == nullptr)
leftEye = head->CreateChild("Left_Eye", LOCAL);
if (rightEye == nullptr)
rightEye = head->CreateChild("Right_Eye", LOCAL);
auto leftCam = leftEye->GetOrCreateComponent<Camera>();
auto rightCam = rightEye->GetOrCreateComponent<Camera>();
leftCam->SetFov(110.0f); // junk mostly
leftCam->SetNearClip(nearDist);
leftCam->SetFarClip(farDist);
rightCam->SetFov(110.0f); // junk mostly
rightCam->SetNearClip(nearDist);
rightCam->SetFarClip(farDist);
leftCam->SetProjection(GetProjection(false, nearDist, farDist));
rightCam->SetProjection(GetProjection(true, nearDist, farDist));
leftEye->SetTransform(GetEyeLocalTransform(false));
rightEye->SetTransform(GetEyeLocalTransform(true));
leftEye->Rotate(Quaternion(0, 0, 180), TS_LOCAL);
rightEye->Rotate(Quaternion(0, 0, 180), TS_LOCAL);
float ipdAdjust = ipdCorrection_ * 0.5f;
leftEye->Translate({ ipdAdjust, 0, 0 }, TS_LOCAL);
rightEye->Translate({ -ipdAdjust, 0, 0 }, TS_LOCAL);
if (sharedTexture_ && forSinglePass)
{
auto surface = sharedTexture_->GetRenderSurface();
if (surface->GetViewport(0) == nullptr)
{
XMLFile* rp = GetSubsystem<ResourceCache>()->GetResource<XMLFile>("RenderPaths/Forward_VR.xml");
SharedPtr<Viewport> view(new Viewport(GetContext(), scene, leftCam, nullptr));
view->SetLeftEye(leftCam);
view->SetRightEye(rightCam);
view->SetCullCamera(leftCam);
view->SetRect({ 0, 0, sharedTexture_->GetWidth(), sharedTexture_->GetHeight() });
view->SetRenderPath(rp);
surface->SetViewport(0, view);
}
else
{
auto view = surface->GetViewport(0);
view->SetScene(scene);
view->SetCullCamera(leftCam);
view->SetLeftEye(leftCam);
view->SetRightEye(rightCam);
}
surface->SetUpdateMode(SURFACE_UPDATEALWAYS);
}
else
{
auto leftSurface = useSingleTexture_ ? sharedTexture_->GetRenderSurface() : leftTexture_->GetRenderSurface();
auto rightSurface = useSingleTexture_ ? sharedTexture_->GetRenderSurface() : rightTexture_->GetRenderSurface();
if (leftSurface->GetViewport(0) == nullptr)
{
SharedPtr<Viewport> leftView(new Viewport(GetContext(), scene, leftCam));
SharedPtr<Viewport> rightView(new Viewport(GetContext(), scene, rightCam));
leftView->SetRect(GetLeftEyeRect());
rightView->SetRect(GetRightEyeRect());
leftSurface->SetViewport(0, leftView);
rightSurface->SetViewport(1, rightView);
}
else
{
auto leftView = leftSurface->GetViewport(0);
leftView->SetScene(scene);
leftView->SetCamera(leftCam);
auto rightView = rightSurface->GetViewport(1);
rightView->SetScene(scene);
rightView->SetCamera(rightCam);
}
leftSurface->SetUpdateMode(SURFACE_UPDATEALWAYS);
rightSurface->SetUpdateMode(SURFACE_UPDATEALWAYS);
}
}
void VRSystem::ConfigureHands(Scene* scene, Node* rigRoot, Node* leftHand, Node* rightHand)
{
if (leftHand == nullptr)
leftHand = rigRoot->CreateChild("Left_Hand");
if (rightHand == nullptr)
rightHand = rigRoot->CreateChild("Right_Hand");
auto leftM = leftHand->GetOrCreateComponent<StaticModel>();
auto rightM = rightHand->GetOrCreateComponent<StaticModel>();
SetupModel(leftM, false);
SetupModel(rightM, true);
leftHand->SetTransform(GetHandTransform(false));
rightHand->SetTransform(GetHandTransform(true));
uint32_t leftHandIndex = (uint32_t)vrSystem_->GetTrackedDeviceIndexForControllerRole(vr::TrackedControllerRole_LeftHand);
uint32_t rightHandIndex = (uint32_t)vrSystem_->GetTrackedDeviceIndexForControllerRole(vr::TrackedControllerRole_RightHand);
if (leftHandIndex == vr::k_unTrackedDeviceIndexInvalid)
leftHand->SetEnabled(false);
else
leftHand->SetEnabled(poses_[leftHandIndex].bPoseIsValid && poses_[leftHandIndex].bDeviceIsConnected);
if (rightHandIndex == vr::k_unTrackedDeviceIndexInvalid)
rightHand->SetEnabled(false);
else
rightHand->SetEnabled(poses_[rightHandIndex].bPoseIsValid && poses_[rightHandIndex].bDeviceIsConnected);
}
void VRSystem::HandlePostRender(StringHash, VariantMap&)
{
if (vrSystem_ == nullptr)
return;
vr::EVRCompositorError err = vr::VRCompositorError_None;
if (useSingleTexture_)
{
vr::Texture_t sharedTexture = { sharedTexture_->GetGPUObject(), vr::TextureType_DirectX, vr::ColorSpace_Gamma };
vr::VRTextureBounds_t leftBounds;
leftBounds.uMin = 0.0f; leftBounds.uMax = 0.5f;
leftBounds.vMin = 0.0f; leftBounds.vMax = 1.0f;
vr::VRTextureBounds_t rightBounds;
rightBounds.uMin = 0.5f; rightBounds.uMax = 1.0f;
rightBounds.vMin = 0.0f; rightBounds.vMax = 1.0f;
// if MSAA then we need to send the resolve texture
if (sharedTexture_->GetMultiSample() > 1)
{
GetSubsystem<Graphics>()->ResolveToTexture(sharedTexture_);
sharedTexture.handle = sharedTexture_->GetResolveTexture();
}
err = vr::VRCompositor()->Submit(vr::Eye_Left, &sharedTexture, &leftBounds);
if (err != vr::VRCompositorError_None)
{
URHO3D_LOGERROR("LeftEyeError: " + VR_CompositorError(err));
}
err = vr::VRCompositor()->Submit(vr::Eye_Right, &sharedTexture, &rightBounds);
if (err != vr::VRCompositorError_None)
{
URHO3D_LOGERROR("RightEyeError: " + VR_CompositorError(err));
}
}
else
{
vr::Texture_t leftEyeTexture = { leftTexture_->GetGPUObject(), vr::TextureType_DirectX, vr::ColorSpace_Gamma };
vr::Texture_t rightEyeTexture = { rightTexture_->GetGPUObject(), vr::TextureType_DirectX, vr::ColorSpace_Gamma };
// if MSAA then we need to send the resolve texture
if (leftTexture_->GetMultiSample() > 1)
{
GetSubsystem<Graphics>()->ResolveToTexture(leftTexture_);
leftEyeTexture.handle = leftTexture_->GetResolveTexture();
}
err = vr::VRCompositor()->Submit(vr::Eye_Left, &leftEyeTexture);
if (err != vr::VRCompositorError_None)
{
URHO3D_LOGERROR("LeftEyeError: " + VR_CompositorError(err));
}
// if MSAA then we need to send the resolve texture (done after submit, so that if there's work (deformation) then it can be done while we resolve if we need to.
if (rightTexture_->GetMultiSample() > 1)
{
GetSubsystem<Graphics>()->ResolveToTexture(rightTexture_);
leftEyeTexture.handle = rightTexture_->GetResolveTexture();
}
err = vr::VRCompositor()->Submit(vr::Eye_Right, &rightEyeTexture);
if (err != vr::VRCompositorError_None)
{
URHO3D_LOGERROR("RightEyeError: " + VR_CompositorError(err));
}
}
}
void VRSystem::SetupModel(StaticModel* target, bool isRight)
{
if (wandMeshes_[isRight].geometry_ && target->GetModel() == nullptr)
{
auto mdl = new Model(GetContext());
mdl->SetNumGeometries(1);
mdl->SetGeometry(0, 0, wandMeshes_[isRight].geometry_);
mdl->SetBoundingBox(wandMeshes_[isRight].bounds_);
target->SetModel(mdl);
}
if (target->GetMaterial() == nullptr)
target->SetMaterial(GetSubsystem<ResourceCache>()->GetResource<Material>("Materials/DefaultGrey.xml")->Clone());
if (wandMeshes_[isRight].texture_)
{
auto mat = target->GetMaterial();
if (mat && mat->GetTexture(TU_DIFFUSE) == nullptr)
mat->SetTexture(TU_DIFFUSE, wandMeshes_[isRight].texture_);
}
}
void VRSystem::ControlMesh::Free()
{
//if (model_)
// vr::VRRenderModels()->FreeRenderModel(model_);
//model_ = nullptr;
//if (colorTex_)
// vr::VRRenderModels()->FreeTexture(colorTex_);
//colorTex_ = nullptr;
texture_.Reset();
geometry_.Reset();
}
void VRSystem::DrawEyeMasks()
{
auto gfx = GetSubsystem<Graphics>();
IntRect vpts[] = {
GetLeftEyeRect(),
GetRightEyeRect()
};
RenderSurface* surfaces[] = {
GetLeftEyeTexture()->GetRenderSurface(),
GetRightEyeTexture()->GetRenderSurface()
};
Texture2D* ds[] = {
useSingleTexture_ ? sharedDS_.Get() : leftDS_.Get(),
useSingleTexture_ ? sharedDS_.Get() : rightDS_.Get(),
};
ShaderVariation* vertexShader = gfx->GetShader(VS, "VR_EyeMask", nullptr);
ShaderVariation* pixelShader = gfx->GetShader(PS, "VR_EyeMask", nullptr);
gfx->ResetRenderTargets();
for (int i = 0; i < 2; ++i)
{
gfx->SetRenderTarget(0, surfaces[i]);
gfx->SetDepthStencil(ds[i]);
gfx->SetViewport(vpts[i]);
gfx->Clear(CLEAR_COLOR | CLEAR_DEPTH | CLEAR_STENCIL);
gfx->SetVertexBuffer(hiddenAreaMesh_[i]->GetVertexBuffer(0));
gfx->SetShaders(vertexShader, pixelShader, nullptr, nullptr, nullptr);
gfx->SetDepthWrite(true);
gfx->SetDepthTest(CMP_ALWAYS);
gfx->SetScissorTest(false);
gfx->SetStencilTest(false);
gfx->SetCullMode(CULL_NONE);
gfx->SetBlendMode(BLEND_REPLACE);
gfx->SetColorWrite(true);
gfx->Draw(TRIANGLE_LIST, 0, hiddenAreaMesh_[i]->GetVertexCount());
}
gfx->ResetRenderTargets();
}
void VRSystem::ParseManifestFile(const String& manifestFile)
{
JSONFile file(GetContext());
file.LoadFile(manifestFile);
auto actions = file.GetRoot().Get("actions");
if (actions.NotNull())
{
auto actionArray = actions.GetArray();
for (unsigned i = 0; i < actionArray.Size(); ++i)
{
auto action = actionArray[i];
String name = action.Get("name").GetString();
String type = action.Get("type").GetString();
VRBinding binding;
binding.path_ = name;
if (type == "boolean")
binding.dataType_ = VAR_BOOL;
else if (type == "vector1" || type == "single")
binding.dataType_ = VAR_FLOAT;
else if (type == "vector2")
binding.dataType_ = VAR_VECTOR2;
else if (type == "vector3")
binding.dataType_ = VAR_VECTOR3;
else if (type == "pose")
binding.dataType_ = VAR_MATRIX3X4;
else
binding.dataType_ = VAR_NONE;
auto err = vr::VRInput()->GetActionHandle(name.CString(), &binding.handle_);
if (err == vr::VRInputError_None)
bindings_.Push(binding);
else
{
URHO3D_LOGERRORF("Failed to find VR input binding for %s", name.CString());
}
}
}
else
{
URHO3D_LOGERROR("No actions found for VR action manifest");
}
auto localization = file.GetRoot().Get("localization");
if (localization.NotNull())
{
auto localizationArray = localization.GetArray();
for (int i = 0; i < localizationArray.Size(); ++i)
{
auto lang = localizationArray[0].GetObject();
for (auto field : lang)
{
if (field.first_.Compare("language_tag", false) == 0)
continue;
for (auto& b : bindings_)
{
if (b.path_.Compare(field.first_, false) == 0)
{
b.localizedName_ = field.second_.GetString();
break;
}
}
}
}
}
}
void VRSystem::UpdateBindingValues()
{
for (auto& binding : bindings_)
{
switch (binding.dataType_)
{
case VAR_BOOL: {
vr::InputDigitalActionData_t data;
auto err = vr::VRInput()->GetDigitalActionData(binding.handle_, &data, sizeof(data), 0);
if (err != vr::VRInputError_None)
{
URHO3D_LOGERRORF("VR input binding update error: %s", VR_InputError(err).CString());
binding.storedData_ = false;
binding.delta_ = false;
continue;
}
if (data.bActive)
{
binding.storedData_ = data.bState;
binding.changed_ = data.bChanged;
}
else
{
binding.storedData_ = false;
binding.changed_ = false;
}
} break;
case VAR_FLOAT: {
vr::InputAnalogActionData_t data;
auto err = vr::VRInput()->GetAnalogActionData(binding.handle_, &data, sizeof(data), 0);
if (err != vr::VRInputError_None)
{
URHO3D_LOGERRORF("VR input binding update error: %s", VR_InputError(err).CString());
continue;
}
binding.active_ = data.bActive;
if (data.bActive)
{
binding.storedData_ = data.x;
binding.delta_ = data.deltaX;
binding.changed_ = fabsf(data.deltaX) > FLT_EPSILON;
}
else
binding.changed_ = false;
} break;
case VAR_VECTOR2: {
vr::InputAnalogActionData_t data;
auto err = vr::VRInput()->GetAnalogActionData(binding.handle_, &data, sizeof(data), 0);
if (err != vr::VRInputError_None)
{
URHO3D_LOGERRORF("VR input binding update error: %s", VR_InputError(err).CString());
continue;
}
binding.active_ = data.bActive;
if (data.bActive)
{
binding.storedData_ = Vector2(data.x, data.y);
auto v = Vector2(data.deltaX, data.deltaY);
binding.delta_ = v;
binding.changed_ = v.Length() > FLT_EPSILON;
}
else
binding.changed_ = false;
break;
} break;
case VAR_VECTOR3: {
vr::InputAnalogActionData_t data;
auto err = vr::VRInput()->GetAnalogActionData(binding.handle_, &data, sizeof(data), 0);
if (err != vr::VRInputError_None)
{
URHO3D_LOGERRORF("VR input binding update error: %s", VR_InputError(err).CString());
continue;
}
binding.active_ = data.bActive;
if (data.bActive)
{
binding.storedData_ = Vector3(data.x, data.y, -data.z);
auto v = Vector3(data.deltaX, data.deltaY, -data.deltaZ);
binding.delta_ = v;
binding.changed_ = v.Length() > FLT_EPSILON;
}
else
binding.changed_ = false;
break;
} break;
case VAR_MATRIX3X4: { // pose
vr::InputPoseActionData_t data;
auto err = vr::VRInput()->GetPoseActionDataForNextFrame(binding.handle_, vr::ETrackingUniverseOrigin::TrackingUniverseStanding, &data, sizeof(data), 0);
if (err != vr::VRInputError_None)
{
URHO3D_LOGERRORF("VR input binding update error: %s", VR_InputError(err).CString());
continue;
}
binding.active_ = data.bActive;
if (data.bActive)
{
if (data.pose.eTrackingResult >= 200)
{
binding.storedData_ = ToUrho(data.pose.mDeviceToAbsoluteTracking);
binding.delta_ = ToUrho(data.pose.vVelocity);
binding.extraDelta_ = ToUrho(data.pose.vAngularVelocity);
}
else
binding.active_ = false;
}
} break;
}
}
}
VRBinding& VRSystem::GetInputBinding(const String& path)
{
for (auto& b : bindings_)
if (b.path_.Compare(path) == 0)
return b;
return badBinding_;
}
const VRBinding& VRSystem::GetInputBinding(const String& path) const
{
for (auto& b : bindings_)
if (b.path_.Compare(path) == 0)
return b;
return badBinding_;
}
VRBinding& VRSystem::GetInputBindingByTitle(const String& name)
{
for (auto& b : bindings_)
if (b.localizedName_.Compare(name) == 0)
return b;
return badBinding_;
}
const VRBinding& VRSystem::GetInputBindingByTitle(const String& name) const
{
for (auto& b : bindings_)
if (b.localizedName_.Compare(name) == 0)
return b;
return badBinding_;
}
}
#pragma once
#include "../Core/Object.h"
#include "../Graphics/Drawable.h"
#include "../Scene/LogicComponent.h"
#include "../Math/Ray.h"
#include <openvr.h>
namespace Urho3D
{
class Camera;
class Geometry;
class Model;
class RenderPath;
class Texture2D;
class Shader;
class StaticModel;
class View;
/// Input manifest binding.
struct URHO3D_API VRBinding
{
/// From the actions portion of the JSON.
String path_;
/// As found in the JSON localization for the first language listed (assumed to be yours)
String localizedName_;
/// Action handle
vr::VRActionHandle_t handle_ = vr::k_ulInvalidActionHandle;
/// VAR_BOOL = boolean, VAR_FLOAT = vector1, VAR_VECTOR2 = vector2, VAR_MATRIX3X4 = pose
VariantType dataType_ = VAR_NONE;
/// Stored current value.
Variant storedData_;
/// Change since the last update.
Variant delta_;
/// Sometimes used for storing extra info, only Angular velocity on poses right now
Variant extraDelta_;
/// Value was changed this update.
bool changed_ = false;
/// Status is active, only applies to non-bools (usually means the delta is legit).
bool active_ = false;
bool GetBool() { return storedData_.GetBool(); }
float GetFloat() { return storedData_.GetFloatSafe(); }
Vector2 GetVec2() { return storedData_.GetVector2(); }
Vector3 GetVec3() { return storedData_.GetVector3(); }
Vector3 GetPos() { return storedData_.GetMatrix3x4().Translation(); }
Quaternion GetRot() { return storedData_.GetMatrix3x4().Rotation(); }
Matrix3x4 GetTransform() { return storedData_.GetMatrix3x4(); }
inline bool IsNull() const { return dataType_ != VAR_NONE && handle_ != vr::k_ulInvalidActionHandle; }
inline bool NotNull() const { return !IsNull(); }
};
/**
Register as a subsystem, Initialize sometime after GFX has been initialized but before Audio is initialized ...
otherwise it won't get the right audio target.
Expectations for the VR-Rig:
Scene
- "VRRig" NETWORKED
- "Head" LOCAL
- "Left_Eye" LOCAL
- Camera
- "Right_Eye" LOCAL
- Camera
- "Left_Hand" NETWORKED, will have enabled status set based on controller availability
- StaticModel[0] = controller model
- "Right_Hand" NETWORKED, will have enabled status set based on controller availability
- StaticModel[0] = controller model
Instead of networking those components network whatever state is relevant like (ie. IK targets),
tag hands against the client-id so server-side logic can deal with them.
Ideally, don't touch multiplayer in VR.
Expectations for Single-pass VR:
Single-pass doesn't not work without modifications to Batch drawing to pass an isVR parameter and additional cameras for View.
It is not possible to compute a single frustum to enclose the view for PIMAX (>180 combined FOV).
Single-pass can save as much as 20%, save ~15%-20% fill-rate by also calling DrawEyeMasks() to force depths to 0 for hidden areas.
15% happens to be pretty close to the extra pixel cost of PBR over legacy.
Using depth-0 since Urho3D uses stencil for other purposes
VR Frame-times are hard. Better GPUs, 90 fps cap - sometimes you won't even go over 40% GPU utilization despite riding right at 10ms
Not sending enough work to get the GPU to throttle up in relation to vblanks it sees
Use a RenderPath with:
vsdefines="VR"
psdefines="VR"
Vertex Shader
#ifdef VR
uint iInstanceID : SV_InstanceID,
out uint oInstanceID : TEXCOORD8,
out float oClip : SV_ClipDistance0,
out float oCull : SV_CullDistance0,
#endif
#ifdef VR
oPos = GetClipPos(worldPos, iInstanceID);
float eyeOffsetScale[2] = {-0.5, 0.5};
float4 eyeClipEdge[2] = { {-1,0,0,1}, {1,0,0,1} };
uint eyeIndex = iInstanceID & 1;
oCull = oClip = dot(oPos, eyeClipEdge[eyeIndex]);
oPos.x *= 0.5;
oPos.x += eyeOffsetScale[eyeIndex] * oPos.w;
#else
oPos = GetClipPos(worldPos);
#endif
Perform clipping based on [SV_InstanceID & 1]
Uniform buffers (use SV_InstanceID & 1 to index):
cbuffer CameraVS : register(b1)
{
#ifdef VR
float3 cCameraPos[3];
#else
float3 cCameraPos;
#endif
float cNearClip;
float cFarClip;
float4 cDepthMode;
float3 cFrustumSize;
float4 cGBufferOffsets;
#ifdef VR
float4x3 cView[3]; // 0 is left, 1 is right, 2 is combined
float4x3 cViewInv[3];
float4x4 cViewProj[3];
#else
float4x3 cView;
float4x3 cViewInv;
float4x4 cViewProj;
#endif
float4 cClipPlane;
}
VR Specialized Rendering (single texture target):
Any specialized rendering (like DebugRenderer) that View calls needs to check for eyes
width is always target-width / 2. Just set it up to draw twice, settings the viewports and
camera parameters appropriately.
Optimization:
- Use one of the VR render-targets for the main view with a simple fullscreen blit render-path.
- Disable reuse shadowmaps if not using single-pass rendering, or using an additional scene render for PC desktop display
To-Do:
- Render-Scale is only expected to go down from 1.0 and not up ... there's no checks for failure to construct (ie. the target is too large)
- That's why it's capped at 2x max, most VR capable GPUs can make a render-target 4x as wide as their max (single-texture is already 2x wide)
- Vive Trackers
- Specifically they probably mess with height-correction
- Typically mean limited things:
- foot trackers
- waist tracker
- tool tracker (seen with PVC guns, etc)
- Mesh loading needs some added changes to support it, always iterate in case of new meshes
- Utility components
- controller "fade in", use an inflated bounding box and fade in Controller models as they approach each other
- because mashing controllers into each other in half-life Alyx sucks
- Controller space management, bounds awareness and shifting based on collision risks
- ie. shift relative weapon pose up/down in hand space to avoid ring collisions.
- Grabber/tosser
- Support multiple Action-Sets
- Do suit/vest haptics have any standard?
- Extract eye-masked images, for easy blit
*/
class URHO3D_API VRSystem : public Object
{
URHO3D_OBJECT(VRSystem, Object);
public:
VRSystem(Context*);
virtual ~VRSystem();
bool Initialize(const String& manifestPath = "Data/vr_actions.json");
void Shutdown();
void HandlePreUpdate(StringHash, VariantMap&);
void HandlePostRender(StringHash, VariantMap&);
Matrix3x4 GetHeadTransform() const;
Matrix3x4 GetHandTransform(bool isRight) const;
Matrix3x4 GetEyeLocalTransform(bool isRight) const;
/// Gets the pose/tip aiming.
Matrix3x4 GetHandAimTransform(bool isRight) const;
/// For the pose/tip aiming (like a gun).
Ray GetHandAimRay(bool isRight) const;
void GetHandVelocity(bool isRight, Vector3* linear, Vector3* angular) const;
Matrix4 GetProjection(bool isRight, float near, float far) const;
void TriggerHaptic(bool isRight, float durationSeconds, float frequency, float strength);
void SetupModel(StaticModel* target, bool isRight);
float GetIPDCorection() const { return ipdCorrection_; }
float GetHeightCorrection() const { return heightCorrection_; }
float GetScaleCorrection() const { return scaleCorrection_; }
void SetIPDCorrection(float value) { ipdCorrection_ = value; }
/// Height correction can also be done on the VRRig node.
void SetHeightCorrection(float value) { heightCorrection_ = value; }
/// Scale correction can also be done on the VRRig node.
void SetScaleCorrection(float value) { scaleCorrection_ = value; }
/// Returns the currently chosen MSAA level.
int GetMSAALevel() const { return msaaLevel_; }
/// Change MSAA level, have to call CreateEyeTextures() to update.
void SetMSAALevel(int level) { msaaLevel_ = Clamp(level, 0, 16); }
/// Can use render-scale to resize targets if FPS is too low.
float GetRenderScale() const { return renderScale_; }
/// Sets the scale factor for the render-targets, have to call CreateEyeTextures() to update.
void SetRenderScale(float value) {
renderScale_ = Clamp(renderScale_, 0.25f, 2.0f);
if (trueEyeWidth_ > 0)
{
eyeWidth_ = trueEyeWidth_ * renderScale_;
eyeHeight_ = trueEyeHeight_ * renderScale_;
}
}
bool IsSingleTexture() const { return useSingleTexture_; }
void SetSingleTexture(bool state) { useSingleTexture_ = state; }
/// Sets up the Head, Left_Eye, Right_Eye nodes.
void PrepareRig(Node* vrRig);
void ConfigureRig(Node* vrRig, float nearDist, float farDist, bool forSinglePass);
void ConfigureRig(Scene* scene, Node* head, Node* leftEye, Node* rightEye, float nearDist, float farDist, bool forSinglePass);
void ConfigureHands(Scene* scene, Node* rigRoot, Node* leftHand, Node* rightHand);
SharedPtr<Texture2D> GetLeftEyeTexture() const { return useSingleTexture_ ? sharedTexture_ : leftTexture_; }
SharedPtr<Texture2D> GetRightEyeTexture() const { return useSingleTexture_ ? sharedTexture_ : rightTexture_; }
IntRect GetLeftEyeRect() const { return IntRect(0, 0, eyeWidth_, eyeHeight_); }
IntRect GetRightEyeRect() const { return useSingleTexture_ ? IntRect(eyeWidth_, 0, eyeWidth_ * 2, eyeHeight_) : IntRect(0, 0, eyeWidth_, eyeHeight_); }
/// Renders the eye-masks to depth 0 (-1 in GL) so depth-test discards pixels. Also clears the render-targets in question.
void DrawEyeMasks();
bool IsAutoDrawEyeMasks() const { return autoClearMasks_; }
void SetAutoDrawEyeMasks(bool state) { autoClearMasks_ = state; }
PODVector<VRBinding>& GetInputBindings() { return bindings_; }
const PODVector<VRBinding>& GetInputBindings() const { return bindings_; }
/// Find a binding by path.
VRBinding& GetInputBinding(const String& path);
/// Find a binding by path.
const VRBinding& GetInputBinding(const String& path) const;
/// Find a binding by localization title.
VRBinding& GetInputBindingByTitle(const String& path);
/// Find a binding by localization title.
const VRBinding& GetInputBindingByTitle(const String& path) const;
/// Called internally during initialization. Call again whenever changing render-scale, single-texture, or MSAA.
void CreateEyeTextures();
private:
Matrix4 ToUrho(const vr::HmdMatrix44_t&) const;
Matrix4 ToUrho(const vr::HmdMatrix34_t&) const;
Vector3 ToUrho(const vr::HmdVector3_t& v) const;
void LoadHiddenAreaMesh();
void LoadRenderModels();
void ParseManifestFile(const String& manifestFile);
void UpdateBindingValues();
vr::IVRSystem* vrSystem_ = nullptr;
float renderScale_ = 1.0f;
float ipdCorrection_ = 0.0f;
float heightCorrection_ = 0.0f;
float scaleCorrection_ = 1.0f;
uint32_t trueEyeWidth_ = 0, trueEyeHeight_ = 0;
uint32_t eyeWidth_, eyeHeight_;
SharedPtr<Texture2D> leftTexture_, rightTexture_, sharedTexture_, leftDS_, rightDS_, sharedDS_;
SharedPtr<Geometry> hiddenAreaMesh_[2];
vr::TrackedDevicePose_t poses_[vr::k_unMaxTrackedDeviceCount];
// The only reliable haptics we'll have.
vr::VRActionHandle_t hapticHandles_[2] = { vr::k_ulInvalidActionHandle, vr::k_ulInvalidActionHandle };
vr::VRInputValueHandle_t headInputHandle_ = 0;
vr::VRInputValueHandle_t handInputHandles_[2] = { 0, 0 };
vr::VRActionSetHandle_t defaultActionSet_ = 0;
PODVector<VRBinding> bindings_;
VRBinding badBinding_;
int msaaLevel_ = 4; // default to the recommended 4x
bool hasHaptics_ = false;
bool useSingleTexture_ = true; // default to single texture (so we can use instancing optimization)
bool autoClearMasks_ = false;
struct ControlMesh {
String name_;
vr::RenderModel_t* model_ = nullptr;
vr::RenderModel_TextureMap_t* colorTex_ = nullptr;
SharedPtr<Geometry> geometry_;
SharedPtr<Texture2D> texture_;
BoundingBox bounds_;
void Free();
} wandMeshes_[2];
};
}
#pragma once
#include "../Core/Object.h"
namespace Urho3D
{
URHO3D_EVENT(E_VRPAUSE, VRPause)
{
URHO3D_PARAM(P_STATE, State); // bool
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment