mirror of
https://github.com/baldurk/renderdoc.git
synced 2026-05-06 01:50:38 +00:00
Handle vertex picking in world space for post-projection data
* Previously the vertex picking was always being done in NDC space, but now for perspective projections we unproject into world space and raycast there for more reliable results with unsual projections.
This commit is contained in:
@@ -113,9 +113,10 @@ BINDING(0) uniform MeshPickUBOData
|
||||
|
||||
uint meshMode; // triangles, triangle strip, fan, etc...
|
||||
uint unproject;
|
||||
vec2 padding;
|
||||
uint flipY;
|
||||
uint ortho;
|
||||
|
||||
mat4 mvp;
|
||||
mat4 transformMat;
|
||||
}
|
||||
INST_NAME(meshpick);
|
||||
|
||||
|
||||
@@ -87,7 +87,7 @@ bool TriangleRayIntersect(vec3 A, vec3 B, vec3 C, vec3 RayPosition, vec3 RayDire
|
||||
if(u >= 0.0f && u <= 1.0f && v >= 0.0f && u + v <= 1.0f)
|
||||
{
|
||||
float t = dot(v0v2, qvec) * invDet;
|
||||
if(t > 0.0f)
|
||||
if(t >= 0.0f)
|
||||
{
|
||||
HitPosition = RayPosition + (RayDirection * t);
|
||||
Result = true;
|
||||
@@ -164,6 +164,13 @@ void trianglePath(uint threadID)
|
||||
bool hit;
|
||||
if(meshpick.unproject == 1u)
|
||||
{
|
||||
if(meshpick.ortho == 0u)
|
||||
{
|
||||
pos0 = meshpick.transformMat * pos0;
|
||||
pos1 = meshpick.transformMat * pos1;
|
||||
pos2 = meshpick.transformMat * pos2;
|
||||
}
|
||||
|
||||
pos0.xyz /= pos0.w;
|
||||
pos1.xyz /= pos1.w;
|
||||
pos2.xyz /= pos2.w;
|
||||
@@ -213,12 +220,13 @@ void defaultPath(uint threadID)
|
||||
pos = vec4(pos.x, -pos.y, pos.z, pos.w);
|
||||
#endif
|
||||
|
||||
vec4 wpos = meshpick.mvp * pos;
|
||||
vec4 wpos = meshpick.transformMat * pos;
|
||||
|
||||
if(meshpick.unproject == 1u)
|
||||
wpos.xyz /= wpos.www;
|
||||
|
||||
wpos.xy *= vec2(1.0f, -1.0f);
|
||||
if(meshpick.flipY == 0u)
|
||||
wpos.xy *= vec2(1.0f, -1.0f);
|
||||
|
||||
vec2 scr = (wpos.xy + 1.0f) * 0.5f * meshpick.viewport;
|
||||
|
||||
|
||||
@@ -130,9 +130,10 @@ cbuffer MeshPickData REG(b0)
|
||||
|
||||
uint PickMeshMode;
|
||||
uint PickUnproject;
|
||||
float2 Padding;
|
||||
uint PickFlipY;
|
||||
uint PickOrtho;
|
||||
|
||||
row_major float4x4 PickMVP;
|
||||
row_major float4x4 PickTransformMat;
|
||||
};
|
||||
|
||||
#define HEATMAP_DISABLED 0
|
||||
|
||||
@@ -173,7 +173,7 @@ bool TriangleRayIntersect(float3 A, float3 B, float3 C, float3 RayPosition, floa
|
||||
if(u >= 0 && u <= 1 && v >= 0 && u + v <= 1)
|
||||
{
|
||||
float t = dot(v0v2, qvec) * invDet;
|
||||
if(t > 0)
|
||||
if(t >= 0)
|
||||
{
|
||||
HitPosition = RayPosition + (RayDirection * t);
|
||||
Result = true;
|
||||
@@ -234,6 +234,13 @@ void trianglePath(uint threadID)
|
||||
bool hit;
|
||||
if(PickUnproject == 1)
|
||||
{
|
||||
if(PickOrtho == 0)
|
||||
{
|
||||
pos0 = mul(pos0, PickTransformMat);
|
||||
pos1 = mul(pos1, PickTransformMat);
|
||||
pos2 = mul(pos2, PickTransformMat);
|
||||
}
|
||||
|
||||
pos0.xyz /= pos0.w;
|
||||
pos1.xyz /= pos1.w;
|
||||
pos2.xyz /= pos2.w;
|
||||
@@ -275,12 +282,13 @@ void defaultPath(uint threadID)
|
||||
|
||||
float4 pos = vertex[idx];
|
||||
|
||||
float4 wpos = mul(pos, PickMVP);
|
||||
float4 wpos = mul(pos, PickTransformMat);
|
||||
|
||||
if(PickUnproject == 1)
|
||||
wpos.xyz /= wpos.www;
|
||||
|
||||
wpos.xy *= float2(1.0f, -1.0f);
|
||||
if(PickFlipY == 0)
|
||||
wpos.xy *= float2(1.0f, -1.0f);
|
||||
|
||||
float2 scr = (wpos.xy + 1.0f) * 0.5f * PickViewport;
|
||||
|
||||
|
||||
@@ -573,7 +573,10 @@ void D3D11Replay::InitPostVSBuffers(uint32_t eventId)
|
||||
float m = (B.y - A.y) / (B.x - A.x);
|
||||
float c = B.y - B.x * m;
|
||||
|
||||
if(m == 1.0f)
|
||||
if(m == 1.0f || c == 0.0f)
|
||||
continue;
|
||||
|
||||
if(-c / m <= 0.000001f)
|
||||
continue;
|
||||
|
||||
nearp = -c / m;
|
||||
@@ -951,7 +954,10 @@ void D3D11Replay::InitPostVSBuffers(uint32_t eventId)
|
||||
float m = (B.y - A.y) / (B.x - A.x);
|
||||
float c = B.y - B.x * m;
|
||||
|
||||
if(m == 1.0f)
|
||||
if(m == 1.0f || c == 0.0f)
|
||||
continue;
|
||||
|
||||
if(-c / m <= 0.000001f)
|
||||
continue;
|
||||
|
||||
nearp = -c / m;
|
||||
|
||||
@@ -365,6 +365,8 @@ void D3D11Replay::RenderMesh(uint32_t eventId, const rdcarray<MeshFormat> &secon
|
||||
|
||||
if(cfg.highlightVert != ~0U)
|
||||
{
|
||||
vertexData.homogenousInput = cfg.position.unproject;
|
||||
|
||||
m_HighlightCache.CacheHighlightingData(eventId, cfg);
|
||||
|
||||
D3D11_PRIMITIVE_TOPOLOGY meshtopo = topo;
|
||||
@@ -532,6 +534,8 @@ void D3D11Replay::RenderMesh(uint32_t eventId, const rdcarray<MeshFormat> &secon
|
||||
m_pImmediateContext->VSSetShader(m_MeshRender.MeshVS, NULL, 0);
|
||||
}
|
||||
|
||||
vertexData.homogenousInput = 0U;
|
||||
|
||||
// bounding box
|
||||
if(cfg.showBBox)
|
||||
{
|
||||
|
||||
@@ -2917,6 +2917,8 @@ uint32_t D3D11Replay::PickVertex(uint32_t eventId, int32_t width, int32_t height
|
||||
cbuf.PickIdx = cfg.position.indexByteStride ? 1 : 0;
|
||||
cbuf.PickNumVerts = cfg.position.numIndices;
|
||||
cbuf.PickUnproject = cfg.position.unproject ? 1 : 0;
|
||||
cbuf.PickFlipY = cfg.position.flipY;
|
||||
cbuf.PickOrtho = cfg.ortho;
|
||||
|
||||
Matrix4f projMat = Matrix4f::Perspective(90.0f, 0.1f, 100000.0f, float(width) / float(height));
|
||||
|
||||
@@ -2924,15 +2926,23 @@ uint32_t D3D11Replay::PickVertex(uint32_t eventId, int32_t width, int32_t height
|
||||
|
||||
Matrix4f pickMVP = projMat.Mul(camMat);
|
||||
|
||||
Matrix4f pickMVPProj;
|
||||
bool reverseProjection = false;
|
||||
Matrix4f guessProj;
|
||||
Matrix4f guessProjInverse;
|
||||
if(cfg.position.unproject)
|
||||
{
|
||||
// the derivation of the projection matrix might not be right (hell, it could be an
|
||||
// orthographic projection). But it'll be close enough likely.
|
||||
Matrix4f guessProj =
|
||||
cfg.position.farPlane != FLT_MAX
|
||||
? Matrix4f::Perspective(cfg.fov, cfg.position.nearPlane, cfg.position.farPlane, cfg.aspect)
|
||||
: Matrix4f::ReversePerspective(cfg.fov, cfg.position.nearPlane, cfg.aspect);
|
||||
if(cfg.position.farPlane != FLT_MAX)
|
||||
{
|
||||
guessProj =
|
||||
Matrix4f::Perspective(cfg.fov, cfg.position.nearPlane, cfg.position.farPlane, cfg.aspect);
|
||||
}
|
||||
else
|
||||
{
|
||||
reverseProjection = true;
|
||||
guessProj = Matrix4f::ReversePerspective(cfg.fov, cfg.position.nearPlane, cfg.aspect);
|
||||
}
|
||||
|
||||
if(cfg.ortho)
|
||||
guessProj = Matrix4f::Orthographic(cfg.position.nearPlane, cfg.position.farPlane);
|
||||
@@ -2940,63 +2950,110 @@ uint32_t D3D11Replay::PickVertex(uint32_t eventId, int32_t width, int32_t height
|
||||
if(cfg.position.flipY)
|
||||
guessProj[5] *= -1.0f;
|
||||
|
||||
pickMVPProj = projMat.Mul(camMat.Mul(guessProj.Inverse()));
|
||||
guessProjInverse = guessProj.Inverse();
|
||||
}
|
||||
|
||||
Vec3f rayPos;
|
||||
Vec3f rayDir;
|
||||
// convert mouse pos to world space ray
|
||||
{
|
||||
Matrix4f inversePickMVP = pickMVP.Inverse();
|
||||
|
||||
float pickX = ((float)x) / ((float)width);
|
||||
float pickXCanonical = RDCLERP(-1.0f, 1.0f, pickX);
|
||||
|
||||
float pickY = ((float)y) / ((float)height);
|
||||
// flip the Y axis
|
||||
// flip the Y axis by default for Y-up
|
||||
float pickYCanonical = RDCLERP(1.0f, -1.0f, pickY);
|
||||
|
||||
Vec3f cameraToWorldNearPosition =
|
||||
inversePickMVP.Transform(Vec3f(pickXCanonical, pickYCanonical, -1), 1);
|
||||
if(cfg.position.flipY && !cfg.ortho)
|
||||
pickYCanonical = -pickYCanonical;
|
||||
|
||||
Vec3f cameraToWorldFarPosition =
|
||||
inversePickMVP.Transform(Vec3f(pickXCanonical, pickYCanonical, 1), 1);
|
||||
// x/y is inside the window. Since we're not using the window projection we need to correct
|
||||
// for the aspect ratio here.
|
||||
if(cfg.position.unproject && !cfg.ortho)
|
||||
pickXCanonical *= (float(width) / float(height)) / cfg.aspect;
|
||||
|
||||
Vec3f testDir = (cameraToWorldFarPosition - cameraToWorldNearPosition);
|
||||
testDir.Normalise();
|
||||
// set up the NDC near/far pos
|
||||
Vec3f nearPosNDC = Vec3f(pickXCanonical, pickYCanonical, 0);
|
||||
Vec3f farPosNDC = Vec3f(pickXCanonical, pickYCanonical, 1);
|
||||
|
||||
// Calculate the ray direction first in the regular way (above), so we can use the
|
||||
// the output for testing if the ray we are picking is negative or not. This is similar
|
||||
// to checking against the forward direction of the camera, but more robust
|
||||
if(cfg.position.unproject)
|
||||
if(cfg.position.unproject && cfg.ortho)
|
||||
{
|
||||
Matrix4f inversePickMVPGuess = pickMVPProj.Inverse();
|
||||
// orthographic projections we raycast in NDC space
|
||||
Matrix4f inversePickMVP = pickMVP.Inverse();
|
||||
|
||||
Vec3f nearPosProj = inversePickMVPGuess.Transform(Vec3f(pickXCanonical, pickYCanonical, -1), 1);
|
||||
// transform from the desired NDC co-ordinates into camera space
|
||||
Vec3f nearPosCamera = inversePickMVP.Transform(nearPosNDC, 1);
|
||||
Vec3f farPosCamera = inversePickMVP.Transform(farPosNDC, 1);
|
||||
|
||||
Vec3f farPosProj = inversePickMVPGuess.Transform(Vec3f(pickXCanonical, pickYCanonical, 1), 1);
|
||||
Vec3f testDir = (farPosCamera - nearPosCamera);
|
||||
testDir.Normalise();
|
||||
|
||||
Matrix4f pickMVPguessProjInverse = guessProj.Mul(inversePickMVP);
|
||||
|
||||
Vec3f nearPosProj = pickMVPguessProjInverse.Transform(nearPosNDC, 1);
|
||||
Vec3f farPosProj = pickMVPguessProjInverse.Transform(farPosNDC, 1);
|
||||
|
||||
rayDir = (farPosProj - nearPosProj);
|
||||
rayDir.Normalise();
|
||||
|
||||
// Calculate the ray direction first in the regular way (above), so we can use the
|
||||
// the output for testing if the ray we are picking is negative or not. This is similar
|
||||
// to checking against the forward direction of the camera, but more robust
|
||||
if(testDir.z < 0)
|
||||
{
|
||||
rayDir = -rayDir;
|
||||
}
|
||||
rayPos = nearPosProj;
|
||||
}
|
||||
else if(cfg.position.unproject)
|
||||
{
|
||||
// projected data we pick in world-space to avoid problems with handling unusual transforms
|
||||
|
||||
if(reverseProjection)
|
||||
{
|
||||
farPosNDC.z = 1e-6f;
|
||||
nearPosNDC.z = 1e+6f;
|
||||
}
|
||||
|
||||
// invert the guessed projection matrix to get the near/far pos in camera space
|
||||
Vec3f nearPosCamera = guessProjInverse.Transform(nearPosNDC, 1.0f);
|
||||
Vec3f farPosCamera = guessProjInverse.Transform(farPosNDC, 1.0f);
|
||||
|
||||
// normalise and generate the ray
|
||||
rayDir = (farPosCamera - nearPosCamera);
|
||||
rayDir.Normalise();
|
||||
|
||||
farPosCamera = nearPosCamera + rayDir;
|
||||
|
||||
// invert the camera transform to transform the ray as camera-relative into world space
|
||||
Matrix4f inverseCamera = camMat.Inverse();
|
||||
|
||||
Vec3f nearPosWorld = inverseCamera.Transform(nearPosCamera, 1);
|
||||
Vec3f farPosWorld = inverseCamera.Transform(farPosCamera, 1);
|
||||
|
||||
// again normalise our final ray
|
||||
rayDir = (farPosWorld - nearPosWorld);
|
||||
rayDir.Normalise();
|
||||
|
||||
rayPos = nearPosWorld;
|
||||
}
|
||||
else
|
||||
{
|
||||
rayDir = testDir;
|
||||
rayPos = cameraToWorldNearPosition;
|
||||
Matrix4f inversePickMVP = pickMVP.Inverse();
|
||||
|
||||
// transform from the desired NDC co-ordinates into model space
|
||||
Vec3f nearPosCamera = inversePickMVP.Transform(nearPosNDC, 1);
|
||||
Vec3f farPosCamera = inversePickMVP.Transform(farPosNDC, 1);
|
||||
|
||||
rayDir = (farPosCamera - nearPosCamera);
|
||||
rayDir.Normalise();
|
||||
rayPos = nearPosCamera;
|
||||
}
|
||||
}
|
||||
|
||||
cbuf.PickRayPos = rayPos;
|
||||
cbuf.PickRayDir = rayDir;
|
||||
|
||||
cbuf.PickMVP = cfg.position.unproject ? pickMVPProj : pickMVP;
|
||||
|
||||
bool isTriangleMesh = true;
|
||||
switch(cfg.position.topology)
|
||||
{
|
||||
@@ -3027,6 +3084,31 @@ uint32_t D3D11Replay::PickVertex(uint32_t eventId, int32_t width, int32_t height
|
||||
}
|
||||
}
|
||||
|
||||
if(cfg.position.unproject && isTriangleMesh)
|
||||
{
|
||||
// projected triangle meshes we transform the vertices into world space, and ray-cast against
|
||||
// that
|
||||
//
|
||||
// NOTE: for ortho, this matrix is not used and we just do the perspective W division on model
|
||||
// vertices. The ray is cast in NDC
|
||||
if(cfg.ortho)
|
||||
cbuf.PickTransformMat = Matrix4f::Identity();
|
||||
else
|
||||
cbuf.PickTransformMat = guessProjInverse;
|
||||
}
|
||||
else if(cfg.position.unproject)
|
||||
{
|
||||
// projected non-triangles are just point clouds, so we transform the vertices into world space
|
||||
// then project them back onto the output and compare that against the picking 2D co-ordinates
|
||||
cbuf.PickTransformMat = pickMVP.Mul(guessProjInverse);
|
||||
}
|
||||
else
|
||||
{
|
||||
// plain meshes of either type, we just transform from model space to the output, and raycast or
|
||||
// co-ordinate check
|
||||
cbuf.PickTransformMat = pickMVP;
|
||||
}
|
||||
|
||||
ID3D11Buffer *vb = NULL, *ib = NULL;
|
||||
|
||||
{
|
||||
|
||||
@@ -700,7 +700,10 @@ void D3D12Replay::InitPostVSBuffers(uint32_t eventId)
|
||||
float m = (B.y - A.y) / (B.x - A.x);
|
||||
float c = B.y - B.x * m;
|
||||
|
||||
if(m == 1.0f)
|
||||
if(m == 1.0f || c == 0.0f)
|
||||
continue;
|
||||
|
||||
if(-c / m <= 0.000001f)
|
||||
continue;
|
||||
|
||||
nearp = -c / m;
|
||||
@@ -1269,7 +1272,10 @@ void D3D12Replay::InitPostVSBuffers(uint32_t eventId)
|
||||
float m = (B.y - A.y) / (B.x - A.x);
|
||||
float c = B.y - B.x * m;
|
||||
|
||||
if(m == 1.0f)
|
||||
if(m == 1.0f || c == 0.0f)
|
||||
continue;
|
||||
|
||||
if(-c / m <= 0.000001f)
|
||||
continue;
|
||||
|
||||
nearp = -c / m;
|
||||
|
||||
@@ -1923,6 +1923,8 @@ uint32_t D3D12Replay::PickVertex(uint32_t eventId, int32_t width, int32_t height
|
||||
cbuf.PickIdx = cfg.position.indexByteStride ? 1 : 0;
|
||||
cbuf.PickNumVerts = cfg.position.numIndices;
|
||||
cbuf.PickUnproject = cfg.position.unproject ? 1 : 0;
|
||||
cbuf.PickFlipY = cfg.position.flipY;
|
||||
cbuf.PickOrtho = cfg.ortho;
|
||||
|
||||
Matrix4f projMat = Matrix4f::Perspective(90.0f, 0.1f, 100000.0f, float(width) / float(height));
|
||||
|
||||
@@ -1930,15 +1932,23 @@ uint32_t D3D12Replay::PickVertex(uint32_t eventId, int32_t width, int32_t height
|
||||
|
||||
Matrix4f pickMVP = projMat.Mul(camMat);
|
||||
|
||||
Matrix4f pickMVPProj;
|
||||
bool reverseProjection = false;
|
||||
Matrix4f guessProj;
|
||||
Matrix4f guessProjInverse;
|
||||
if(cfg.position.unproject)
|
||||
{
|
||||
// the derivation of the projection matrix might not be right (hell, it could be an
|
||||
// orthographic projection). But it'll be close enough likely.
|
||||
Matrix4f guessProj =
|
||||
cfg.position.farPlane != FLT_MAX
|
||||
? Matrix4f::Perspective(cfg.fov, cfg.position.nearPlane, cfg.position.farPlane, cfg.aspect)
|
||||
: Matrix4f::ReversePerspective(cfg.fov, cfg.position.nearPlane, cfg.aspect);
|
||||
if(cfg.position.farPlane != FLT_MAX)
|
||||
{
|
||||
guessProj =
|
||||
Matrix4f::Perspective(cfg.fov, cfg.position.nearPlane, cfg.position.farPlane, cfg.aspect);
|
||||
}
|
||||
else
|
||||
{
|
||||
reverseProjection = true;
|
||||
guessProj = Matrix4f::ReversePerspective(cfg.fov, cfg.position.nearPlane, cfg.aspect);
|
||||
}
|
||||
|
||||
if(cfg.ortho)
|
||||
guessProj = Matrix4f::Orthographic(cfg.position.nearPlane, cfg.position.farPlane);
|
||||
@@ -1946,63 +1956,110 @@ uint32_t D3D12Replay::PickVertex(uint32_t eventId, int32_t width, int32_t height
|
||||
if(cfg.position.flipY)
|
||||
guessProj[5] *= -1.0f;
|
||||
|
||||
pickMVPProj = projMat.Mul(camMat.Mul(guessProj.Inverse()));
|
||||
guessProjInverse = guessProj.Inverse();
|
||||
}
|
||||
|
||||
Vec3f rayPos;
|
||||
Vec3f rayDir;
|
||||
// convert mouse pos to world space ray
|
||||
{
|
||||
Matrix4f inversePickMVP = pickMVP.Inverse();
|
||||
|
||||
float pickX = ((float)x) / ((float)width);
|
||||
float pickXCanonical = RDCLERP(-1.0f, 1.0f, pickX);
|
||||
|
||||
float pickY = ((float)y) / ((float)height);
|
||||
// flip the Y axis
|
||||
// flip the Y axis by default for Y-up
|
||||
float pickYCanonical = RDCLERP(1.0f, -1.0f, pickY);
|
||||
|
||||
Vec3f cameraToWorldNearPosition =
|
||||
inversePickMVP.Transform(Vec3f(pickXCanonical, pickYCanonical, -1), 1);
|
||||
if(cfg.position.flipY && !cfg.ortho)
|
||||
pickYCanonical = -pickYCanonical;
|
||||
|
||||
Vec3f cameraToWorldFarPosition =
|
||||
inversePickMVP.Transform(Vec3f(pickXCanonical, pickYCanonical, 1), 1);
|
||||
// x/y is inside the window. Since we're not using the window projection we need to correct
|
||||
// for the aspect ratio here.
|
||||
if(cfg.position.unproject && !cfg.ortho)
|
||||
pickXCanonical *= (float(width) / float(height)) / cfg.aspect;
|
||||
|
||||
Vec3f testDir = (cameraToWorldFarPosition - cameraToWorldNearPosition);
|
||||
testDir.Normalise();
|
||||
// set up the NDC near/far pos
|
||||
Vec3f nearPosNDC = Vec3f(pickXCanonical, pickYCanonical, 0);
|
||||
Vec3f farPosNDC = Vec3f(pickXCanonical, pickYCanonical, 1);
|
||||
|
||||
// Calculate the ray direction first in the regular way (above), so we can use the
|
||||
// the output for testing if the ray we are picking is negative or not. This is similar
|
||||
// to checking against the forward direction of the camera, but more robust
|
||||
if(cfg.position.unproject)
|
||||
if(cfg.position.unproject && cfg.ortho)
|
||||
{
|
||||
Matrix4f inversePickMVPGuess = pickMVPProj.Inverse();
|
||||
// orthographic projections we raycast in NDC space
|
||||
Matrix4f inversePickMVP = pickMVP.Inverse();
|
||||
|
||||
Vec3f nearPosProj = inversePickMVPGuess.Transform(Vec3f(pickXCanonical, pickYCanonical, -1), 1);
|
||||
// transform from the desired NDC co-ordinates into camera space
|
||||
Vec3f nearPosCamera = inversePickMVP.Transform(nearPosNDC, 1);
|
||||
Vec3f farPosCamera = inversePickMVP.Transform(farPosNDC, 1);
|
||||
|
||||
Vec3f farPosProj = inversePickMVPGuess.Transform(Vec3f(pickXCanonical, pickYCanonical, 1), 1);
|
||||
Vec3f testDir = (farPosCamera - nearPosCamera);
|
||||
testDir.Normalise();
|
||||
|
||||
Matrix4f pickMVPguessProjInverse = guessProj.Mul(inversePickMVP);
|
||||
|
||||
Vec3f nearPosProj = pickMVPguessProjInverse.Transform(nearPosNDC, 1);
|
||||
Vec3f farPosProj = pickMVPguessProjInverse.Transform(farPosNDC, 1);
|
||||
|
||||
rayDir = (farPosProj - nearPosProj);
|
||||
rayDir.Normalise();
|
||||
|
||||
// Calculate the ray direction first in the regular way (above), so we can use the
|
||||
// the output for testing if the ray we are picking is negative or not. This is similar
|
||||
// to checking against the forward direction of the camera, but more robust
|
||||
if(testDir.z < 0)
|
||||
{
|
||||
rayDir = -rayDir;
|
||||
}
|
||||
rayPos = nearPosProj;
|
||||
}
|
||||
else if(cfg.position.unproject)
|
||||
{
|
||||
// projected data we pick in world-space to avoid problems with handling unusual transforms
|
||||
|
||||
if(reverseProjection)
|
||||
{
|
||||
farPosNDC.z = 1e-6f;
|
||||
nearPosNDC.z = 1e+6f;
|
||||
}
|
||||
|
||||
// invert the guessed projection matrix to get the near/far pos in camera space
|
||||
Vec3f nearPosCamera = guessProjInverse.Transform(nearPosNDC, 1.0f);
|
||||
Vec3f farPosCamera = guessProjInverse.Transform(farPosNDC, 1.0f);
|
||||
|
||||
// normalise and generate the ray
|
||||
rayDir = (farPosCamera - nearPosCamera);
|
||||
rayDir.Normalise();
|
||||
|
||||
farPosCamera = nearPosCamera + rayDir;
|
||||
|
||||
// invert the camera transform to transform the ray as camera-relative into world space
|
||||
Matrix4f inverseCamera = camMat.Inverse();
|
||||
|
||||
Vec3f nearPosWorld = inverseCamera.Transform(nearPosCamera, 1);
|
||||
Vec3f farPosWorld = inverseCamera.Transform(farPosCamera, 1);
|
||||
|
||||
// again normalise our final ray
|
||||
rayDir = (farPosWorld - nearPosWorld);
|
||||
rayDir.Normalise();
|
||||
|
||||
rayPos = nearPosWorld;
|
||||
}
|
||||
else
|
||||
{
|
||||
rayDir = testDir;
|
||||
rayPos = cameraToWorldNearPosition;
|
||||
Matrix4f inversePickMVP = pickMVP.Inverse();
|
||||
|
||||
// transform from the desired NDC co-ordinates into model space
|
||||
Vec3f nearPosCamera = inversePickMVP.Transform(nearPosNDC, 1);
|
||||
Vec3f farPosCamera = inversePickMVP.Transform(farPosNDC, 1);
|
||||
|
||||
rayDir = (farPosCamera - nearPosCamera);
|
||||
rayDir.Normalise();
|
||||
rayPos = nearPosCamera;
|
||||
}
|
||||
}
|
||||
|
||||
cbuf.PickRayPos = rayPos;
|
||||
cbuf.PickRayDir = rayDir;
|
||||
|
||||
cbuf.PickMVP = cfg.position.unproject ? pickMVPProj : pickMVP;
|
||||
|
||||
bool isTriangleMesh = true;
|
||||
switch(cfg.position.topology)
|
||||
{
|
||||
@@ -2033,6 +2090,31 @@ uint32_t D3D12Replay::PickVertex(uint32_t eventId, int32_t width, int32_t height
|
||||
}
|
||||
}
|
||||
|
||||
if(cfg.position.unproject && isTriangleMesh)
|
||||
{
|
||||
// projected triangle meshes we transform the vertices into world space, and ray-cast against
|
||||
// that
|
||||
//
|
||||
// NOTE: for ortho, this matrix is not used and we just do the perspective W division on model
|
||||
// vertices. The ray is cast in NDC
|
||||
if(cfg.ortho)
|
||||
cbuf.PickTransformMat = Matrix4f::Identity();
|
||||
else
|
||||
cbuf.PickTransformMat = guessProjInverse;
|
||||
}
|
||||
else if(cfg.position.unproject)
|
||||
{
|
||||
// projected non-triangles are just point clouds, so we transform the vertices into world space
|
||||
// then project them back onto the output and compare that against the picking 2D co-ordinates
|
||||
cbuf.PickTransformMat = pickMVP.Mul(guessProjInverse);
|
||||
}
|
||||
else
|
||||
{
|
||||
// plain meshes of either type, we just transform from model space to the output, and raycast or
|
||||
// co-ordinate check
|
||||
cbuf.PickTransformMat = pickMVP;
|
||||
}
|
||||
|
||||
ID3D12Resource *vb = NULL, *ib = NULL;
|
||||
|
||||
if(cfg.position.vertexResourceId != ResourceId())
|
||||
|
||||
@@ -2255,15 +2255,23 @@ uint32_t GLReplay::PickVertex(uint32_t eventId, int32_t width, int32_t height,
|
||||
Matrix4f camMat = cfg.cam ? ((Camera *)cfg.cam)->GetMatrix() : Matrix4f::Identity();
|
||||
Matrix4f pickMVP = projMat.Mul(camMat);
|
||||
|
||||
Matrix4f pickMVPProj;
|
||||
bool reverseProjection = false;
|
||||
Matrix4f guessProj;
|
||||
Matrix4f guessProjInverse;
|
||||
if(cfg.position.unproject)
|
||||
{
|
||||
// the derivation of the projection matrix might not be right (hell, it could be an
|
||||
// orthographic projection). But it'll be close enough likely.
|
||||
Matrix4f guessProj =
|
||||
cfg.position.farPlane != FLT_MAX
|
||||
? Matrix4f::Perspective(cfg.fov, cfg.position.nearPlane, cfg.position.farPlane, cfg.aspect)
|
||||
: Matrix4f::ReversePerspective(cfg.fov, cfg.position.nearPlane, cfg.aspect);
|
||||
if(cfg.position.farPlane != FLT_MAX)
|
||||
{
|
||||
guessProj =
|
||||
Matrix4f::Perspective(cfg.fov, cfg.position.nearPlane, cfg.position.farPlane, cfg.aspect);
|
||||
}
|
||||
else
|
||||
{
|
||||
reverseProjection = true;
|
||||
guessProj = Matrix4f::ReversePerspective(cfg.fov, cfg.position.nearPlane, cfg.aspect);
|
||||
}
|
||||
|
||||
if(cfg.ortho)
|
||||
guessProj = Matrix4f::Orthographic(cfg.position.nearPlane, cfg.position.farPlane);
|
||||
@@ -2271,55 +2279,104 @@ uint32_t GLReplay::PickVertex(uint32_t eventId, int32_t width, int32_t height,
|
||||
if(cfg.position.flipY)
|
||||
guessProj[5] *= -1.0f;
|
||||
|
||||
pickMVPProj = projMat.Mul(camMat.Mul(guessProj.Inverse()));
|
||||
guessProjInverse = guessProj.Inverse();
|
||||
}
|
||||
|
||||
vec3 rayPos;
|
||||
vec3 rayDir;
|
||||
Vec3f rayPos;
|
||||
Vec3f rayDir;
|
||||
// convert mouse pos to world space ray
|
||||
{
|
||||
Matrix4f inversePickMVP = pickMVP.Inverse();
|
||||
|
||||
float pickX = ((float)x) / ((float)width);
|
||||
float pickXCanonical = RDCLERP(-1.0f, 1.0f, pickX);
|
||||
|
||||
float pickY = ((float)y) / ((float)height);
|
||||
// flip the Y axis
|
||||
// flip the Y axis by default for Y-up
|
||||
float pickYCanonical = RDCLERP(1.0f, -1.0f, pickY);
|
||||
|
||||
vec3 cameraToWorldNearPosition =
|
||||
inversePickMVP.Transform(Vec3f(pickXCanonical, pickYCanonical, -1), 1);
|
||||
if(cfg.position.flipY && !cfg.ortho)
|
||||
pickYCanonical = -pickYCanonical;
|
||||
|
||||
vec3 cameraToWorldFarPosition =
|
||||
inversePickMVP.Transform(Vec3f(pickXCanonical, pickYCanonical, 1), 1);
|
||||
// x/y is inside the window. Since we're not using the window projection we need to correct
|
||||
// for the aspect ratio here.
|
||||
if(cfg.position.unproject && !cfg.ortho)
|
||||
pickXCanonical *= (float(width) / float(height)) / cfg.aspect;
|
||||
|
||||
vec3 testDir = (cameraToWorldFarPosition - cameraToWorldNearPosition);
|
||||
testDir.Normalise();
|
||||
// set up the NDC near/far pos
|
||||
Vec3f nearPosNDC = Vec3f(pickXCanonical, pickYCanonical, 0);
|
||||
Vec3f farPosNDC = Vec3f(pickXCanonical, pickYCanonical, 1);
|
||||
|
||||
// Calculate the ray direction first in the regular way (above), so we can use the
|
||||
// the output for testing if the ray we are picking is negative or not. This is similar
|
||||
// to checking against the forward direction of the camera, but more robust
|
||||
if(cfg.position.unproject)
|
||||
if(cfg.position.unproject && cfg.ortho)
|
||||
{
|
||||
Matrix4f inversePickMVPGuess = pickMVPProj.Inverse();
|
||||
// orthographic projections we raycast in NDC space
|
||||
Matrix4f inversePickMVP = pickMVP.Inverse();
|
||||
|
||||
vec3 nearPosProj = inversePickMVPGuess.Transform(Vec3f(pickXCanonical, pickYCanonical, -1), 1);
|
||||
// transform from the desired NDC co-ordinates into camera space
|
||||
Vec3f nearPosCamera = inversePickMVP.Transform(nearPosNDC, 1);
|
||||
Vec3f farPosCamera = inversePickMVP.Transform(farPosNDC, 1);
|
||||
|
||||
vec3 farPosProj = inversePickMVPGuess.Transform(Vec3f(pickXCanonical, pickYCanonical, 1), 1);
|
||||
Vec3f testDir = (farPosCamera - nearPosCamera);
|
||||
testDir.Normalise();
|
||||
|
||||
Matrix4f pickMVPguessProjInverse = guessProj.Mul(inversePickMVP);
|
||||
|
||||
Vec3f nearPosProj = pickMVPguessProjInverse.Transform(nearPosNDC, 1);
|
||||
Vec3f farPosProj = pickMVPguessProjInverse.Transform(farPosNDC, 1);
|
||||
|
||||
rayDir = (farPosProj - nearPosProj);
|
||||
rayDir.Normalise();
|
||||
|
||||
// Calculate the ray direction first in the regular way (above), so we can use the
|
||||
// the output for testing if the ray we are picking is negative or not. This is similar
|
||||
// to checking against the forward direction of the camera, but more robust
|
||||
if(testDir.z < 0)
|
||||
{
|
||||
rayDir = -rayDir;
|
||||
}
|
||||
rayPos = nearPosProj;
|
||||
}
|
||||
else if(cfg.position.unproject)
|
||||
{
|
||||
// projected data we pick in world-space to avoid problems with handling unusual transforms
|
||||
|
||||
if(reverseProjection)
|
||||
{
|
||||
farPosNDC.z = 1e-6f;
|
||||
nearPosNDC.z = 1e+6f;
|
||||
}
|
||||
|
||||
// invert the guessed projection matrix to get the near/far pos in camera space
|
||||
Vec3f nearPosCamera = guessProjInverse.Transform(nearPosNDC, 1.0f);
|
||||
Vec3f farPosCamera = guessProjInverse.Transform(farPosNDC, 1.0f);
|
||||
|
||||
// normalise and generate the ray
|
||||
rayDir = (farPosCamera - nearPosCamera);
|
||||
rayDir.Normalise();
|
||||
|
||||
farPosCamera = nearPosCamera + rayDir;
|
||||
|
||||
// invert the camera transform to transform the ray as camera-relative into world space
|
||||
Matrix4f inverseCamera = camMat.Inverse();
|
||||
|
||||
Vec3f nearPosWorld = inverseCamera.Transform(nearPosCamera, 1);
|
||||
Vec3f farPosWorld = inverseCamera.Transform(farPosCamera, 1);
|
||||
|
||||
// again normalise our final ray
|
||||
rayDir = (farPosWorld - nearPosWorld);
|
||||
rayDir.Normalise();
|
||||
|
||||
rayPos = nearPosWorld;
|
||||
}
|
||||
else
|
||||
{
|
||||
rayDir = testDir;
|
||||
rayPos = cameraToWorldNearPosition;
|
||||
Matrix4f inversePickMVP = pickMVP.Inverse();
|
||||
|
||||
// transform from the desired NDC co-ordinates into model space
|
||||
Vec3f nearPosCamera = inversePickMVP.Transform(nearPosNDC, 1);
|
||||
Vec3f farPosCamera = inversePickMVP.Transform(farPosNDC, 1);
|
||||
|
||||
rayDir = (farPosCamera - nearPosCamera);
|
||||
rayDir.Normalise();
|
||||
rayPos = nearPosCamera;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2584,10 +2641,36 @@ uint32_t GLReplay::PickVertex(uint32_t eventId, int32_t width, int32_t height,
|
||||
|
||||
// line/point data
|
||||
cdata->unproject = cfg.position.unproject;
|
||||
cdata->mvp = cfg.position.unproject ? pickMVPProj : pickMVP;
|
||||
cdata->flipY = cfg.position.flipY;
|
||||
cdata->ortho = cfg.ortho;
|
||||
cdata->coords = Vec2f((float)x, (float)y);
|
||||
cdata->viewport = Vec2f((float)width, (float)height);
|
||||
|
||||
if(cfg.position.unproject && isTriangleMesh)
|
||||
{
|
||||
// projected triangle meshes we transform the vertices into world space, and ray-cast against
|
||||
// that
|
||||
//
|
||||
// NOTE: for ortho, this matrix is not used and we just do the perspective W division on model
|
||||
// vertices. The ray is cast in NDC
|
||||
if(cfg.ortho)
|
||||
cdata->transformMat = Matrix4f::Identity();
|
||||
else
|
||||
cdata->transformMat = guessProjInverse;
|
||||
}
|
||||
else if(cfg.position.unproject)
|
||||
{
|
||||
// projected non-triangles are just point clouds, so we transform the vertices into world space
|
||||
// then project them back onto the output and compare that against the picking 2D co-ordinates
|
||||
cdata->transformMat = pickMVP.Mul(guessProjInverse);
|
||||
}
|
||||
else
|
||||
{
|
||||
// plain meshes of either type, we just transform from model space to the output, and raycast or
|
||||
// co-ordinate check
|
||||
cdata->transformMat = pickMVP;
|
||||
}
|
||||
|
||||
drv.glUnmapBuffer(eGL_UNIFORM_BUFFER);
|
||||
|
||||
uint32_t reset[4] = {};
|
||||
|
||||
@@ -1103,7 +1103,10 @@ void GLReplay::InitPostVSBuffers(uint32_t eventId)
|
||||
float m = (B.y - A.y) / (B.x - A.x);
|
||||
float c = B.y - B.x * m;
|
||||
|
||||
if(m == 1.0f)
|
||||
if(m == 1.0f || c == 0.0f)
|
||||
continue;
|
||||
|
||||
if(-c / m <= 0.000001f)
|
||||
continue;
|
||||
|
||||
nearp = -c / m;
|
||||
@@ -1836,7 +1839,10 @@ void GLReplay::InitPostVSBuffers(uint32_t eventId)
|
||||
float m = (B.y - A.y) / (B.x - A.x);
|
||||
float c = B.y - B.x * m;
|
||||
|
||||
if(m == 1.0f)
|
||||
if(m == 1.0f || c == 0.0f)
|
||||
continue;
|
||||
|
||||
if(-c / m <= 0.000001f)
|
||||
continue;
|
||||
|
||||
nearp = -c / m;
|
||||
|
||||
@@ -1016,29 +1016,36 @@ void VulkanDebugManager::CreateCustomShaderPipeline(ResourceId shader, VkPipelin
|
||||
CREATE_OBJECT(m_Custom.TexPipeline, customPipe);
|
||||
}
|
||||
|
||||
// TODO: Point meshes don't pick correctly
|
||||
uint32_t VulkanReplay::PickVertex(uint32_t eventId, int32_t w, int32_t h, const MeshDisplay &cfg,
|
||||
uint32_t x, uint32_t y)
|
||||
uint32_t VulkanReplay::PickVertex(uint32_t eventId, int32_t width, int32_t height,
|
||||
const MeshDisplay &cfg, uint32_t x, uint32_t y)
|
||||
{
|
||||
VkDevice dev = m_pDriver->GetDev();
|
||||
const VkDevDispatchTable *vt = ObjDisp(dev);
|
||||
|
||||
VkMarkerRegion::Begin(StringFormat::Fmt("VulkanReplay::PickVertex(%u, %u)", x, y));
|
||||
|
||||
Matrix4f projMat = Matrix4f::Perspective(90.0f, 0.1f, 100000.0f, float(w) / float(h));
|
||||
Matrix4f projMat = Matrix4f::Perspective(90.0f, 0.1f, 100000.0f, float(width) / float(height));
|
||||
|
||||
Matrix4f camMat = cfg.cam ? ((Camera *)cfg.cam)->GetMatrix() : Matrix4f::Identity();
|
||||
Matrix4f pickMVP = projMat.Mul(camMat);
|
||||
|
||||
Matrix4f pickMVPProj;
|
||||
bool reverseProjection = false;
|
||||
Matrix4f guessProj;
|
||||
Matrix4f guessProjInverse;
|
||||
if(cfg.position.unproject)
|
||||
{
|
||||
// the derivation of the projection matrix might not be right (hell, it could be an
|
||||
// orthographic projection). But it'll be close enough likely.
|
||||
Matrix4f guessProj =
|
||||
cfg.position.farPlane != FLT_MAX
|
||||
? Matrix4f::Perspective(cfg.fov, cfg.position.nearPlane, cfg.position.farPlane, cfg.aspect)
|
||||
: Matrix4f::ReversePerspective(cfg.fov, cfg.position.nearPlane, cfg.aspect);
|
||||
if(cfg.position.farPlane != FLT_MAX)
|
||||
{
|
||||
guessProj =
|
||||
Matrix4f::Perspective(cfg.fov, cfg.position.nearPlane, cfg.position.farPlane, cfg.aspect);
|
||||
}
|
||||
else
|
||||
{
|
||||
reverseProjection = true;
|
||||
guessProj = Matrix4f::ReversePerspective(cfg.fov, cfg.position.nearPlane, cfg.aspect);
|
||||
}
|
||||
|
||||
if(cfg.ortho)
|
||||
guessProj = Matrix4f::Orthographic(cfg.position.nearPlane, cfg.position.farPlane);
|
||||
@@ -1046,56 +1053,104 @@ uint32_t VulkanReplay::PickVertex(uint32_t eventId, int32_t w, int32_t h, const
|
||||
if(cfg.position.flipY)
|
||||
guessProj[5] *= -1.0f;
|
||||
|
||||
pickMVPProj = projMat.Mul(camMat.Mul(guessProj.Inverse()));
|
||||
guessProjInverse = guessProj.Inverse();
|
||||
}
|
||||
|
||||
vec3 rayPos;
|
||||
vec3 rayDir;
|
||||
Vec3f rayPos;
|
||||
Vec3f rayDir;
|
||||
// convert mouse pos to world space ray
|
||||
{
|
||||
Matrix4f inversePickMVP = pickMVP.Inverse();
|
||||
|
||||
float pickX = ((float)x) / ((float)w);
|
||||
float pickX = ((float)x) / ((float)width);
|
||||
float pickXCanonical = RDCLERP(-1.0f, 1.0f, pickX);
|
||||
|
||||
float pickY = ((float)y) / ((float)h);
|
||||
// flip the Y axis
|
||||
float pickY = ((float)y) / ((float)height);
|
||||
// flip the Y axis by default for Y-up
|
||||
float pickYCanonical = RDCLERP(1.0f, -1.0f, pickY);
|
||||
|
||||
vec3 cameraToWorldNearPosition =
|
||||
inversePickMVP.Transform(Vec3f(pickXCanonical, pickYCanonical, -1), 1);
|
||||
if(cfg.position.flipY && !cfg.ortho)
|
||||
pickYCanonical = -pickYCanonical;
|
||||
|
||||
vec3 cameraToWorldFarPosition =
|
||||
inversePickMVP.Transform(Vec3f(pickXCanonical, pickYCanonical, 1), 1);
|
||||
// x/y is inside the window. Since we're not using the window projection we need to correct
|
||||
// for the aspect ratio here.
|
||||
if(cfg.position.unproject && !cfg.ortho)
|
||||
pickXCanonical *= (float(width) / float(height)) / cfg.aspect;
|
||||
|
||||
vec3 testDir = (cameraToWorldFarPosition - cameraToWorldNearPosition);
|
||||
testDir.Normalise();
|
||||
// set up the NDC near/far pos
|
||||
Vec3f nearPosNDC = Vec3f(pickXCanonical, pickYCanonical, 0);
|
||||
Vec3f farPosNDC = Vec3f(pickXCanonical, pickYCanonical, 1);
|
||||
|
||||
/* Calculate the ray direction first in the regular way (above), so we can use the
|
||||
the output for testing if the ray we are picking is negative or not. This is similar
|
||||
to checking against the forward direction of the camera, but more robust
|
||||
*/
|
||||
if(cfg.position.unproject)
|
||||
if(cfg.position.unproject && cfg.ortho)
|
||||
{
|
||||
Matrix4f inversePickMVPGuess = pickMVPProj.Inverse();
|
||||
// orthographic projections we raycast in NDC space
|
||||
Matrix4f inversePickMVP = pickMVP.Inverse();
|
||||
|
||||
vec3 nearPosProj = inversePickMVPGuess.Transform(Vec3f(pickXCanonical, pickYCanonical, -1), 1);
|
||||
// transform from the desired NDC co-ordinates into camera space
|
||||
Vec3f nearPosCamera = inversePickMVP.Transform(nearPosNDC, 1);
|
||||
Vec3f farPosCamera = inversePickMVP.Transform(farPosNDC, 1);
|
||||
|
||||
vec3 farPosProj = inversePickMVPGuess.Transform(Vec3f(pickXCanonical, pickYCanonical, 1), 1);
|
||||
Vec3f testDir = (farPosCamera - nearPosCamera);
|
||||
testDir.Normalise();
|
||||
|
||||
Matrix4f pickMVPguessProjInverse = guessProj.Mul(inversePickMVP);
|
||||
|
||||
Vec3f nearPosProj = pickMVPguessProjInverse.Transform(nearPosNDC, 1);
|
||||
Vec3f farPosProj = pickMVPguessProjInverse.Transform(farPosNDC, 1);
|
||||
|
||||
rayDir = (farPosProj - nearPosProj);
|
||||
rayDir.Normalise();
|
||||
|
||||
// Calculate the ray direction first in the regular way (above), so we can use the
|
||||
// the output for testing if the ray we are picking is negative or not. This is similar
|
||||
// to checking against the forward direction of the camera, but more robust
|
||||
if(testDir.z < 0)
|
||||
{
|
||||
rayDir = -rayDir;
|
||||
}
|
||||
rayPos = nearPosProj;
|
||||
}
|
||||
else if(cfg.position.unproject)
|
||||
{
|
||||
// projected data we pick in world-space to avoid problems with handling unusual transforms
|
||||
|
||||
if(reverseProjection)
|
||||
{
|
||||
farPosNDC.z = 1e-6f;
|
||||
nearPosNDC.z = 1e+6f;
|
||||
}
|
||||
|
||||
// invert the guessed projection matrix to get the near/far pos in camera space
|
||||
Vec3f nearPosCamera = guessProjInverse.Transform(nearPosNDC, 1.0f);
|
||||
Vec3f farPosCamera = guessProjInverse.Transform(farPosNDC, 1.0f);
|
||||
|
||||
// normalise and generate the ray
|
||||
rayDir = (farPosCamera - nearPosCamera);
|
||||
rayDir.Normalise();
|
||||
|
||||
farPosCamera = nearPosCamera + rayDir;
|
||||
|
||||
// invert the camera transform to transform the ray as camera-relative into world space
|
||||
Matrix4f inverseCamera = camMat.Inverse();
|
||||
|
||||
Vec3f nearPosWorld = inverseCamera.Transform(nearPosCamera, 1);
|
||||
Vec3f farPosWorld = inverseCamera.Transform(farPosCamera, 1);
|
||||
|
||||
// again normalise our final ray
|
||||
rayDir = (farPosWorld - nearPosWorld);
|
||||
rayDir.Normalise();
|
||||
|
||||
rayPos = nearPosWorld;
|
||||
}
|
||||
else
|
||||
{
|
||||
rayDir = testDir;
|
||||
rayPos = cameraToWorldNearPosition;
|
||||
Matrix4f inversePickMVP = pickMVP.Inverse();
|
||||
|
||||
// transform from the desired NDC co-ordinates into model space
|
||||
Vec3f nearPosCamera = inversePickMVP.Transform(nearPosNDC, 1);
|
||||
Vec3f farPosCamera = inversePickMVP.Transform(farPosNDC, 1);
|
||||
|
||||
rayDir = (farPosCamera - nearPosCamera);
|
||||
rayDir.Normalise();
|
||||
rayPos = nearPosCamera;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1329,9 +1384,35 @@ uint32_t VulkanReplay::PickVertex(uint32_t eventId, int32_t w, int32_t h, const
|
||||
|
||||
// line/point data
|
||||
ubo->unproject = cfg.position.unproject;
|
||||
ubo->mvp = cfg.position.unproject ? pickMVPProj : pickMVP;
|
||||
ubo->flipY = cfg.position.flipY;
|
||||
ubo->ortho = cfg.ortho;
|
||||
ubo->coords = Vec2f((float)x, (float)y);
|
||||
ubo->viewport = Vec2f((float)w, (float)h);
|
||||
ubo->viewport = Vec2f((float)width, (float)height);
|
||||
|
||||
if(cfg.position.unproject && isTriangleMesh)
|
||||
{
|
||||
// projected triangle meshes we transform the vertices into world space, and ray-cast against
|
||||
// that
|
||||
//
|
||||
// NOTE: for ortho, this matrix is not used and we just do the perspective W division on model
|
||||
// vertices. The ray is cast in NDC
|
||||
if(cfg.ortho)
|
||||
ubo->transformMat = Matrix4f::Identity();
|
||||
else
|
||||
ubo->transformMat = guessProjInverse;
|
||||
}
|
||||
else if(cfg.position.unproject)
|
||||
{
|
||||
// projected non-triangles are just point clouds, so we transform the vertices into world space
|
||||
// then project them back onto the output and compare that against the picking 2D co-ordinates
|
||||
ubo->transformMat = pickMVP.Mul(guessProjInverse);
|
||||
}
|
||||
else
|
||||
{
|
||||
// plain meshes of either type, we just transform from model space to the output, and raycast or
|
||||
// co-ordinate check
|
||||
ubo->transformMat = pickMVP;
|
||||
}
|
||||
|
||||
m_VertexPick.UBO.Unmap();
|
||||
|
||||
|
||||
@@ -2684,7 +2684,7 @@ void VulkanReplay::FetchVSOut(uint32_t eventId, VulkanRenderState &state)
|
||||
float m = (B.y - A.y) / (B.x - A.x);
|
||||
float c = B.y - B.x * m;
|
||||
|
||||
if(m == 1.0f)
|
||||
if(m == 1.0f || c == 0.0f)
|
||||
continue;
|
||||
|
||||
if(-c / m <= 0.000001f)
|
||||
@@ -3249,7 +3249,7 @@ void VulkanReplay::FetchTessGSOut(uint32_t eventId, VulkanRenderState &state)
|
||||
float m = (B.y - A.y) / (B.x - A.x);
|
||||
float c = B.y - B.x * m;
|
||||
|
||||
if(m == 1.0f)
|
||||
if(m == 1.0f || c == 0.0f)
|
||||
continue;
|
||||
|
||||
if(-c / m <= 0.000001f)
|
||||
|
||||
Reference in New Issue
Block a user