mirror of
https://github.com/baldurk/renderdoc.git
synced 2026-05-05 09:30:44 +00:00
Add support for VK_device_group & VK_device_group_creation
This commit is contained in:
@@ -954,6 +954,7 @@ struct DrawcallDescription
|
||||
|
||||
dispatchDimension[0] = dispatchDimension[1] = dispatchDimension[2] = 0;
|
||||
dispatchThreadsDimension[0] = dispatchThreadsDimension[1] = dispatchThreadsDimension[2] = 0;
|
||||
dispatchBase[0] = dispatchBase[1] = dispatchBase[2] = 0;
|
||||
|
||||
indexByteWidth = 0;
|
||||
topology = Topology::Unknown;
|
||||
@@ -1015,6 +1016,9 @@ struct DrawcallDescription
|
||||
DOCUMENT("The 3D size of each workgroup in threads if the call allows an override, or 0 if not.");
|
||||
uint32_t dispatchThreadsDimension[3];
|
||||
|
||||
DOCUMENT("The 3D base offset of the workgroup ID if the call allows an override, or 0 if not.");
|
||||
uint32_t dispatchBase[3];
|
||||
|
||||
DOCUMENT(R"(The width in bytes of each index.
|
||||
|
||||
Valid values are 1 (depending on API), 2 or 4, or 0 if the drawcall is not an indexed draw.
|
||||
|
||||
@@ -242,6 +242,261 @@ bool VkInitParams::IsSupportedVersion(uint64_t ver)
|
||||
return false;
|
||||
}
|
||||
|
||||
// utility function for when we're modifying one struct in a pNext chain, this
|
||||
// lets us just copy across a struct unmodified into some temporary memory and
|
||||
// append it onto a pNext chain we're building
|
||||
template <typename VkStruct>
|
||||
void CopyNextChainedStruct(byte *&tempMem, const VkGenericStruct *nextInput,
|
||||
VkGenericStruct *&nextChainTail)
|
||||
{
|
||||
const VkStruct *instruct = (const VkStruct *)nextInput;
|
||||
VkStruct *outstruct = (VkStruct *)tempMem;
|
||||
|
||||
tempMem = (byte *)(outstruct + 1);
|
||||
|
||||
// copy the struct, nothing to unwrap
|
||||
*outstruct = *instruct;
|
||||
|
||||
// default to NULL. It will be overwritten next time if there is a next object
|
||||
outstruct->pNext = NULL;
|
||||
|
||||
// append this onto the chain
|
||||
nextChainTail->pNext = (const VkGenericStruct *)outstruct;
|
||||
nextChainTail = (VkGenericStruct *)outstruct;
|
||||
}
|
||||
|
||||
// this is similar to the above function, but for use after we've modified a struct locally
|
||||
// e.g. to unwrap some members or patch flags, etc.
|
||||
template <typename VkStruct>
|
||||
void AppendModifiedChainedStruct(byte *&tempMem, VkStruct *outputStruct,
|
||||
VkGenericStruct *&nextChainTail)
|
||||
{
|
||||
tempMem = (byte *)(outputStruct + 1);
|
||||
|
||||
// default to NULL. It will be overwritten in the next step if there is a next object
|
||||
outputStruct->pNext = NULL;
|
||||
|
||||
// append this onto the chain
|
||||
nextChainTail->pNext = (const VkGenericStruct *)outputStruct;
|
||||
nextChainTail = (VkGenericStruct *)outputStruct;
|
||||
}
|
||||
|
||||
size_t GetNextPatchSize(const void *pNext)
|
||||
{
|
||||
const VkGenericStruct *next = (const VkGenericStruct *)pNext;
|
||||
size_t memSize = 0;
|
||||
|
||||
while(next)
|
||||
{
|
||||
// VkMemoryAllocateInfo
|
||||
if(next->sType == VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV)
|
||||
memSize += sizeof(VkDedicatedAllocationMemoryAllocateInfoNV);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR)
|
||||
memSize += sizeof(VkMemoryDedicatedAllocateInfoKHR);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV)
|
||||
memSize += sizeof(VkExportMemoryAllocateInfoNV);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR)
|
||||
memSize += sizeof(VkExportMemoryAllocateInfoKHR);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR)
|
||||
memSize += sizeof(VkImportMemoryFdInfoKHR);
|
||||
|
||||
#ifdef VK_NV_external_memory_win32
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV)
|
||||
memSize += sizeof(VkExportMemoryWin32HandleInfoNV);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV)
|
||||
memSize += sizeof(VkImportMemoryWin32HandleInfoNV);
|
||||
#else
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV)
|
||||
RDCERR("Support for VK_NV_external_memory_win32 not compiled in");
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV)
|
||||
RDCERR("Support for VK_NV_external_memory_win32 not compiled in");
|
||||
#endif
|
||||
|
||||
#ifdef VK_KHR_external_memory_win32
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR)
|
||||
memSize += sizeof(VkExportMemoryWin32HandleInfoKHR);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR)
|
||||
memSize += sizeof(VkImportMemoryWin32HandleInfoKHR);
|
||||
#else
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR)
|
||||
RDCERR("Support for VK_KHR_external_memory_win32 not compiled in");
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR)
|
||||
RDCERR("Support for VK_KHR_external_memory_win32 not compiled in");
|
||||
#endif
|
||||
|
||||
// vkSamplerCreateInfo
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR)
|
||||
memSize += sizeof(VkSamplerYcbcrConversionInfoKHR);
|
||||
|
||||
// VkImageCreateInfo
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR)
|
||||
memSize += sizeof(VkExternalMemoryImageCreateInfoKHR);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV)
|
||||
memSize += sizeof(VkExternalMemoryImageCreateInfoNV);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR)
|
||||
memSize += sizeof(VkImageSwapchainCreateInfoKHR);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV)
|
||||
memSize += sizeof(VkDedicatedAllocationImageCreateInfoNV);
|
||||
|
||||
// VkBindImageMemoryInfoKHR
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR)
|
||||
memSize += sizeof(VkBindImageMemorySwapchainInfoKHR);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR)
|
||||
memSize += sizeof(VkBindImageMemoryDeviceGroupInfoKHR);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR)
|
||||
memSize += sizeof(VkBindImagePlaneMemoryInfoKHR);
|
||||
|
||||
next = next->pNext;
|
||||
}
|
||||
|
||||
return memSize;
|
||||
}
|
||||
|
||||
void PatchNextChain(const char *structName, byte *&tempMem, VkGenericStruct *infoStruct)
|
||||
{
|
||||
VkGenericStruct *nextChainTail = infoStruct;
|
||||
const VkGenericStruct *nextInput = (const VkGenericStruct *)infoStruct->pNext;
|
||||
while(nextInput)
|
||||
{
|
||||
// unwrap and replace the dedicated allocation struct in the chain
|
||||
if(nextInput->sType == VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV)
|
||||
{
|
||||
const VkDedicatedAllocationMemoryAllocateInfoNV *dedicatedIn =
|
||||
(const VkDedicatedAllocationMemoryAllocateInfoNV *)nextInput;
|
||||
VkDedicatedAllocationMemoryAllocateInfoNV *dedicatedOut =
|
||||
(VkDedicatedAllocationMemoryAllocateInfoNV *)tempMem;
|
||||
|
||||
// copy and unwrap the struct
|
||||
dedicatedOut->sType = dedicatedIn->sType;
|
||||
dedicatedOut->buffer = Unwrap(dedicatedIn->buffer);
|
||||
dedicatedOut->image = Unwrap(dedicatedIn->image);
|
||||
|
||||
AppendModifiedChainedStruct(tempMem, dedicatedOut, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR)
|
||||
{
|
||||
const VkMemoryDedicatedAllocateInfoKHR *dedicatedIn =
|
||||
(const VkMemoryDedicatedAllocateInfoKHR *)nextInput;
|
||||
VkMemoryDedicatedAllocateInfoKHR *dedicatedOut = (VkMemoryDedicatedAllocateInfoKHR *)tempMem;
|
||||
|
||||
// copy and unwrap the struct
|
||||
dedicatedOut->sType = dedicatedIn->sType;
|
||||
dedicatedOut->buffer = Unwrap(dedicatedIn->buffer);
|
||||
dedicatedOut->image = Unwrap(dedicatedIn->image);
|
||||
|
||||
AppendModifiedChainedStruct(tempMem, dedicatedOut, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV)
|
||||
{
|
||||
CopyNextChainedStruct<VkExportMemoryAllocateInfoNV>(tempMem, nextInput, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR)
|
||||
{
|
||||
CopyNextChainedStruct<VkExportMemoryAllocateInfoKHR>(tempMem, nextInput, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR)
|
||||
{
|
||||
CopyNextChainedStruct<VkImportMemoryFdInfoKHR>(tempMem, nextInput, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR)
|
||||
{
|
||||
#ifdef VK_KHR_external_memory_win32
|
||||
CopyNextChainedStruct<VkExportMemoryWin32HandleInfoKHR>(tempMem, nextInput, nextChainTail);
|
||||
#else
|
||||
RDCERR("Support for VK_KHR_external_memory_win32 not compiled in");
|
||||
#endif
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR)
|
||||
{
|
||||
#ifdef VK_KHR_external_memory_win32
|
||||
CopyNextChainedStruct<VkImportMemoryWin32HandleInfoKHR>(tempMem, nextInput, nextChainTail);
|
||||
#else
|
||||
RDCERR("Support for VK_KHR_external_memory_win32 not compiled in");
|
||||
#endif
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV)
|
||||
{
|
||||
#ifdef VK_NV_external_memory_win32
|
||||
CopyNextChainedStruct<VkExportMemoryWin32HandleInfoNV>(tempMem, nextInput, nextChainTail);
|
||||
#else
|
||||
RDCERR("Support for VK_NV_external_memory_win32 not compiled in");
|
||||
#endif
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV)
|
||||
{
|
||||
#ifdef VK_NV_external_memory_win32
|
||||
CopyNextChainedStruct<VkImportMemoryWin32HandleInfoNV>(tempMem, nextInput, nextChainTail);
|
||||
#else
|
||||
RDCERR("Support for VK_NV_external_memory_win32 not compiled in");
|
||||
#endif
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO)
|
||||
{
|
||||
const VkSamplerYcbcrConversionInfoKHR *ycbcrIn =
|
||||
(const VkSamplerYcbcrConversionInfoKHR *)nextInput;
|
||||
VkSamplerYcbcrConversionInfoKHR *ycbcrOut = (VkSamplerYcbcrConversionInfoKHR *)tempMem;
|
||||
|
||||
// copy and unwrap the struct
|
||||
ycbcrOut->sType = ycbcrIn->sType;
|
||||
ycbcrOut->conversion = Unwrap(ycbcrIn->conversion);
|
||||
|
||||
AppendModifiedChainedStruct(tempMem, ycbcrOut, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR)
|
||||
{
|
||||
CopyNextChainedStruct<VkExternalMemoryImageCreateInfoKHR>(tempMem, nextInput, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV)
|
||||
{
|
||||
CopyNextChainedStruct<VkExternalMemoryImageCreateInfoNV>(tempMem, nextInput, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR)
|
||||
{
|
||||
const VkImageSwapchainCreateInfoKHR *swapIn = (const VkImageSwapchainCreateInfoKHR *)nextInput;
|
||||
VkImageSwapchainCreateInfoKHR *swapOut = (VkImageSwapchainCreateInfoKHR *)tempMem;
|
||||
|
||||
// copy and unwrap the struct
|
||||
swapOut->sType = swapIn->sType;
|
||||
swapOut->swapchain = Unwrap(swapIn->swapchain);
|
||||
|
||||
AppendModifiedChainedStruct(tempMem, swapOut, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV)
|
||||
{
|
||||
CopyNextChainedStruct<VkDedicatedAllocationImageCreateInfoNV>(tempMem, nextInput,
|
||||
nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR)
|
||||
{
|
||||
const VkBindImageMemorySwapchainInfoKHR *swapIn =
|
||||
(const VkBindImageMemorySwapchainInfoKHR *)nextInput;
|
||||
VkBindImageMemorySwapchainInfoKHR *swapOut = (VkBindImageMemorySwapchainInfoKHR *)tempMem;
|
||||
|
||||
// copy and unwrap the struct
|
||||
swapOut->sType = swapIn->sType;
|
||||
swapOut->swapchain = Unwrap(swapIn->swapchain);
|
||||
|
||||
AppendModifiedChainedStruct(tempMem, swapOut, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR)
|
||||
{
|
||||
CopyNextChainedStruct<VkBindImageMemoryDeviceGroupInfoKHR>(tempMem, nextInput, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR)
|
||||
{
|
||||
CopyNextChainedStruct<VkBindImagePlaneMemoryInfoKHR>(tempMem, nextInput, nextChainTail);
|
||||
}
|
||||
else
|
||||
{
|
||||
RDCERR("unrecognised struct %d in %s pNext chain", structName, nextInput->sType);
|
||||
// can't patch this struct, have to just copy it and hope it's the last in the chain
|
||||
nextChainTail->pNext = nextInput;
|
||||
}
|
||||
|
||||
nextInput = nextInput->pNext;
|
||||
}
|
||||
}
|
||||
|
||||
VkAccessFlags MakeAccessMask(VkImageLayout layout)
|
||||
{
|
||||
switch(layout)
|
||||
|
||||
@@ -234,44 +234,8 @@ enum VkFlagWithNoBits
|
||||
FlagWithNoBits_Dummy_Bit = 1,
|
||||
};
|
||||
|
||||
// utility function for when we're modifying one struct in a pNext chain, this
|
||||
// lets us just copy across a struct unmodified into some temporary memory and
|
||||
// append it onto a pNext chain we're building
|
||||
template <typename VkStruct>
|
||||
void CopyNextChainedStruct(byte *&tempMem, const VkGenericStruct *nextInput,
|
||||
VkGenericStruct *&nextChainTail)
|
||||
{
|
||||
const VkStruct *instruct = (const VkStruct *)nextInput;
|
||||
VkStruct *outstruct = (VkStruct *)tempMem;
|
||||
|
||||
tempMem = (byte *)(outstruct + 1);
|
||||
|
||||
// copy the struct, nothing to unwrap
|
||||
*outstruct = *instruct;
|
||||
|
||||
// default to NULL. It will be overwritten next time if there is a next object
|
||||
outstruct->pNext = NULL;
|
||||
|
||||
// append this onto the chain
|
||||
nextChainTail->pNext = (const VkGenericStruct *)outstruct;
|
||||
nextChainTail = (VkGenericStruct *)outstruct;
|
||||
}
|
||||
|
||||
// this is similar to the above function, but for use after we've modified a struct locally
|
||||
// e.g. to unwrap some members or patch flags, etc.
|
||||
template <typename VkStruct>
|
||||
void AppendModifiedChainedStruct(byte *&tempMem, VkStruct *outputStruct,
|
||||
VkGenericStruct *&nextChainTail)
|
||||
{
|
||||
tempMem = (byte *)(outputStruct + 1);
|
||||
|
||||
// default to NULL. It will be overwritten in the next step if there is a next object
|
||||
outputStruct->pNext = NULL;
|
||||
|
||||
// append this onto the chain
|
||||
nextChainTail->pNext = (const VkGenericStruct *)outputStruct;
|
||||
nextChainTail = (VkGenericStruct *)outputStruct;
|
||||
}
|
||||
size_t GetNextPatchSize(const void *next);
|
||||
void PatchNextChain(const char *structName, byte *&tempMem, VkGenericStruct *infoStruct);
|
||||
|
||||
template <typename VkStruct>
|
||||
const VkGenericStruct *FindNextStruct(const VkStruct *haystack, VkStructureType needle)
|
||||
@@ -501,6 +465,8 @@ enum class VulkanChunk : uint32_t
|
||||
vkCmdEndDebugUtilsLabelEXT,
|
||||
vkCmdInsertDebugUtilsLabelEXT,
|
||||
vkCreateSamplerYcbcrConversionKHR,
|
||||
vkCmdSetDeviceMaskKHR,
|
||||
vkCmdDispatchBaseKHR,
|
||||
Max,
|
||||
};
|
||||
|
||||
|
||||
@@ -2360,6 +2360,13 @@ bool WrappedVulkan::ProcessChunk(ReadSerialiser &ser, VulkanChunk chunk)
|
||||
return Serialise_vkCmdInsertDebugUtilsLabelEXT(ser, VK_NULL_HANDLE, NULL);
|
||||
break;
|
||||
|
||||
case VulkanChunk::vkCmdSetDeviceMaskKHR:
|
||||
return Serialise_vkCmdSetDeviceMaskKHR(ser, VK_NULL_HANDLE, 0);
|
||||
break;
|
||||
case VulkanChunk::vkCmdDispatchBaseKHR:
|
||||
return Serialise_vkCmdDispatchBaseKHR(ser, VK_NULL_HANDLE, 0, 0, 0, 0, 0, 0);
|
||||
break;
|
||||
|
||||
default:
|
||||
{
|
||||
SystemChunk system = (SystemChunk)chunk;
|
||||
|
||||
@@ -1746,4 +1746,29 @@ public:
|
||||
IMPLEMENT_FUNCTION_SERIALISED(void, vkDestroySamplerYcbcrConversionKHR, VkDevice device,
|
||||
VkSamplerYcbcrConversionKHR ycbcrConversion,
|
||||
const VkAllocationCallbacks *pAllocator);
|
||||
|
||||
// VK_KHR_device_group_creation
|
||||
VkResult vkEnumeratePhysicalDeviceGroupsKHR(
|
||||
VkInstance instance, uint32_t *pPhysicalDeviceGroupCount,
|
||||
VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties);
|
||||
|
||||
// VK_KHR_device_group
|
||||
void vkGetDeviceGroupPeerMemoryFeaturesKHR(VkDevice device, uint32_t heapIndex,
|
||||
uint32_t localDeviceIndex, uint32_t remoteDeviceIndex,
|
||||
VkPeerMemoryFeatureFlags *pPeerMemoryFeatures);
|
||||
VkResult vkGetDeviceGroupPresentCapabilitiesKHR(
|
||||
VkDevice device, VkDeviceGroupPresentCapabilitiesKHR *pDeviceGroupPresentCapabilities);
|
||||
VkResult vkGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface,
|
||||
VkDeviceGroupPresentModeFlagsKHR *pModes);
|
||||
VkResult vkGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice,
|
||||
VkSurfaceKHR surface, uint32_t *pRectCount,
|
||||
VkRect2D *pRects);
|
||||
VkResult vkAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
|
||||
uint32_t *pImageIndex);
|
||||
|
||||
IMPLEMENT_FUNCTION_SERIALISED(void, vkCmdSetDeviceMaskKHR, VkCommandBuffer commandBuffer,
|
||||
uint32_t deviceMask);
|
||||
IMPLEMENT_FUNCTION_SERIALISED(void, vkCmdDispatchBaseKHR, VkCommandBuffer commandBuffer,
|
||||
uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ,
|
||||
uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
|
||||
};
|
||||
|
||||
@@ -296,7 +296,8 @@
|
||||
CheckExt(KHR_external_memory_capabilities, VK11); \
|
||||
CheckExt(KHR_external_semaphore_capabilities, VK11); \
|
||||
CheckExt(KHR_external_fence_capabilities, VK11); \
|
||||
CheckExt(EXT_debug_utils, VKXX);
|
||||
CheckExt(EXT_debug_utils, VKXX); \
|
||||
CheckExt(KHR_device_group_creation, VK11);
|
||||
|
||||
#define CheckDeviceExts() \
|
||||
CheckExt(EXT_debug_marker, VKXX); \
|
||||
@@ -328,7 +329,8 @@
|
||||
CheckExt(AMD_buffer_marker, VKXX); \
|
||||
CheckExt(EXT_vertex_attribute_divisor, VKXX); \
|
||||
CheckExt(EXT_sampler_filter_minmax, VKXX); \
|
||||
CheckExt(KHR_sampler_ycbcr_conversion, VK11);
|
||||
CheckExt(KHR_sampler_ycbcr_conversion, VK11); \
|
||||
CheckExt(KHR_device_group, VK11);
|
||||
|
||||
#define HookInitVulkanInstanceExts() \
|
||||
HookInitExtension(KHR_surface, DestroySurfaceKHR); \
|
||||
@@ -367,6 +369,11 @@
|
||||
HookInitExtension(EXT_debug_utils, CreateDebugUtilsMessengerEXT); \
|
||||
HookInitExtension(EXT_debug_utils, DestroyDebugUtilsMessengerEXT); \
|
||||
HookInitExtension(EXT_debug_utils, SubmitDebugUtilsMessageEXT); \
|
||||
HookInitExtension(KHR_device_group_creation, EnumeratePhysicalDeviceGroupsKHR); \
|
||||
/* Not technically accurate - part of KHR_device_group - but these extensions are linked and */ \
|
||||
/* should always be present/not present together. Keying from the instance extension ensures */ \
|
||||
/* we'll load this function correctly when populating dispatch tables. */ \
|
||||
HookInitExtension(KHR_device_group_creation, GetPhysicalDevicePresentRectanglesKHR); \
|
||||
HookInitInstance_PlatformSpecific()
|
||||
|
||||
#define HookInitVulkanDeviceExts() \
|
||||
@@ -416,6 +423,12 @@
|
||||
HookInitExtension(EXT_debug_utils, CmdInsertDebugUtilsLabelEXT); \
|
||||
HookInitExtension(KHR_sampler_ycbcr_conversion, CreateSamplerYcbcrConversionKHR); \
|
||||
HookInitExtension(KHR_sampler_ycbcr_conversion, DestroySamplerYcbcrConversionKHR); \
|
||||
HookInitExtension(KHR_device_group, GetDeviceGroupPeerMemoryFeaturesKHR); \
|
||||
HookInitExtension(KHR_device_group, CmdSetDeviceMaskKHR); \
|
||||
HookInitExtension(KHR_device_group, CmdDispatchBaseKHR); \
|
||||
HookInitExtension(KHR_device_group, GetDeviceGroupPresentCapabilitiesKHR); \
|
||||
HookInitExtension(KHR_device_group, GetDeviceGroupSurfacePresentModesKHR); \
|
||||
HookInitExtension(KHR_device_group, AcquireNextImage2KHR); \
|
||||
HookInitDevice_PlatformSpecific()
|
||||
|
||||
#define DefineHooks() \
|
||||
@@ -892,6 +905,24 @@
|
||||
HookDefine3(void, vkDestroySamplerYcbcrConversionKHR, VkDevice, device, \
|
||||
VkSamplerYcbcrConversionKHR, ycbcrConversion, const VkAllocationCallbacks *, \
|
||||
pAllocator); \
|
||||
HookDefine3(VkResult, vkEnumeratePhysicalDeviceGroupsKHR, VkInstance, instance, uint32_t *, \
|
||||
pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *, \
|
||||
pPhysicalDeviceGroupProperties); \
|
||||
HookDefine5(void, vkGetDeviceGroupPeerMemoryFeaturesKHR, VkDevice, device, uint32_t, heapIndex, \
|
||||
uint32_t, localDeviceIndex, uint32_t, remoteDeviceIndex, VkPeerMemoryFeatureFlags *, \
|
||||
pPeerMemoryFeatures); \
|
||||
HookDefine2(void, vkCmdSetDeviceMaskKHR, VkCommandBuffer, commandBuffer, uint32_t, deviceMask); \
|
||||
HookDefine7(void, vkCmdDispatchBaseKHR, VkCommandBuffer, commandBuffer, uint32_t, baseGroupX, \
|
||||
uint32_t, baseGroupY, uint32_t, baseGroupZ, uint32_t, groupCountX, uint32_t, \
|
||||
groupCountY, uint32_t, groupCountZ); \
|
||||
HookDefine2(VkResult, vkGetDeviceGroupPresentCapabilitiesKHR, VkDevice, device, \
|
||||
VkDeviceGroupPresentCapabilitiesKHR *, pDeviceGroupPresentCapabilities); \
|
||||
HookDefine3(VkResult, vkGetDeviceGroupSurfacePresentModesKHR, VkDevice, device, VkSurfaceKHR, \
|
||||
surface, VkDeviceGroupPresentModeFlagsKHR *, pModes); \
|
||||
HookDefine4(VkResult, vkGetPhysicalDevicePresentRectanglesKHR, VkPhysicalDevice, physicalDevice, \
|
||||
VkSurfaceKHR, surface, uint32_t *, pRectCount, VkRect2D *, pRects); \
|
||||
HookDefine3(VkResult, vkAcquireNextImage2KHR, VkDevice, device, \
|
||||
const VkAcquireNextImageInfoKHR *, pAcquireInfo, uint32_t *, pImageIndex); \
|
||||
HookDefine_PlatformSpecific()
|
||||
|
||||
struct VkLayerInstanceDispatchTableExtended : VkLayerInstanceDispatchTable
|
||||
|
||||
@@ -175,6 +175,20 @@ SERIALISE_VK_HANDLES();
|
||||
PNEXT_IGNORE(VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR) \
|
||||
PNEXT_IGNORE(VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO) \
|
||||
\
|
||||
/* for now we don't handle true device groups and report all physdevices in separate groups. */ \
|
||||
/* So we can safely ignore these structures as redundant/unneeded. */ \
|
||||
/* VK_KHR_sampler_ycbcr_conversion */ \
|
||||
PNEXT_IGNORE(VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR) \
|
||||
PNEXT_IGNORE(VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR) \
|
||||
PNEXT_IGNORE(VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR) \
|
||||
PNEXT_IGNORE(VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR) \
|
||||
PNEXT_IGNORE(VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR) \
|
||||
PNEXT_IGNORE(VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR) \
|
||||
PNEXT_IGNORE(VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR) \
|
||||
PNEXT_IGNORE(VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR) \
|
||||
PNEXT_IGNORE(VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR) \
|
||||
PNEXT_IGNORE(VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR) \
|
||||
\
|
||||
/* VK_EXT_conservative_rasterization */ \
|
||||
PNEXT_STRUCT(VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, \
|
||||
VkPipelineRasterizationConservativeStateCreateInfoEXT) \
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
template <>
|
||||
std::string DoStringise(const VulkanChunk &el)
|
||||
{
|
||||
RDCCOMPILE_ASSERT((uint32_t)VulkanChunk::Max == 1113, "Chunks changed without updating names");
|
||||
RDCCOMPILE_ASSERT((uint32_t)VulkanChunk::Max == 1115, "Chunks changed without updating names");
|
||||
|
||||
BEGIN_ENUM_STRINGISE(VulkanChunk)
|
||||
{
|
||||
@@ -145,6 +145,8 @@ std::string DoStringise(const VulkanChunk &el)
|
||||
STRINGISE_ENUM_CLASS(vkCmdEndDebugUtilsLabelEXT);
|
||||
STRINGISE_ENUM_CLASS(vkCmdInsertDebugUtilsLabelEXT);
|
||||
STRINGISE_ENUM_CLASS(vkCreateSamplerYcbcrConversionKHR);
|
||||
STRINGISE_ENUM_CLASS(vkCmdSetDeviceMaskKHR);
|
||||
STRINGISE_ENUM_CLASS(vkCmdDispatchBaseKHR);
|
||||
STRINGISE_ENUM_CLASS_NAMED(Max, "Max Chunk");
|
||||
}
|
||||
END_ENUM_STRINGISE()
|
||||
|
||||
@@ -3790,6 +3790,58 @@ void WrappedVulkan::vkCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
|
||||
}
|
||||
}
|
||||
|
||||
template <typename SerialiserType>
|
||||
bool WrappedVulkan::Serialise_vkCmdSetDeviceMaskKHR(SerialiserType &ser,
|
||||
VkCommandBuffer commandBuffer,
|
||||
uint32_t deviceMask)
|
||||
{
|
||||
SERIALISE_ELEMENT(commandBuffer);
|
||||
SERIALISE_ELEMENT(deviceMask);
|
||||
|
||||
Serialise_DebugMessages(ser);
|
||||
|
||||
SERIALISE_CHECK_READ_ERRORS();
|
||||
|
||||
if(IsReplayingAndReading())
|
||||
{
|
||||
m_LastCmdBufferID = GetResourceManager()->GetOriginalID(GetResID(commandBuffer));
|
||||
|
||||
if(IsActiveReplaying(m_State))
|
||||
{
|
||||
if(InRerecordRange(m_LastCmdBufferID))
|
||||
commandBuffer = RerecordCmdBuf(m_LastCmdBufferID);
|
||||
else
|
||||
commandBuffer = VK_NULL_HANDLE;
|
||||
}
|
||||
|
||||
if(commandBuffer != VK_NULL_HANDLE)
|
||||
{
|
||||
ObjDisp(commandBuffer)->CmdSetDeviceMaskKHR(Unwrap(commandBuffer), deviceMask);
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void WrappedVulkan::vkCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask)
|
||||
{
|
||||
SCOPED_DBG_SINK();
|
||||
|
||||
SERIALISE_TIME_CALL(ObjDisp(commandBuffer)->CmdSetDeviceMaskKHR(Unwrap(commandBuffer), deviceMask));
|
||||
|
||||
if(IsCaptureMode(m_State))
|
||||
{
|
||||
VkResourceRecord *record = GetRecord(commandBuffer);
|
||||
|
||||
CACHE_THREAD_SERIALISER();
|
||||
|
||||
SCOPED_SERIALISE_CHUNK(VulkanChunk::vkCmdSetDeviceMaskKHR);
|
||||
Serialise_vkCmdSetDeviceMaskKHR(ser, commandBuffer, deviceMask);
|
||||
|
||||
record->AddChunk(scope.Get());
|
||||
}
|
||||
}
|
||||
|
||||
INSTANTIATE_FUNCTION_SERIALISED(VkResult, vkCreateCommandPool, VkDevice device,
|
||||
const VkCommandPoolCreateInfo *pCreateInfo,
|
||||
const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool);
|
||||
@@ -3899,3 +3951,6 @@ INSTANTIATE_FUNCTION_SERIALISED(void, vkCmdEndDebugUtilsLabelEXT, VkCommandBuffe
|
||||
|
||||
INSTANTIATE_FUNCTION_SERIALISED(void, vkCmdInsertDebugUtilsLabelEXT, VkCommandBuffer commandBuffer,
|
||||
const VkDebugUtilsLabelEXT *pLabelInfo);
|
||||
|
||||
INSTANTIATE_FUNCTION_SERIALISED(void, vkCmdSetDeviceMaskKHR, VkCommandBuffer commandBuffer,
|
||||
uint32_t deviceMask);
|
||||
@@ -2136,6 +2136,104 @@ void WrappedVulkan::vkCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_
|
||||
}
|
||||
}
|
||||
|
||||
template <typename SerialiserType>
|
||||
bool WrappedVulkan::Serialise_vkCmdDispatchBaseKHR(SerialiserType &ser, VkCommandBuffer commandBuffer,
|
||||
uint32_t baseGroupX, uint32_t baseGroupY,
|
||||
uint32_t baseGroupZ, uint32_t groupCountX,
|
||||
uint32_t groupCountY, uint32_t groupCountZ)
|
||||
{
|
||||
SERIALISE_ELEMENT(commandBuffer);
|
||||
SERIALISE_ELEMENT(baseGroupX);
|
||||
SERIALISE_ELEMENT(baseGroupY);
|
||||
SERIALISE_ELEMENT(baseGroupZ);
|
||||
SERIALISE_ELEMENT(groupCountX);
|
||||
SERIALISE_ELEMENT(groupCountY);
|
||||
SERIALISE_ELEMENT(groupCountZ);
|
||||
|
||||
Serialise_DebugMessages(ser);
|
||||
|
||||
SERIALISE_CHECK_READ_ERRORS();
|
||||
|
||||
if(IsReplayingAndReading())
|
||||
{
|
||||
m_LastCmdBufferID = GetResourceManager()->GetOriginalID(GetResID(commandBuffer));
|
||||
|
||||
if(IsActiveReplaying(m_State))
|
||||
{
|
||||
if(InRerecordRange(m_LastCmdBufferID))
|
||||
{
|
||||
commandBuffer = RerecordCmdBuf(m_LastCmdBufferID);
|
||||
|
||||
uint32_t eventId = HandlePreCallback(commandBuffer, DrawFlags::Dispatch);
|
||||
|
||||
ObjDisp(commandBuffer)
|
||||
->CmdDispatchBaseKHR(Unwrap(commandBuffer), baseGroupX, baseGroupY, baseGroupZ,
|
||||
groupCountX, groupCountY, groupCountZ);
|
||||
|
||||
if(eventId && m_DrawcallCallback->PostDispatch(eventId, commandBuffer))
|
||||
{
|
||||
ObjDisp(commandBuffer)
|
||||
->CmdDispatchBaseKHR(Unwrap(commandBuffer), baseGroupX, baseGroupY, baseGroupZ,
|
||||
groupCountX, groupCountY, groupCountZ);
|
||||
m_DrawcallCallback->PostRedispatch(eventId, commandBuffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
ObjDisp(commandBuffer)
|
||||
->CmdDispatchBaseKHR(Unwrap(commandBuffer), baseGroupX, baseGroupY, baseGroupZ,
|
||||
groupCountX, groupCountY, groupCountZ);
|
||||
|
||||
{
|
||||
AddEvent();
|
||||
|
||||
DrawcallDescription draw;
|
||||
draw.name = StringFormat::Fmt("vkCmdDispatchBaseKHR(%u, %u, %u)", groupCountX, groupCountY,
|
||||
groupCountZ);
|
||||
draw.dispatchDimension[0] = groupCountX;
|
||||
draw.dispatchDimension[1] = groupCountY;
|
||||
draw.dispatchDimension[2] = groupCountZ;
|
||||
draw.dispatchBase[0] = baseGroupX;
|
||||
draw.dispatchBase[1] = baseGroupY;
|
||||
draw.dispatchBase[2] = baseGroupZ;
|
||||
|
||||
draw.flags |= DrawFlags::Dispatch;
|
||||
|
||||
AddDrawcall(draw, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void WrappedVulkan::vkCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX,
|
||||
uint32_t baseGroupY, uint32_t baseGroupZ,
|
||||
uint32_t groupCountX, uint32_t groupCountY,
|
||||
uint32_t groupCountZ)
|
||||
{
|
||||
SCOPED_DBG_SINK();
|
||||
|
||||
SERIALISE_TIME_CALL(ObjDisp(commandBuffer)
|
||||
->CmdDispatchBaseKHR(Unwrap(commandBuffer), baseGroupX, baseGroupY,
|
||||
baseGroupZ, groupCountX, groupCountY, groupCountZ));
|
||||
|
||||
if(IsCaptureMode(m_State))
|
||||
{
|
||||
VkResourceRecord *record = GetRecord(commandBuffer);
|
||||
|
||||
CACHE_THREAD_SERIALISER();
|
||||
|
||||
ser.SetDrawChunk();
|
||||
SCOPED_SERIALISE_CHUNK(VulkanChunk::vkCmdDispatchBaseKHR);
|
||||
Serialise_vkCmdDispatchBaseKHR(ser, commandBuffer, baseGroupX, baseGroupY, baseGroupZ,
|
||||
groupCountX, groupCountY, groupCountZ);
|
||||
|
||||
record->AddChunk(scope.Get());
|
||||
}
|
||||
}
|
||||
|
||||
INSTANTIATE_FUNCTION_SERIALISED(void, vkCmdDraw, VkCommandBuffer commandBuffer, uint32_t vertexCount,
|
||||
uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance);
|
||||
|
||||
@@ -2197,3 +2295,7 @@ INSTANTIATE_FUNCTION_SERIALISED(void, vkCmdResolveImage, VkCommandBuffer command
|
||||
VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
|
||||
VkImageLayout dstImageLayout, uint32_t regionCount,
|
||||
const VkImageResolve *pRegions);
|
||||
|
||||
INSTANTIATE_FUNCTION_SERIALISED(void, vkCmdDispatchBaseKHR, VkCommandBuffer commandBuffer,
|
||||
uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ,
|
||||
uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
|
||||
@@ -505,4 +505,52 @@ void WrappedVulkan::vkGetDescriptorSetLayoutSupportKHR(VkDevice device,
|
||||
{
|
||||
VkDescriptorSetLayoutCreateInfo unwrapped = UnwrapInfo(pCreateInfo);
|
||||
return ObjDisp(device)->GetDescriptorSetLayoutSupportKHR(Unwrap(device), &unwrapped, pSupport);
|
||||
}
|
||||
|
||||
VkResult WrappedVulkan::vkEnumeratePhysicalDeviceGroupsKHR(
|
||||
VkInstance instance, uint32_t *pPhysicalDeviceGroupCount,
|
||||
VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
|
||||
{
|
||||
// we ignore the 'real' physical device groups, and report one group per physical device. We use
|
||||
// our internal enumerate function to make sure we handle wrapping the objects.
|
||||
uint32_t numPhys = 0;
|
||||
vkEnumeratePhysicalDevices(Unwrap(instance), &numPhys, NULL);
|
||||
|
||||
VkPhysicalDevice *phys = new VkPhysicalDevice[numPhys];
|
||||
vkEnumeratePhysicalDevices(Unwrap(instance), &numPhys, phys);
|
||||
|
||||
uint32_t outputSpace = pPhysicalDeviceGroupCount ? *pPhysicalDeviceGroupCount : 0;
|
||||
|
||||
if(pPhysicalDeviceGroupCount)
|
||||
*pPhysicalDeviceGroupCount = numPhys;
|
||||
|
||||
if(pPhysicalDeviceGroupProperties)
|
||||
{
|
||||
// list one group per device
|
||||
for(uint32_t i = 0; i < outputSpace; i++)
|
||||
{
|
||||
RDCEraseEl(pPhysicalDeviceGroupProperties[i]);
|
||||
pPhysicalDeviceGroupProperties[i].sType =
|
||||
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR;
|
||||
pPhysicalDeviceGroupProperties[i].physicalDeviceCount = 1;
|
||||
pPhysicalDeviceGroupProperties[i].physicalDevices[0] = phys[i];
|
||||
pPhysicalDeviceGroupProperties[i].subsetAllocation = VK_FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
delete[] phys;
|
||||
|
||||
if(pPhysicalDeviceGroupProperties && outputSpace < numPhys)
|
||||
return VK_INCOMPLETE;
|
||||
|
||||
return VK_SUCCESS;
|
||||
}
|
||||
|
||||
void WrappedVulkan::vkGetDeviceGroupPeerMemoryFeaturesKHR(VkDevice device, uint32_t heapIndex,
|
||||
uint32_t localDeviceIndex,
|
||||
uint32_t remoteDeviceIndex,
|
||||
VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
|
||||
{
|
||||
return ObjDisp(device)->GetDeviceGroupPeerMemoryFeaturesKHR(
|
||||
Unwrap(device), heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
|
||||
}
|
||||
@@ -464,53 +464,9 @@ VkResult WrappedVulkan::vkCreateSampler(VkDevice device, const VkSamplerCreateIn
|
||||
{
|
||||
VkSamplerCreateInfo info = *pCreateInfo;
|
||||
|
||||
size_t memSize = 0;
|
||||
byte *tempMem = GetTempMemory(GetNextPatchSize(info.pNext));
|
||||
|
||||
// we don't have to unwrap every struct, but unwrapping a struct means we need to copy
|
||||
// the previous one in the chain locally to modify the pNext pointer. So we just copy
|
||||
// all of them locally
|
||||
{
|
||||
const VkGenericStruct *next = (const VkGenericStruct *)info.pNext;
|
||||
while(next)
|
||||
{
|
||||
// we need to unwrap these structs
|
||||
if(next->sType == VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR)
|
||||
memSize += sizeof(VkSamplerYcbcrConversionInfoKHR);
|
||||
|
||||
next = next->pNext;
|
||||
}
|
||||
}
|
||||
|
||||
byte *tempMem = GetTempMemory(memSize);
|
||||
|
||||
{
|
||||
VkGenericStruct *nextChainTail = (VkGenericStruct *)&info;
|
||||
const VkGenericStruct *nextInput = (const VkGenericStruct *)info.pNext;
|
||||
while(nextInput)
|
||||
{
|
||||
// unwrap and replace the dedicated allocation struct in the chain
|
||||
if(nextInput->sType == VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO)
|
||||
{
|
||||
const VkSamplerYcbcrConversionInfoKHR *ycbcrIn =
|
||||
(const VkSamplerYcbcrConversionInfoKHR *)nextInput;
|
||||
VkSamplerYcbcrConversionInfoKHR *ycbcrOut = (VkSamplerYcbcrConversionInfoKHR *)tempMem;
|
||||
|
||||
// copy and unwrap the struct
|
||||
ycbcrOut->sType = ycbcrIn->sType;
|
||||
ycbcrOut->conversion = Unwrap(ycbcrIn->conversion);
|
||||
|
||||
AppendModifiedChainedStruct(tempMem, ycbcrOut, nextChainTail);
|
||||
}
|
||||
else
|
||||
{
|
||||
RDCERR("unrecognised struct %d in vkAllocateMemoryInfo pNext chain", nextInput->sType);
|
||||
// can't patch this struct, have to just copy it and hope it's the last in the chain
|
||||
nextChainTail->pNext = nextInput;
|
||||
}
|
||||
|
||||
nextInput = nextInput->pNext;
|
||||
}
|
||||
}
|
||||
PatchNextChain("VkSamplerCreateInfo", tempMem, (VkGenericStruct *)&info);
|
||||
|
||||
VkResult ret;
|
||||
SERIALISE_TIME_CALL(
|
||||
|
||||
@@ -161,12 +161,22 @@ template <>
|
||||
VkBindImageMemoryInfoKHR *WrappedVulkan::UnwrapInfos(const VkBindImageMemoryInfoKHR *info,
|
||||
uint32_t count)
|
||||
{
|
||||
VkBindImageMemoryInfoKHR *ret = GetTempArray<VkBindImageMemoryInfoKHR>(count);
|
||||
size_t memSize = sizeof(VkBindImageMemoryInfoKHR) * count;
|
||||
|
||||
for(uint32_t i = 0; i < count; i++)
|
||||
memSize += GetNextPatchSize(info[i].pNext);
|
||||
|
||||
byte *tempMem = GetTempMemory(memSize);
|
||||
|
||||
VkBindImageMemoryInfoKHR *ret = (VkBindImageMemoryInfoKHR *)tempMem;
|
||||
|
||||
tempMem += sizeof(VkBindImageMemoryInfoKHR) * count;
|
||||
|
||||
memcpy(ret, info, count * sizeof(VkBindImageMemoryInfoKHR));
|
||||
|
||||
for(uint32_t i = 0; i < count; i++)
|
||||
{
|
||||
PatchNextChain("VkBindImageMemoryInfoKHR", tempMem, (VkGenericStruct *)&ret[i]);
|
||||
ret[i].image = Unwrap(ret[i].image);
|
||||
ret[i].memory = Unwrap(ret[i].memory);
|
||||
}
|
||||
@@ -367,145 +377,9 @@ VkResult WrappedVulkan::vkAllocateMemory(VkDevice device, const VkMemoryAllocate
|
||||
ObjDisp(device)->DestroyBuffer(Unwrap(device), buf, NULL);
|
||||
}
|
||||
|
||||
size_t memSize = 0;
|
||||
byte *tempMem = GetTempMemory(GetNextPatchSize(info.pNext));
|
||||
|
||||
// we don't have to unwrap every struct, but unwrapping a struct means we need to copy
|
||||
// the previous one in the chain locally to modify the pNext pointer. So we just copy
|
||||
// all of them locally
|
||||
{
|
||||
const VkGenericStruct *next = (const VkGenericStruct *)info.pNext;
|
||||
while(next)
|
||||
{
|
||||
// we need to unwrap these structs
|
||||
if(next->sType == VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV)
|
||||
memSize += sizeof(VkDedicatedAllocationMemoryAllocateInfoNV);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR)
|
||||
memSize += sizeof(VkMemoryDedicatedAllocateInfoKHR);
|
||||
// the rest we don't need to unwrap, but we need to copy locally for chaining
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV)
|
||||
memSize += sizeof(VkExportMemoryAllocateInfoNV);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR)
|
||||
memSize += sizeof(VkExportMemoryAllocateInfoKHR);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR)
|
||||
memSize += sizeof(VkImportMemoryFdInfoKHR);
|
||||
|
||||
#ifdef VK_NV_external_memory_win32
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV)
|
||||
memSize += sizeof(VkExportMemoryWin32HandleInfoNV);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV)
|
||||
memSize += sizeof(VkImportMemoryWin32HandleInfoNV);
|
||||
#else
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV)
|
||||
RDCERR("Support for VK_NV_external_memory_win32 not compiled in");
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV)
|
||||
RDCERR("Support for VK_NV_external_memory_win32 not compiled in");
|
||||
#endif
|
||||
|
||||
#ifdef VK_KHR_external_memory_win32
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR)
|
||||
memSize += sizeof(VkExportMemoryWin32HandleInfoKHR);
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR)
|
||||
memSize += sizeof(VkImportMemoryWin32HandleInfoKHR);
|
||||
#else
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR)
|
||||
RDCERR("Support for VK_KHR_external_memory_win32 not compiled in");
|
||||
else if(next->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR)
|
||||
RDCERR("Support for VK_KHR_external_memory_win32 not compiled in");
|
||||
#endif
|
||||
|
||||
next = next->pNext;
|
||||
}
|
||||
}
|
||||
|
||||
byte *tempMem = GetTempMemory(memSize);
|
||||
|
||||
{
|
||||
VkGenericStruct *nextChainTail = (VkGenericStruct *)&info;
|
||||
const VkGenericStruct *nextInput = (const VkGenericStruct *)info.pNext;
|
||||
while(nextInput)
|
||||
{
|
||||
// unwrap and replace the dedicated allocation struct in the chain
|
||||
if(nextInput->sType == VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV)
|
||||
{
|
||||
const VkDedicatedAllocationMemoryAllocateInfoNV *dedicatedIn =
|
||||
(const VkDedicatedAllocationMemoryAllocateInfoNV *)nextInput;
|
||||
VkDedicatedAllocationMemoryAllocateInfoNV *dedicatedOut =
|
||||
(VkDedicatedAllocationMemoryAllocateInfoNV *)tempMem;
|
||||
|
||||
// copy and unwrap the struct
|
||||
dedicatedOut->sType = dedicatedIn->sType;
|
||||
dedicatedOut->buffer = Unwrap(dedicatedIn->buffer);
|
||||
dedicatedOut->image = Unwrap(dedicatedIn->image);
|
||||
|
||||
AppendModifiedChainedStruct(tempMem, dedicatedOut, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR)
|
||||
{
|
||||
const VkMemoryDedicatedAllocateInfoKHR *dedicatedIn =
|
||||
(const VkMemoryDedicatedAllocateInfoKHR *)nextInput;
|
||||
VkMemoryDedicatedAllocateInfoKHR *dedicatedOut = (VkMemoryDedicatedAllocateInfoKHR *)tempMem;
|
||||
|
||||
// copy and unwrap the struct
|
||||
dedicatedOut->sType = dedicatedIn->sType;
|
||||
dedicatedOut->buffer = Unwrap(dedicatedIn->buffer);
|
||||
dedicatedOut->image = Unwrap(dedicatedIn->image);
|
||||
|
||||
AppendModifiedChainedStruct(tempMem, dedicatedOut, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV)
|
||||
{
|
||||
CopyNextChainedStruct<VkExportMemoryAllocateInfoNV>(tempMem, nextInput, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR)
|
||||
{
|
||||
CopyNextChainedStruct<VkExportMemoryAllocateInfoKHR>(tempMem, nextInput, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR)
|
||||
{
|
||||
CopyNextChainedStruct<VkImportMemoryFdInfoKHR>(tempMem, nextInput, nextChainTail);
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR)
|
||||
{
|
||||
#ifdef VK_KHR_external_memory_win32
|
||||
CopyNextChainedStruct<VkExportMemoryWin32HandleInfoKHR>(tempMem, nextInput, nextChainTail);
|
||||
#else
|
||||
RDCERR("Support for VK_KHR_external_memory_win32 not compiled in");
|
||||
#endif
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR)
|
||||
{
|
||||
#ifdef VK_KHR_external_memory_win32
|
||||
CopyNextChainedStruct<VkImportMemoryWin32HandleInfoKHR>(tempMem, nextInput, nextChainTail);
|
||||
#else
|
||||
RDCERR("Support for VK_KHR_external_memory_win32 not compiled in");
|
||||
#endif
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV)
|
||||
{
|
||||
#ifdef VK_NV_external_memory_win32
|
||||
CopyNextChainedStruct<VkExportMemoryWin32HandleInfoNV>(tempMem, nextInput, nextChainTail);
|
||||
#else
|
||||
RDCERR("Support for VK_NV_external_memory_win32 not compiled in");
|
||||
#endif
|
||||
}
|
||||
else if(nextInput->sType == VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV)
|
||||
{
|
||||
#ifdef VK_NV_external_memory_win32
|
||||
CopyNextChainedStruct<VkImportMemoryWin32HandleInfoNV>(tempMem, nextInput, nextChainTail);
|
||||
#else
|
||||
RDCERR("Support for VK_NV_external_memory_win32 not compiled in");
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
RDCERR("unrecognised struct %d in vkAllocateMemoryInfo pNext chain", nextInput->sType);
|
||||
// can't patch this struct, have to just copy it and hope it's the last in the chain
|
||||
nextChainTail->pNext = nextInput;
|
||||
}
|
||||
|
||||
nextInput = nextInput->pNext;
|
||||
}
|
||||
}
|
||||
PatchNextChain("VkMemoryAllocateInfo", tempMem, (VkGenericStruct *)&info);
|
||||
|
||||
VkResult ret;
|
||||
SERIALISE_TIME_CALL(
|
||||
@@ -1508,6 +1382,10 @@ VkResult WrappedVulkan::vkCreateImage(VkDevice device, const VkImageCreateInfo *
|
||||
}
|
||||
}
|
||||
|
||||
byte *tempMem = GetTempMemory(GetNextPatchSize(createInfo_adjusted.pNext));
|
||||
|
||||
PatchNextChain("VkImageCreateInfo", tempMem, (VkGenericStruct *)&createInfo_adjusted);
|
||||
|
||||
VkResult ret;
|
||||
SERIALISE_TIME_CALL(
|
||||
ret = ObjDisp(device)->CreateImage(Unwrap(device), &createInfo_adjusted, pAllocator, pImage));
|
||||
|
||||
@@ -643,6 +643,18 @@ VkResult WrappedVulkan::vkQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR
|
||||
unwrappedInfo.pWaitSemaphores = unwrappedInfo.waitSemaphoreCount ? &unwrappedSems[0] : NULL;
|
||||
|
||||
// Don't support any extensions for present info
|
||||
const VkGenericStruct *next = (const VkGenericStruct *)pPresentInfo->pNext;
|
||||
while(next)
|
||||
{
|
||||
// allowed (and ignored) pNext structs
|
||||
if(next->sType != VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR &&
|
||||
next->sType != VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR)
|
||||
{
|
||||
RDCWARN("Unsupported pNext structure in pPresentInfo: %s", ToStr(next->sType).c_str());
|
||||
}
|
||||
|
||||
next = next->pNext;
|
||||
}
|
||||
RDCASSERT(pPresentInfo->pNext == NULL);
|
||||
|
||||
VkResourceRecord *swaprecord = GetRecord(pPresentInfo->pSwapchains[0]);
|
||||
@@ -888,6 +900,42 @@ VkResult WrappedVulkan::vkReleaseDisplayEXT(VkPhysicalDevice physicalDevice, VkD
|
||||
return ObjDisp(physicalDevice)->ReleaseDisplayEXT(Unwrap(physicalDevice), display);
|
||||
}
|
||||
|
||||
VkResult WrappedVulkan::vkGetDeviceGroupPresentCapabilitiesKHR(
|
||||
VkDevice device, VkDeviceGroupPresentCapabilitiesKHR *pDeviceGroupPresentCapabilities)
|
||||
{
|
||||
return ObjDisp(device)->GetDeviceGroupPresentCapabilitiesKHR(Unwrap(device),
|
||||
pDeviceGroupPresentCapabilities);
|
||||
}
|
||||
|
||||
VkResult WrappedVulkan::vkGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface,
|
||||
VkDeviceGroupPresentModeFlagsKHR *pModes)
|
||||
{
|
||||
return ObjDisp(device)->GetDeviceGroupSurfacePresentModesKHR(Unwrap(device), Unwrap(surface),
|
||||
pModes);
|
||||
}
|
||||
|
||||
VkResult WrappedVulkan::vkGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice,
|
||||
VkSurfaceKHR surface,
|
||||
uint32_t *pRectCount,
|
||||
VkRect2D *pRects)
|
||||
{
|
||||
return ObjDisp(physicalDevice)
|
||||
->GetPhysicalDevicePresentRectanglesKHR(Unwrap(physicalDevice), Unwrap(surface), pRectCount,
|
||||
pRects);
|
||||
}
|
||||
|
||||
VkResult WrappedVulkan::vkAcquireNextImage2KHR(VkDevice device,
|
||||
const VkAcquireNextImageInfoKHR *pAcquireInfo,
|
||||
uint32_t *pImageIndex)
|
||||
{
|
||||
VkAcquireNextImageInfoKHR unwrapped = *pAcquireInfo;
|
||||
unwrapped.semaphore = Unwrap(unwrapped.semaphore);
|
||||
unwrapped.fence = Unwrap(unwrapped.fence);
|
||||
unwrapped.swapchain = Unwrap(unwrapped.swapchain);
|
||||
|
||||
return ObjDisp(device)->AcquireNextImage2KHR(Unwrap(device), &unwrapped, pImageIndex);
|
||||
}
|
||||
|
||||
INSTANTIATE_FUNCTION_SERIALISED(VkResult, vkCreateSwapchainKHR, VkDevice device,
|
||||
const VkSwapchainCreateInfoKHR *pCreateInfo,
|
||||
const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain);
|
||||
|
||||
@@ -483,6 +483,7 @@ void DoSerialise(SerialiserType &ser, DrawcallDescription &el)
|
||||
|
||||
SERIALISE_MEMBER(dispatchDimension);
|
||||
SERIALISE_MEMBER(dispatchThreadsDimension);
|
||||
SERIALISE_MEMBER(dispatchBase);
|
||||
|
||||
SERIALISE_MEMBER(indexByteWidth);
|
||||
SERIALISE_MEMBER(topology);
|
||||
@@ -500,7 +501,7 @@ void DoSerialise(SerialiserType &ser, DrawcallDescription &el)
|
||||
SERIALISE_MEMBER(events);
|
||||
SERIALISE_MEMBER(children);
|
||||
|
||||
SIZE_CHECK(248);
|
||||
SIZE_CHECK(256);
|
||||
}
|
||||
|
||||
template <typename SerialiserType>
|
||||
|
||||
Reference in New Issue
Block a user