Add support for VK_KHR_map_memory2

This commit is contained in:
baldurk
2025-08-12 16:47:21 +01:00
parent 5dcc1b147f
commit e809db8a40
9 changed files with 297 additions and 97 deletions
+1 -1
View File
@@ -185,6 +185,7 @@ Maintainers can update this file by updating vk.xml in this folder and running `
* `VK_KHR_maintenance3`
* `VK_KHR_maintenance4`
* `VK_KHR_maintenance5`
* `VK_KHR_map_memory2`
* `VK_KHR_multiview`
* `VK_KHR_performance_query`
* `VK_KHR_pipeline_executable_properties`
@@ -268,7 +269,6 @@ KHR extensions will definitely be implemented at some point, though KHR extensio
* `VK_KHR_maintenance7`
* `VK_KHR_maintenance8`
* `VK_KHR_maintenance9`
* `VK_KHR_map_memory2`
* `VK_KHR_pipeline_binary`
* `VK_KHR_present_id2`
* `VK_KHR_present_mode_fifo_latest_ready`
+8 -1
View File
@@ -1392,6 +1392,7 @@ enum class VulkanChunk : uint32_t
vkCopyImageToMemory,
vkCopyMemoryToImage,
vkTransitionImageLayout,
vkUnmapMemory2KHR,
Max,
};
@@ -1618,10 +1619,12 @@ DECLARE_REFLECTION_STRUCT(VkMemoryDedicatedAllocateInfo);
DECLARE_REFLECTION_STRUCT(VkMemoryDedicatedRequirements);
DECLARE_REFLECTION_STRUCT(VkMemoryFdPropertiesKHR);
DECLARE_REFLECTION_STRUCT(VkMemoryGetFdInfoKHR);
DECLARE_REFLECTION_STRUCT(VkMemoryMapInfo);
DECLARE_REFLECTION_STRUCT(VkMemoryOpaqueCaptureAddressAllocateInfo);
DECLARE_REFLECTION_STRUCT(VkMemoryPriorityAllocateInfoEXT);
DECLARE_REFLECTION_STRUCT(VkMemoryRequirements2);
DECLARE_REFLECTION_STRUCT(VkMemoryToImageCopy);
DECLARE_REFLECTION_STRUCT(VkMemoryUnmapInfo);
DECLARE_REFLECTION_STRUCT(VkMultisampledRenderToSingleSampledInfoEXT);
DECLARE_REFLECTION_STRUCT(VkMultisamplePropertiesEXT);
DECLARE_REFLECTION_STRUCT(VkMutableDescriptorTypeCreateInfoEXT);
@@ -2108,10 +2111,12 @@ DECLARE_DESERIALISE_TYPE(VkMemoryDedicatedAllocateInfo);
DECLARE_DESERIALISE_TYPE(VkMemoryDedicatedRequirements);
DECLARE_DESERIALISE_TYPE(VkMemoryFdPropertiesKHR);
DECLARE_DESERIALISE_TYPE(VkMemoryGetFdInfoKHR);
DECLARE_DESERIALISE_TYPE(VkMemoryMapInfo);
DECLARE_DESERIALISE_TYPE(VkMemoryOpaqueCaptureAddressAllocateInfo);
DECLARE_DESERIALISE_TYPE(VkMemoryPriorityAllocateInfoEXT);
DECLARE_DESERIALISE_TYPE(VkMemoryRequirements2);
DECLARE_DESERIALISE_TYPE(VkMemoryToImageCopy);
DECLARE_DESERIALISE_TYPE(VkMemoryUnmapInfo);
DECLARE_DESERIALISE_TYPE(VkMultisampledRenderToSingleSampledInfoEXT);
DECLARE_DESERIALISE_TYPE(VkMultisamplePropertiesEXT);
DECLARE_DESERIALISE_TYPE(VkMutableDescriptorTypeCreateInfoEXT);
@@ -2694,8 +2699,10 @@ DECLARE_REFLECTION_ENUM(VkLineRasterizationMode);
DECLARE_REFLECTION_ENUM(VkLogicOp);
DECLARE_REFLECTION_ENUM(VkMemoryAllocateFlagBits);
DECLARE_REFLECTION_ENUM(VkMemoryHeapFlagBits);
DECLARE_REFLECTION_ENUM(VkMemoryPropertyFlagBits);
DECLARE_REFLECTION_ENUM(VkMemoryMapFlagBits);
DECLARE_REFLECTION_ENUM(VkMemoryOverallocationBehaviorAMD);
DECLARE_REFLECTION_ENUM(VkMemoryPropertyFlagBits);
DECLARE_REFLECTION_ENUM(VkMemoryUnmapFlagBits);
DECLARE_REFLECTION_ENUM(VkObjectType);
DECLARE_REFLECTION_ENUM(VkPerformanceCounterDescriptionFlagBitsKHR);
DECLARE_REFLECTION_ENUM(VkPerformanceCounterScopeKHR);
+7
View File
@@ -1682,6 +1682,10 @@ static const VkExtensionProperties supportedExtensions[] = {
VK_KHR_MAINTENANCE_5_EXTENSION_NAME,
VK_KHR_MAINTENANCE_5_SPEC_VERSION,
},
{
VK_KHR_MAP_MEMORY_2_EXTENSION_NAME,
VK_KHR_MAP_MEMORY_2_SPEC_VERSION,
},
{
VK_KHR_MULTIVIEW_EXTENSION_NAME,
VK_KHR_MULTIVIEW_SPEC_VERSION,
@@ -4655,6 +4659,9 @@ bool WrappedVulkan::ProcessChunk(ReadSerialiser &ser, VulkanChunk chunk)
return Serialise_vkCmdBindIndexBuffer2KHR(ser, VK_NULL_HANDLE, VK_NULL_HANDLE, 0, 0,
VK_INDEX_TYPE_MAX_ENUM);
case VulkanChunk::vkUnmapMemory2KHR:
return Serialise_vkUnmapMemory2KHR(ser, VK_NULL_HANDLE, VK_NULL_HANDLE);
// chunks that are reserved but not yet serialised
case VulkanChunk::vkResetCommandPool:
case VulkanChunk::vkCreateDepthTargetView:
+14
View File
@@ -1739,10 +1739,18 @@ public:
IMPLEMENT_FUNCTION_SERIALISED(void, vkFreeMemory, VkDevice device, VkDeviceMemory memory,
const VkAllocationCallbacks *);
void ProcessMap(VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, void **ppData,
byte *realData, VkDeviceSize misalignedOffset);
IMPLEMENT_FUNCTION_SERIALISED(VkResult, vkMapMemory, VkDevice device, VkDeviceMemory memory,
VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags,
void **ppData);
template <typename SerialiserType>
bool SerialiseUnmap(SerialiserType &ser, VkDeviceMemory memory, uint64_t MapOffset,
uint64_t MapSize, byte *MapData);
void ProcessUnmap(VkDevice device, VkDeviceMemory mem, const VkMemoryUnmapInfo *info);
IMPLEMENT_FUNCTION_SERIALISED(void, vkUnmapMemory, VkDevice device, VkDeviceMemory memory);
IMPLEMENT_FUNCTION_SERIALISED(VkResult, vkFlushMappedMemoryRanges, VkDevice device,
@@ -3253,4 +3261,10 @@ public:
IMPLEMENT_FUNCTION_SERIALISED(void, vkCmdBindDescriptorBufferEmbeddedSamplersEXT,
VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
VkPipelineLayout layout, uint32_t set);
// VK_KHR_map_memory2
IMPLEMENT_FUNCTION_SERIALISED(VkResult, vkMapMemory2KHR, VkDevice device,
const VkMemoryMapInfo *pMemoryMapInfo, void **ppData);
IMPLEMENT_FUNCTION_SERIALISED(void, vkUnmapMemory2KHR, VkDevice device,
const VkMemoryUnmapInfo *pMemoryUnmapInfo);
};
+10 -2
View File
@@ -551,7 +551,8 @@
DeclExt(KHR_maintenance5); \
DeclExt(EXT_image_compression_control); \
DeclExt(EXT_image_compression_control_swapchain); \
DeclExt(EXT_descriptor_buffer);
DeclExt(EXT_descriptor_buffer); \
DeclExt(KHR_map_memory2);
// for simplicity and since the check itself is platform agnostic,
// these aren't protected in platform defines
@@ -692,7 +693,8 @@
CheckExt(KHR_maintenance5, VKXX); \
CheckExt(EXT_image_compression_control, VKXX); \
CheckExt(EXT_image_compression_control_swapchain, VKXX); \
CheckExt(EXT_descriptor_buffer, VKXX);
CheckExt(EXT_descriptor_buffer, VKXX); \
CheckExt(KHR_map_memory2, VKXX);
#define HookInitVulkanInstanceExts_PhysDev() \
HookInitExtension(KHR_surface, GetPhysicalDeviceSurfaceSupportKHR); \
@@ -1082,6 +1084,8 @@
HookInitExtension(EXT_descriptor_buffer, GetImageViewOpaqueCaptureDescriptorDataEXT); \
HookInitExtension(EXT_descriptor_buffer, GetSamplerOpaqueCaptureDescriptorDataEXT); \
HookInitExtension(EXT_descriptor_buffer, GetAccelerationStructureOpaqueCaptureDescriptorDataEXT); \
HookInitExtension(KHR_map_memory2, MapMemory2KHR); \
HookInitExtension(KHR_map_memory2, UnmapMemory2KHR); \
HookInitExtension_Device_Win32(); \
HookInitExtension_Device_Linux(); \
HookInitExtension_Device_Android(); \
@@ -2032,6 +2036,10 @@
const VkSamplerCaptureDescriptorDataInfoEXT *, pInfo, void *, pData); \
HookDefine3(VkResult, vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT, VkDevice, device, \
const VkAccelerationStructureCaptureDescriptorDataInfoEXT *, pInfo, void *, pData); \
HookDefine3(VkResult, vkMapMemory2KHR, VkDevice, device, const VkMemoryMapInfo *, \
pMemoryMapInfo, void **, ppData); \
HookDefine2(void, vkUnmapMemory2KHR, VkDevice, device, const VkMemoryUnmapInfo *, \
pMemoryUnmapInfo); \
HookDefine_Win32(); \
HookDefine_Linux(); \
HookDefine_Android(); \
+2 -2
View File
@@ -831,6 +831,8 @@ static void AppendModifiedChainedStruct(byte *&tempMem, VkStruct *outputStruct,
UnwrapInPlace(out->memory)); \
UNWRAP_STRUCT(VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, VkMemoryDedicatedAllocateInfo, \
UnwrapInPlace(out->buffer), UnwrapInPlace(out->image)); \
UNWRAP_STRUCT(VK_STRUCTURE_TYPE_MEMORY_MAP_INFO, VkMemoryMapInfo, UnwrapInPlace(out->memory)); \
UNWRAP_STRUCT(VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO, VkMemoryUnmapInfo, UnwrapInPlace(out->memory)); \
UNWRAP_STRUCT(VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, VkPipelineInfoKHR, \
UnwrapInPlace(out->pipeline)); \
UNWRAP_STRUCT(VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, VkPipelineExecutableInfoKHR, \
@@ -1063,10 +1065,8 @@ static void AppendModifiedChainedStruct(byte *&tempMem, VkStruct *outputStruct,
case VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV: \
case VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA: \
case VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT: \
case VK_STRUCTURE_TYPE_MEMORY_MAP_INFO: \
case VK_STRUCTURE_TYPE_MEMORY_MAP_PLACED_INFO_EXT: \
case VK_STRUCTURE_TYPE_MEMORY_METAL_HANDLE_PROPERTIES_EXT: \
case VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO: \
case VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA: \
case VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT: \
case VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT: \
+41 -4
View File
@@ -91,6 +91,7 @@ DECL_VKFLAG(VkFramebufferCreate);
DECL_VKFLAG(VkMemoryAllocate);
DECL_VKFLAG(VkMemoryHeap);
DECL_VKFLAG(VkMemoryMap);
DECL_VKFLAG(VkMemoryUnmap);
DECL_VKFLAG(VkMemoryProperty);
DECL_VKFLAG(VkPeerMemoryFeature);
DECL_VKFLAG(VkPipelineCacheCreate);
@@ -1283,6 +1284,10 @@ SERIALISE_VK_HANDLES();
PNEXT_STRUCT(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES, \
VkPhysicalDeviceMaintenance5Properties) \
\
/* VK_KHR_map_memory2 */ \
PNEXT_STRUCT(VK_STRUCTURE_TYPE_MEMORY_MAP_INFO, VkMemoryMapInfo) \
PNEXT_STRUCT(VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO, VkMemoryUnmapInfo) \
\
/* VK_EXT_multisampled_render_to_single_sampled */ \
PNEXT_STRUCT(VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT, \
VkMultisampledRenderToSingleSampledInfoEXT) \
@@ -1902,10 +1907,6 @@ SERIALISE_VK_HANDLES();
PNEXT_UNSUPPORTED(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_2_FEATURES_KHR) \
PNEXT_UNSUPPORTED(VK_STRUCTURE_TYPE_PRESENT_WAIT_2_INFO_KHR) \
\
/* VK_KHR_map_memory2 */ \
PNEXT_UNSUPPORTED(VK_STRUCTURE_TYPE_MEMORY_MAP_INFO) \
PNEXT_UNSUPPORTED(VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO) \
\
/* VK_KHR_maintenance6 */ \
PNEXT_UNSUPPORTED(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES) \
PNEXT_UNSUPPORTED(VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES) \
@@ -4581,6 +4582,40 @@ void Deserialise(const VkMappedMemoryRange &el)
DeserialiseNext(el.pNext);
}
template <typename SerialiserType>
void DoSerialise(SerialiserType &ser, VkMemoryMapInfo &el)
{
RDCASSERT(ser.IsReading() || el.sType == VK_STRUCTURE_TYPE_MEMORY_MAP_INFO);
SerialiseNext(ser, el.sType, el.pNext);
SERIALISE_MEMBER_VKFLAGS(VkMemoryMapFlags, flags);
SERIALISE_MEMBER(memory).Important();
SERIALISE_MEMBER(offset).OffsetOrSize();
SERIALISE_MEMBER(size).OffsetOrSize();
}
template <>
void Deserialise(const VkMemoryMapInfo &el)
{
DeserialiseNext(el.pNext);
}
template <typename SerialiserType>
void DoSerialise(SerialiserType &ser, VkMemoryUnmapInfo &el)
{
RDCASSERT(ser.IsReading() || el.sType == VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO);
SerialiseNext(ser, el.sType, el.pNext);
SERIALISE_MEMBER_VKFLAGS(VkMemoryUnmapFlags, flags);
SERIALISE_MEMBER(memory).Important();
}
template <>
void Deserialise(const VkMemoryUnmapInfo &el)
{
DeserialiseNext(el.pNext);
}
template <typename SerialiserType>
void DoSerialise(SerialiserType &ser, VkBufferImageCopy &el)
{
@@ -13938,10 +13973,12 @@ INSTANTIATE_SERIALISE_TYPE(VkMemoryDedicatedAllocateInfo);
INSTANTIATE_SERIALISE_TYPE(VkMemoryDedicatedRequirements);
INSTANTIATE_SERIALISE_TYPE(VkMemoryFdPropertiesKHR);
INSTANTIATE_SERIALISE_TYPE(VkMemoryGetFdInfoKHR);
INSTANTIATE_SERIALISE_TYPE(VkMemoryMapInfo);
INSTANTIATE_SERIALISE_TYPE(VkMemoryOpaqueCaptureAddressAllocateInfo);
INSTANTIATE_SERIALISE_TYPE(VkMemoryPriorityAllocateInfoEXT);
INSTANTIATE_SERIALISE_TYPE(VkMemoryRequirements2);
INSTANTIATE_SERIALISE_TYPE(VkMemoryToImageCopy);
INSTANTIATE_SERIALISE_TYPE(VkMemoryUnmapInfo);
INSTANTIATE_SERIALISE_TYPE(VkMultisampledRenderToSingleSampledInfoEXT);
INSTANTIATE_SERIALISE_TYPE(VkMultisamplePropertiesEXT);
INSTANTIATE_SERIALISE_TYPE(VkMutableDescriptorTypeCreateInfoEXT);
+2 -1
View File
@@ -28,7 +28,7 @@
template <>
rdcstr DoStringise(const VulkanChunk &el)
{
RDCCOMPILE_ASSERT((uint32_t)VulkanChunk::Max == 1226, "Chunks changed without updating names");
RDCCOMPILE_ASSERT((uint32_t)VulkanChunk::Max == 1227, "Chunks changed without updating names");
BEGIN_ENUM_STRINGISE(VulkanChunk)
{
@@ -258,6 +258,7 @@ rdcstr DoStringise(const VulkanChunk &el)
STRINGISE_ENUM_CLASS(vkCopyImageToMemory)
STRINGISE_ENUM_CLASS(vkCopyMemoryToImage)
STRINGISE_ENUM_CLASS(vkTransitionImageLayout)
STRINGISE_ENUM_CLASS(vkUnmapMemory2KHR)
STRINGISE_ENUM_CLASS_NAMED(Max, "Max Chunk");
}
END_ENUM_STRINGISE()
@@ -985,8 +985,7 @@ void WrappedVulkan::vkFreeMemory(VkDevice device, VkDeviceMemory memory, const V
ObjDisp(device)->FreeMemory(Unwrap(device), unwrappedMem, NULL);
}
VkResult WrappedVulkan::vkMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset,
VkDeviceSize size, VkMemoryMapFlags flags, void **ppData)
VkDeviceSize AlignMapBoundaries(VkDeviceSize &offset, VkDeviceSize &size)
{
// ensure we always map on a 16-byte boundary. This is for our own purposes so we can
// FindDiffRange against the mapped region. We adjust the pointer returned to the user but
@@ -998,97 +997,58 @@ VkResult WrappedVulkan::vkMapMemory(VkDevice device, VkDeviceMemory mem, VkDevic
// need to adjust the size so the end-point is still the same!
size += misalignedOffset;
byte *realData = NULL;
VkResult ret = ObjDisp(device)->MapMemory(Unwrap(device), Unwrap(mem), offset, size, flags,
(void **)&realData);
return misalignedOffset;
}
if(ret == VK_SUCCESS && realData)
void WrappedVulkan::ProcessMap(VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size,
void **ppData, byte *realData, VkDeviceSize misalignedOffset)
{
ResourceId id = GetResID(memory);
if(IsCaptureMode(m_State))
{
ResourceId id = GetResID(mem);
VkResourceRecord *memrecord = GetRecord(memory);
if(IsCaptureMode(m_State))
// must have map state, only non host visible memories have no map
// state, and they can't be mapped!
RDCASSERT(memrecord->memMapState);
MemMapState &state = *memrecord->memMapState;
// ensure size is valid
RDCASSERT(size == VK_WHOLE_SIZE || (size > 0 && offset + size <= memrecord->Length),
GetResID(memory), size, memrecord->Length);
// flush range offsets are relative to the start of the memory so keep mappedPtr at that
// basis. We'll only access within the mapped range
state.cpuReadPtr = state.mappedPtr = (byte *)realData - (size_t)offset;
state.refData = NULL;
state.mapOffset = offset;
state.mapSize = size == VK_WHOLE_SIZE ? (memrecord->Length - offset)
: RDCMIN(memrecord->Length - offset, size);
*ppData = realData + misalignedOffset;
if(state.mapCoherent)
{
VkResourceRecord *memrecord = GetRecord(mem);
// must have map state, only non host visible memories have no map
// state, and they can't be mapped!
RDCASSERT(memrecord->memMapState);
MemMapState &state = *memrecord->memMapState;
// ensure size is valid
RDCASSERT(size == VK_WHOLE_SIZE || (size > 0 && offset + size <= memrecord->Length),
GetResID(mem), size, memrecord->Length);
// flush range offsets are relative to the start of the memory so keep mappedPtr at that
// basis. We'll only access within the mapped range
state.cpuReadPtr = state.mappedPtr = (byte *)realData - (size_t)offset;
state.refData = NULL;
state.mapOffset = offset;
state.mapSize = size == VK_WHOLE_SIZE ? (memrecord->Length - offset)
: RDCMIN(memrecord->Length - offset, size);
*ppData = realData + misalignedOffset;
if(state.mapCoherent)
{
SCOPED_LOCK(m_CoherentMapsLock);
m_CoherentMaps.push_back(memrecord);
}
}
else
{
*ppData = realData + misalignedOffset;
SCOPED_LOCK(m_CoherentMapsLock);
m_CoherentMaps.push_back(memrecord);
}
}
else
{
*ppData = NULL;
*ppData = realData + misalignedOffset;
}
return ret;
}
template <typename SerialiserType>
bool WrappedVulkan::Serialise_vkUnmapMemory(SerialiserType &ser, VkDevice device,
VkDeviceMemory memory)
bool WrappedVulkan::SerialiseUnmap(SerialiserType &ser, VkDeviceMemory memory, uint64_t MapOffset,
uint64_t MapSize, byte *MapData)
{
SERIALISE_ELEMENT(device);
SERIALISE_ELEMENT(memory).Important();
uint64_t MapOffset = 0;
uint64_t MapSize = 0;
byte *MapData = NULL;
MemMapState *state = NULL;
if(IsCaptureMode(m_State))
{
state = GetRecord(memory)->memMapState;
MapOffset = state->mapOffset;
MapSize = state->mapSize;
MapData = (byte *)state->cpuReadPtr + MapOffset;
}
SERIALISE_ELEMENT(MapOffset).OffsetOrSize();
SERIALISE_ELEMENT(MapSize).OffsetOrSize();
bool directStream = true;
if(IsReplayingAndReading() && memory != VK_NULL_HANDLE)
{
if(IsLoading(m_State))
m_ResourceUses[GetResID(memory)].push_back(EventUsage(m_RootEventID, ResourceUsage::CPUWrite));
VkResult vkr = ObjDisp(device)->MapMemory(Unwrap(device), Unwrap(memory), MapOffset, MapSize, 0,
(void **)&MapData);
if(vkr != VK_SUCCESS)
{
SET_ERROR_RESULT(m_FailedReplayResult, ResultCode::APIReplayFailed,
"Error mapping memory on replay, VkResult: %s", ToStr(vkr).c_str());
return false;
}
if(!MapData)
{
SET_ERROR_RESULT(m_FailedReplayResult, ResultCode::APIReplayFailed,
@@ -1097,6 +1057,9 @@ bool WrappedVulkan::Serialise_vkUnmapMemory(SerialiserType &ser, VkDevice device
return false;
}
if(IsLoading(m_State))
m_ResourceUses[GetResID(memory)].push_back(EventUsage(m_RootEventID, ResourceUsage::CPUWrite));
const Intervals<VulkanCreationInfo::Memory::MemoryBinding> &bindings =
m_CreationInfo.m_Memory[GetResID(memory)].bindings;
@@ -1169,16 +1132,15 @@ bool WrappedVulkan::Serialise_vkUnmapMemory(SerialiserType &ser, VkDevice device
}
}
if(IsReplayingAndReading() && MapData && memory != VK_NULL_HANDLE)
ObjDisp(device)->UnmapMemory(Unwrap(device), Unwrap(memory));
SERIALISE_CHECK_READ_ERRORS();
return true;
}
void WrappedVulkan::vkUnmapMemory(VkDevice device, VkDeviceMemory mem)
void WrappedVulkan::ProcessUnmap(VkDevice device, VkDeviceMemory mem, const VkMemoryUnmapInfo *info)
{
// if this is an Unmap2 call, mem will be unset so pull it from the info struct
if(mem == VK_NULL_HANDLE && info)
mem = info->memory;
if(IsCaptureMode(m_State))
{
ResourceId id = GetResID(mem);
@@ -1222,16 +1184,18 @@ void WrappedVulkan::vkUnmapMemory(VkDevice device, VkDeviceMemory mem)
// coherent maps must always serialise all data on unmap, even if a flush was seen, because
// unflushed data is *also* visible. This is a bit redundant since data is serialised here
// and in any flushes, but that's the app's fault - the spec calls out flushing coherent
// maps
// as inefficient
// maps as inefficient
// if the memory is not coherent, we must have a flush for every region written while it is
// mapped, there is no implicit flush on unmap, so we follow the spec strictly on this.
if(state.mapCoherent)
{
CACHE_THREAD_SERIALISER();
SCOPED_SERIALISE_CHUNK(VulkanChunk::vkUnmapMemory);
Serialise_vkUnmapMemory(ser, device, mem);
SCOPED_SERIALISE_CHUNK(info ? VulkanChunk::vkUnmapMemory2KHR : VulkanChunk::vkUnmapMemory);
if(info)
Serialise_vkUnmapMemory2KHR(ser, device, info);
else
Serialise_vkUnmapMemory(ser, device, mem);
VkResourceRecord *record = GetRecord(mem);
@@ -1254,10 +1218,169 @@ void WrappedVulkan::vkUnmapMemory(VkDevice device, VkDeviceMemory mem)
FreeAlignedBuffer(state.refData);
state.refData = NULL;
}
}
VkResult WrappedVulkan::vkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset,
VkDeviceSize size, VkMemoryMapFlags flags, void **ppData)
{
VkDeviceSize misalignedOffset = AlignMapBoundaries(offset, size);
byte *realData = NULL;
VkResult ret = ObjDisp(device)->MapMemory(Unwrap(device), Unwrap(memory), offset, size, flags,
(void **)&realData);
if(ret == VK_SUCCESS && realData)
{
ProcessMap(memory, offset, size, ppData, realData, misalignedOffset);
}
else
{
*ppData = NULL;
}
return ret;
}
VkResult WrappedVulkan::vkMapMemory2KHR(VkDevice device, const VkMemoryMapInfo *pMemoryMapInfo,
void **ppData)
{
VkMemoryMapInfo unwrapped = *pMemoryMapInfo;
unwrapped.memory = Unwrap(unwrapped.memory);
VkDeviceSize misalignedOffset = AlignMapBoundaries(unwrapped.offset, unwrapped.size);
byte *realData = NULL;
VkResult ret = ObjDisp(device)->MapMemory2KHR(Unwrap(device), &unwrapped, (void **)&realData);
if(ret == VK_SUCCESS && realData)
{
ProcessMap(pMemoryMapInfo->memory, unwrapped.offset, unwrapped.size, ppData, realData,
misalignedOffset);
}
else
{
*ppData = NULL;
}
return ret;
}
template <typename SerialiserType>
bool WrappedVulkan::Serialise_vkUnmapMemory(SerialiserType &ser, VkDevice device,
VkDeviceMemory memory)
{
SERIALISE_ELEMENT(device);
SERIALISE_ELEMENT(memory).Important();
uint64_t MapOffset = 0;
uint64_t MapSize = 0;
byte *MapData = NULL;
if(IsCaptureMode(m_State))
{
MemMapState *state = GetRecord(memory)->memMapState;
MapOffset = state->mapOffset;
MapSize = state->mapSize;
MapData = (byte *)state->cpuReadPtr + MapOffset;
}
SERIALISE_ELEMENT(MapOffset).OffsetOrSize();
SERIALISE_ELEMENT(MapSize).OffsetOrSize();
if(IsReplayingAndReading() && memory != VK_NULL_HANDLE)
{
VkResult vkr = ObjDisp(device)->MapMemory(Unwrap(device), Unwrap(memory), MapOffset, MapSize, 0,
(void **)&MapData);
if(vkr != VK_SUCCESS)
{
SET_ERROR_RESULT(m_FailedReplayResult, ResultCode::APIReplayFailed,
"Error mapping memory on replay, VkResult: %s", ToStr(vkr).c_str());
return false;
}
}
if(!SerialiseUnmap(ser, memory, MapOffset, MapSize, MapData))
return false;
if(IsReplayingAndReading() && MapData && memory != VK_NULL_HANDLE)
ObjDisp(device)->UnmapMemory(Unwrap(device), Unwrap(memory));
SERIALISE_CHECK_READ_ERRORS();
return true;
}
void WrappedVulkan::vkUnmapMemory(VkDevice device, VkDeviceMemory mem)
{
ProcessUnmap(device, mem, NULL);
ObjDisp(device)->UnmapMemory(Unwrap(device), Unwrap(mem));
}
template <typename SerialiserType>
bool WrappedVulkan::Serialise_vkUnmapMemory2KHR(SerialiserType &ser, VkDevice device,
const VkMemoryUnmapInfo *pMemoryUnmapInfo)
{
SERIALISE_ELEMENT(device);
SERIALISE_ELEMENT_LOCAL(UnmapInfo, *pMemoryUnmapInfo).Important();
uint64_t MapOffset = 0;
uint64_t MapSize = 0;
byte *MapData = NULL;
if(IsCaptureMode(m_State))
{
MemMapState *state = GetRecord(UnmapInfo.memory)->memMapState;
MapOffset = state->mapOffset;
MapSize = state->mapSize;
MapData = (byte *)state->cpuReadPtr + MapOffset;
}
SERIALISE_ELEMENT(MapOffset).OffsetOrSize();
SERIALISE_ELEMENT(MapSize).OffsetOrSize();
if(IsReplayingAndReading() && UnmapInfo.memory != VK_NULL_HANDLE)
{
VkMemoryMapInfo mapInfo = {
VK_STRUCTURE_TYPE_MEMORY_MAP_INFO, NULL, 0, Unwrap(UnmapInfo.memory), MapOffset, MapSize,
};
VkResult vkr = ObjDisp(device)->MapMemory2KHR(Unwrap(device), &mapInfo, (void **)&MapData);
if(vkr != VK_SUCCESS)
{
SET_ERROR_RESULT(m_FailedReplayResult, ResultCode::APIReplayFailed,
"Error mapping memory on replay, VkResult: %s", ToStr(vkr).c_str());
return false;
}
}
if(!SerialiseUnmap(ser, UnmapInfo.memory, MapOffset, MapSize, MapData))
return false;
if(IsReplayingAndReading() && MapData && UnmapInfo.memory != VK_NULL_HANDLE)
{
VkMemoryUnmapInfo unwrapped = UnmapInfo;
unwrapped.memory = Unwrap(unwrapped.memory);
ObjDisp(device)->UnmapMemory2KHR(Unwrap(device), &unwrapped);
}
SERIALISE_CHECK_READ_ERRORS();
return true;
}
void WrappedVulkan::vkUnmapMemory2KHR(VkDevice device, const VkMemoryUnmapInfo *pMemoryUnmapInfo)
{
ProcessUnmap(device, VK_NULL_HANDLE, pMemoryUnmapInfo);
VkMemoryUnmapInfo unwrapped = *pMemoryUnmapInfo;
unwrapped.memory = Unwrap(unwrapped.memory);
ObjDisp(device)->UnmapMemory2KHR(Unwrap(device), &unwrapped);
}
template <typename SerialiserType>
bool WrappedVulkan::Serialise_vkFlushMappedMemoryRanges(SerialiserType &ser, VkDevice device,
uint32_t memRangeCount,
@@ -3929,3 +4052,6 @@ INSTANTIATE_FUNCTION_SERIALISED(VkResult, vkCreateAccelerationStructureKHR, VkDe
const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
const VkAllocationCallbacks *,
VkAccelerationStructureKHR *pAccelerationStructure);
INSTANTIATE_FUNCTION_SERIALISED(void, vkUnmapMemory2KHR, VkDevice device,
const VkMemoryUnmapInfo *pMemoryUnmapInfo);